pixman: Branch 'master' - 11 commits

Søren Sandmann Pedersen sandmann at kemper.freedesktop.org
Sat Feb 13 14:42:25 PST 2010


 Makefile.am               |    2 
 pixman/pixman-arm-neon.c  |   93 +++++++++---------
 pixman/pixman-arm-simd.c  |   26 ++---
 pixman/pixman-fast-path.c |  114 +++++++++++-----------
 pixman/pixman-mmx.c       |  130 +++++++++++++------------
 pixman/pixman-private.h   |   68 ++++++++++++-
 pixman/pixman-region.c    |   35 ++----
 pixman/pixman-sse2.c      |  152 +++++++++++++++---------------
 pixman/pixman-utils.c     |  231 ++++++++++++++++------------------------------
 9 files changed, 420 insertions(+), 431 deletions(-)

New commits:
commit 337e916473069a76a44757b3664f8d49da350773
Merge: bdc4a6a... 8e85059...
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Feb 13 12:26:09 2010 -0500

    Merge branch 'bitmasks'

commit bdc4a6afe0bcea6dfb0df221006f4fe188324678
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Feb 13 11:18:13 2010 -0500

    Makefile.am: Remove 'check' from release-check
    
    It's already included in distcheck.

diff --git a/Makefile.am b/Makefile.am
index 9fd3a37..63b08c1 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -77,7 +77,7 @@ ensure-prev:
 		false;									\
 	fi
 
-release-check: ensure-prev release-verify-newer release-remove-old check distcheck
+release-check: ensure-prev release-verify-newer release-remove-old distcheck
 
 release-tag:
 	git tag -u $(GPGKEY) -m "$(PACKAGE) $(VERSION) release" $(PACKAGE)-$(VERSION)
commit edee4be052cf0d466922759efd2613e5a2be9e2b
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Feb 13 09:40:33 2010 -0500

    Turn off asserts in development snapshots (bug 26314).
    
    There is not much real benefit in having asserts turned on in
    snapshots because it doesn't lead to any new bug reports, just to
    people not installing development snapshots since they case X server
    crashes. So just turn them off.
    
    While we are at it, limit the number of messages to stderr to 5
    instead of 50.

diff --git a/pixman/pixman-region.c b/pixman/pixman-region.c
index ec2ddf8..9f7515c 100644
--- a/pixman/pixman-region.c
+++ b/pixman/pixman-region.c
@@ -68,32 +68,21 @@
 
 /* Turn on debugging depending on what type of release this is
  */
-
-#if ((PIXMAN_VERSION_MICRO % 2) == 1)
-/* Random git checkout.
- * 
- * Those are often used for performance work, so we don't turn on the
- * full self-checking, but we do turn on the asserts.
- */
-#    define   FATAL_BUGS
-#    define noSELF_CHECKS
-#elif ((PIXMAN_VERSION_MINOR % 2) == 0)
-/* Stable release.
- *
- * We don't want assertions because the X server should stay alive
- * if possible. We also don't want self-checks for performance-reasons.
+#if (((PIXMAN_VERSION_MICRO % 2) == 0) && ((PIXMAN_VERSION_MINOR % 2) == 1))
+/* This is a development snapshot, so we want self-checking in order to
+ * catch as many bugs as possible. However, we don't turn on the asserts
+ * because that just leads to the X server crashing which leads to
+ * people not running the snapshots.
  */
 #    define noFATAL_BUGS
-#    define noSELF_CHECKS
+#    define SELF_CHECKS
 #else
-/* Development snapshot.
- *
- * These are the things that get shipped in development distributions
- * such as Rawhide. We want both self-checking and fatal assertions
- * to catch as many bugs as possible.
+/* This is either a stable release or a random git checkout. We don't
+ * want self checks in either case for performance reasons. (Random
+ * git checkouts are often used for performance work
  */
-#    define FATAL_BUGS
-#    define SELF_CHECKS
+#    define noFATAL_BUGS
+#    define noSELF_CHECKS
 #endif
 
 #ifndef FATAL_BUGS
@@ -110,7 +99,7 @@ log_region_error (const char *function, const char *message)
 {
     static int n_messages = 0;
 
-    if (n_messages < 50)
+    if (n_messages < 5)
     {
 	fprintf (stderr,
 		 "*** BUG ***\n"
commit 8e8505943651ac46e0ad5a2dd0b9e85704095cc1
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sun Sep 13 04:28:20 2009 -0400

    Move checks for src/mask repeat right before walking the region.
    
    Also add a couple of additional checks to the src/mask repeat check.

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index c30f09c..5441b6b 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -609,17 +609,13 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
                        int32_t                   width,
                        int32_t                   height)
 {
-    pixman_bool_t src_repeat
-	= src->common.repeat == PIXMAN_REPEAT_NORMAL;
-    pixman_bool_t mask_repeat
-	= mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
     pixman_format_code_t src_format, mask_format, dest_format;
     uint32_t src_flags, mask_flags, dest_flags;
-    pixman_composite_func_t func = NULL;
+    pixman_composite_func_t func;
     const pixman_fast_path_t *info;
     pixman_bool_t result;
 
-    get_image_info (src, &src_format, &src_flags);
+    get_image_info (src,  &src_format,  &src_flags);
     get_image_info (mask, &mask_format, &mask_flags);
     get_image_info (dest, &dest_format, &dest_flags);
     
@@ -635,6 +631,7 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
 	    src_format = mask_format = PIXMAN_rpixbuf;
     }
 
+    func = NULL;
     for (info = paths; info->op != PIXMAN_OP_NONE; ++info)
     {
 	if (info->op == op					&&
@@ -646,13 +643,6 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
 	    (info->dest_flags & dest_flags) == info->dest_flags)
 	{
 	    func = info->func;
-	    
-	    if (info->src_format == PIXMAN_solid)
-		src_repeat = FALSE;
-	    
-	    if (info->mask_format == PIXMAN_solid)
-		mask_repeat = FALSE;
-
 	    break;
 	}
     }
@@ -674,6 +664,21 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
 		    src, mask, extents,
 		    src_x, src_y, mask_x, mask_y, dest_x, dest_y))
 	    {
+		pixman_bool_t src_repeat, mask_repeat;
+
+		src_repeat =
+		    src->type == BITS					&&
+		    src_flags & FAST_PATH_ID_TRANSFORM			&&
+		    src->common.repeat == PIXMAN_REPEAT_NORMAL		&&
+		    src_format != PIXMAN_solid;
+		
+		mask_repeat =
+		    mask						&&
+		    mask->type == BITS					&&
+		    mask_flags & FAST_PATH_ID_TRANSFORM			&&
+		    mask->common.repeat == PIXMAN_REPEAT_NORMAL		&&
+		    mask_format != PIXMAN_solid;
+		
 		walk_region_internal (imp, op,
 		                      src, mask, dest,
 		                      src_x, src_y, mask_x, mask_y,
commit eea58eab93aefd4430544754f8a0f5460b4a30aa
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sun Sep 13 03:43:16 2009 -0400

    Compute src, mask, dest flags and base fast path decisions on them.
    
    This makes sets the stage for caching the information by image instead
    of computing it on each composite invocation.
    
    This patch also computes format codes for images such as PIXMAN_solid,
    so that we can no longer end up in the situation that a fast path is
    selected for a 1x1 solid image, when that fast path doesn't actually
    understand repeating.

diff --git a/pixman/pixman-fast-path.c b/pixman/pixman-fast-path.c
index 49dcf79..54b7422 100644
--- a/pixman/pixman-fast-path.c
+++ b/pixman/pixman-fast-path.c
@@ -1361,12 +1361,12 @@ static const pixman_fast_path_t c_fast_paths[] =
     PIXMAN_STD_FAST_PATH (OVER, solid, a1, x8b8g8r8, fast_composite_over_n_1_8888),
     PIXMAN_STD_FAST_PATH (OVER, solid, a1, r5g6b5,   fast_composite_over_n_1_0565),
     PIXMAN_STD_FAST_PATH (OVER, solid, a1, b5g6r5,   fast_composite_over_n_1_0565),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, a8r8g8b8, fast_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, x8r8g8b8, fast_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, r5g6b5, fast_composite_over_n_8888_0565_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, a8b8g8r8, fast_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, x8b8g8r8, fast_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, b5g6r5, fast_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, a8r8g8b8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, x8r8g8b8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, r5g6b5, fast_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, a8b8g8r8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, x8b8g8r8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, b5g6r5, fast_composite_over_n_8888_0565_ca),
     PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, x8r8g8b8, fast_composite_over_x888_8_8888),
     PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, a8r8g8b8, fast_composite_over_x888_8_8888),
     PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, a8, x8b8g8r8, fast_composite_over_x888_8_8888),
@@ -1381,7 +1381,7 @@ static const pixman_fast_path_t c_fast_paths[] =
     PIXMAN_STD_FAST_PATH (ADD, a8b8g8r8, null, a8b8g8r8, fast_composite_add_8888_8888),
     PIXMAN_STD_FAST_PATH (ADD, a8, null, a8, fast_composite_add_8000_8000),
     PIXMAN_STD_FAST_PATH (ADD, a1, null, a1, fast_composite_add_1000_1000),
-    PIXMAN_STD_FAST_PATH (ADD, solid, a8r8g8b8_ca, a8r8g8b8, fast_composite_add_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (ADD, solid, a8r8g8b8, a8r8g8b8, fast_composite_add_n_8888_8888_ca),
     PIXMAN_STD_FAST_PATH (ADD, solid, a8, a8, fast_composite_add_n_8_8),
     PIXMAN_STD_FAST_PATH (SRC, solid, null, a8r8g8b8, fast_composite_solid_fill),
     PIXMAN_STD_FAST_PATH (SRC, solid, null, x8r8g8b8, fast_composite_solid_fill),
diff --git a/pixman/pixman-mmx.c b/pixman/pixman-mmx.c
index b7961cf..a4affa5 100644
--- a/pixman/pixman-mmx.c
+++ b/pixman/pixman-mmx.c
@@ -3216,74 +3216,74 @@ mmx_composite_over_x888_8_8888 (pixman_implementation_t *imp,
 
 static const pixman_fast_path_t mmx_fast_paths[] =
 {
-    PIXMAN_STD_FAST_PATH (OVER, solid,	  a8,          r5g6b5,   mmx_composite_over_n_8_0565       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,	  a8,          b5g6r5,   mmx_composite_over_n_8_0565       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          a8r8g8b8, mmx_composite_over_n_8_8888       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          x8r8g8b8, mmx_composite_over_n_8_8888       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          a8b8g8r8, mmx_composite_over_n_8_8888       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          x8b8g8r8, mmx_composite_over_n_8_8888       ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, a8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, x8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, r5g6b5,   mmx_composite_over_n_8888_0565_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, a8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, x8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, b5g6r5,   mmx_composite_over_n_8888_0565_ca ),
-    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      a8r8g8b8, mmx_composite_over_pixbuf_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      x8r8g8b8, mmx_composite_over_pixbuf_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      r5g6b5,   mmx_composite_over_pixbuf_0565    ),
-    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     a8b8g8r8, mmx_composite_over_pixbuf_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     x8b8g8r8, mmx_composite_over_pixbuf_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     b5g6r5,   mmx_composite_over_pixbuf_0565    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid,       a8r8g8b8, mmx_composite_over_x888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid,       x8r8g8b8, mmx_composite_over_x888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid,       a8b8g8r8, mmx_composite_over_x888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid,       x8b8g8r8, mmx_composite_over_x888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,       a8r8g8b8, mmx_composite_over_8888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,       x8r8g8b8, mmx_composite_over_8888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid,       a8b8g8r8, mmx_composite_over_8888_n_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid,       x8b8g8r8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       r5g6b5,   mmx_composite_over_n_8_0565       ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       b5g6r5,   mmx_composite_over_n_8_0565       ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       a8r8g8b8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       x8r8g8b8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       a8b8g8r8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    a8,       x8b8g8r8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8r8g8b8, a8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8r8g8b8, x8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8r8g8b8, r5g6b5,   mmx_composite_over_n_8888_0565_ca ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8b8g8r8, a8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8b8g8r8, x8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid,    a8b8g8r8, b5g6r5,   mmx_composite_over_n_8888_0565_ca ),
+    PIXMAN_STD_FAST_PATH    (OVER, pixbuf,   pixbuf,   a8r8g8b8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, pixbuf,   pixbuf,   x8r8g8b8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, pixbuf,   pixbuf,   r5g6b5,   mmx_composite_over_pixbuf_0565    ),
+    PIXMAN_STD_FAST_PATH    (OVER, rpixbuf,  rpixbuf,  a8b8g8r8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, rpixbuf,  rpixbuf,  x8b8g8r8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, rpixbuf,  rpixbuf,  b5g6r5,   mmx_composite_over_pixbuf_0565    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8r8g8b8, solid,    a8r8g8b8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8r8g8b8, solid,    x8r8g8b8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8b8g8r8, solid,    a8b8g8r8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8b8g8r8, solid,    x8b8g8r8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8r8g8b8, solid,    a8r8g8b8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8r8g8b8, solid,    x8r8g8b8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8b8g8r8, solid,    a8b8g8r8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8b8g8r8, solid,    x8b8g8r8, mmx_composite_over_8888_n_8888    ),
 #if 0
     /* FIXME: This code is commented out since it's apparently
      * not actually faster than the generic code.
      */
-    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8,          x8r8g8b8, mmx_composite_over_x888_8_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8,          a8r8g8b8, mmx_composite_over_x888_8_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8b8r8g8, a8,          x8b8g8r8, mmx_composite_over_x888_8_8888    ),
-    PIXMAN_STD_FAST_PATH (OVER, x8b8r8g8, a8,          a8r8g8b8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8r8g8b8, a8,       x8r8g8b8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8r8g8b8, a8,       a8r8g8b8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8b8r8g8, a8,       x8b8g8r8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8b8r8g8, a8,       a8r8g8b8, mmx_composite_over_x888_8_8888    ),
 #endif
-    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        a8r8g8b8, mmx_composite_over_n_8888         ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        x8r8g8b8, mmx_composite_over_n_8888         ),
-    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        r5g6b5,   mmx_composite_over_n_0565         ),
-    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
-
-    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        a8r8g8b8, mmx_composite_over_8888_8888      ),
-    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        x8r8g8b8, mmx_composite_over_8888_8888      ),
-    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        r5g6b5,   mmx_composite_over_8888_0565      ),
-    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        a8b8g8r8, mmx_composite_over_8888_8888      ),
-    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        x8b8g8r8, mmx_composite_over_8888_8888      ),
-    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        b5g6r5,   mmx_composite_over_8888_0565      ),
-
-    PIXMAN_STD_FAST_PATH (ADD,  a8r8g8b8, null,        a8r8g8b8, mmx_composite_add_8888_8888       ),
-    PIXMAN_STD_FAST_PATH (ADD,  a8b8g8r8, null,        a8b8g8r8, mmx_composite_add_8888_8888       ),
-    PIXMAN_STD_FAST_PATH (ADD,  a8,       null,        a8,       mmx_composite_add_8000_8000       ),
-    PIXMAN_STD_FAST_PATH (ADD,  solid,    a8,          a8,       mmx_composite_add_n_8_8           ),
-
-    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          a8r8g8b8, mmx_composite_src_n_8_8888        ),
-    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          x8r8g8b8, mmx_composite_src_n_8_8888        ),
-    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          a8b8g8r8, mmx_composite_src_n_8_8888        ),
-    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          x8b8g8r8, mmx_composite_src_n_8_8888        ),
-    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,        a8r8g8b8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,        a8b8g8r8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  x8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  x8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  r5g6b5,   null,        r5g6b5,   mmx_composite_copy_area           ),
-    PIXMAN_STD_FAST_PATH (SRC,  b5g6r5,   null,        b5g6r5,   mmx_composite_copy_area           ),
-
-    PIXMAN_STD_FAST_PATH (IN,   a8,       null,        a8,       mmx_composite_in_8_8              ),
-    PIXMAN_STD_FAST_PATH (IN,   solid,    a8,          a8,       mmx_composite_in_n_8_8            ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    null,     a8r8g8b8, mmx_composite_over_n_8888         ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    null,     x8r8g8b8, mmx_composite_over_n_8888         ),
+    PIXMAN_STD_FAST_PATH    (OVER, solid,    null,     r5g6b5,   mmx_composite_over_n_0565         ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8r8g8b8, null,     x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (OVER, x8b8g8r8, null,     x8b8g8r8, mmx_composite_copy_area           ),
+
+    PIXMAN_STD_FAST_PATH    (OVER, a8r8g8b8, null,     a8r8g8b8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8r8g8b8, null,     x8r8g8b8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8r8g8b8, null,     r5g6b5,   mmx_composite_over_8888_0565      ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8b8g8r8, null,     a8b8g8r8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8b8g8r8, null,     x8b8g8r8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH    (OVER, a8b8g8r8, null,     b5g6r5,   mmx_composite_over_8888_0565      ),
+
+    PIXMAN_STD_FAST_PATH    (ADD,  a8r8g8b8, null,     a8r8g8b8, mmx_composite_add_8888_8888       ),
+    PIXMAN_STD_FAST_PATH    (ADD,  a8b8g8r8, null,     a8b8g8r8, mmx_composite_add_8888_8888       ),
+    PIXMAN_STD_FAST_PATH    (ADD,  a8,       null,     a8,       mmx_composite_add_8000_8000       ),
+    PIXMAN_STD_FAST_PATH    (ADD,  solid,    a8,       a8,       mmx_composite_add_n_8_8           ),
+
+    PIXMAN_STD_FAST_PATH    (SRC,  solid,    a8,       a8r8g8b8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH    (SRC,  solid,    a8,       x8r8g8b8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH    (SRC,  solid,    a8,       a8b8g8r8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH    (SRC,  solid,    a8,       x8b8g8r8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH    (SRC,  a8r8g8b8, null,     a8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  a8b8g8r8, null,     a8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  a8r8g8b8, null,     x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  a8b8g8r8, null,     x8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  x8r8g8b8, null,     x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  x8b8g8r8, null,     x8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  r5g6b5,   null,     r5g6b5,   mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH    (SRC,  b5g6r5,   null,     b5g6r5,   mmx_composite_copy_area           ),
+
+    PIXMAN_STD_FAST_PATH    (IN,   a8,       null,     a8,       mmx_composite_in_8_8              ),
+    PIXMAN_STD_FAST_PATH    (IN,   solid,    a8,       a8,       mmx_composite_in_n_8_8            ),
 
     { PIXMAN_OP_NONE },
 };
diff --git a/pixman/pixman-private.h b/pixman/pixman-private.h
index 2055f9a..c99b101 100644
--- a/pixman/pixman-private.h
+++ b/pixman/pixman-private.h
@@ -561,15 +561,41 @@ _pixman_choose_implementation (void);
  */
 #define PIXMAN_null             PIXMAN_FORMAT (0, 0, 0, 0, 0, 0)
 #define PIXMAN_solid            PIXMAN_FORMAT (0, 1, 0, 0, 0, 0)
-#define PIXMAN_a8r8g8b8_ca	PIXMAN_FORMAT (0, 2, 0, 0, 0, 0)
-#define PIXMAN_a8b8g8r8_ca	PIXMAN_FORMAT (0, 3, 0, 0, 0, 0)
-#define PIXMAN_pixbuf		PIXMAN_FORMAT (0, 4, 0, 0, 0, 0)
-#define PIXMAN_rpixbuf		PIXMAN_FORMAT (0, 5, 0, 0, 0, 0)
-
-#define FAST_PATH_STD_SRC_FLAGS		0
-#define FAST_PATH_STD_MASK_U_FLAGS	0
-#define FAST_PATH_STD_MASK_CA_FLAGS	0
-#define FAST_PATH_STD_DEST_FLAGS	0
+#define PIXMAN_pixbuf		PIXMAN_FORMAT (0, 2, 0, 0, 0, 0)
+#define PIXMAN_rpixbuf		PIXMAN_FORMAT (0, 3, 0, 0, 0, 0)
+#define PIXMAN_unknown		PIXMAN_FORMAT (0, 4, 0, 0, 0, 0)
+
+#define FAST_PATH_ID_TRANSFORM			(1 << 0)
+#define FAST_PATH_NO_ALPHA_MAP			(1 << 1)
+#define FAST_PATH_NO_CONVOLUTION_FILTER		(1 << 2)
+#define FAST_PATH_NO_PAD_REPEAT			(1 << 3)
+#define FAST_PATH_NO_REFLECT_REPEAT		(1 << 4)
+#define FAST_PATH_NO_ACCESSORS			(1 << 5)
+#define FAST_PATH_NO_WIDE_FORMAT		(1 << 6)
+#define FAST_PATH_reserved			(1 << 7)
+#define FAST_PATH_COMPONENT_ALPHA		(1 << 8)
+#define FAST_PATH_UNIFIED_ALPHA			(1 << 9)
+
+#define _FAST_PATH_STANDARD_FLAGS					\
+    (FAST_PATH_ID_TRANSFORM		|				\
+     FAST_PATH_NO_ALPHA_MAP		|				\
+     FAST_PATH_NO_CONVOLUTION_FILTER	|				\
+     FAST_PATH_NO_PAD_REPEAT		|				\
+     FAST_PATH_NO_REFLECT_REPEAT	|				\
+     FAST_PATH_NO_ACCESSORS		|				\
+     FAST_PATH_NO_WIDE_FORMAT)
+
+#define FAST_PATH_STD_SRC_FLAGS						\
+    _FAST_PATH_STANDARD_FLAGS
+#define FAST_PATH_STD_MASK_U_FLAGS					\
+    (_FAST_PATH_STANDARD_FLAGS		|				\
+     FAST_PATH_UNIFIED_ALPHA)
+#define FAST_PATH_STD_MASK_CA_FLAGS					\
+    (_FAST_PATH_STANDARD_FLAGS		|				\
+     FAST_PATH_COMPONENT_ALPHA)
+#define FAST_PATH_STD_DEST_FLAGS					\
+    (FAST_PATH_NO_ACCESSORS		|				\
+     FAST_PATH_NO_WIDE_FORMAT)
 
 typedef struct
 {
@@ -583,16 +609,31 @@ typedef struct
     pixman_composite_func_t func;
 } pixman_fast_path_t;
 
+#define FAST_PATH(op, src, src_flags, mask, mask_flags, dest, dest_flags, func) \
+    PIXMAN_OP_ ## op,							\
+    PIXMAN_ ## src,							\
+    src_flags,							        \
+    PIXMAN_ ## mask,						        \
+    mask_flags,							        \
+    PIXMAN_ ## dest,	                                                \
+    dest_flags,							        \
+    func
+
 #define PIXMAN_STD_FAST_PATH(op, src, mask, dest, func)			\
-    {	    PIXMAN_OP_ ## op,						\
-	    PIXMAN_ ## src, FAST_PATH_STD_SRC_FLAGS,			\
-	    PIXMAN_ ## mask,						\
-	    ((PIXMAN_ ## mask == PIXMAN_a8r8g8b8_ca ||			\
-	      PIXMAN_ ## mask == PIXMAN_a8b8g8r8_ca) ?			\
-	     FAST_PATH_STD_MASK_CA_FLAGS : FAST_PATH_STD_MASK_U_FLAGS),	\
-	    PIXMAN_ ## dest, FAST_PATH_STD_DEST_FLAGS,			\
-	    func							\
-    }
+    { FAST_PATH (							\
+	  op,								\
+	  src, FAST_PATH_STD_SRC_FLAGS,					\
+	  mask, (PIXMAN_ ## mask) ? FAST_PATH_STD_MASK_U_FLAGS : 0,	\
+	  dest, FAST_PATH_STD_DEST_FLAGS,				\
+	  func) }
+
+#define PIXMAN_STD_FAST_PATH_CA(op, src, mask, dest, func)		\
+    { FAST_PATH (							\
+	  op,								\
+	  src, FAST_PATH_STD_SRC_FLAGS,					\
+	  mask, FAST_PATH_STD_MASK_CA_FLAGS,				\
+	  dest, FAST_PATH_STD_DEST_FLAGS,				\
+	  func) }
 
 /* Memory allocation helpers */
 void *
diff --git a/pixman/pixman-sse2.c b/pixman/pixman-sse2.c
index df3fee0..2bade74 100644
--- a/pixman/pixman-sse2.c
+++ b/pixman/pixman-sse2.c
@@ -5799,12 +5799,12 @@ static const pixman_fast_path_t sse2_fast_paths[] =
     PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid, x8r8g8b8, sse2_composite_over_8888_n_8888),
     PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid, a8b8g8r8, sse2_composite_over_8888_n_8888),
     PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid, x8b8g8r8, sse2_composite_over_8888_n_8888),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, a8r8g8b8, sse2_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, x8r8g8b8, sse2_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, a8b8g8r8, sse2_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, x8b8g8r8, sse2_composite_over_n_8888_8888_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, r5g6b5, sse2_composite_over_n_8888_0565_ca),
-    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, b5g6r5, sse2_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, a8r8g8b8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, x8r8g8b8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, a8b8g8r8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, x8b8g8r8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8r8g8b8, r5g6b5, sse2_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH_CA (OVER, solid, a8b8g8r8, b5g6r5, sse2_composite_over_n_8888_0565_ca),
     PIXMAN_STD_FAST_PATH (OVER, pixbuf, pixbuf, a8r8g8b8, sse2_composite_over_pixbuf_8888),
     PIXMAN_STD_FAST_PATH (OVER, pixbuf, pixbuf, x8r8g8b8, sse2_composite_over_pixbuf_8888),
     PIXMAN_STD_FAST_PATH (OVER, rpixbuf, rpixbuf, a8b8g8r8, sse2_composite_over_pixbuf_8888),
@@ -5815,7 +5815,7 @@ static const pixman_fast_path_t sse2_fast_paths[] =
     PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, null, x8b8g8r8, sse2_composite_copy_area),
 
     /* PIXMAN_OP_ADD */
-    PIXMAN_STD_FAST_PATH (ADD, solid, a8r8g8b8_ca, a8r8g8b8, sse2_composite_add_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH_CA (ADD, solid, a8r8g8b8, a8r8g8b8, sse2_composite_add_n_8888_8888_ca),
     PIXMAN_STD_FAST_PATH (ADD, a8, null, a8, sse2_composite_add_8000_8000),
     PIXMAN_STD_FAST_PATH (ADD, a8r8g8b8, null, a8r8g8b8, sse2_composite_add_8888_8888),
     PIXMAN_STD_FAST_PATH (ADD, a8b8g8r8, null, a8b8g8r8, sse2_composite_add_8888_8888),
diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index cc3e6ba..c30f09c 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -498,76 +498,55 @@ _pixman_walk_composite_region (pixman_implementation_t *imp,
     }
 }
 
-static pixman_bool_t
-source_is_fastpathable (pixman_image_t *image)
-{
-    if (image->common.transform					||
-	image->common.alpha_map					||
-	image->common.filter == PIXMAN_FILTER_CONVOLUTION	||
-	image->common.repeat == PIXMAN_REPEAT_PAD		||
-	image->common.repeat == PIXMAN_REPEAT_REFLECT)
-    {
-	return FALSE;
-    }
-
-    if (image->type == BITS					&&
-	(image->bits.read_func					||
-	 image->bits.write_func					||
-	 PIXMAN_FORMAT_IS_WIDE (image->bits.format)))
-    {
-	return FALSE;
-    }
-
-    return TRUE;
-}
-
-static pixman_bool_t
-get_source_format_code (pixman_image_t *image, pixman_format_code_t *code)
+static void
+get_image_info (pixman_image_t       *image,
+		pixman_format_code_t *code,
+		uint32_t	     *flags)
 {
+    *flags = 0;
+    
     if (!image)
     {
 	*code = PIXMAN_null;
-	
-	return TRUE;
     }
     else
     {
-	if (!source_is_fastpathable (image))
-	    return FALSE;
-	
-	if (image->common.component_alpha)
+	if (!image->common.transform)
+	    *flags |= FAST_PATH_ID_TRANSFORM;
+
+	if (!image->common.alpha_map)
+	    *flags |= FAST_PATH_NO_ALPHA_MAP;
+
+	if (image->common.filter != PIXMAN_FILTER_CONVOLUTION)
+	    *flags |= FAST_PATH_NO_CONVOLUTION_FILTER;
+
+	if (image->common.repeat != PIXMAN_REPEAT_PAD)
+	    *flags |= FAST_PATH_NO_PAD_REPEAT;
+
+	if (image->common.repeat != PIXMAN_REPEAT_REFLECT)
+	    *flags |= FAST_PATH_NO_REFLECT_REPEAT;
+
+	*flags |= (FAST_PATH_NO_ACCESSORS | FAST_PATH_NO_WIDE_FORMAT);
+	if (image->type == BITS)
 	{
-	    if (image->type == BITS)
-	    {
-		/* These are the *only* component_alpha formats
-		 * we support for fast paths
-		 */
-		if (image->bits.format == PIXMAN_a8r8g8b8)
-		    *code = PIXMAN_a8r8g8b8_ca;
-		else if (image->bits.format == PIXMAN_a8b8g8r8)
-		    *code = PIXMAN_a8b8g8r8_ca;
-		else
-		    return FALSE;
-	    }
-	    else
-	    {
-		return FALSE;
-	    }
+	    if (image->bits.read_func || image->bits.write_func)
+		*flags &= ~FAST_PATH_NO_ACCESSORS;
+
+	    if (PIXMAN_FORMAT_IS_WIDE (image->bits.format))
+		*flags &= ~FAST_PATH_NO_WIDE_FORMAT;
 	}
-	else if (_pixman_image_is_solid (image))
-	{
+
+	if (image->common.component_alpha)
+	    *flags |= FAST_PATH_COMPONENT_ALPHA;
+	else
+	    *flags |= FAST_PATH_UNIFIED_ALPHA;
+
+	if (_pixman_image_is_solid (image))
 	    *code = PIXMAN_solid;
-	}
 	else if (image->common.type == BITS)
-	{
 	    *code = image->bits.format;
-	}
 	else
-	{
-	    return FALSE;
-	}
-	
-	return TRUE;
+	    *code = PIXMAN_unknown;
     }
 }
 
@@ -630,28 +609,20 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
                        int32_t                   width,
                        int32_t                   height)
 {
-    pixman_bool_t src_repeat = src->common.repeat == PIXMAN_REPEAT_NORMAL;
-    pixman_bool_t mask_repeat = mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
+    pixman_bool_t src_repeat
+	= src->common.repeat == PIXMAN_REPEAT_NORMAL;
+    pixman_bool_t mask_repeat
+	= mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
     pixman_format_code_t src_format, mask_format, dest_format;
+    uint32_t src_flags, mask_flags, dest_flags;
     pixman_composite_func_t func = NULL;
     const pixman_fast_path_t *info;
     pixman_bool_t result;
 
-    if (!get_source_format_code (src, &src_format))
-	return FALSE;
-
-    if (!get_source_format_code (mask, &mask_format))
-	return FALSE;
-
-    if (dest->common.alpha_map	||
-	dest->bits.read_func	||
-	dest->bits.write_func)
-    {
-	return FALSE;
-    }
-
-    dest_format = dest->bits.format;
-
+    get_image_info (src, &src_format, &src_flags);
+    get_image_info (mask, &mask_format, &mask_flags);
+    get_image_info (dest, &dest_format, &dest_flags);
+    
     /* Check for pixbufs */
     if ((mask_format == PIXMAN_a8r8g8b8 || mask_format == PIXMAN_a8b8g8r8) &&
 	(src->type == BITS && src->bits.bits == mask->bits.bits)	   &&
@@ -666,10 +637,13 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
 
     for (info = paths; info->op != PIXMAN_OP_NONE; ++info)
     {
-	if (info->op == op			&&
-	    info->src_format == src_format	&&
-	    info->mask_format == mask_format	&&
-	    info->dest_format == dest_format)
+	if (info->op == op					&&
+	    (info->src_format == src_format)			&&
+	    (info->src_flags & src_flags) == info->src_flags	&&
+	    (info->mask_format == mask_format)			&&
+	    (info->mask_flags & mask_flags) == info->mask_flags	&&
+	    (info->dest_format == dest_format)			&&
+	    (info->dest_flags & dest_flags) == info->dest_flags)
 	{
 	    func = info->func;
 	    
@@ -678,24 +652,7 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
 	    
 	    if (info->mask_format == PIXMAN_solid)
 		mask_repeat = FALSE;
-	    
-	    if ((src_repeat				&&
-		 src->bits.width == 1		&&
-		 src->bits.height == 1)		||
-		(mask_repeat			&&
-		 mask->bits.width == 1		&&
-		 mask->bits.height == 1))
-	    {
-		/* If src or mask are repeating 1x1 images and src_repeat or
-		 * mask_repeat are still TRUE, it means the fast path we
-		 * selected does not actually handle repeating images.
-		 *
-		 * So rather than calling the "fast path" with a zillion
-		 * 1x1 requests, we just fall back to the general code (which
-		 * does do something sensible with 1x1 repeating images).
-		 */
-		func = NULL;
-	    }
+
 	    break;
 	}
     }
commit 6197db91a32da7ea281fd87b59f5bb74b989361b
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sun Sep 13 02:34:32 2009 -0400

    Add src_, mask_, and dest_flags fields to fast path arrays
    
    Update all the fast path tables to match using a new
    PIXMAN_STD_FAST_PATH macro.
    
    For now, use 0 for the flags fields.

diff --git a/pixman/pixman-arm-neon.c b/pixman/pixman-arm-neon.c
index efeabeb..26f7267 100644
--- a/pixman/pixman-arm-neon.c
+++ b/pixman/pixman-arm-neon.c
@@ -385,52 +385,53 @@ pixman_blt_neon (uint32_t *src_bits,
 
 static const pixman_fast_path_t arm_neon_fast_path_array[] =
 {
-    { PIXMAN_OP_SRC,  PIXMAN_r5g6b5,   PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_src_0565_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_b5g6r5,   PIXMAN_null,     PIXMAN_b5g6r5,   neon_composite_src_0565_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_src_8888_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_x8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_src_8888_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   neon_composite_src_8888_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_x8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   neon_composite_src_8888_0565    },
-    { PIXMAN_OP_SRC,  PIXMAN_r5g6b5,   PIXMAN_null,     PIXMAN_a8r8g8b8, neon_composite_src_0565_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_r5g6b5,   PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_src_0565_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_b5g6r5,   PIXMAN_null,     PIXMAN_a8b8g8r8, neon_composite_src_0565_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_b5g6r5,   PIXMAN_null,     PIXMAN_x8b8g8r8, neon_composite_src_0565_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_src_8888_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_x8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_src_8888_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, neon_composite_src_8888_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_x8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, neon_composite_src_8888_8888    },
-    { PIXMAN_OP_SRC,  PIXMAN_r8g8b8,   PIXMAN_null,     PIXMAN_r8g8b8,   neon_composite_src_0888_0888    },
-    { PIXMAN_OP_SRC,  PIXMAN_b8g8r8,   PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_src_0888_8888_rev },
-    { PIXMAN_OP_SRC,  PIXMAN_b8g8r8,   PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_src_0888_0565_rev },
-    { PIXMAN_OP_SRC,  PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_a8r8g8b8, neon_composite_src_pixbuf_8888  },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_r5g6b5,   neon_composite_over_n_8_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_b5g6r5,   neon_composite_over_n_8_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, neon_composite_over_n_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, neon_composite_over_n_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, neon_composite_over_n_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, neon_composite_over_n_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_over_n_0565      },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_a8r8g8b8, neon_composite_over_n_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_over_n_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, neon_composite_over_8888_n_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, neon_composite_over_8888_n_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_a8,       PIXMAN_a8r8g8b8, neon_composite_over_8888_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_a8,       PIXMAN_x8r8g8b8, neon_composite_over_8888_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_a8,       PIXMAN_a8b8g8r8, neon_composite_over_8888_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_a8,       PIXMAN_x8b8g8r8, neon_composite_over_8888_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_a8r8g8b8, PIXMAN_a8r8g8b8, neon_composite_over_8888_8888_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   neon_composite_over_8888_0565   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   neon_composite_over_8888_0565   },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, neon_composite_over_8888_8888   },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, neon_composite_over_8888_8888   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, neon_composite_over_8888_8888   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, neon_composite_over_8888_8888   },
-    { PIXMAN_OP_ADD,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8,       neon_composite_add_n_8_8        },
-    { PIXMAN_OP_ADD,  PIXMAN_a8,       PIXMAN_a8,       PIXMAN_a8,       neon_composite_add_8_8_8        },
-    { PIXMAN_OP_ADD,  PIXMAN_a8r8g8b8, PIXMAN_a8r8g8b8, PIXMAN_a8r8g8b8, neon_composite_add_8888_8888_8888 },
-    { PIXMAN_OP_ADD,  PIXMAN_a8,       PIXMAN_null,     PIXMAN_a8,       neon_composite_add_8000_8000    },
-    { PIXMAN_OP_ADD,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, neon_composite_add_8888_8888    },
-    { PIXMAN_OP_ADD,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, neon_composite_add_8888_8888    },
+    PIXMAN_STD_FAST_PATH (SRC,  r5g6b5,   null,     r5g6b5,   neon_composite_src_0565_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  b5g6r5,   null,     b5g6r5,   neon_composite_src_0565_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,     r5g6b5,   neon_composite_src_8888_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  x8r8g8b8, null,     r5g6b5,   neon_composite_src_8888_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,     b5g6r5,   neon_composite_src_8888_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  x8b8g8r8, null,     b5g6r5,   neon_composite_src_8888_0565),
+    PIXMAN_STD_FAST_PATH (SRC,  r5g6b5,   null,     a8r8g8b8, neon_composite_src_0565_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  r5g6b5,   null,     x8r8g8b8, neon_composite_src_0565_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  b5g6r5,   null,     a8b8g8r8, neon_composite_src_0565_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  b5g6r5,   null,     x8b8g8r8, neon_composite_src_0565_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,     x8r8g8b8, neon_composite_src_8888_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  x8r8g8b8, null,     x8r8g8b8, neon_composite_src_8888_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,     x8b8g8r8, neon_composite_src_8888_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  x8b8g8r8, null,     x8b8g8r8, neon_composite_src_8888_8888),
+    PIXMAN_STD_FAST_PATH (SRC,  r8g8b8,   null,     r8g8b8,   neon_composite_src_0888_0888),
+    PIXMAN_STD_FAST_PATH (SRC,  b8g8r8,   null,     x8r8g8b8, neon_composite_src_0888_8888_rev),
+    PIXMAN_STD_FAST_PATH (SRC,  b8g8r8,   null,     r5g6b5,   neon_composite_src_0888_0565_rev),
+    PIXMAN_STD_FAST_PATH (SRC,  pixbuf,   pixbuf,   a8r8g8b8, neon_composite_src_pixbuf_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       r5g6b5,   neon_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       b5g6r5,   neon_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       a8r8g8b8, neon_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       x8r8g8b8, neon_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       a8b8g8r8, neon_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,       x8b8g8r8, neon_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,     r5g6b5,   neon_composite_over_n_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,     a8r8g8b8, neon_composite_over_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,     x8r8g8b8, neon_composite_over_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,    a8r8g8b8, neon_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,    x8r8g8b8, neon_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, a8,       a8r8g8b8, neon_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, a8,       x8r8g8b8, neon_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, a8,       a8b8g8r8, neon_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, a8,       x8b8g8r8, neon_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, a8r8g8b8, a8r8g8b8, neon_composite_over_8888_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,     r5g6b5,   neon_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,     b5g6r5,   neon_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,     a8r8g8b8, neon_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,     x8r8g8b8, neon_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,     a8b8g8r8, neon_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,     x8b8g8r8, neon_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD,  solid,    a8,       a8,       neon_composite_add_n_8_8),
+    PIXMAN_STD_FAST_PATH (ADD,  a8,       a8,       a8,       neon_composite_add_8_8_8),
+    PIXMAN_STD_FAST_PATH (ADD,  a8r8g8b8, a8r8g8b8, a8r8g8b8, neon_composite_add_8888_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD,  a8,       null,     a8,       neon_composite_add_8000_8000),
+    PIXMAN_STD_FAST_PATH (ADD,  a8r8g8b8, null,     a8r8g8b8, neon_composite_add_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD,  a8b8g8r8, null,     a8b8g8r8, neon_composite_add_8888_8888),
+
     { PIXMAN_OP_NONE },
 };
 
diff --git a/pixman/pixman-arm-simd.c b/pixman/pixman-arm-simd.c
index 04946f2..dd8dc5c 100644
--- a/pixman/pixman-arm-simd.c
+++ b/pixman/pixman-arm-simd.c
@@ -421,19 +421,19 @@ arm_composite_over_n_8_8888 (pixman_implementation_t * impl,
 
 static const pixman_fast_path_t arm_simd_fast_path_array[] =
 {
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, arm_composite_over_8888_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, arm_composite_over_8888_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, arm_composite_over_8888_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, arm_composite_over_8888_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, arm_composite_over_8888_n_8888  },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, arm_composite_over_8888_n_8888  },
-
-    { PIXMAN_OP_ADD, PIXMAN_a8,        PIXMAN_null,     PIXMAN_a8,       arm_composite_add_8000_8000     },
-
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, arm_composite_over_n_8_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, arm_composite_over_n_8_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, arm_composite_over_n_8_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, arm_composite_over_n_8_8888     },
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, a8r8g8b8, arm_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, x8r8g8b8, arm_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, a8b8g8r8, arm_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, x8b8g8r8, arm_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid, a8r8g8b8, arm_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid, x8r8g8b8, arm_composite_over_8888_n_8888),
+
+    PIXMAN_STD_FAST_PATH (ADD, a8, null, a8, arm_composite_add_8000_8000),
+
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8r8g8b8, arm_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8r8g8b8, arm_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8b8g8r8, arm_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8b8g8r8, arm_composite_over_n_8_8888),
 
     { PIXMAN_OP_NONE },
 };
diff --git a/pixman/pixman-fast-path.c b/pixman/pixman-fast-path.c
index 7a3bf52..49dcf79 100644
--- a/pixman/pixman-fast-path.c
+++ b/pixman/pixman-fast-path.c
@@ -1033,10 +1033,10 @@ fast_composite_add_n_8_8 (pixman_implementation_t *imp,
 #define UPDATE_BITMASK(n) ((n) << 1)
 #endif
 
-#define TEST_BIT(p, n) \
-	(*((p) + ((n) >> 5)) & CREATE_BITMASK ((n) & 31))
-#define SET_BIT(p, n) \
-	do { *((p) + ((n) >> 5)) |= CREATE_BITMASK ((n) & 31); } while (0);
+#define TEST_BIT(p, n)						\
+    (*((p) + ((n) >> 5)) & CREATE_BITMASK ((n) & 31))
+#define SET_BIT(p, n)							\
+    do { *((p) + ((n) >> 5)) |= CREATE_BITMASK ((n) & 31); } while (0);
 
 static void
 fast_composite_add_1000_1000 (pixman_implementation_t *imp,
@@ -1347,58 +1347,59 @@ fast_composite_src_8888_x888 (pixman_implementation_t *imp,
 
 static const pixman_fast_path_t c_fast_paths[] =
 {
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_r5g6b5,   fast_composite_over_n_8_0565 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_b5g6r5,   fast_composite_over_n_8_0565 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_r8g8b8,   fast_composite_over_n_8_0888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_b8g8r8,   fast_composite_over_n_8_0888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, fast_composite_over_n_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, fast_composite_over_n_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, fast_composite_over_n_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, fast_composite_over_n_8_8888 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_a8r8g8b8, fast_composite_over_n_1_8888, },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_x8r8g8b8, fast_composite_over_n_1_8888, },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_a8b8g8r8, fast_composite_over_n_1_8888, },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_x8b8g8r8, fast_composite_over_n_1_8888, },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_r5g6b5,   fast_composite_over_n_1_0565 },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a1,       PIXMAN_b5g6r5,   fast_composite_over_n_1_0565 },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_a8r8g8b8, fast_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_x8r8g8b8, fast_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_r5g6b5,   fast_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_a8b8g8r8, fast_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_x8b8g8r8, fast_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_b5g6r5,   fast_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_x8r8g8b8, fast_composite_over_x888_8_8888,      },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_a8r8g8b8, fast_composite_over_x888_8_8888,      },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_a8,       PIXMAN_x8b8g8r8, fast_composite_over_x888_8_8888,      },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_a8,       PIXMAN_a8b8g8r8, fast_composite_over_x888_8_8888,      },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, fast_composite_over_8888_8888,   },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, fast_composite_over_8888_8888,   },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   fast_composite_over_8888_0565,   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, fast_composite_over_8888_8888,   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, fast_composite_over_8888_8888,   },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   fast_composite_over_8888_0565,   },
-    { PIXMAN_OP_ADD, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_a8r8g8b8, fast_composite_add_8888_8888,  },
-    { PIXMAN_OP_ADD, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_a8b8g8r8, fast_composite_add_8888_8888,  },
-    { PIXMAN_OP_ADD, PIXMAN_a8,        PIXMAN_null,     PIXMAN_a8,       fast_composite_add_8000_8000,  },
-    { PIXMAN_OP_ADD, PIXMAN_a1,        PIXMAN_null,     PIXMAN_a1,       fast_composite_add_1000_1000,   },
-    { PIXMAN_OP_ADD, PIXMAN_solid,   PIXMAN_a8r8g8b8_ca, PIXMAN_a8r8g8b8, fast_composite_add_n_8888_8888_ca },
-    { PIXMAN_OP_ADD, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8,       fast_composite_add_n_8_8,   },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_a8r8g8b8, fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_x8r8g8b8, fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_a8b8g8r8, fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_x8b8g8r8, fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_a8,       fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_null,     PIXMAN_r5g6b5,   fast_composite_solid_fill },
-    { PIXMAN_OP_SRC, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_x8r8g8b8, fast_composite_src_8888_x888 },
-    { PIXMAN_OP_SRC, PIXMAN_x8r8g8b8,  PIXMAN_null,     PIXMAN_x8r8g8b8, fast_composite_src_8888_x888 },
-    { PIXMAN_OP_SRC, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_x8b8g8r8, fast_composite_src_8888_x888 },
-    { PIXMAN_OP_SRC, PIXMAN_x8b8g8r8,  PIXMAN_null,     PIXMAN_x8b8g8r8, fast_composite_src_8888_x888 },
-    { PIXMAN_OP_SRC, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_r5g6b5,   fast_composite_src_x888_0565 },
-    { PIXMAN_OP_SRC, PIXMAN_x8r8g8b8,  PIXMAN_null,     PIXMAN_r5g6b5,   fast_composite_src_x888_0565 },
-    { PIXMAN_OP_SRC, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_b5g6r5,   fast_composite_src_x888_0565 },
-    { PIXMAN_OP_SRC, PIXMAN_x8b8g8r8,  PIXMAN_null,     PIXMAN_b5g6r5,   fast_composite_src_x888_0565 },
-    { PIXMAN_OP_IN,  PIXMAN_a8,        PIXMAN_null,     PIXMAN_a8,       fast_composite_in_8_8,  },
-    { PIXMAN_OP_IN,  PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8,       fast_composite_in_n_8_8 },
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, r5g6b5, fast_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, b5g6r5, fast_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, r8g8b8, fast_composite_over_n_8_0888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, b8g8r8, fast_composite_over_n_8_0888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8r8g8b8, fast_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8r8g8b8, fast_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8b8g8r8, fast_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8b8g8r8, fast_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, a8r8g8b8, fast_composite_over_n_1_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, x8r8g8b8, fast_composite_over_n_1_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, a8b8g8r8, fast_composite_over_n_1_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, x8b8g8r8, fast_composite_over_n_1_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, r5g6b5,   fast_composite_over_n_1_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a1, b5g6r5,   fast_composite_over_n_1_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, a8r8g8b8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, x8r8g8b8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, r5g6b5, fast_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, a8b8g8r8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, x8b8g8r8, fast_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, b5g6r5, fast_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, x8r8g8b8, fast_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, a8r8g8b8, fast_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, a8, x8b8g8r8, fast_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, a8, a8b8g8r8, fast_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, a8r8g8b8, fast_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, x8r8g8b8, fast_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, r5g6b5, fast_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, a8b8g8r8, fast_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, x8b8g8r8, fast_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, b5g6r5, fast_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (ADD, a8r8g8b8, null, a8r8g8b8, fast_composite_add_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD, a8b8g8r8, null, a8b8g8r8, fast_composite_add_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD, a8, null, a8, fast_composite_add_8000_8000),
+    PIXMAN_STD_FAST_PATH (ADD, a1, null, a1, fast_composite_add_1000_1000),
+    PIXMAN_STD_FAST_PATH (ADD, solid, a8r8g8b8_ca, a8r8g8b8, fast_composite_add_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (ADD, solid, a8, a8, fast_composite_add_n_8_8),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, a8r8g8b8, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, x8r8g8b8, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, a8b8g8r8, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, x8b8g8r8, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, a8, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, solid, null, r5g6b5, fast_composite_solid_fill),
+    PIXMAN_STD_FAST_PATH (SRC, a8r8g8b8, null, x8r8g8b8, fast_composite_src_8888_x888),
+    PIXMAN_STD_FAST_PATH (SRC, x8r8g8b8, null, x8r8g8b8, fast_composite_src_8888_x888),
+    PIXMAN_STD_FAST_PATH (SRC, a8b8g8r8, null, x8b8g8r8, fast_composite_src_8888_x888),
+    PIXMAN_STD_FAST_PATH (SRC, x8b8g8r8, null, x8b8g8r8, fast_composite_src_8888_x888),
+    PIXMAN_STD_FAST_PATH (SRC, a8r8g8b8, null, r5g6b5, fast_composite_src_x888_0565),
+    PIXMAN_STD_FAST_PATH (SRC, x8r8g8b8, null, r5g6b5, fast_composite_src_x888_0565),
+    PIXMAN_STD_FAST_PATH (SRC, a8b8g8r8, null, b5g6r5, fast_composite_src_x888_0565),
+    PIXMAN_STD_FAST_PATH (SRC, x8b8g8r8, null, b5g6r5, fast_composite_src_x888_0565),
+    PIXMAN_STD_FAST_PATH (IN, a8, null, a8, fast_composite_in_8_8),
+    PIXMAN_STD_FAST_PATH (IN, solid, a8, a8, fast_composite_in_n_8_8),
+
     { PIXMAN_OP_NONE },
 };
 
@@ -1694,4 +1695,3 @@ _pixman_implementation_create_fast_path (void)
 
     return imp;
 }
-
diff --git a/pixman/pixman-mmx.c b/pixman/pixman-mmx.c
index 69d2493..b7961cf 100644
--- a/pixman/pixman-mmx.c
+++ b/pixman/pixman-mmx.c
@@ -3216,72 +3216,74 @@ mmx_composite_over_x888_8_8888 (pixman_implementation_t *imp,
 
 static const pixman_fast_path_t mmx_fast_paths[] =
 {
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_r5g6b5,   mmx_composite_over_n_8_0565       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_b5g6r5,   mmx_composite_over_n_8_0565       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, mmx_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, mmx_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, mmx_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, mmx_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_a8r8g8b8, mmx_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_x8r8g8b8, mmx_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_r5g6b5,   mmx_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_a8b8g8r8, mmx_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_x8b8g8r8, mmx_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_b5g6r5,   mmx_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_a8r8g8b8, mmx_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_x8r8g8b8, mmx_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_r5g6b5,   mmx_composite_over_pixbuf_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_a8b8g8r8, mmx_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_x8b8g8r8, mmx_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_b5g6r5,   mmx_composite_over_pixbuf_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, mmx_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, mmx_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_solid,    PIXMAN_a8b8g8r8, mmx_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_solid,    PIXMAN_x8b8g8r8, mmx_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, mmx_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, mmx_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_solid,    PIXMAN_a8b8g8r8, mmx_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_solid,    PIXMAN_x8b8g8r8, mmx_composite_over_8888_n_8888    },
+    PIXMAN_STD_FAST_PATH (OVER, solid,	  a8,          r5g6b5,   mmx_composite_over_n_8_0565       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,	  a8,          b5g6r5,   mmx_composite_over_n_8_0565       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          a8r8g8b8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          x8r8g8b8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          a8b8g8r8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8,          x8b8g8r8, mmx_composite_over_n_8_8888       ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, a8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, x8r8g8b8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8r8g8b8_ca, r5g6b5,   mmx_composite_over_n_8888_0565_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, a8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, x8b8g8r8, mmx_composite_over_n_8888_8888_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    a8b8g8r8_ca, b5g6r5,   mmx_composite_over_n_8888_0565_ca ),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      a8r8g8b8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      x8r8g8b8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf,   pixbuf,      r5g6b5,   mmx_composite_over_pixbuf_0565    ),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     a8b8g8r8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     x8b8g8r8, mmx_composite_over_pixbuf_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf,  rpixbuf,     b5g6r5,   mmx_composite_over_pixbuf_0565    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid,       a8r8g8b8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid,       x8r8g8b8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid,       a8b8g8r8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid,       x8b8g8r8, mmx_composite_over_x888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,       a8r8g8b8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid,       x8r8g8b8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid,       a8b8g8r8, mmx_composite_over_8888_n_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid,       x8b8g8r8, mmx_composite_over_8888_n_8888    ),
 #if 0
-    /* FIXME: This code is commented out since it's apparently not actually faster than the generic code. */
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_x8r8g8b8, mmx_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_a8r8g8b8, mmx_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8r8g8, PIXMAN_a8,       PIXMAN_x8b8g8r8, mmx_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8r8g8, PIXMAN_a8,       PIXMAN_a8r8g8b8, mmx_composite_over_x888_8_8888    },
+    /* FIXME: This code is commented out since it's apparently
+     * not actually faster than the generic code.
+     */
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8,          x8r8g8b8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8,          a8r8g8b8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8r8g8, a8,          x8b8g8r8, mmx_composite_over_x888_8_8888    ),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8r8g8, a8,          a8r8g8b8, mmx_composite_over_x888_8_8888    ),
 #endif
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_a8r8g8b8, mmx_composite_over_n_8888        },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_x8r8g8b8, mmx_composite_over_n_8888        },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_r5g6b5,   mmx_composite_over_n_0565        },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, mmx_composite_copy_area          },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, mmx_composite_copy_area          },
-
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, mmx_composite_over_8888_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, mmx_composite_over_8888_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   mmx_composite_over_8888_0565     },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, mmx_composite_over_8888_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, mmx_composite_over_8888_8888     },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   mmx_composite_over_8888_0565     },
-
-    { PIXMAN_OP_ADD, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_a8r8g8b8, mmx_composite_add_8888_8888   },
-    { PIXMAN_OP_ADD, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_a8b8g8r8, mmx_composite_add_8888_8888   },
-    { PIXMAN_OP_ADD, PIXMAN_a8,        PIXMAN_null,     PIXMAN_a8,       mmx_composite_add_8000_8000   },
-    { PIXMAN_OP_ADD, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8,       mmx_composite_add_n_8_8    },
-
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8r8g8b8, mmx_composite_src_n_8_8888 },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_x8r8g8b8, mmx_composite_src_n_8_8888 },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8b8g8r8, mmx_composite_src_n_8_8888 },
-    { PIXMAN_OP_SRC, PIXMAN_solid,     PIXMAN_a8,       PIXMAN_x8b8g8r8, mmx_composite_src_n_8_8888 },
-    { PIXMAN_OP_SRC, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_a8r8g8b8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_a8b8g8r8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_a8r8g8b8,  PIXMAN_null,     PIXMAN_x8r8g8b8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_a8b8g8r8,  PIXMAN_null,     PIXMAN_x8b8g8r8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_x8r8g8b8,  PIXMAN_null,     PIXMAN_x8r8g8b8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_x8b8g8r8,  PIXMAN_null,     PIXMAN_x8b8g8r8, mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_r5g6b5,    PIXMAN_null,     PIXMAN_r5g6b5,   mmx_composite_copy_area },
-    { PIXMAN_OP_SRC, PIXMAN_b5g6r5,    PIXMAN_null,     PIXMAN_b5g6r5,   mmx_composite_copy_area },
-
-    { PIXMAN_OP_IN,  PIXMAN_a8,        PIXMAN_null,     PIXMAN_a8,       mmx_composite_in_8_8    },
-    { PIXMAN_OP_IN,  PIXMAN_solid,     PIXMAN_a8,       PIXMAN_a8,       mmx_composite_in_n_8_8  },
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        a8r8g8b8, mmx_composite_over_n_8888         ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        x8r8g8b8, mmx_composite_over_n_8888         ),
+    PIXMAN_STD_FAST_PATH (OVER, solid,    null,        r5g6b5,   mmx_composite_over_n_0565         ),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
+
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        a8r8g8b8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        x8r8g8b8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null,        r5g6b5,   mmx_composite_over_8888_0565      ),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        a8b8g8r8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        x8b8g8r8, mmx_composite_over_8888_8888      ),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null,        b5g6r5,   mmx_composite_over_8888_0565      ),
+
+    PIXMAN_STD_FAST_PATH (ADD,  a8r8g8b8, null,        a8r8g8b8, mmx_composite_add_8888_8888       ),
+    PIXMAN_STD_FAST_PATH (ADD,  a8b8g8r8, null,        a8b8g8r8, mmx_composite_add_8888_8888       ),
+    PIXMAN_STD_FAST_PATH (ADD,  a8,       null,        a8,       mmx_composite_add_8000_8000       ),
+    PIXMAN_STD_FAST_PATH (ADD,  solid,    a8,          a8,       mmx_composite_add_n_8_8           ),
+
+    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          a8r8g8b8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          x8r8g8b8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          a8b8g8r8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH (SRC,  solid,    a8,          x8b8g8r8, mmx_composite_src_n_8_8888        ),
+    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,        a8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,        a8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  a8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  a8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  x8r8g8b8, null,        x8r8g8b8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  x8b8g8r8, null,        x8b8g8r8, mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  r5g6b5,   null,        r5g6b5,   mmx_composite_copy_area           ),
+    PIXMAN_STD_FAST_PATH (SRC,  b5g6r5,   null,        b5g6r5,   mmx_composite_copy_area           ),
+
+    PIXMAN_STD_FAST_PATH (IN,   a8,       null,        a8,       mmx_composite_in_8_8              ),
+    PIXMAN_STD_FAST_PATH (IN,   solid,    a8,          a8,       mmx_composite_in_n_8_8            ),
 
     { PIXMAN_OP_NONE },
 };
diff --git a/pixman/pixman-private.h b/pixman/pixman-private.h
index 40388aa..2055f9a 100644
--- a/pixman/pixman-private.h
+++ b/pixman/pixman-private.h
@@ -566,15 +566,34 @@ _pixman_choose_implementation (void);
 #define PIXMAN_pixbuf		PIXMAN_FORMAT (0, 4, 0, 0, 0, 0)
 #define PIXMAN_rpixbuf		PIXMAN_FORMAT (0, 5, 0, 0, 0, 0)
 
+#define FAST_PATH_STD_SRC_FLAGS		0
+#define FAST_PATH_STD_MASK_U_FLAGS	0
+#define FAST_PATH_STD_MASK_CA_FLAGS	0
+#define FAST_PATH_STD_DEST_FLAGS	0
+
 typedef struct
 {
     pixman_op_t             op;
     pixman_format_code_t    src_format;
+    uint32_t		    src_flags;
     pixman_format_code_t    mask_format;
+    uint32_t		    mask_flags;
     pixman_format_code_t    dest_format;
+    uint32_t		    dest_flags;
     pixman_composite_func_t func;
 } pixman_fast_path_t;
 
+#define PIXMAN_STD_FAST_PATH(op, src, mask, dest, func)			\
+    {	    PIXMAN_OP_ ## op,						\
+	    PIXMAN_ ## src, FAST_PATH_STD_SRC_FLAGS,			\
+	    PIXMAN_ ## mask,						\
+	    ((PIXMAN_ ## mask == PIXMAN_a8r8g8b8_ca ||			\
+	      PIXMAN_ ## mask == PIXMAN_a8b8g8r8_ca) ?			\
+	     FAST_PATH_STD_MASK_CA_FLAGS : FAST_PATH_STD_MASK_U_FLAGS),	\
+	    PIXMAN_ ## dest, FAST_PATH_STD_DEST_FLAGS,			\
+	    func							\
+    }
+
 /* Memory allocation helpers */
 void *
 pixman_malloc_ab (unsigned int n, unsigned int b);
diff --git a/pixman/pixman-sse2.c b/pixman/pixman-sse2.c
index 696a514..df3fee0 100644
--- a/pixman/pixman-sse2.c
+++ b/pixman/pixman-sse2.c
@@ -998,7 +998,7 @@ core_combine_reverse_out_u_sse2 (uint32_t*       pd,
 	    pix_multiply_1x64 (
 		unpack_32_1x64 (d), negate_1x64 (
 		    expand_alpha_1x64 (unpack_32_1x64 (s)))));
-	
+
 	if (pm)
 	    pm++;
 	ps++;
@@ -2652,8 +2652,8 @@ create_mask_2x32_64 (uint32_t mask0,
 
 /* Work around a code generation bug in Sun Studio 12. */
 #if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
-# define create_mask_2x32_128(mask0, mask1) \
-	(_mm_set_epi32 ((mask0), (mask1), (mask0), (mask1)))
+# define create_mask_2x32_128(mask0, mask1)				\
+    (_mm_set_epi32 ((mask0), (mask1), (mask0), (mask1)))
 #else
 static force_inline __m128i
 create_mask_2x32_128 (uint32_t mask0,
@@ -3152,7 +3152,7 @@ sse2_composite_add_n_8888_8888_ca (pixman_implementation_t *imp,
 
     src = _pixman_image_get_solid (src_image, dst_image->bits.format);
     srca = src >> 24;
-    
+
     if (src == 0)
 	return;
 
@@ -3187,7 +3187,7 @@ sse2_composite_add_n_8888_8888_ca (pixman_implementation_t *imp,
 	    if (m)
 	    {
 		d = *pd;
-		
+
 		mmx_mask = unpack_32_1x64 (m);
 		mmx_dest = unpack_32_1x64 (d);
 
@@ -3226,7 +3226,7 @@ sse2_composite_add_n_8888_8888_ca (pixman_implementation_t *imp,
 				    &xmm_mask_lo, &xmm_mask_hi,
 				    &xmm_mask_lo, &xmm_mask_hi);
 		xmm_mask_hi = pack_2x128_128 (xmm_mask_lo, xmm_mask_hi);
-		
+
 		save_128_aligned (
 		    (__m128i*)pd, _mm_adds_epu8 (xmm_mask_hi, xmm_dst));
 	    }
@@ -5713,7 +5713,7 @@ sse2_composite_over_8888_8_8888 (pixman_implementation_t *imp,
 
 		    expand_alpha_2x128 (xmm_src_lo, xmm_src_hi, &xmm_srca_lo, &xmm_srca_hi);
 		    expand_alpha_rev_2x128 (xmm_mask_lo, xmm_mask_hi, &xmm_mask_lo, &xmm_mask_hi);
-		    
+
 		    in_over_2x128 (&xmm_src_lo, &xmm_src_hi, &xmm_srca_lo, &xmm_srca_hi,
 				   &xmm_mask_lo, &xmm_mask_hi, &xmm_dst_lo, &xmm_dst_hi);
 
@@ -5767,73 +5767,77 @@ sse2_composite_over_8888_8_8888 (pixman_implementation_t *imp,
 
 static const pixman_fast_path_t sse2_fast_paths[] =
 {
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_r5g6b5,   sse2_composite_over_n_8_0565       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_b5g6r5,   sse2_composite_over_n_8_0565       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_a8r8g8b8, sse2_composite_over_n_8888         },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_x8r8g8b8, sse2_composite_over_n_8888         },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_null,     PIXMAN_r5g6b5,   sse2_composite_over_n_0565         },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, sse2_composite_over_8888_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, sse2_composite_over_8888_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, sse2_composite_over_8888_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, sse2_composite_over_8888_8888      },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_r5g6b5,   sse2_composite_over_8888_0565      },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_b5g6r5,   sse2_composite_over_8888_0565      },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, sse2_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, sse2_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, sse2_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, sse2_composite_over_n_8_8888       },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_a8,       PIXMAN_x8r8g8b8, sse2_composite_over_8888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_a8,       PIXMAN_a8r8g8b8, sse2_composite_over_8888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_a8,       PIXMAN_x8b8g8r8, sse2_composite_over_8888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_a8,       PIXMAN_a8b8g8r8, sse2_composite_over_8888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_x8r8g8b8, sse2_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_a8,       PIXMAN_a8r8g8b8, sse2_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_a8,       PIXMAN_x8b8g8r8, sse2_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_a8,       PIXMAN_a8b8g8r8, sse2_composite_over_x888_8_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, sse2_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, sse2_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_solid,    PIXMAN_a8b8g8r8, sse2_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_solid,    PIXMAN_x8b8g8r8, sse2_composite_over_x888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_a8r8g8b8, sse2_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8r8g8b8, PIXMAN_solid,    PIXMAN_x8r8g8b8, sse2_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_solid,    PIXMAN_a8b8g8r8, sse2_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_a8b8g8r8, PIXMAN_solid,    PIXMAN_x8b8g8r8, sse2_composite_over_8888_n_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_a8r8g8b8, sse2_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_x8r8g8b8, sse2_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_a8b8g8r8, sse2_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_x8b8g8r8, sse2_composite_over_n_8888_8888_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_r5g6b5,   sse2_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_solid, PIXMAN_a8b8g8r8_ca, PIXMAN_b5g6r5,   sse2_composite_over_n_8888_0565_ca },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_a8r8g8b8, sse2_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_x8r8g8b8, sse2_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_a8b8g8r8, sse2_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_x8b8g8r8, sse2_composite_over_pixbuf_8888    },
-    { PIXMAN_OP_OVER, PIXMAN_pixbuf,   PIXMAN_pixbuf,   PIXMAN_r5g6b5,   sse2_composite_over_pixbuf_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_rpixbuf,  PIXMAN_rpixbuf,  PIXMAN_b5g6r5,   sse2_composite_over_pixbuf_0565    },
-    { PIXMAN_OP_OVER, PIXMAN_x8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, sse2_composite_copy_area           },
-    { PIXMAN_OP_OVER, PIXMAN_x8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, sse2_composite_copy_area           },
-
-    { PIXMAN_OP_ADD,  PIXMAN_solid, PIXMAN_a8r8g8b8_ca, PIXMAN_a8r8g8b8, sse2_composite_add_n_8888_8888_ca  },
-    { PIXMAN_OP_ADD,  PIXMAN_a8,       PIXMAN_null,     PIXMAN_a8,       sse2_composite_add_8000_8000       },
-    { PIXMAN_OP_ADD,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, sse2_composite_add_8888_8888       },
-    { PIXMAN_OP_ADD,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, sse2_composite_add_8888_8888       },
-    { PIXMAN_OP_ADD,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8,       sse2_composite_add_n_8_8           },
-
-    { PIXMAN_OP_SRC,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8r8g8b8, sse2_composite_src_n_8_8888        },
-    { PIXMAN_OP_SRC,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8r8g8b8, sse2_composite_src_n_8_8888        },
-    { PIXMAN_OP_SRC,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8b8g8r8, sse2_composite_src_n_8_8888        },
-    { PIXMAN_OP_SRC,  PIXMAN_solid,    PIXMAN_a8,       PIXMAN_x8b8g8r8, sse2_composite_src_n_8_8888        },
-    { PIXMAN_OP_SRC,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_a8r8g8b8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_a8b8g8r8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_a8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_a8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_x8r8g8b8, PIXMAN_null,     PIXMAN_x8r8g8b8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_x8b8g8r8, PIXMAN_null,     PIXMAN_x8b8g8r8, sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_r5g6b5,   PIXMAN_null,     PIXMAN_r5g6b5,   sse2_composite_copy_area           },
-    { PIXMAN_OP_SRC,  PIXMAN_b5g6r5,   PIXMAN_null,     PIXMAN_b5g6r5,   sse2_composite_copy_area           },
-
-    { PIXMAN_OP_IN,   PIXMAN_a8,       PIXMAN_null,     PIXMAN_a8,       sse2_composite_in_8_8              },
-    { PIXMAN_OP_IN,   PIXMAN_solid,    PIXMAN_a8,       PIXMAN_a8,       sse2_composite_in_n_8_8            },
+    /* PIXMAN_OP_OVER */
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, r5g6b5, sse2_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, b5g6r5, sse2_composite_over_n_8_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, null, a8r8g8b8, sse2_composite_over_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, null, x8r8g8b8, sse2_composite_over_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, null, r5g6b5, sse2_composite_over_n_0565),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, a8r8g8b8, sse2_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, x8r8g8b8, sse2_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, a8b8g8r8, sse2_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, x8b8g8r8, sse2_composite_over_8888_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, null, r5g6b5, sse2_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, null, b5g6r5, sse2_composite_over_8888_0565),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8r8g8b8, sse2_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8r8g8b8, sse2_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, a8b8g8r8, sse2_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8, x8b8g8r8, sse2_composite_over_n_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, a8, x8r8g8b8, sse2_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, a8, a8r8g8b8, sse2_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, a8, x8b8g8r8, sse2_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, a8, a8b8g8r8, sse2_composite_over_8888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, x8r8g8b8, sse2_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, a8, a8r8g8b8, sse2_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, a8, x8b8g8r8, sse2_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, a8, a8b8g8r8, sse2_composite_over_x888_8_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid, a8r8g8b8, sse2_composite_over_x888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, solid, x8r8g8b8, sse2_composite_over_x888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid, a8b8g8r8, sse2_composite_over_x888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, solid, x8b8g8r8, sse2_composite_over_x888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid, a8r8g8b8, sse2_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8r8g8b8, solid, x8r8g8b8, sse2_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid, a8b8g8r8, sse2_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, a8b8g8r8, solid, x8b8g8r8, sse2_composite_over_8888_n_8888),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, a8r8g8b8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, x8r8g8b8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, a8b8g8r8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, x8b8g8r8, sse2_composite_over_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8r8g8b8_ca, r5g6b5, sse2_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH (OVER, solid, a8b8g8r8_ca, b5g6r5, sse2_composite_over_n_8888_0565_ca),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf, pixbuf, a8r8g8b8, sse2_composite_over_pixbuf_8888),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf, pixbuf, x8r8g8b8, sse2_composite_over_pixbuf_8888),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf, rpixbuf, a8b8g8r8, sse2_composite_over_pixbuf_8888),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf, rpixbuf, x8b8g8r8, sse2_composite_over_pixbuf_8888),
+    PIXMAN_STD_FAST_PATH (OVER, pixbuf, pixbuf, r5g6b5, sse2_composite_over_pixbuf_0565),
+    PIXMAN_STD_FAST_PATH (OVER, rpixbuf, rpixbuf, b5g6r5, sse2_composite_over_pixbuf_0565),
+    PIXMAN_STD_FAST_PATH (OVER, x8r8g8b8, null, x8r8g8b8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (OVER, x8b8g8r8, null, x8b8g8r8, sse2_composite_copy_area),
+
+    /* PIXMAN_OP_ADD */
+    PIXMAN_STD_FAST_PATH (ADD, solid, a8r8g8b8_ca, a8r8g8b8, sse2_composite_add_n_8888_8888_ca),
+    PIXMAN_STD_FAST_PATH (ADD, a8, null, a8, sse2_composite_add_8000_8000),
+    PIXMAN_STD_FAST_PATH (ADD, a8r8g8b8, null, a8r8g8b8, sse2_composite_add_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD, a8b8g8r8, null, a8b8g8r8, sse2_composite_add_8888_8888),
+    PIXMAN_STD_FAST_PATH (ADD, solid, a8, a8, sse2_composite_add_n_8_8),
+
+    /* PIXMAN_OP_SRC */
+    PIXMAN_STD_FAST_PATH (SRC, solid, a8, a8r8g8b8, sse2_composite_src_n_8_8888),
+    PIXMAN_STD_FAST_PATH (SRC, solid, a8, x8r8g8b8, sse2_composite_src_n_8_8888),
+    PIXMAN_STD_FAST_PATH (SRC, solid, a8, a8b8g8r8, sse2_composite_src_n_8_8888),
+    PIXMAN_STD_FAST_PATH (SRC, solid, a8, x8b8g8r8, sse2_composite_src_n_8_8888),
+    PIXMAN_STD_FAST_PATH (SRC, a8r8g8b8, null, a8r8g8b8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, a8b8g8r8, null, a8b8g8r8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, a8r8g8b8, null, x8r8g8b8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, a8b8g8r8, null, x8b8g8r8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, x8r8g8b8, null, x8r8g8b8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, x8b8g8r8, null, x8b8g8r8, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, r5g6b5, null, r5g6b5, sse2_composite_copy_area),
+    PIXMAN_STD_FAST_PATH (SRC, b5g6r5, null, b5g6r5, sse2_composite_copy_area),
+
+    /* PIXMAN_OP_IN */
+    PIXMAN_STD_FAST_PATH (IN, a8, null, a8, sse2_composite_in_8_8),
+    PIXMAN_STD_FAST_PATH (IN, solid, a8, a8, sse2_composite_in_n_8_8),
 
     { PIXMAN_OP_NONE },
 };
commit ff6eaac50eaa8778ba15fd0f796e94cc751dea0a
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Sep 12 05:26:50 2009 -0400

    Move calls to source_is_fastpathable() into get_source_format()

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index 4a9e616..cc3e6ba 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -527,40 +527,48 @@ get_source_format_code (pixman_image_t *image, pixman_format_code_t *code)
     if (!image)
     {
 	*code = PIXMAN_null;
+	
+	return TRUE;
     }
-    else if (image->common.component_alpha)
+    else
     {
-	if (image->type == BITS)
+	if (!source_is_fastpathable (image))
+	    return FALSE;
+	
+	if (image->common.component_alpha)
 	{
-	    /* These are the *only* component_alpha formats
-	     * we support for fast paths
-	     */
-	    if (image->bits.format == PIXMAN_a8r8g8b8)
-		*code = PIXMAN_a8r8g8b8_ca;
-	    else if (image->bits.format == PIXMAN_a8b8g8r8)
-		*code = PIXMAN_a8b8g8r8_ca;
+	    if (image->type == BITS)
+	    {
+		/* These are the *only* component_alpha formats
+		 * we support for fast paths
+		 */
+		if (image->bits.format == PIXMAN_a8r8g8b8)
+		    *code = PIXMAN_a8r8g8b8_ca;
+		else if (image->bits.format == PIXMAN_a8b8g8r8)
+		    *code = PIXMAN_a8b8g8r8_ca;
+		else
+		    return FALSE;
+	    }
 	    else
+	    {
 		return FALSE;
+	    }
+	}
+	else if (_pixman_image_is_solid (image))
+	{
+	    *code = PIXMAN_solid;
+	}
+	else if (image->common.type == BITS)
+	{
+	    *code = image->bits.format;
 	}
 	else
 	{
 	    return FALSE;
 	}
+	
+	return TRUE;
     }
-    else if (_pixman_image_is_solid (image))
-    {
-	*code = PIXMAN_solid;
-    }
-    else if (image->common.type == BITS)
-    {
-	*code = image->bits.format;
-    }
-    else
-    {
-	return FALSE;
-    }
-
-    return TRUE;
 }
 
 static force_inline pixman_bool_t
@@ -622,23 +630,16 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
                        int32_t                   width,
                        int32_t                   height)
 {
-    pixman_composite_func_t func = NULL;
     pixman_bool_t src_repeat = src->common.repeat == PIXMAN_REPEAT_NORMAL;
     pixman_bool_t mask_repeat = mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
-    pixman_bool_t result;
     pixman_format_code_t src_format, mask_format, dest_format;
+    pixman_composite_func_t func = NULL;
     const pixman_fast_path_t *info;
-
-    /* Source */
-    if (!source_is_fastpathable (src))
-	return FALSE;
+    pixman_bool_t result;
 
     if (!get_source_format_code (src, &src_format))
 	return FALSE;
 
-    if (mask && !source_is_fastpathable (mask))
-	return FALSE;
-
     if (!get_source_format_code (mask, &mask_format))
 	return FALSE;
 
commit 171dc4875644f72d65ff2e31533edacc781069ec
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Sep 12 05:09:53 2009 -0400

    Fold get_fast_path() into _pixman_run_fast_path()
    
    Also factor out the source format code computation to its own
    function.

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index b2c5c51..4a9e616 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -510,7 +510,7 @@ source_is_fastpathable (pixman_image_t *image)
 	return FALSE;
     }
 
-    if (image->type == BITS	&&
+    if (image->type == BITS					&&
 	(image->bits.read_func					||
 	 image->bits.write_func					||
 	 PIXMAN_FORMAT_IS_WIDE (image->bits.format)))
@@ -521,116 +521,46 @@ source_is_fastpathable (pixman_image_t *image)
     return TRUE;
 }
 
-static const pixman_fast_path_t *
-get_fast_path (const pixman_fast_path_t *fast_paths,
-               pixman_op_t               op,
-               pixman_image_t *          src,
-               pixman_image_t *          mask,
-               pixman_image_t *          dest,
-	       int			 src_x,
-	       int			 src_y,
-	       int			 mask_x,
-	       int			 mask_y)
+static pixman_bool_t
+get_source_format_code (pixman_image_t *image, pixman_format_code_t *code)
 {
-    pixman_format_code_t src_format, mask_format, dest_format;
-    const pixman_fast_path_t *info;
-
-    /* Source */
-    if (!source_is_fastpathable (src))
-	return NULL;
-
-    if (mask && !source_is_fastpathable (mask))
-	return NULL;
-
-    /* Destination */
-
-    if (dest->common.alpha_map	||
-	dest->bits.read_func	||
-	dest->bits.write_func)
-    {
-	return NULL;
-    }
-
-    /* Source */
-    
-    if (_pixman_image_is_solid (src))
-    {
-	src_format = PIXMAN_solid;
-    }
-    else if (src->type == BITS)
-    {
-	src_format = src->bits.format;
-    }
-    else
+    if (!image)
     {
-	return NULL;
+	*code = PIXMAN_null;
     }
-    
-    /* Mask */
-    if (!mask)
+    else if (image->common.component_alpha)
     {
-	mask_format = PIXMAN_null;
-    }
-    else if (mask->common.component_alpha)
-    {
-	if (mask->type == BITS)
+	if (image->type == BITS)
 	{
 	    /* These are the *only* component_alpha formats
 	     * we support for fast paths
 	     */
-	    if (mask->bits.format == PIXMAN_a8r8g8b8)
-		mask_format = PIXMAN_a8r8g8b8_ca;
-	    else if (mask->bits.format == PIXMAN_a8b8g8r8)
-		mask_format = PIXMAN_a8b8g8r8_ca;
+	    if (image->bits.format == PIXMAN_a8r8g8b8)
+		*code = PIXMAN_a8r8g8b8_ca;
+	    else if (image->bits.format == PIXMAN_a8b8g8r8)
+		*code = PIXMAN_a8b8g8r8_ca;
 	    else
-		return NULL;
+		return FALSE;
 	}
 	else
 	{
-	    return NULL;
+	    return FALSE;
 	}
     }
-    else if (_pixman_image_is_solid (mask))
+    else if (_pixman_image_is_solid (image))
     {
-	mask_format = PIXMAN_solid;
+	*code = PIXMAN_solid;
     }
-    else if (mask->common.type == BITS)
+    else if (image->common.type == BITS)
     {
-	mask_format = mask->bits.format;
+	*code = image->bits.format;
     }
     else
     {
-	return NULL;
-    }
-
-    dest_format = dest->bits.format;
-
-    /* Check for pixbufs */
-    if ((mask_format == PIXMAN_a8r8g8b8 || mask_format == PIXMAN_a8b8g8r8) &&
-	(src->type == BITS && src->bits.bits == mask->bits.bits)	   &&
-	(src->common.repeat == mask->common.repeat)			   &&
-	(src_x == mask_x && src_y == mask_y))
-    {
-	if (src_format == PIXMAN_x8b8g8r8)
-	    src_format = mask_format = PIXMAN_pixbuf;
-	else if (src_format == PIXMAN_x8r8g8b8)
-	    src_format = mask_format = PIXMAN_rpixbuf;
-	else
-	    return NULL;
-    }
-
-    for (info = fast_paths; info->op != PIXMAN_OP_NONE; ++info)
-    {
-	if (info->op == op			&&
-	    info->src_format == src_format	&&
-	    info->mask_format == mask_format	&&
-	    info->dest_format == dest_format)
-	{
-	    return info;
-	}
+	return FALSE;
     }
 
-    return NULL;
+    return TRUE;
 }
 
 static force_inline pixman_bool_t
@@ -693,44 +623,84 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
                        int32_t                   height)
 {
     pixman_composite_func_t func = NULL;
-    pixman_bool_t src_repeat =
-	src->common.repeat == PIXMAN_REPEAT_NORMAL;
-    pixman_bool_t mask_repeat =
-	mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
+    pixman_bool_t src_repeat = src->common.repeat == PIXMAN_REPEAT_NORMAL;
+    pixman_bool_t mask_repeat = mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
     pixman_bool_t result;
+    pixman_format_code_t src_format, mask_format, dest_format;
     const pixman_fast_path_t *info;
-    
-    if ((info = get_fast_path (paths, op, src, mask, dest, src_x, src_y, mask_x, mask_y)))
+
+    /* Source */
+    if (!source_is_fastpathable (src))
+	return FALSE;
+
+    if (!get_source_format_code (src, &src_format))
+	return FALSE;
+
+    if (mask && !source_is_fastpathable (mask))
+	return FALSE;
+
+    if (!get_source_format_code (mask, &mask_format))
+	return FALSE;
+
+    if (dest->common.alpha_map	||
+	dest->bits.read_func	||
+	dest->bits.write_func)
+    {
+	return FALSE;
+    }
+
+    dest_format = dest->bits.format;
+
+    /* Check for pixbufs */
+    if ((mask_format == PIXMAN_a8r8g8b8 || mask_format == PIXMAN_a8b8g8r8) &&
+	(src->type == BITS && src->bits.bits == mask->bits.bits)	   &&
+	(src->common.repeat == mask->common.repeat)			   &&
+	(src_x == mask_x && src_y == mask_y))
+    {
+	if (src_format == PIXMAN_x8b8g8r8)
+	    src_format = mask_format = PIXMAN_pixbuf;
+	else if (src_format == PIXMAN_x8r8g8b8)
+	    src_format = mask_format = PIXMAN_rpixbuf;
+    }
+
+    for (info = paths; info->op != PIXMAN_OP_NONE; ++info)
     {
-	func = info->func;
-	
-	if (info->src_format == PIXMAN_solid)
-	    src_repeat = FALSE;
-	
-	if (info->mask_format == PIXMAN_solid)
-	    mask_repeat = FALSE;
-	
-	if ((src_repeat                     &&
-	     src->bits.width == 1           &&
-	     src->bits.height == 1)		||
-	    (mask_repeat			&&
-	     mask->bits.width == 1		&&
-	     mask->bits.height == 1))
+	if (info->op == op			&&
+	    info->src_format == src_format	&&
+	    info->mask_format == mask_format	&&
+	    info->dest_format == dest_format)
 	{
-	    /* If src or mask are repeating 1x1 images and src_repeat or
-	     * mask_repeat are still TRUE, it means the fast path we
-	     * selected does not actually handle repeating images.
-	     *
-	     * So rather than calling the "fast path" with a zillion
-	     * 1x1 requests, we just fall back to the general code (which
-	     * does do something sensible with 1x1 repeating images).
-	     */
-	    func = NULL;
+	    func = info->func;
+	    
+	    if (info->src_format == PIXMAN_solid)
+		src_repeat = FALSE;
+	    
+	    if (info->mask_format == PIXMAN_solid)
+		mask_repeat = FALSE;
+	    
+	    if ((src_repeat				&&
+		 src->bits.width == 1		&&
+		 src->bits.height == 1)		||
+		(mask_repeat			&&
+		 mask->bits.width == 1		&&
+		 mask->bits.height == 1))
+	    {
+		/* If src or mask are repeating 1x1 images and src_repeat or
+		 * mask_repeat are still TRUE, it means the fast path we
+		 * selected does not actually handle repeating images.
+		 *
+		 * So rather than calling the "fast path" with a zillion
+		 * 1x1 requests, we just fall back to the general code (which
+		 * does do something sensible with 1x1 repeating images).
+		 */
+		func = NULL;
+	    }
+	    break;
 	}
     }
 
     result = FALSE;
-
+    
     if (func)
     {
 	pixman_region32_t region;
commit 459c7a52f67c9628e94107599e3abbc6463cbd0f
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Sep 12 04:30:22 2009 -0400

    Consolidate the source and mask sanity checks in a function

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index 7b57743..b2c5c51 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -498,6 +498,29 @@ _pixman_walk_composite_region (pixman_implementation_t *imp,
     }
 }
 
+static pixman_bool_t
+source_is_fastpathable (pixman_image_t *image)
+{
+    if (image->common.transform					||
+	image->common.alpha_map					||
+	image->common.filter == PIXMAN_FILTER_CONVOLUTION	||
+	image->common.repeat == PIXMAN_REPEAT_PAD		||
+	image->common.repeat == PIXMAN_REPEAT_REFLECT)
+    {
+	return FALSE;
+    }
+
+    if (image->type == BITS	&&
+	(image->bits.read_func					||
+	 image->bits.write_func					||
+	 PIXMAN_FORMAT_IS_WIDE (image->bits.format)))
+    {
+	return FALSE;
+    }
+
+    return TRUE;
+}
+
 static const pixman_fast_path_t *
 get_fast_path (const pixman_fast_path_t *fast_paths,
                pixman_op_t               op,
@@ -513,45 +536,11 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
     const pixman_fast_path_t *info;
 
     /* Source */
-    
-    if (src->common.transform				||
-	src->common.alpha_map				||
-	src->common.filter == PIXMAN_FILTER_CONVOLUTION	||
-	src->common.repeat == PIXMAN_REPEAT_PAD		||
-	src->common.repeat == PIXMAN_REPEAT_REFLECT)
-    {
+    if (!source_is_fastpathable (src))
 	return NULL;
-    }
 
-    if (src->type == BITS		&&
-	(src->bits.read_func				||
-	 src->bits.write_func				||
-	 PIXMAN_FORMAT_IS_WIDE (src->bits.format)))
-    {
+    if (mask && !source_is_fastpathable (mask))
 	return NULL;
-    }
-
-    /* Mask */
-    
-    if (mask)
-    {
-	if (mask->common.transform				||
-	    mask->common.alpha_map				||
-	    mask->common.filter == PIXMAN_FILTER_CONVOLUTION	||
-	    mask->common.repeat == PIXMAN_REPEAT_PAD		||
-	    mask->common.repeat == PIXMAN_REPEAT_REFLECT)
-	{
-	    return NULL;
-	}
-
-	if (mask->type == BITS		&&
-	    (mask->bits.read_func				||
-	     mask->bits.write_func				||
-	     PIXMAN_FORMAT_IS_WIDE (src->bits.format)))
-	{
-	    return NULL;
-	}
-    }
 
     /* Destination */
 
commit 27a4fb4747426ee935d2149cca2197a369c4556d
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Sep 12 04:10:30 2009 -0400

    Move pixbuf checks after src_format and mask_format have been computed.

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index 60940da..7b57743 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -562,77 +562,74 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
 	return NULL;
     }
 
-    /* Check for pixbufs */
-    if (mask_image && mask_image->type == BITS								&&
-	(mask_image->bits.format == PIXMAN_a8r8g8b8 || mask_image->bits.format == PIXMAN_a8b8g8r8)	&&
-	(src_image->type == BITS && src_image->bits.bits == mask_image->bits.bits)			&&
-	(src_image->common.repeat == mask_image->common.repeat)						&&
-	(src_x == mask_x && src_y == mask_y))
+    /* Source */
+    
+    if (_pixman_image_is_solid (src))
     {
-	if (src_image->bits.format == PIXMAN_x8b8g8r8)
-	    src_format = mask_format = PIXMAN_pixbuf;
-	else if (src_image->bits.format == PIXMAN_x8r8g8b8)
-	    src_format = mask_format = PIXMAN_rpixbuf;
-	else
-	    return NULL;
+	src_format = PIXMAN_solid;
+    }
+    else if (src->type == BITS)
+    {
+	src_format = src->bits.format;
     }
     else
     {
-	/* Source */
-	
-	if (_pixman_image_is_solid (src))
-	{
-	    src_format = PIXMAN_solid;
-	}
-	else if (src->type == BITS)
-	{
-	    src_format = src->bits.format;
-	}
-	else
-	{
-	    return NULL;
-	}
-
-	/* Mask */
-	if (!mask)
-	{
-	    mask_format = PIXMAN_null;
-	}
-	else if (mask->common.component_alpha)
+	return NULL;
+    }
+    
+    /* Mask */
+    if (!mask)
+    {
+	mask_format = PIXMAN_null;
+    }
+    else if (mask->common.component_alpha)
+    {
+	if (mask->type == BITS)
 	{
-	    if (mask->type == BITS)
-	    {
-		/* These are the *only* component_alpha formats
-		 * we support for fast paths
-		 */
-		if (mask->bits.format == PIXMAN_a8r8g8b8)
-		    mask_format = PIXMAN_a8r8g8b8_ca;
-		else if (mask->bits.format == PIXMAN_a8b8g8r8)
-		    mask_format = PIXMAN_a8b8g8r8_ca;
-		else
-		    return NULL;
-	    }
+	    /* These are the *only* component_alpha formats
+	     * we support for fast paths
+	     */
+	    if (mask->bits.format == PIXMAN_a8r8g8b8)
+		mask_format = PIXMAN_a8r8g8b8_ca;
+	    else if (mask->bits.format == PIXMAN_a8b8g8r8)
+		mask_format = PIXMAN_a8b8g8r8_ca;
 	    else
-	    {
 		return NULL;
-	    }
-	}
-	else if (_pixman_image_is_solid (mask))
-	{
-	    mask_format = PIXMAN_solid;
-	}
-	else if (mask->common.type == BITS)
-	{
-	    mask_format = mask->bits.format;
 	}
 	else
 	{
 	    return NULL;
 	}
     }
+    else if (_pixman_image_is_solid (mask))
+    {
+	mask_format = PIXMAN_solid;
+    }
+    else if (mask->common.type == BITS)
+    {
+	mask_format = mask->bits.format;
+    }
+    else
+    {
+	return NULL;
+    }
 
     dest_format = dest->bits.format;
-    
+
+    /* Check for pixbufs */
+    if ((mask_format == PIXMAN_a8r8g8b8 || mask_format == PIXMAN_a8b8g8r8) &&
+	(src->type == BITS && src->bits.bits == mask->bits.bits)	   &&
+	(src->common.repeat == mask->common.repeat)			   &&
+	(src_x == mask_x && src_y == mask_y))
+    {
+	if (src_format == PIXMAN_x8b8g8r8)
+	    src_format = mask_format = PIXMAN_pixbuf;
+	else if (src_format == PIXMAN_x8r8g8b8)
+	    src_format = mask_format = PIXMAN_rpixbuf;
+	else
+	    return NULL;
+    }
+
     for (info = fast_paths; info->op != PIXMAN_OP_NONE; ++info)
     {
 	if (info->op == op			&&
commit 2def1a8867a1ab0ccab720d1cc3f3c7b61c74619
Author: Søren Sandmann Pedersen <sandmann at redhat.com>
Date:   Sat Sep 12 04:03:25 2009 -0400

    Move the sanity checks for src, mask and destination into get_fast_path()

diff --git a/pixman/pixman-utils.c b/pixman/pixman-utils.c
index a80a226..60940da 100644
--- a/pixman/pixman-utils.c
+++ b/pixman/pixman-utils.c
@@ -501,9 +501,9 @@ _pixman_walk_composite_region (pixman_implementation_t *imp,
 static const pixman_fast_path_t *
 get_fast_path (const pixman_fast_path_t *fast_paths,
                pixman_op_t               op,
-               pixman_image_t *          src_image,
-               pixman_image_t *          mask_image,
-               pixman_image_t *          dst_image,
+               pixman_image_t *          src,
+               pixman_image_t *          mask,
+               pixman_image_t *          dest,
 	       int			 src_x,
 	       int			 src_y,
 	       int			 mask_x,
@@ -512,6 +512,56 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
     pixman_format_code_t src_format, mask_format, dest_format;
     const pixman_fast_path_t *info;
 
+    /* Source */
+    
+    if (src->common.transform				||
+	src->common.alpha_map				||
+	src->common.filter == PIXMAN_FILTER_CONVOLUTION	||
+	src->common.repeat == PIXMAN_REPEAT_PAD		||
+	src->common.repeat == PIXMAN_REPEAT_REFLECT)
+    {
+	return NULL;
+    }
+
+    if (src->type == BITS		&&
+	(src->bits.read_func				||
+	 src->bits.write_func				||
+	 PIXMAN_FORMAT_IS_WIDE (src->bits.format)))
+    {
+	return NULL;
+    }
+
+    /* Mask */
+    
+    if (mask)
+    {
+	if (mask->common.transform				||
+	    mask->common.alpha_map				||
+	    mask->common.filter == PIXMAN_FILTER_CONVOLUTION	||
+	    mask->common.repeat == PIXMAN_REPEAT_PAD		||
+	    mask->common.repeat == PIXMAN_REPEAT_REFLECT)
+	{
+	    return NULL;
+	}
+
+	if (mask->type == BITS		&&
+	    (mask->bits.read_func				||
+	     mask->bits.write_func				||
+	     PIXMAN_FORMAT_IS_WIDE (src->bits.format)))
+	{
+	    return NULL;
+	}
+    }
+
+    /* Destination */
+
+    if (dest->common.alpha_map	||
+	dest->bits.read_func	||
+	dest->bits.write_func)
+    {
+	return NULL;
+    }
+
     /* Check for pixbufs */
     if (mask_image && mask_image->type == BITS								&&
 	(mask_image->bits.format == PIXMAN_a8r8g8b8 || mask_image->bits.format == PIXMAN_a8b8g8r8)	&&
@@ -529,13 +579,14 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
     else
     {
 	/* Source */
-	if (_pixman_image_is_solid (src_image))
+	
+	if (_pixman_image_is_solid (src))
 	{
 	    src_format = PIXMAN_solid;
 	}
-	else if (src_image->type == BITS)
+	else if (src->type == BITS)
 	{
-	    src_format = src_image->bits.format;
+	    src_format = src->bits.format;
 	}
 	else
 	{
@@ -543,20 +594,20 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
 	}
 
 	/* Mask */
-	if (!mask_image)
+	if (!mask)
 	{
 	    mask_format = PIXMAN_null;
 	}
-	else if (mask_image->common.component_alpha)
+	else if (mask->common.component_alpha)
 	{
-	    if (mask_image->type == BITS)
+	    if (mask->type == BITS)
 	    {
 		/* These are the *only* component_alpha formats
 		 * we support for fast paths
 		 */
-		if (mask_image->bits.format == PIXMAN_a8r8g8b8)
+		if (mask->bits.format == PIXMAN_a8r8g8b8)
 		    mask_format = PIXMAN_a8r8g8b8_ca;
-		else if (mask_image->bits.format == PIXMAN_a8b8g8r8)
+		else if (mask->bits.format == PIXMAN_a8b8g8r8)
 		    mask_format = PIXMAN_a8b8g8r8_ca;
 		else
 		    return NULL;
@@ -566,13 +617,13 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
 		return NULL;
 	    }
 	}
-	else if (_pixman_image_is_solid (mask_image))
+	else if (_pixman_image_is_solid (mask))
 	{
 	    mask_format = PIXMAN_solid;
 	}
-	else if (mask_image->common.type == BITS)
+	else if (mask->common.type == BITS)
 	{
-	    mask_format = mask_image->bits.format;
+	    mask_format = mask->bits.format;
 	}
 	else
 	{
@@ -580,7 +631,7 @@ get_fast_path (const pixman_fast_path_t *fast_paths,
 	}
     }
 
-    dest_format = dst_image->bits.format;
+    dest_format = dest->bits.format;
     
     for (info = fast_paths; info->op != PIXMAN_OP_NONE; ++info)
     {
@@ -661,71 +712,34 @@ _pixman_run_fast_path (const pixman_fast_path_t *paths,
     pixman_bool_t mask_repeat =
 	mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
     pixman_bool_t result;
-    pixman_bool_t has_fast_path;
-
-    has_fast_path = !dest->common.alpha_map &&
-		    !dest->bits.read_func &&
-		    !dest->bits.write_func;
-
-    if (has_fast_path)
-    {
-	has_fast_path = !src->common.transform &&
-	                !src->common.alpha_map &&
-			src->common.filter != PIXMAN_FILTER_CONVOLUTION &&
-			src->common.repeat != PIXMAN_REPEAT_PAD &&
-			src->common.repeat != PIXMAN_REPEAT_REFLECT;
-	if (has_fast_path && src->type == BITS)
-	{
-	    has_fast_path = !src->bits.read_func &&
-	                    !src->bits.write_func &&
-		            !PIXMAN_FORMAT_IS_WIDE (src->bits.format);
-	}
-    }
-
-    if (mask && has_fast_path)
-    {
-	has_fast_path =
-	    !mask->common.transform &&
-	    !mask->common.alpha_map &&
-	    !mask->bits.read_func &&
-	    !mask->bits.write_func &&
-	    mask->common.filter != PIXMAN_FILTER_CONVOLUTION &&
-	    mask->common.repeat != PIXMAN_REPEAT_PAD &&
-	    mask->common.repeat != PIXMAN_REPEAT_REFLECT &&
-	    !PIXMAN_FORMAT_IS_WIDE (mask->bits.format);
-    }
-
-    if (has_fast_path)
+    const pixman_fast_path_t *info;
+    
+    if ((info = get_fast_path (paths, op, src, mask, dest, src_x, src_y, mask_x, mask_y)))
     {
-	const pixman_fast_path_t *info;
-
-	if ((info = get_fast_path (paths, op, src, mask, dest, src_x, src_y, mask_x, mask_y)))
+	func = info->func;
+	
+	if (info->src_format == PIXMAN_solid)
+	    src_repeat = FALSE;
+	
+	if (info->mask_format == PIXMAN_solid)
+	    mask_repeat = FALSE;
+	
+	if ((src_repeat                     &&
+	     src->bits.width == 1           &&
+	     src->bits.height == 1)		||
+	    (mask_repeat			&&
+	     mask->bits.width == 1		&&
+	     mask->bits.height == 1))
 	{
-	    func = info->func;
-
-	    if (info->src_format == PIXMAN_solid)
-		src_repeat = FALSE;
-
-	    if (info->mask_format == PIXMAN_solid)
-		mask_repeat = FALSE;
-
-	    if ((src_repeat                     &&
-		 src->bits.width == 1           &&
-		 src->bits.height == 1)		||
-		(mask_repeat			&&
-		 mask->bits.width == 1		&&
-		 mask->bits.height == 1))
-	    {
-		/* If src or mask are repeating 1x1 images and src_repeat or
-		 * mask_repeat are still TRUE, it means the fast path we
-		 * selected does not actually handle repeating images.
-		 *
-		 * So rather than calling the "fast path" with a zillion
-		 * 1x1 requests, we just fall back to the general code (which
-		 * does do something sensible with 1x1 repeating images).
-		 */
-		func = NULL;
-	    }
+	    /* If src or mask are repeating 1x1 images and src_repeat or
+	     * mask_repeat are still TRUE, it means the fast path we
+	     * selected does not actually handle repeating images.
+	     *
+	     * So rather than calling the "fast path" with a zillion
+	     * 1x1 requests, we just fall back to the general code (which
+	     * does do something sensible with 1x1 repeating images).
+	     */
+	    func = NULL;
 	}
     }
 


More information about the xorg-commit mailing list