xf86-video-intel: 2 commits - man/intel.man src/i810.h src/i830_display.c src/i830_driver.c src/i830.h

Zhenyu Wang zhen at kemper.freedesktop.org
Tue Apr 8 23:36:50 PDT 2008


 man/intel.man      |   17 +++++++++++++++++
 src/i810.h         |    1 +
 src/i830.h         |    2 ++
 src/i830_display.c |   19 +++++++++++++++----
 src/i830_driver.c  |    8 ++++++++
 5 files changed, 43 insertions(+), 4 deletions(-)

New commits:
commit e031cc02e65acfbafb48136dad414751e04425c5
Author: Mike Isely <isely at pobox.com>
Date:   Wed Apr 9 14:15:03 2008 +0800

    Implement support for 24 bit pixel format
    
    The Intel driver appears to be coded to only work with displays
    expecting 18 bit pixels.  However I have an application using a LCD
    display that expects pixel data in 24 bit format.  The difference is
    only 2 bits in a single GPU register.  This patch implements that
    change, controlled by a new driver option, "LVDS24Bit".  The default
    value is false, which is the previous behavior.  When set to true,
    then 24 bit panels should work (at least the one I'm testing here
    does).
    
    Fd.o bug #15201
    
    Signed-off-by: Mike Isely <isely at pobox.com>

diff --git a/man/intel.man b/man/intel.man
index 69af25e..8a8b7a0 100644
--- a/man/intel.man
+++ b/man/intel.man
@@ -183,6 +183,23 @@ causing a crash.  If you find that your platform needs this option, please file
 a bug against xf86-video-intel at http://bugs.freedesktop.org which includes
 the output of 'lspci -v' and 'lspci -vn'.
 .TP
+.BI "Option \*qLVDS24Bit\*q \*q" boolean \*q
+Specify 24 bit pixel format (i.e. 8 bits per color) to be used for the
+LVDS output.  Some newer LCD panels expect pixels to be formatted and
+sent as 8 bits per color channel instead of the more common 6 bits per
+color channel.  Set this option to true to enable the newer format.
+Note that this concept is entirely different and independent from the
+frame buffer color depth - which is still controlled in the usual way
+within the X server.  This option instead selects the physical format
+/ sequencing of the digital bits sent to the display.  Setting the
+frame buffer color depth is really a matter of preference by the user,
+while setting the pixel format here is a requirement of the connected
+hardware.  Leaving this unset implies the default value of false,
+which is almost always going to be right choice.  If your
+LVDS-connected display on the other hand is extremely washed out
+(e.g. white on a lighter white), trying this option might clear the
+problem.
+.TP
 .BI "Option \*qXvMC\*q \*q" boolean \*q
 Enable XvMC driver. Current support MPEG2 MC on 915/945 and G33 series.
 User should provide absolute path to libIntelXvMC.so in XvMCConfig file.
diff --git a/src/i830.h b/src/i830.h
index bd41a2f..834e4dc 100644
--- a/src/i830.h
+++ b/src/i830.h
@@ -548,6 +548,8 @@ typedef struct _I830Rec {
    /* Broken-out options. */
    OptionInfoPtr Options;
 
+   Bool lvds_24_bit_mode;
+
    Bool StolenOnly;
 
    Bool swfSaved;
diff --git a/src/i830_display.c b/src/i830_display.c
index 4f3f8ef..4091e79 100644
--- a/src/i830_display.c
+++ b/src/i830_display.c
@@ -1289,10 +1289,21 @@ i830_crtc_mode_set(xf86CrtcPtr crtc, DisplayModePtr mode,
 	else
 	    lvds &= ~(LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP);
 
-	/* It would be nice to set 24 vs 18-bit mode (LVDS_A3_POWER_UP)
-	 * appropriately here, but we need to look more thoroughly into how
-	 * panels behave in the two modes.
-	 */
+	if (pI830->lvds_24_bit_mode) {
+	    /* Option set which requests 24-bit mode
+	     * (LVDS_A3_POWER_UP, as opposed to 18-bit mode) here; we
+	     * still need to look more thoroughly into how panels
+	     * behave in the two modes.  This option enables that
+	     * experimentation.
+	     */
+	    xf86DrvMsg(pScrn->scrnIndex, X_INFO,
+		       "Selecting less common 24 bit TMDS pixel format.\n");
+	    lvds |= LVDS_A3_POWER_UP;
+	    lvds |= LVDS_DATA_FORMAT_DOT_ONE;
+	} else {
+	    xf86DrvMsg(pScrn->scrnIndex, X_INFO,
+		       "Selecting standard 18 bit TMDS pixel format.\n");
+	}
 
 	/* Enable dithering if we're in 18-bit mode. */
 	if (IS_I965G(pI830))
diff --git a/src/i830_driver.c b/src/i830_driver.c
index a19c8eb..66153b7 100644
--- a/src/i830_driver.c
+++ b/src/i830_driver.c
@@ -296,6 +296,7 @@ typedef enum {
    OPTION_COLOR_KEY,
    OPTION_CHECKDEVICES,
    OPTION_MODEDEBUG,
+   OPTION_LVDS24BITMODE,
    OPTION_FBC,
    OPTION_TILING,
 #ifdef XF86DRI_MM
@@ -322,6 +323,7 @@ static OptionInfoRec I830Options[] = {
    {OPTION_VIDEO_KEY,	"VideoKey",	OPTV_INTEGER,	{0},	FALSE},
    {OPTION_CHECKDEVICES, "CheckDevices",OPTV_BOOLEAN,	{0},	FALSE},
    {OPTION_MODEDEBUG,	"ModeDebug",	OPTV_BOOLEAN,	{0},	FALSE},
+   {OPTION_LVDS24BITMODE, "LVDS24Bit",	OPTV_BOOLEAN,	{0},	FALSE},
    {OPTION_FBC,		"FramebufferCompression", OPTV_BOOLEAN, {0}, TRUE},
    {OPTION_TILING,	"Tiling",	OPTV_BOOLEAN,	{0},	TRUE},
 #ifdef XF86DRI_MM
@@ -1398,6 +1400,12 @@ I830PreInit(ScrnInfoPtr pScrn, int flags)
       pI830->debug_modes = FALSE;
    }
 
+   if (xf86ReturnOptValBool(pI830->Options, OPTION_LVDS24BITMODE, FALSE)) {
+      pI830->lvds_24_bit_mode = TRUE;
+   } else {
+      pI830->lvds_24_bit_mode = FALSE;
+   }
+
    if (xf86ReturnOptValBool(pI830->Options, OPTION_FORCEENABLEPIPEA, FALSE))
        pI830->quirk_flag |= QUIRK_PIPEA_FORCE;
 
commit 79b18980ac9f60b1978abe421352df965aed1681
Author: Zhenyu Wang <zhenyu.z.wang at intel.com>
Date:   Wed Apr 9 14:03:45 2008 +0800

    Include <stdint.h> in i810.h for integer types definition

diff --git a/src/i810.h b/src/i810.h
index b798021..e7331f6 100644
--- a/src/i810.h
+++ b/src/i810.h
@@ -39,6 +39,7 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 #ifndef _I810_H_
 #define _I810_H_
 
+#include <stdint.h>
 #include "compiler.h"
 #include "xf86PciInfo.h"
 #include "xf86Pci.h"


More information about the xorg-commit mailing list