
On 01/22/2018 01:29 AM, Simon Glass wrote:
Hi Heinrich,
On 17 January 2018 at 19:42, Heinrich Schuchardt xypron.glpk@gmx.de wrote:
Support special rendition code 0 - reset attributes. Support special rendition code 1 - increased intensity (bold). Get RGB sequence in pixels right (swap blue and red). Do not set reserved bits. Use u32 instead of unsigned for color bit mask.
qemu-system-i386 -display sdl -vga virtio and qemu-system-i386 -display sdl -vga cirrus now display the same colors as qemu-system-i386 -nographic
Testing is possible via
setenv efi_selftest test output bootefi selftest
Signed-off-by: Heinrich Schuchardt xypron.glpk@gmx.de
v2: SGR 0 should reset the colors and the attributes.
drivers/video/vidconsole-uclass.c | 53 ++++++++++++++++++++++++++------------ drivers/video/video-uclass.c | 54 +++++++++++++++++++++++++++++++++------ include/video.h | 13 ++++++++-- test/dm/video.c | 2 +- 4 files changed, 94 insertions(+), 28 deletions(-)
Reviewed-by: Simon Glass sjg@chromium.org
Some comments below. IMO there is a bit too much going on in this patch.
Do you want me to split the patch into a series?
diff --git a/drivers/video/vidconsole-uclass.c b/drivers/video/vidconsole-uclass.c index 5f63c12d6c..3c39ee06dd 100644 --- a/drivers/video/vidconsole-uclass.c +++ b/drivers/video/vidconsole-uclass.c @@ -114,28 +114,35 @@ static const struct { unsigned b; } colors[] = { { 0x00, 0x00, 0x00 }, /* black */
Perhaps we should have an enum for these colours and then:
[BLACK] = {...}, [RED] = ...
{ 0xff, 0x00, 0x00 }, /* red */
{ 0x00, 0xff, 0x00 }, /* green */
{ 0xc0, 0x00, 0x00 }, /* red */
{ 0x00, 0xc0, 0x00 }, /* green */
{ 0xc0, 0x60, 0x00 }, /* brown */
{ 0x00, 0x00, 0xc0 }, /* blue */
{ 0xc0, 0x00, 0xc0 }, /* magenta */
{ 0x00, 0xc0, 0xc0 }, /* cyan */
{ 0xc0, 0xc0, 0xc0 }, /* light gray */
{ 0x80, 0x80, 0x80 }, /* gray */
{ 0xff, 0x00, 0x00 }, /* bright red */
{ 0x00, 0xff, 0x00 }, /* bright green */ { 0xff, 0xff, 0x00 }, /* yellow */
{ 0x00, 0x00, 0xff }, /* blue */
{ 0xff, 0x00, 0xff }, /* magenta */
{ 0x00, 0xff, 0xff }, /* cyan */
{ 0x00, 0x00, 0xff }, /* bright blue */
{ 0xff, 0x00, 0xff }, /* bright magenta */
{ 0x00, 0xff, 0xff }, /* bright cyan */ { 0xff, 0xff, 0xff }, /* white */
};
-static void set_color(struct video_priv *priv, unsigned idx, unsigned *c) +static void set_color(struct video_priv *priv, unsigned int idx, u32 *c)
Can you add a comment for this?
Also, why is *c a u32?
Because a pixel has a maximum of 32 bits. The only thing known about unsigned is size(char) <= size(unsigned int) <= size(long int)
{ switch (priv->bpix) { case VIDEO_BPP16:
*c = ((colors[idx].r >> 3) << 0) |
((colors[idx].g >> 2) << 5) |
((colors[idx].b >> 3) << 11);
*c = ((colors[idx].r >> 3) << 11) |
((colors[idx].g >> 2) << 5) |
((colors[idx].b >> 3) << 0); break; case VIDEO_BPP32:
*c = 0xff000000 |
(colors[idx].r << 0) |
(colors[idx].g << 8) |
(colors[idx].b << 16);
*c = (colors[idx].r << 16) |
(colors[idx].g << 8) |
(colors[idx].b << 0); break; default: /* unsupported, leave current color in place */
@@ -270,18 +277,30 @@ static void vidconsole_escape_char(struct udevice *dev, char ch) s++;
switch (val) {
case 0:
/* all attributes off */
video_set_default_colors(vid_priv);
break;
case 1:
/* bold */
vid_priv->fg |= 8;
set_color(vid_priv, vid_priv->fg,
(unsigned int *)&vid_priv->colour_fg);
break; case 30 ... 37: /* fg color */
set_color(vid_priv, val - 30,
(unsigned *)&vid_priv->colour_fg);
vid_priv->fg &= ~7;
vid_priv->fg |= val - 30;
set_color(vid_priv, vid_priv->fg,
&vid_priv->colour_fg); break; case 40 ... 47: /* bg color */ set_color(vid_priv, val - 40,
(unsigned *)&vid_priv->colour_bg);
&vid_priv->colour_bg); break; default:
/* unknown/unsupported */
/* ignore unsupported SGR parameter */ break; } }
diff --git a/drivers/video/video-uclass.c b/drivers/video/video-uclass.c index dcaceed42c..ec6ee10c67 100644 --- a/drivers/video/video-uclass.c +++ b/drivers/video/video-uclass.c @@ -91,17 +91,59 @@ void video_clear(struct udevice *dev) { struct video_priv *priv = dev_get_uclass_priv(dev);
if (priv->bpix == VIDEO_BPP32) {
switch (priv->bpix) {
case VIDEO_BPP16: {
u16 *ppix = priv->fb;
u16 *end = priv->fb + priv->fb_size;
while (ppix < end)
*ppix++ = priv->colour_bg;
break;
}
case VIDEO_BPP32: { u32 *ppix = priv->fb; u32 *end = priv->fb + priv->fb_size; while (ppix < end) *ppix++ = priv->colour_bg;
} else {
break;
}
default: memset(priv->fb, priv->colour_bg, priv->fb_size);
break; }
}
+void video_set_default_colors(struct video_priv *priv) +{ +#ifdef CONFIG_SYS_WHITE_ON_BLACK
switch (priv->bpix) {
case VIDEO_BPP16:
priv->colour_fg = 0xc618;
Can you add a comment as to what this colour is? Also for constants below.
Wouldn't it be better to put all color constants (see array above) into an include. Put a macro into the same include to convert from 24 bit colors to 16 bit colors.
break;
case VIDEO_BPP32:
priv->colour_fg = 0xc0c0c0;
break;
default:
priv->colour_fg = 0xffffff;
break;
}
priv->colour_bg = 0;
priv->fg = 7;
+#else
priv->colour_fg = 0;
switch (priv->bpix) {
case VIDEO_BPP16:
priv->colour_bg = 0xcffff;
break;
default:
priv->colour_bg = 0xffffff;
break;
}
priv->fg = 0;
+#endif +}
/* Flush video activity to the caches */ void video_sync(struct udevice *vid) { @@ -191,12 +233,8 @@ static int video_post_probe(struct udevice *dev) priv->line_length = priv->xsize * VNBYTES(priv->bpix); priv->fb_size = priv->line_length * priv->ysize;
/* Set up colours - we could in future support other colours */
-#ifdef CONFIG_SYS_WHITE_ON_BLACK
priv->colour_fg = 0xffffff;
-#else
priv->colour_bg = 0xffffff;
-#endif
/* Set up colors */
video_set_default_colors(priv); if (!CONFIG_IS_ENABLED(NO_FB_CLEAR)) video_clear(dev);
diff --git a/include/video.h b/include/video.h index 61ff653121..3101459d2a 100644 --- a/include/video.h +++ b/include/video.h @@ -67,6 +67,7 @@ enum video_log2_bpp {
- @flush_dcache: true to enable flushing of the data cache after
the LCD is updated
- @cmap: Colour map for 8-bit-per-pixel displays
- @fg: Foreground color code (bit 3 = bold, bit 0-2 = color)
This name is not very descriptive. Is it the ansi colour? If so, how about ansi_fg?
*/ struct video_priv { /* Things set up by the driver: */ @@ -84,10 +85,11 @@ struct video_priv { void *fb; int fb_size; int line_length;
int colour_fg;
int colour_bg;
u32 colour_fg;
u32 colour_bg;
Should be uint I think, or ulong.
Why? A graphics card uses a maximum 32bit per pixel. I want 32bit irrespective of what size int or long have.
The only rule coded in scripts/checkpatch.pl is that u32 is preferred over uint32_t.
Best regards
Heinrich
bool flush_dcache; ushort *cmap;
u8 fg;
};
/* Placeholder - there are no video operations at present */ @@ -183,6 +185,13 @@ int video_get_ysize(struct udevice *dev); */ void video_set_flush_dcache(struct udevice *dev, bool flush);
+/**
- Set default colors and attributes
- @priv device information
- */
+void video_set_default_colors(struct video_priv *priv);
#endif /* CONFIG_DM_VIDEO */
#ifndef CONFIG_DM_VIDEO diff --git a/test/dm/video.c b/test/dm/video.c index 29917d0c2d..caca496902 100644 --- a/test/dm/video.c +++ b/test/dm/video.c @@ -186,7 +186,7 @@ static int dm_test_video_ansi(struct unit_test_state *uts) /* test colors (30-37 fg color, 40-47 bg color) */ vidconsole_put_string(con, ANSI_ESC"[30;41mfoo"); /* black on red */ vidconsole_put_string(con, ANSI_ESC"[33;44mbar"); /* yellow on blue */
ut_asserteq(268, compress_frame_buffer(dev));
ut_asserteq(265, compress_frame_buffer(dev)); return 0;
}
2.15.1
Regards, Simon