summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDr.Smile <vabnick@gmail.com>2018-12-08 21:00:42 +0300
committerDr.Smile <vabnick@gmail.com>2019-05-20 01:05:10 +0300
commitd1feaf3684c14a24c707b6c50033d83014a8053b (patch)
tree56eed7c3526a0f1353c91a24844747f85c04df6e
parent20a26e98c27387004151ce291c1e5e0bda3cc0dd (diff)
downloadlibass-d1feaf3684c14a24c707b6c50033d83014a8053b.tar.bz2
libass-d1feaf3684c14a24c707b6c50033d83014a8053b.tar.xz
renderer: eliminate use of bitmap pointers as mode flags
Render logic should depend only on input subs and not on some internal state such as bitmap pointers. That can prevent incorrect behavior in case of allocation failure.
-rw-r--r--libass/ass_bitmap.c6
-rw-r--r--libass/ass_render.c65
2 files changed, 29 insertions, 42 deletions
diff --git a/libass/ass_bitmap.c b/libass/ass_bitmap.c
index 9ae8d7a..e43f727 100644
--- a/libass/ass_bitmap.c
+++ b/libass/ass_bitmap.c
@@ -199,6 +199,9 @@ bool outline_to_bitmap(ASS_Renderer *render_priv, Bitmap *bm,
*/
void fix_outline(Bitmap *bm_g, Bitmap *bm_o)
{
+ if (!bm_g->buffer || !bm_o->buffer)
+ return;
+
int32_t l = FFMAX(bm_o->left, bm_g->left);
int32_t t = FFMAX(bm_o->top, bm_g->top);
int32_t r = FFMIN(bm_o->left + bm_o->stride, bm_g->left + bm_g->stride);
@@ -223,6 +226,9 @@ void shift_bitmap(Bitmap *bm, int shift_x, int shift_y)
{
assert((shift_x & ~63) == 0 && (shift_y & ~63) == 0);
+ if (!bm->buffer)
+ return;
+
int32_t w = bm->w, h = bm->h;
ptrdiff_t s = bm->stride;
uint8_t *buf = bm->buffer;
diff --git a/libass/ass_render.c b/libass/ass_render.c
index 0a2a279..8338ef3 100644
--- a/libass/ass_render.c
+++ b/libass/ass_render.c
@@ -1846,43 +1846,6 @@ static bool is_new_bm_run(GlyphInfo *info, GlyphInfo *last)
((last->flags ^ info->flags) & ~DECO_ROTATE);
}
-static void make_shadow_bitmap(ASS_Renderer *render_priv,
- CompositeHashValue *val, const FilterDesc *filter)
-{
- Bitmap *bm = &val->bm, *bm_o = &val->bm_o, *bm_s = &val->bm_s;
-
- if (!(filter->flags & FILTER_NONZERO_SHADOW)) {
- if (bm->buffer && bm_o->buffer && !(filter->flags & FILTER_BORDER_STYLE_3)) {
- fix_outline(bm, bm_o);
- } else if (bm_o->buffer && !(filter->flags & FILTER_NONZERO_BORDER)) {
- ass_free_bitmap(bm_o);
- memset(bm_o, 0, sizeof(*bm_o));
- }
- return;
- }
-
- // Create shadow and fix outline as needed
- if (bm->buffer && bm_o->buffer && !(filter->flags & FILTER_BORDER_STYLE_3)) {
- copy_bitmap(render_priv->engine, bm_s, bm_o);
- fix_outline(bm, bm_o);
- } else if (bm_o->buffer && (filter->flags & FILTER_NONZERO_BORDER)) {
- copy_bitmap(render_priv->engine, bm_s, bm_o);
- } else if (bm_o->buffer) {
- *bm_s = *bm_o;
- memset(bm_o, 0, sizeof(*bm_o));
- } else if (bm->buffer)
- copy_bitmap(render_priv->engine, bm_s, bm);
-
- if (!bm_s->buffer)
- return;
-
- // Works right even for negative offsets
- // '>>' rounds toward negative infinity, '&' returns correct remainder
- bm_s->left += filter->shadow.x >> 6;
- bm_s->top += filter->shadow.y >> 6;
- shift_bitmap(bm_s, filter->shadow.x & SUBPIXEL_MASK, filter->shadow.y & SUBPIXEL_MASK);
-}
-
// Parse event text.
// Fill render_priv->text_info.
static bool parse_events(ASS_Renderer *render_priv, ASS_Event *event)
@@ -2428,13 +2391,31 @@ size_t ass_composite_construct(void *key, void *value, void *priv)
}
}
- if (v->bm.buffer || v->bm_o.buffer) {
- if (!v->bm_o.buffer || (k->filter.flags & FILTER_BORDER_STYLE_3))
- ass_synth_blur(render_priv->engine, &v->bm, k->filter.be, k->filter.blur);
- ass_synth_blur(render_priv->engine, &v->bm_o, k->filter.be, k->filter.blur);
- make_shadow_bitmap(render_priv, v, &k->filter);
+ int flags = k->filter.flags;
+ bool no_blur = (flags & ~FILTER_NONZERO_SHADOW) == FILTER_NONZERO_BORDER;
+ if (!no_blur)
+ ass_synth_blur(render_priv->engine, &v->bm, k->filter.be, k->filter.blur);
+ ass_synth_blur(render_priv->engine, &v->bm_o, k->filter.be, k->filter.blur);
+
+ if (flags & FILTER_NONZERO_SHADOW) {
+ if (flags & FILTER_NONZERO_BORDER)
+ copy_bitmap(render_priv->engine, &v->bm_s, &v->bm_o);
+ else if (flags & FILTER_BORDER_STYLE_3) {
+ v->bm_s = v->bm_o;
+ memset(&v->bm_o, 0, sizeof(v->bm_o));
+ } else
+ copy_bitmap(render_priv->engine, &v->bm_s, &v->bm);
+
+ // Works right even for negative offsets
+ // '>>' rounds toward negative infinity, '&' returns correct remainder
+ v->bm_s.left += k->filter.shadow.x >> 6;
+ v->bm_s.top += k->filter.shadow.y >> 6;
+ shift_bitmap(&v->bm_s, k->filter.shadow.x & SUBPIXEL_MASK, k->filter.shadow.y & SUBPIXEL_MASK);
}
+ if (no_blur)
+ fix_outline(&v->bm, &v->bm_o);
+
return sizeof(CompositeHashKey) + sizeof(CompositeHashValue) +
bitmap_size(&v->bm) + bitmap_size(&v->bm_o) + bitmap_size(&v->bm_s);
}