use movdqu instead of movdqa for unaligned load avoiding a segfault (bug 10265)

This commit is contained in:
Roland Scheidegger 2007-03-13 13:44:23 +01:00
parent eb4db4c4ec
commit 14f0b7ea98

View file

@ -369,7 +369,7 @@ _generic_read_RGBA_span_BGRA8888_REV_SSE2:
movdqa mask, %xmm1
movdqa mask+16, %xmm2
*/
LOAD_MASK(movdqa,%xmm1,%xmm2)
LOAD_MASK(movdqu,%xmm1,%xmm2)
movl 12(%esp), %ebx /* source pointer */
movl 20(%esp), %edx /* number of pixels to copy */