summaryrefslogtreecommitdiff
path: root/macros
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2008-10-25 17:48:33 -0700
committerH. Peter Anvin <hpa@zytor.com>2008-10-25 17:48:33 -0700
commita168ab1e018ca98b7880daae9e277d3efb5d7f0d (patch)
tree3af0f6f7999e3bf80ab6727fa6e10d7d011809c6 /macros
parent4adf21c08c23276adc7190f3e0f00f4f08ce8824 (diff)
downloadnasm-a168ab1e018ca98b7880daae9e277d3efb5d7f0d.tar.gz
nasm-a168ab1e018ca98b7880daae9e277d3efb5d7f0d.tar.bz2
nasm-a168ab1e018ca98b7880daae9e277d3efb5d7f0d.zip
smartalign: clean up unnecessary duplication; tweak dependencies
Remove unnecessary duplicated patterns; with indirection we can handle lists of any length. For 16-bit generic padding, alternate between SI and DI dependencies. Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'macros')
-rw-r--r--macros/smartalign.mac77
1 files changed, 13 insertions, 64 deletions
diff --git a/macros/smartalign.mac b/macros/smartalign.mac
index 48c07de..3b5b9b9 100644
--- a/macros/smartalign.mac
+++ b/macros/smartalign.mac
@@ -8,34 +8,13 @@ USE: smartalign
%define __ALIGN_JMP_THRESHOLD__ 16
%define __ALIGN_16BIT_1B__ 0x90
- %define __ALIGN_16BIT_2B__ 0x90,0x90
- %define __ALIGN_16BIT_3B__ 0x90,0x90,0x90
- %define __ALIGN_16BIT_4B__ 0x90,0x90,0x90,0x90
- %define __ALIGN_16BIT_5B__ 0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_16BIT_6B__ 0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_16BIT_7B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_16BIT_8B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_16BIT_GROUP__ 8
+ %define __ALIGN_16BIT_GROUP__ 1
%define __ALIGN_32BIT_1B__ 0x90
- %define __ALIGN_32BIT_2B__ 0x90,0x90
- %define __ALIGN_32BIT_3B__ 0x90,0x90,0x90
- %define __ALIGN_32BIT_4B__ 0x90,0x90,0x90,0x90
- %define __ALIGN_32BIT_5B__ 0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_32BIT_6B__ 0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_32BIT_7B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_32BIT_8B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_32BIT_GROUP__ 8
+ %define __ALIGN_32BIT_GROUP__ 1
%define __ALIGN_64BIT_1B__ 0x90
- %define __ALIGN_64BIT_2B__ 0x90,0x90
- %define __ALIGN_64BIT_3B__ 0x90,0x90,0x90
- %define __ALIGN_64BIT_4B__ 0x90,0x90,0x90,0x90
- %define __ALIGN_64BIT_5B__ 0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_64BIT_6B__ 0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_64BIT_7B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_64BIT_8B__ 0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90
- %define __ALIGN_64BIT_GROUP__ 8
+ %define __ALIGN_64BIT_GROUP__ 1
%elifidni %1,generic
%define __ALIGN_JMP_THRESHOLD__ 8
@@ -43,9 +22,9 @@ USE: smartalign
%define __ALIGN_16BIT_2B__ 0x89,0xf6
%define __ALIGN_16BIT_3B__ 0x8d,0x74,0x00
%define __ALIGN_16BIT_4B__ 0x8d,0xb4,0x00,0x00
- %define __ALIGN_16BIT_5B__ 0x90,0x8d,0xb4,0x00,0x00
- %define __ALIGN_16BIT_6B__ 0x89,0xf6,0x8d,0xbd,0x00,0x00
- %define __ALIGN_16BIT_7B__ 0x8d,0x74,0x00,0x8d,0xbd,0x00,0x00
+ %define __ALIGN_16BIT_5B__ 0x8d,0xb4,0x00,0x00,0x90
+ %define __ALIGN_16BIT_6B__ 0x8d,0xb4,0x00,0x00,0x89,0xff
+ %define __ALIGN_16BIT_7B__ 0x8d,0xb4,0x00,0x00,0x8d,0x7d,0x00
%define __ALIGN_16BIT_8B__ 0x8d,0xb4,0x00,0x00,0x8d,0xbd,0x00,0x00
%define __ALIGN_16BIT_GROUP__ 8
@@ -56,18 +35,13 @@ USE: smartalign
%define __ALIGN_32BIT_5B__ 0x90,0x8d,0x74,0x26,0x00
%define __ALIGN_32BIT_6B__ 0x8d,0xb6,0x00,0x00,0x00,0x00
%define __ALIGN_32BIT_7B__ 0x8d,0xb4,0x26,0x00,0x00,0x00,0x00
- %undef __ALIGN_32BIT_8B__
%define __ALIGN_32BIT_GROUP__ 7
%define __ALIGN_64BIT_1B__ 0x90
%define __ALIGN_64BIT_2B__ 0x66,0x90
%define __ALIGN_64BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_64BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_64BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_GROUP__ 8
+ %define __ALIGN_64BIT_GROUP__ 4
%elifidni %1,k8
%define __ALIGN_JMP_THRESHOLD__ 16
@@ -75,31 +49,19 @@ USE: smartalign
%define __ALIGN_16BIT_2B__ 0x66,0x90
%define __ALIGN_16BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_16BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_16BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_16BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_16BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_16BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_16BIT_GROUP__ 8
+ %define __ALIGN_16BIT_GROUP__ 4
%define __ALIGN_32BIT_1B__ 0x90
%define __ALIGN_32BIT_2B__ 0x66,0x90
%define __ALIGN_32BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_32BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_32BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_32BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_32BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_32BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_32BIT_GROUP__ 8
+ %define __ALIGN_32BIT_GROUP__ 4
%define __ALIGN_64BIT_1B__ 0x90
%define __ALIGN_64BIT_2B__ 0x66,0x90
%define __ALIGN_64BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_64BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_64BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_GROUP__ 8
+ %define __ALIGN_64BIT_GROUP__ 4
%elifidni %1,k7
%define __ALIGN_JMP_THRESHOLD__ 16
@@ -107,11 +69,7 @@ USE: smartalign
%define __ALIGN_16BIT_2B__ 0x66,0x90
%define __ALIGN_16BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_16BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_16BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_16BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_16BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_16BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_GROUP__ 8
+ %define __ALIGN_64BIT_GROUP__ 4
%define __ALIGN_32BIT_1B__ 0x90
%define __ALIGN_32BIT_2B__ 0x8b,0xc0
@@ -120,18 +78,13 @@ USE: smartalign
%define __ALIGN_32BIT_5B__ 0x8d,0x44,0x20,0x00,0x90
%define __ALIGN_32BIT_6B__ 0x8d,0x80,0x00,0x00,0x00,0x00
%define __ALIGN_32BIT_7B__ 0x8d,0x04,0x05,0x00,0x00,0x00,0x00
- %undef __ALIGN_32BIT_8B__
%define __ALIGN_32BIT_GROUP__ 7
%define __ALIGN_64BIT_1B__ 0x90
%define __ALIGN_64BIT_2B__ 0x66,0x90
%define __ALIGN_64BIT_3B__ 0x66,0x66,0x90
%define __ALIGN_64BIT_4B__ 0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_5B__ 0x66,0x66,0x90,0x66,0x90
- %define __ALIGN_64BIT_6B__ 0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_7B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x90
- %define __ALIGN_64BIT_8B__ 0x66,0x66,0x66,0x90,0x66,0x66,0x66,0x90
- %define __ALIGN_64BIT_GROUP__ 8
+ %define __ALIGN_64BIT_GROUP__ 4
%elifidni %1,p6
%define __ALIGN_JMP_THRESHOLD__ 16
@@ -139,11 +92,7 @@ USE: smartalign
%define __ALIGN_16BIT_2B__ 0x66,0x90
%define __ALIGN_16BIT_3B__ 0x0f,0x1f,0x00
%define __ALIGN_16BIT_4B__ 0x0f,0x1f,0x40,0x00
- %define __ALIGN_16BIT_5B__ 0x90,0x0f,0x1f,0x40,0x00
- %define __ALIGN_16BIT_6B__ 0x0f,0x1f,0x00,0x0f,0x1f,0x00
- %define __ALIGN_16BIT_7B__ 0x0f,0x1f,0x00,0x0f,0x1f,0x40,0x00
- %define __ALIGN_16BIT_8B__ 0x0f,0x1f,0x40,0x00,0x0f,0x1f,0x40,0x00
- %define __ALIGN_16BIT_GROUP__ 8
+ %define __ALIGN_16BIT_GROUP__ 4
%define __ALIGN_32BIT_1B__ 0x90
%define __ALIGN_32BIT_2B__ 0x66,0x90