aboutsummaryrefslogtreecommitdiff
path: root/src/libmp/386/mpvecsub.s
diff options
context:
space:
mode:
authorwkj <devnull@localhost>2004-04-21 04:45:31 +0000
committerwkj <devnull@localhost>2004-04-21 04:45:31 +0000
commitac0e2db600593d5b30550453b78874bfa0611751 (patch)
treeb5532ae46d9dae89a4ba22e30fb7ff15e4aa173b /src/libmp/386/mpvecsub.s
parentf2a700e22af3cf792837935f8e8ed42d533d552e (diff)
downloadplan9port-ac0e2db600593d5b30550453b78874bfa0611751.tar.gz
plan9port-ac0e2db600593d5b30550453b78874bfa0611751.tar.bz2
plan9port-ac0e2db600593d5b30550453b78874bfa0611751.zip
Add basic libmp support for the x86.
Diffstat (limited to 'src/libmp/386/mpvecsub.s')
-rw-r--r--src/libmp/386/mpvecsub.s62
1 files changed, 62 insertions, 0 deletions
diff --git a/src/libmp/386/mpvecsub.s b/src/libmp/386/mpvecsub.s
new file mode 100644
index 00000000..a56b4968
--- /dev/null
+++ b/src/libmp/386/mpvecsub.s
@@ -0,0 +1,62 @@
+/*
+ * mpvecsub(mpdigit *a, int alen, mpdigit *b, int blen, mpdigit *diff)
+ *
+ * diff[0:alen-1] = a[0:alen-1] - b[0:blen-1]
+ *
+ * prereq: alen >= blen, diff has room for alen digits
+ */
+.text
+
+.p2align 2,0x90
+.globl mpvecsub
+ .type mpvecsub, @function
+mpvecsub:
+ /* Prelude */
+ pushl %ebp
+ movl %ebx, -4(%esp) /* save on stack */
+ movl %esi, -8(%esp)
+ movl %edi, -12(%esp)
+
+ movl 8(%esp), %esi /* a */
+ movl 16(%esp), %ebx /* b */
+ movl 12(%esp), %edx /* alen */
+ movl 20(%esp), %ecx /* blen */
+ movl 24(%esp), %edi /* diff */
+ subl %ecx,%edx
+ xorl %ebp,%ebp /* this also sets carry to 0 */
+
+ /* skip subraction if b is zero */
+ testl %ecx,%ecx
+ jz _sub1
+
+ /* diff[0:blen-1],borrow = a[0:blen-1] - b[0:blen-1] */
+_subloop1:
+ movl (%esi, %ebp, 4), %eax
+ sbbl (%ebx, %ebp, 4), %eax
+ movl %eax, (%edi, %ebp, 4)
+ incl %ebp
+ loop _subloop1
+
+_sub1:
+ incl %edx
+ movl %edx,%ecx
+ loop _subloop2
+ jmp done
+
+ /* diff[blen:alen-1] = a[blen:alen-1] - 0 */
+_subloop2:
+ movl (%esi, %ebp, 4), %eax
+ sbbl $0, %eax
+ movl %eax, (%edi, %ebp, 4)
+ INCL %ebp
+ LOOP _subloop2
+
+done:
+ /* Postlude */
+ movl -4(%esp), %ebx /* restore from stack */
+ movl -8(%esp), %esi
+ movl -12(%esp), %edi
+ movl %esp, %ebp
+ leave
+ ret
+