Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/lib/libpthread/arch/vax Merge updates to algorithms from i38...



details:   https://anonhg.NetBSD.org/src/rev/1e2fd38458f8
branches:  trunk
changeset: 580101:1e2fd38458f8
user:      matt <matt%NetBSD.org@localhost>
date:      Sat Apr 09 20:53:19 2005 +0000

description:
Merge updates to algorithms from i386 switch code.

diffstat:

 lib/libpthread/arch/vax/pthread_switch.S |  72 ++++++++++++++++++++------------
 1 files changed, 45 insertions(+), 27 deletions(-)

diffs (174 lines):

diff -r a68e36f13d19 -r 1e2fd38458f8 lib/libpthread/arch/vax/pthread_switch.S
--- a/lib/libpthread/arch/vax/pthread_switch.S  Sat Apr 09 20:50:27 2005 +0000
+++ b/lib/libpthread/arch/vax/pthread_switch.S  Sat Apr 09 20:53:19 2005 +0000
@@ -1,4 +1,4 @@
-/*     $NetBSD: pthread_switch.S,v 1.5 2003/09/07 14:47:56 cl Exp $    */
+/*     $NetBSD: pthread_switch.S,v 1.6 2005/04/09 20:53:19 matt Exp $  */
 
 /*-
  * Copyright (c) 2001, 2003 The NetBSD Foundation, Inc.
@@ -62,13 +62,17 @@
  */
        
 #define STACK_SWITCH(pt)       \
-       movl    PT_TRAPUC(pt),%sp ; \
+       movl    PT_TRAPUC(pt),%sp; \
        bneq    1f; \
        movl    PT_UC(pt),%sp; \
-1:     clrl    PT_TRAPUC(pt)
+1:     subl2   $STACKSPACE,%sp; \
+       clrl    PT_TRAPUC(pt)
 
+/*
+ * void pthread__switch(pthread_t self, pthread_t next);
+ */
 ENTRY(pthread__switch, 0)
-       movab   -(CONTEXTSIZE)(%sp), %sp
+       subl2   $CONTEXTSIZE, %sp
 
        /* Get the current context */
        pushl   %sp
@@ -92,9 +96,15 @@
 /*
  * Helper switch code used by pthread__locked_switch() and 
  * pthread__upcall_switch() when they discover spinlock preemption.
+ *
+ * r3 = new pthread_t
+ * r4 = lock flags
+ * r5 = old pthread_t
  */
 
+       .globl  pthread__switch_away
 pthread__switch_away:
+switch_away:
        STACK_SWITCH(%r3)
 
        /* If we're invoked from the switch-to-next provisions of
@@ -111,8 +121,8 @@
  *           pt_spin_t *lock);
  */
 ENTRY(pthread__locked_switch, 0)
-       movab   -(CONTEXTSIZE)(%sp),%sp
-       movl    8(%ap),%r5
+       subl2   $CONTEXTSIZE,%sp
+       movl    8(%ap),%r5              /* r5 = next */
 
        /* Make sure we get continuted */
        incl    PT_SPINLOCKS(%r5)
@@ -121,7 +131,7 @@
        pushl   %sp
        calls   $1, _getcontext_u
 
-       movq    4(%ap),%r4
+       movl    4(%ap),%r4              /* r4 = self */
 
        /* Edit the context to make it continue below, rather than here */
        movab   locked_return_point, (UC_GREGS + _REG_PC)(%sp)
@@ -129,27 +139,31 @@
 
        STACK_SWITCH(%r5)       /* sp = next->pt_uc */
 
-       /* Check if the switcher was preempted and continued to here. */
+       /*
+        * Check if the original thread was preempted while holding
+        * its queue lock.
+        */
        movl    PT_NEXT(%r4),%r3
        beql    1f
 
-       /* Yes, it was. Stash the thread we were going to switch to,
-        * the lock the original thread was holding, 
-        * and switch to the next thread in the continuation chain.
+       /*
+        * Yes, it was. Stash the thread we were going to
+        * switch to, the lock the original thread was holding, 
+        * and go to the next thread in the chain.
         * Mark the fact that this was a locked switch, and so the
         * thread does not need to be put on a run queue.
         * Don't release the lock. It's possible that if we do so,
         * PT_SWITCHTO will be stomped by another switch_lock and
         * preemption.
         */
-       movl    12(%ap), PT_HELDLOCK(%r4)
        movl    %sp, PT_SWITCHTOUC(%r4)
        movl    %r5, PT_SWITCHTO(%r4)
-       incl    PT_SPINLOCKS(%r4)
+       movl    12(%ap), PT_HELDLOCK(%r4)
+       decl    PT_SPINLOCKS(%r4)
 
-       /* %r3 = self->pt_next */
+       movl    %r3, %r5                /* r3 = self->pt_next */
        movl    $1, %r4
-       brw     pthread__switch_away
+       jbr     switch_away             /* r3 = next, r5 = next */
 
        /* No locked old-preemption */
 1:     /* We've moved to the new stack, and the old context has been 
@@ -162,17 +176,18 @@
        clrl    *12(%ap)
 
        /* .. and remove the fake lock */
-       incl    PT_SPINLOCKS(%r5)
+       decl    PT_SPINLOCKS(%r5)
        
        /* Check if we were preempted while holding the fake lock. */
        movl    PT_NEXT(%r5),%r3
        jeql    setcontext
        
-       /* Yes, we were. Go to the next element in the chain. */
+       /* Yes, we were. Bummer. Go to the next element in the chain. */
        movl    %sp, PT_SWITCHTOUC(%r5)
        movl    %r5, PT_SWITCHTO(%r5)
-       clrl    %r4
-       brw     pthread__switch_away
+       movl    PT_NEXT(%r5), %r3
+       movl    $2, %r4
+       jbr     switch_away
        NOTREACHED
 
 locked_return_point:   
@@ -185,16 +200,16 @@
  */
 
 ENTRY(pthread__upcall_switch, 0)
-       movq    4(%ap),%r4
+       movq    4(%ap),%r4              /* r4 = self, r5 = next */
 
        /*
-        * this code never returns, so we can treat s0-s6 as
+        * this code never returns, so we can treat r0-r5 as
         * convenient registers that will be saved for us by callees,
         * but that we do not have to save.
         */
        
        /* Create a "fake" lock count so that this code will be continued */
-       incl    PT_SPINLOCKS(%r4)
+       incl    PT_SPINLOCKS(%r5)
 
        STACK_SWITCH(%r5)
 
@@ -205,10 +220,12 @@
        /* Yes, it was. Stash the thread we were going to switch to,
         * and switch to the next thread in the continuation chain.
         */
-       movl    %sp, PT_SWITCHTOUC(%r4)
+       movl    %sp,PT_SWITCHTOUC(%r4)
        movq    %r5,PT_SWITCHTO(%r4)
-       movl    $1,%r4
-       brw     pthread__switch_away
+       movl    PT_NEXT(%r4), %r3
+       movl    %r4, %r5
+       movl    $1, %r4
+       jbr     switch_away
        
        /* No old-upcall-preemption */
 1:     movq    %r4,-(%sp)
@@ -229,5 +246,6 @@
         */
        movl    %sp, PT_SWITCHTOUC(%r5)
        movl    %r5, PT_SWITCHTO(%r5)
-       clrl    %r4
-       brw     pthread__switch_away    
+       movl    PT_NEXT(%r5), %r3
+       movl    $1, %r4
+       jbr     switch_away     



Home | Main Index | Thread Index | Old Index