Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/sys/external/bsd/common/include/linux Implement find_next_ze...



details:   https://anonhg.NetBSD.org/src/rev/eb3bedbeabe7
branches:  trunk
changeset: 834975:eb3bedbeabe7
user:      riastradh <riastradh%NetBSD.org@localhost>
date:      Mon Aug 27 07:08:37 2018 +0000

description:
Implement find_next_zero_bit.  Define find_first_zero_bit in terms of it.

diffstat:

 sys/external/bsd/common/include/linux/bitops.h |  55 ++++++++++++++++++++++---
 1 files changed, 47 insertions(+), 8 deletions(-)

diffs (76 lines):

diff -r a9099272ecf4 -r eb3bedbeabe7 sys/external/bsd/common/include/linux/bitops.h
--- a/sys/external/bsd/common/include/linux/bitops.h    Mon Aug 27 07:08:20 2018 +0000
+++ b/sys/external/bsd/common/include/linux/bitops.h    Mon Aug 27 07:08:37 2018 +0000
@@ -1,4 +1,4 @@
-/*     $NetBSD: bitops.h,v 1.4 2018/08/27 07:04:32 riastradh Exp $     */
+/*     $NetBSD: bitops.h,v 1.5 2018/08/27 07:08:37 riastradh Exp $     */
 
 /*-
  * Copyright (c) 2013 The NetBSD Foundation, Inc.
@@ -177,20 +177,59 @@
 }
 
 static inline unsigned long
-find_first_zero_bit(const unsigned long *ptr, unsigned long nbits)
+find_next_zero_bit(const unsigned long *ptr, unsigned long nbits,
+    unsigned long startbit)
 {
        const size_t bpl = (CHAR_BIT * sizeof(*ptr));
-       const unsigned long *p;
-       unsigned long result = 0;
+       const unsigned long *p = ptr + startbit/bpl;
+       unsigned long result = rounddown(startbit, bpl);
+       uint64_t word;
+
+       /*
+        * We use ffs64 because NetBSD doesn't have a handy ffsl that
+        * works on unsigned long.  This is a waste on 32-bit systems
+        * but I'd rather not maintain multiple copies of this -- the
+        * first version had enough bugs already.
+        */
 
-       for (p = ptr; bpl < nbits; nbits -= bpl, p++, result += bpl) {
+       /* Do we need to examine a partial starting word?  */
+       if (startbit % bpl) {
+               /* Are any of the first startbit%bpl bits zero?  */
+               if (~(*p | (~0UL << (startbit % bpl)))) {
+                       /* Invert the bits and convert to 64 bits.  */
+                       word = ~(uint64_t)*p;
+
+                       /* Clear the low startbit%bpl bits.  */
+                       word &= ~(~0UL << (startbit % bpl));
+
+                       /* Find the first set bit in this word. */
+                       result += ffs64(word);
+
+                       /* Clamp down to at most nbits.  */
+                       return MIN(result, nbits);
+               }
+       }
+
+       /* Find the first word with zeros in it.  */
+       for (; bpl < nbits; p++, result += bpl) {
                if (~*p)
                        break;
        }
 
-       CTASSERT(sizeof(unsigned long) <= sizeof(uint64_t));
-       result += ffs64(~(uint64_t)*p | (~0UL << MIN(nbits, bpl)));
-       return result;
+       /* Invert the bits and convert to 64 bits for ffs64.  */
+       word = ~(uint64_t)*p;
+
+       /* Find the first set bit in this word.  */
+       result += ffs64(word);
+
+       /* Clamp down to at most nbits.  */
+       return MIN(result, nbits);
+}
+
+static inline unsigned long
+find_first_zero_bit(const unsigned long *ptr, unsigned long nbits)
+{
+       return find_next_zero_bit(ptr, nbits, 0);
 }
 
 static inline unsigned



Home | Main Index | Thread Index | Old Index