diff mbox series

[v2,06/12] range: Introduce range_inverse_array()

Message ID 20230913080423.523953-7-eric.auger@redhat.com
State New
Headers show
Series VIRTIO-IOMMU/VFIO: Don't assume 64b IOVA space | expand

Commit Message

Eric Auger Sept. 13, 2023, 8:01 a.m. UTC
This helper reverses an array of regions, turning original
regions into holes and original holes into actual regions,
covering the whole UINT64_MAX span.

Signed-off-by: Eric Auger <eric.auger@redhat.com>

---

v1 -> v2:
- Move range_inverse_array description comment in the header
- Take low/high params
---
 include/qemu/range.h |  8 ++++++++
 util/range.c         | 45 ++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 53 insertions(+)

Comments

Alex Williamson Sept. 19, 2023, 4:29 p.m. UTC | #1
On Wed, 13 Sep 2023 10:01:41 +0200
Eric Auger <eric.auger@redhat.com> wrote:

> This helper reverses an array of regions, turning original
> regions into holes and original holes into actual regions,
> covering the whole UINT64_MAX span.
> 
> Signed-off-by: Eric Auger <eric.auger@redhat.com>
> 
> ---
> 
> v1 -> v2:
> - Move range_inverse_array description comment in the header
> - Take low/high params
> ---
>  include/qemu/range.h |  8 ++++++++
>  util/range.c         | 45 ++++++++++++++++++++++++++++++++++++++++++++
>  2 files changed, 53 insertions(+)
> 
> diff --git a/include/qemu/range.h b/include/qemu/range.h
> index 7e2b1cc447..2b59e3bf0c 100644
> --- a/include/qemu/range.h
> +++ b/include/qemu/range.h
> @@ -219,4 +219,12 @@ static inline int ranges_overlap(uint64_t first1, uint64_t len1,
>  
>  GList *range_list_insert(GList *list, Range *data);
>  
> +/*
> + * Inverse an array of sorted ranges over the [low, high] span, ie.
> + * original ranges becomes holes in the newly allocated inv_ranges
> + */
> +void range_inverse_array(uint32_t nr_ranges, Range *ranges,
> +                         uint32_t *nr_inv_ranges, Range **inv_ranges,
> +                         uint64_t low, uint64_t high);
> +
>  #endif
> diff --git a/util/range.c b/util/range.c
> index 098d9d2dc0..4baeb588cc 100644
> --- a/util/range.c
> +++ b/util/range.c
> @@ -70,3 +70,48 @@ GList *range_list_insert(GList *list, Range *data)
>  
>      return list;
>  }
> +
> +void range_inverse_array(uint32_t nr_ranges, Range *ranges,
> +                         uint32_t *nr_inv_ranges, Range **inv_ranges,
> +                         uint64_t low, uint64_t high)

Rare be it for me to suggest GLib, but we already appear to have
range_list_insert() making use of GList for an ordered list of Ranges.
Doesn't this function become a lot easier if we take a sorted GList,
walk it to create the inverse, and return a new GList of the inverted
Ranges?  Seems the initial sorted GList would be created by making use
of the existing range_list_insert() function.  Thanks,

Alex

> +{
> +    Range *resv;
> +    int i = 0, j = 0;
> +
> +    resv = g_malloc0_n(nr_ranges + 1, sizeof(Range));
> +
> +    for (; j < nr_ranges  && (range_upb(&ranges[j]) < low); j++) {
> +        continue; /* skip all ranges below mon */
> +    }
> +
> +    if (j == nr_ranges) {
> +        range_set_bounds(&resv[i++], low, high);
> +        goto realloc;
> +    }
> +
> +    /* first range lob is greater than min, insert a first range */
> +    if (range_lob(&ranges[j]) > low) {
> +        range_set_bounds(&resv[i++], low,
> +                         MIN(range_lob(&ranges[j]) - 1, high));
> +    }
> +
> +    /* insert a range inbetween each original range until we reach max */
> +    for (; j < nr_ranges - 1; j++) {
> +        if (range_lob(&ranges[j]) >= high) {
> +            goto realloc;
> +        }
> +        if (range_compare(&ranges[j], &ranges[j + 1])) {
> +            range_set_bounds(&resv[i++], range_upb(&ranges[j]) + 1,
> +                             MIN(range_lob(&ranges[j + 1]) - 1, high));
> +        }
> +    }
> +    /* last range upb is less than max, insert a last range */
> +    if (range_upb(&ranges[j]) <  high) {
> +        range_set_bounds(&resv[i++],
> +                          range_upb(&ranges[j]) + 1, high);
> +    }
> +realloc:
> +    *nr_inv_ranges = i;
> +    resv = g_realloc(resv, i * sizeof(Range));
> +    *inv_ranges = resv;
> +}
Eric Auger Sept. 20, 2023, 7:24 a.m. UTC | #2
Hi Alex,

On 9/19/23 18:29, Alex Williamson wrote:
> On Wed, 13 Sep 2023 10:01:41 +0200
> Eric Auger <eric.auger@redhat.com> wrote:
>
>> This helper reverses an array of regions, turning original
>> regions into holes and original holes into actual regions,
>> covering the whole UINT64_MAX span.
>>
>> Signed-off-by: Eric Auger <eric.auger@redhat.com>
>>
>> ---
>>
>> v1 -> v2:
>> - Move range_inverse_array description comment in the header
>> - Take low/high params
>> ---
>>  include/qemu/range.h |  8 ++++++++
>>  util/range.c         | 45 ++++++++++++++++++++++++++++++++++++++++++++
>>  2 files changed, 53 insertions(+)
>>
>> diff --git a/include/qemu/range.h b/include/qemu/range.h
>> index 7e2b1cc447..2b59e3bf0c 100644
>> --- a/include/qemu/range.h
>> +++ b/include/qemu/range.h
>> @@ -219,4 +219,12 @@ static inline int ranges_overlap(uint64_t first1, uint64_t len1,
>>  
>>  GList *range_list_insert(GList *list, Range *data);
>>  
>> +/*
>> + * Inverse an array of sorted ranges over the [low, high] span, ie.
>> + * original ranges becomes holes in the newly allocated inv_ranges
>> + */
>> +void range_inverse_array(uint32_t nr_ranges, Range *ranges,
>> +                         uint32_t *nr_inv_ranges, Range **inv_ranges,
>> +                         uint64_t low, uint64_t high);
>> +
>>  #endif
>> diff --git a/util/range.c b/util/range.c
>> index 098d9d2dc0..4baeb588cc 100644
>> --- a/util/range.c
>> +++ b/util/range.c
>> @@ -70,3 +70,48 @@ GList *range_list_insert(GList *list, Range *data)
>>  
>>      return list;
>>  }
>> +
>> +void range_inverse_array(uint32_t nr_ranges, Range *ranges,
>> +                         uint32_t *nr_inv_ranges, Range **inv_ranges,
>> +                         uint64_t low, uint64_t high)
> Rare be it for me to suggest GLib, but we already appear to have
> range_list_insert() making use of GList for an ordered list of Ranges.
> Doesn't this function become a lot easier if we take a sorted GList,
> walk it to create the inverse, and return a new GList of the inverted
> Ranges?  Seems the initial sorted GList would be created by making use
> of the existing range_list_insert() function.  Thanks,

I am not sure this greatly simplifies. This definitively removes the
realloc stuff. But to me what complexifies the algo is the low/high
parameter support instead of hardcoding thel to 0/UINT64_MAX (suggestion
by Philippe which I can understand for such helper though). You can see
the diff with

https://lore.kernel.org/all/20230904080451.424731-7-eric.auger@redhat.com/

I will further look at your suggestion though

Eric
>
> Alex
>
>> +{
>> +    Range *resv;
>> +    int i = 0, j = 0;
>> +
>> +    resv = g_malloc0_n(nr_ranges + 1, sizeof(Range));
>> +
>> +    for (; j < nr_ranges  && (range_upb(&ranges[j]) < low); j++) {
>> +        continue; /* skip all ranges below mon */
>> +    }
>> +
>> +    if (j == nr_ranges) {
>> +        range_set_bounds(&resv[i++], low, high);
>> +        goto realloc;
>> +    }
>> +
>> +    /* first range lob is greater than min, insert a first range */
>> +    if (range_lob(&ranges[j]) > low) {
>> +        range_set_bounds(&resv[i++], low,
>> +                         MIN(range_lob(&ranges[j]) - 1, high));
>> +    }
>> +
>> +    /* insert a range inbetween each original range until we reach max */
>> +    for (; j < nr_ranges - 1; j++) {
>> +        if (range_lob(&ranges[j]) >= high) {
>> +            goto realloc;
>> +        }
>> +        if (range_compare(&ranges[j], &ranges[j + 1])) {
>> +            range_set_bounds(&resv[i++], range_upb(&ranges[j]) + 1,
>> +                             MIN(range_lob(&ranges[j + 1]) - 1, high));
>> +        }
>> +    }
>> +    /* last range upb is less than max, insert a last range */
>> +    if (range_upb(&ranges[j]) <  high) {
>> +        range_set_bounds(&resv[i++],
>> +                          range_upb(&ranges[j]) + 1, high);
>> +    }
>> +realloc:
>> +    *nr_inv_ranges = i;
>> +    resv = g_realloc(resv, i * sizeof(Range));
>> +    *inv_ranges = resv;
>> +}
diff mbox series

Patch

diff --git a/include/qemu/range.h b/include/qemu/range.h
index 7e2b1cc447..2b59e3bf0c 100644
--- a/include/qemu/range.h
+++ b/include/qemu/range.h
@@ -219,4 +219,12 @@  static inline int ranges_overlap(uint64_t first1, uint64_t len1,
 
 GList *range_list_insert(GList *list, Range *data);
 
+/*
+ * Inverse an array of sorted ranges over the [low, high] span, ie.
+ * original ranges becomes holes in the newly allocated inv_ranges
+ */
+void range_inverse_array(uint32_t nr_ranges, Range *ranges,
+                         uint32_t *nr_inv_ranges, Range **inv_ranges,
+                         uint64_t low, uint64_t high);
+
 #endif
diff --git a/util/range.c b/util/range.c
index 098d9d2dc0..4baeb588cc 100644
--- a/util/range.c
+++ b/util/range.c
@@ -70,3 +70,48 @@  GList *range_list_insert(GList *list, Range *data)
 
     return list;
 }
+
+void range_inverse_array(uint32_t nr_ranges, Range *ranges,
+                         uint32_t *nr_inv_ranges, Range **inv_ranges,
+                         uint64_t low, uint64_t high)
+{
+    Range *resv;
+    int i = 0, j = 0;
+
+    resv = g_malloc0_n(nr_ranges + 1, sizeof(Range));
+
+    for (; j < nr_ranges  && (range_upb(&ranges[j]) < low); j++) {
+        continue; /* skip all ranges below mon */
+    }
+
+    if (j == nr_ranges) {
+        range_set_bounds(&resv[i++], low, high);
+        goto realloc;
+    }
+
+    /* first range lob is greater than min, insert a first range */
+    if (range_lob(&ranges[j]) > low) {
+        range_set_bounds(&resv[i++], low,
+                         MIN(range_lob(&ranges[j]) - 1, high));
+    }
+
+    /* insert a range inbetween each original range until we reach max */
+    for (; j < nr_ranges - 1; j++) {
+        if (range_lob(&ranges[j]) >= high) {
+            goto realloc;
+        }
+        if (range_compare(&ranges[j], &ranges[j + 1])) {
+            range_set_bounds(&resv[i++], range_upb(&ranges[j]) + 1,
+                             MIN(range_lob(&ranges[j + 1]) - 1, high));
+        }
+    }
+    /* last range upb is less than max, insert a last range */
+    if (range_upb(&ranges[j]) <  high) {
+        range_set_bounds(&resv[i++],
+                          range_upb(&ranges[j]) + 1, high);
+    }
+realloc:
+    *nr_inv_ranges = i;
+    resv = g_realloc(resv, i * sizeof(Range));
+    *inv_ranges = resv;
+}