1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
/*
* Copyright 2021 Google LLC
* SPDX-License-Identifier: MIT
*/
#include "vn_renderer_util.h"
void
vn_renderer_shmem_pool_init(UNUSED struct vn_renderer *renderer,
struct vn_renderer_shmem_pool *pool,
size_t min_alloc_size)
{
*pool = (struct vn_renderer_shmem_pool){
/* power-of-two to hit shmem cache */
.min_alloc_size = util_next_power_of_two(min_alloc_size),
};
}
void
vn_renderer_shmem_pool_fini(struct vn_renderer *renderer,
struct vn_renderer_shmem_pool *pool)
{
if (pool->shmem)
vn_renderer_shmem_unref(renderer, pool->shmem);
}
static bool
vn_renderer_shmem_pool_grow(struct vn_renderer *renderer,
struct vn_renderer_shmem_pool *pool,
size_t size)
{
VN_TRACE_FUNC();
/* power-of-two to hit shmem cache */
size_t alloc_size = pool->min_alloc_size;
while (alloc_size < size) {
alloc_size <<= 1;
if (!alloc_size)
return false;
}
struct vn_renderer_shmem *shmem =
vn_renderer_shmem_create(renderer, alloc_size);
if (!shmem)
return false;
if (pool->shmem)
vn_renderer_shmem_unref(renderer, pool->shmem);
pool->shmem = shmem;
pool->size = alloc_size;
pool->used = 0;
return true;
}
struct vn_renderer_shmem *
vn_renderer_shmem_pool_alloc(struct vn_renderer *renderer,
struct vn_renderer_shmem_pool *pool,
size_t size,
size_t *out_offset)
{
if (unlikely(size > pool->size - pool->used)) {
if (!vn_renderer_shmem_pool_grow(renderer, pool, size))
return NULL;
assert(size <= pool->size - pool->used);
}
struct vn_renderer_shmem *shmem =
vn_renderer_shmem_ref(renderer, pool->shmem);
*out_offset = pool->used;
pool->used += size;
return shmem;
}
|