1818#include <sof/trace/trace.h>
1919#include <rtos/symbol.h>
2020#include <rtos/wait.h>
21+ #if CONFIG_VIRTUAL_HEAP
22+ #include <sof/lib/regions_mm.h>
23+
24+ struct vmh_heap * virtual_buffers_heap ;
25+ struct k_spinlock vmh_lock ;
26+ #endif /* CONFIG_VIRTUAL_HEAP */
27+
2128
2229/* Zephyr includes */
2330#include <zephyr/init.h>
@@ -189,6 +196,85 @@ static void l3_heap_free(struct k_heap *h, void *mem)
189196
190197#endif
191198
199+ #if CONFIG_VIRTUAL_HEAP
200+ static void * virtual_heap_alloc (struct vmh_heap * heap , uint32_t flags , uint32_t caps , size_t bytes ,
201+ uint32_t align )
202+ {
203+ void * mem ;
204+
205+ K_SPINLOCK (& vmh_lock ) {
206+ heap -> core_id = cpu_get_id ();
207+ mem = vmh_alloc (heap , bytes );
208+ }
209+
210+ if (!mem )
211+ return NULL ;
212+
213+ assert (IS_ALIGNED (mem , align ));
214+
215+ if (flags & SOF_MEM_FLAG_COHERENT )
216+ return sys_cache_uncached_ptr_get ((__sparse_force void __sparse_cache * )mem );
217+
218+ return mem ;
219+ }
220+
221+ /**
222+ * Checks whether pointer is from virtual memory range.
223+ * @param ptr Pointer to memory being checked.
224+ * @return True if pointer falls into virtual memory region, false otherwise.
225+ */
226+ static bool is_virtual_heap_pointer (void * ptr )
227+ {
228+ uintptr_t virtual_heap_start = POINTER_TO_UINT (sys_cache_cached_ptr_get (& heapmem )) +
229+ HEAPMEM_SIZE ;
230+ uintptr_t virtual_heap_end = CONFIG_KERNEL_VM_BASE + CONFIG_KERNEL_VM_SIZE ;
231+
232+ if (!is_cached (ptr ))
233+ ptr = sys_cache_cached_ptr_get ((__sparse_force void * )ptr );
234+
235+ return ((POINTER_TO_UINT (ptr ) >= virtual_heap_start ) &&
236+ (POINTER_TO_UINT (ptr ) < virtual_heap_end ));
237+ }
238+
239+ static void virtual_heap_free (void * ptr )
240+ {
241+ ptr = sys_cache_cached_ptr_get ((__sparse_force void * )ptr );
242+
243+ K_SPINLOCK (& vmh_lock ) {
244+ vmh_free (virtual_buffers_heap , ptr );
245+ }
246+ }
247+
248+ static const struct vmh_heap_config static_hp_buffers = {
249+ {
250+ { 128 , 32 },
251+ { 512 , 8 },
252+ { 1024 , 16 },
253+ { 2048 , 8 },
254+ { 4096 , 11 },
255+ { 8192 , 13 },
256+ { 65536 , 3 },
257+ { 131072 , 1 },
258+ { 524288 , 1 } /* buffer for kpb */
259+ },
260+ };
261+
262+ static int virtual_heap_init (void )
263+ {
264+ k_spinlock_init (& vmh_lock );
265+
266+ virtual_buffers_heap = vmh_init_heap (& static_hp_buffers , MEM_REG_ATTR_SHARED_HEAP , 0 ,
267+ false);
268+ if (!virtual_buffers_heap )
269+ tr_err (& zephyr_tr , "Unable to init virtual buffers heap!" );
270+
271+ return 0 ;
272+ }
273+
274+ SYS_INIT (virtual_heap_init , POST_KERNEL , 1 );
275+
276+ #endif /* CONFIG_VIRTUAL_HEAP */
277+
192278static void * heap_alloc_aligned (struct k_heap * h , size_t min_align , size_t bytes )
193279{
194280 k_spinlock_key_t key ;
@@ -395,6 +481,12 @@ void *rballoc_align(uint32_t flags, uint32_t caps, size_t bytes,
395481 heap = & sof_heap ;
396482 }
397483
484+ #if CONFIG_VIRTUAL_HEAP
485+ /* Use virtual heap if it is available */
486+ if (virtual_buffers_heap )
487+ return virtual_heap_alloc (virtual_buffers_heap , flags , caps , bytes , align );
488+ #endif /* CONFIG_VIRTUAL_HEAP */
489+
398490 if (flags & SOF_MEM_FLAG_COHERENT )
399491 return heap_alloc_aligned (heap , align , bytes );
400492
@@ -417,6 +509,13 @@ void rfree(void *ptr)
417509 }
418510#endif
419511
512+ #if CONFIG_VIRTUAL_HEAP
513+ if (is_virtual_heap_pointer (ptr )) {
514+ virtual_heap_free (ptr );
515+ return ;
516+ }
517+ #endif
518+
420519 heap_free (& sof_heap , ptr );
421520}
422521EXPORT_SYMBOL (rfree );
@@ -428,7 +527,6 @@ static int heap_init(void)
428527#if CONFIG_L3_HEAP
429528 sys_heap_init (& l3_heap .heap , UINT_TO_POINTER (get_l3_heap_start ()), get_l3_heap_size ());
430529#endif
431-
432530 return 0 ;
433531}
434532
0 commit comments