Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 1 | |
| 2 | #include <common.h> |
Simon Glass | 9bc1564 | 2020-02-03 07:36:16 -0700 | [diff] [blame] | 3 | #include <malloc.h> |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 4 | #include <memalign.h> |
Simon Glass | 274e0b0 | 2020-05-10 11:39:56 -0600 | [diff] [blame] | 5 | #include <asm/cache.h> |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 6 | #include <linux/compat.h> |
| 7 | |
| 8 | struct p_current cur = { |
| 9 | .pid = 1, |
| 10 | }; |
| 11 | __maybe_unused struct p_current *current = &cur; |
| 12 | |
| 13 | unsigned long copy_from_user(void *dest, const void *src, |
| 14 | unsigned long count) |
| 15 | { |
| 16 | memcpy((void *)dest, (void *)src, count); |
| 17 | return 0; |
| 18 | } |
| 19 | |
| 20 | void *kmalloc(size_t size, int flags) |
| 21 | { |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 22 | void *p; |
| 23 | |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 24 | p = malloc_cache_aligned(size); |
Marek Szyprowski | fe77e43 | 2019-10-02 14:37:20 +0200 | [diff] [blame] | 25 | if (p && flags & __GFP_ZERO) |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 26 | memset(p, 0, size); |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 27 | |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 28 | return p; |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 29 | } |
| 30 | |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 31 | struct kmem_cache *get_mem(int element_sz) |
| 32 | { |
| 33 | struct kmem_cache *ret; |
| 34 | |
| 35 | ret = memalign(ARCH_DMA_MINALIGN, sizeof(struct kmem_cache)); |
| 36 | ret->sz = element_sz; |
| 37 | |
| 38 | return ret; |
| 39 | } |
| 40 | |
| 41 | void *kmem_cache_alloc(struct kmem_cache *obj, int flag) |
| 42 | { |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 43 | return malloc_cache_aligned(obj->sz); |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 44 | } |
AKASHI Takahiro | 1d8d34d | 2019-11-13 09:44:47 +0900 | [diff] [blame] | 45 | |
| 46 | /** |
| 47 | * kmemdup - duplicate region of memory |
| 48 | * |
| 49 | * @src: memory region to duplicate |
| 50 | * @len: memory region length |
| 51 | * @gfp: GFP mask to use |
| 52 | * |
| 53 | * Return: newly allocated copy of @src or %NULL in case of error |
| 54 | */ |
| 55 | void *kmemdup(const void *src, size_t len, gfp_t gfp) |
| 56 | { |
| 57 | void *p; |
| 58 | |
| 59 | p = kmalloc(len, gfp); |
| 60 | if (p) |
| 61 | memcpy(p, src, len); |
| 62 | return p; |
| 63 | } |