Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 1 | |
| 2 | #include <common.h> |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 3 | #include <memalign.h> |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 4 | #include <linux/compat.h> |
| 5 | |
| 6 | struct p_current cur = { |
| 7 | .pid = 1, |
| 8 | }; |
| 9 | __maybe_unused struct p_current *current = &cur; |
| 10 | |
| 11 | unsigned long copy_from_user(void *dest, const void *src, |
| 12 | unsigned long count) |
| 13 | { |
| 14 | memcpy((void *)dest, (void *)src, count); |
| 15 | return 0; |
| 16 | } |
| 17 | |
| 18 | void *kmalloc(size_t size, int flags) |
| 19 | { |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 20 | void *p; |
| 21 | |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 22 | p = malloc_cache_aligned(size); |
Marek Szyprowski | fe77e43 | 2019-10-02 14:37:20 +0200 | [diff] [blame] | 23 | if (p && flags & __GFP_ZERO) |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 24 | memset(p, 0, size); |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 25 | |
Masahiro Yamada | 7b5ec7e | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 26 | return p; |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 27 | } |
| 28 | |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 29 | struct kmem_cache *get_mem(int element_sz) |
| 30 | { |
| 31 | struct kmem_cache *ret; |
| 32 | |
| 33 | ret = memalign(ARCH_DMA_MINALIGN, sizeof(struct kmem_cache)); |
| 34 | ret->sz = element_sz; |
| 35 | |
| 36 | return ret; |
| 37 | } |
| 38 | |
| 39 | void *kmem_cache_alloc(struct kmem_cache *obj, int flag) |
| 40 | { |
Masahiro Yamada | 3c88440 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 41 | return malloc_cache_aligned(obj->sz); |
Heiko Schocher | 4f7a9a3 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 42 | } |