forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-01-31 f70575805708cabdedea7498aaa3f710fde4d920
kernel/drivers/gpu/drm/etnaviv/etnaviv_mmu.h
....@@ -16,61 +16,111 @@
1616
1717 struct etnaviv_gpu;
1818 struct etnaviv_vram_mapping;
19
-struct etnaviv_iommu_domain;
19
+struct etnaviv_iommu_global;
20
+struct etnaviv_iommu_context;
2021
21
-struct etnaviv_iommu_domain_ops {
22
- void (*free)(struct etnaviv_iommu_domain *);
23
- int (*map)(struct etnaviv_iommu_domain *domain, unsigned long iova,
22
+struct etnaviv_iommu_ops {
23
+ struct etnaviv_iommu_context *(*init)(struct etnaviv_iommu_global *);
24
+ void (*free)(struct etnaviv_iommu_context *);
25
+ int (*map)(struct etnaviv_iommu_context *context, unsigned long iova,
2426 phys_addr_t paddr, size_t size, int prot);
25
- size_t (*unmap)(struct etnaviv_iommu_domain *domain, unsigned long iova,
27
+ size_t (*unmap)(struct etnaviv_iommu_context *context, unsigned long iova,
2628 size_t size);
27
- size_t (*dump_size)(struct etnaviv_iommu_domain *);
28
- void (*dump)(struct etnaviv_iommu_domain *, void *);
29
+ size_t (*dump_size)(struct etnaviv_iommu_context *);
30
+ void (*dump)(struct etnaviv_iommu_context *, void *);
31
+ void (*restore)(struct etnaviv_gpu *, struct etnaviv_iommu_context *);
2932 };
3033
31
-struct etnaviv_iommu_domain {
34
+extern const struct etnaviv_iommu_ops etnaviv_iommuv1_ops;
35
+extern const struct etnaviv_iommu_ops etnaviv_iommuv2_ops;
36
+
37
+#define ETNAVIV_PTA_SIZE SZ_4K
38
+#define ETNAVIV_PTA_ENTRIES (ETNAVIV_PTA_SIZE / sizeof(u64))
39
+
40
+struct etnaviv_iommu_global {
3241 struct device *dev;
42
+ enum etnaviv_iommu_version version;
43
+ const struct etnaviv_iommu_ops *ops;
44
+ unsigned int use;
45
+ struct mutex lock;
46
+
3347 void *bad_page_cpu;
3448 dma_addr_t bad_page_dma;
35
- u64 base;
36
- u64 size;
3749
38
- const struct etnaviv_iommu_domain_ops *ops;
50
+ u32 memory_base;
51
+
52
+ /*
53
+ * This union holds members needed by either MMUv1 or MMUv2, which
54
+ * can not exist at the same time.
55
+ */
56
+ union {
57
+ struct {
58
+ struct etnaviv_iommu_context *shared_context;
59
+ } v1;
60
+ struct {
61
+ /* P(age) T(able) A(rray) */
62
+ u64 *pta_cpu;
63
+ dma_addr_t pta_dma;
64
+ struct spinlock pta_lock;
65
+ DECLARE_BITMAP(pta_alloc, ETNAVIV_PTA_ENTRIES);
66
+ } v2;
67
+ };
3968 };
4069
41
-struct etnaviv_iommu {
42
- struct etnaviv_gpu *gpu;
43
- struct etnaviv_iommu_domain *domain;
44
-
45
- enum etnaviv_iommu_version version;
70
+struct etnaviv_iommu_context {
71
+ struct kref refcount;
72
+ struct etnaviv_iommu_global *global;
4673
4774 /* memory manager for GPU address area */
4875 struct mutex lock;
4976 struct list_head mappings;
5077 struct drm_mm mm;
5178 unsigned int flush_seq;
79
+
80
+ /* Not part of the context, but needs to have the same lifetime */
81
+ struct etnaviv_vram_mapping cmdbuf_mapping;
5282 };
83
+
84
+int etnaviv_iommu_global_init(struct etnaviv_gpu *gpu);
85
+void etnaviv_iommu_global_fini(struct etnaviv_gpu *gpu);
5386
5487 struct etnaviv_gem_object;
5588
56
-int etnaviv_iommu_map_gem(struct etnaviv_iommu *mmu,
89
+int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context,
5790 struct etnaviv_gem_object *etnaviv_obj, u32 memory_base,
58
- struct etnaviv_vram_mapping *mapping);
59
-void etnaviv_iommu_unmap_gem(struct etnaviv_iommu *mmu,
91
+ struct etnaviv_vram_mapping *mapping, u64 va);
92
+void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context,
6093 struct etnaviv_vram_mapping *mapping);
6194
62
-int etnaviv_iommu_get_suballoc_va(struct etnaviv_gpu *gpu, dma_addr_t paddr,
63
- struct drm_mm_node *vram_node, size_t size,
64
- u32 *iova);
65
-void etnaviv_iommu_put_suballoc_va(struct etnaviv_gpu *gpu,
66
- struct drm_mm_node *vram_node, size_t size,
67
- u32 iova);
95
+int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *ctx,
96
+ struct etnaviv_vram_mapping *mapping,
97
+ u32 memory_base, dma_addr_t paddr,
98
+ size_t size);
99
+void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *ctx,
100
+ struct etnaviv_vram_mapping *mapping);
68101
69
-size_t etnaviv_iommu_dump_size(struct etnaviv_iommu *iommu);
70
-void etnaviv_iommu_dump(struct etnaviv_iommu *iommu, void *buf);
102
+size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *ctx);
103
+void etnaviv_iommu_dump(struct etnaviv_iommu_context *ctx, void *buf);
71104
72
-struct etnaviv_iommu *etnaviv_iommu_new(struct etnaviv_gpu *gpu);
73
-void etnaviv_iommu_destroy(struct etnaviv_iommu *iommu);
74
-void etnaviv_iommu_restore(struct etnaviv_gpu *gpu);
105
+struct etnaviv_iommu_context *
106
+etnaviv_iommu_context_init(struct etnaviv_iommu_global *global,
107
+ struct etnaviv_cmdbuf_suballoc *suballoc);
108
+static inline struct etnaviv_iommu_context *
109
+etnaviv_iommu_context_get(struct etnaviv_iommu_context *ctx)
110
+{
111
+ kref_get(&ctx->refcount);
112
+ return ctx;
113
+}
114
+void etnaviv_iommu_context_put(struct etnaviv_iommu_context *ctx);
115
+void etnaviv_iommu_restore(struct etnaviv_gpu *gpu,
116
+ struct etnaviv_iommu_context *ctx);
117
+
118
+struct etnaviv_iommu_context *
119
+etnaviv_iommuv1_context_alloc(struct etnaviv_iommu_global *global);
120
+struct etnaviv_iommu_context *
121
+etnaviv_iommuv2_context_alloc(struct etnaviv_iommu_global *global);
122
+
123
+u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context);
124
+unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context);
75125
76126 #endif /* __ETNAVIV_MMU_H__ */