1#ifndef __NOUVEAU_BUFFER_H__
2#define __NOUVEAU_BUFFER_H__
3
4#include "util/u_range.h"
5#include "util/u_transfer.h"
6#include "util/list.h"
7
8struct pipe_resource;
9struct nouveau_context;
10struct nouveau_bo;
11
12/* DIRTY: buffer was (or will be after the next flush) written to by GPU and
13 *  resource->data has not been updated to reflect modified VRAM contents
14 *
15 * USER_MEMORY: resource->data is a pointer to client memory and may change
16 *  between GL calls
17 *
18 * USER_PTR: bo is backed by user memory mapped into the GPUs VM
19 */
20#define NOUVEAU_BUFFER_STATUS_GPU_READING (1 << 0)
21#define NOUVEAU_BUFFER_STATUS_GPU_WRITING (1 << 1)
22#define NOUVEAU_BUFFER_STATUS_DIRTY       (1 << 2)
23#define NOUVEAU_BUFFER_STATUS_USER_PTR    (1 << 6)
24#define NOUVEAU_BUFFER_STATUS_USER_MEMORY (1 << 7)
25
26#define NOUVEAU_BUFFER_STATUS_REALLOC_MASK NOUVEAU_BUFFER_STATUS_USER_MEMORY
27
28/* Resources, if mapped into the GPU's address space, are guaranteed to
29 * have constant virtual addresses (nv50+).
30 *
31 * The address of a resource will lie within the nouveau_bo referenced,
32 * and this bo should be added to the memory manager's validation list.
33 */
34struct nv04_resource {
35   struct pipe_resource base;
36
37   uint64_t address; /* virtual address (nv50+) */
38
39   uint8_t *data; /* resource's contents, if domain == 0, or cached */
40   struct nouveau_bo *bo;
41   uint32_t offset; /* offset into the data/bo */
42
43   uint8_t status;
44   uint8_t domain;
45
46   uint16_t cb_bindings[6]; /* per-shader per-slot bindings */
47
48   struct nouveau_fence *fence;
49   struct nouveau_fence *fence_wr;
50
51   struct nouveau_mm_allocation *mm;
52
53   /* buffer range that has been initialized */
54   struct util_range valid_buffer_range;
55};
56
57void
58nouveau_buffer_release_gpu_storage(struct nv04_resource *);
59
60void
61nouveau_copy_buffer(struct nouveau_context *,
62                    struct nv04_resource *dst, unsigned dst_pos,
63                    struct nv04_resource *src, unsigned src_pos, unsigned size);
64
65bool
66nouveau_buffer_migrate(struct nouveau_context *,
67                       struct nv04_resource *, unsigned domain);
68
69void *
70nouveau_resource_map_offset(struct nouveau_context *, struct nv04_resource *,
71                            uint32_t offset, uint32_t flags);
72
73void
74nouveau_buffer_destroy(struct pipe_screen *pscreen,
75                       struct pipe_resource *presource);
76
77void
78nouveau_buffer_transfer_flush_region(struct pipe_context *pipe,
79                                     struct pipe_transfer *transfer,
80                                     const struct pipe_box *box);
81
82static inline void
83nouveau_resource_unmap(struct nv04_resource *res)
84{
85   /* no-op */
86}
87
88static inline struct nv04_resource *
89nv04_resource(struct pipe_resource *resource)
90{
91   return (struct nv04_resource *)resource;
92}
93
94/* is resource mapped into the GPU's address space (i.e. VRAM or GART) ? */
95static inline bool
96nouveau_resource_mapped_by_gpu(struct pipe_resource *resource)
97{
98   return nv04_resource(resource)->domain != 0;
99}
100
101struct pipe_resource *
102nouveau_buffer_create(struct pipe_screen *pscreen,
103                      const struct pipe_resource *templ);
104
105struct pipe_resource *
106nouveau_buffer_create_from_user(struct pipe_screen *pscreen,
107                                const struct pipe_resource *templ,
108                                void *user_ptr);
109
110struct pipe_resource *
111nouveau_user_buffer_create(struct pipe_screen *screen, void *ptr,
112                           unsigned bytes, unsigned usage);
113
114bool
115nouveau_user_buffer_upload(struct nouveau_context *, struct nv04_resource *,
116                           unsigned base, unsigned size);
117
118void
119nouveau_buffer_invalidate(struct pipe_context *pipe,
120                          struct pipe_resource *resource);
121
122/* Copy data to a scratch buffer and return address & bo the data resides in.
123 * Returns 0 on failure.
124 */
125uint64_t
126nouveau_scratch_data(struct nouveau_context *,
127                     const void *data, unsigned base, unsigned size,
128                     struct nouveau_bo **);
129
130void *
131nouveau_buffer_transfer_map(struct pipe_context *pipe,
132                            struct pipe_resource *resource,
133                            unsigned level, unsigned usage,
134                            const struct pipe_box *box,
135                            struct pipe_transfer **ptransfer);
136
137void
138nouveau_buffer_transfer_unmap(struct pipe_context *pipe,
139                              struct pipe_transfer *transfer);
140
141#endif
142