Lines Matching refs:dirty
72 * vmw_surface_dirty - Surface dirty-tracker
76 * @boxes: Array of SVGA3dBoxes indicating dirty regions. One per subresource.
529 * backup buffer is dirty.
669 WARN_ON_ONCE(res->dirty);
1238 if (res->backup->dirty && res->backup_dirty) {
1239 /* We've just made a full upload. Cear dirty regions. */
1711 * vmw_subres_dirty_add - Add a dirty region to a subresource
1712 * @dirty: The surfaces's dirty tracker.
1722 static void vmw_subres_dirty_add(struct vmw_surface_dirty *dirty,
1726 const struct svga3dsurface_cache *cache = &dirty->cache;
1727 SVGA3dBox *box = &dirty->boxes[loc_start->sub_resource];
1732 if (WARN_ON(loc_start->sub_resource >= dirty->num_subres))
1766 * vmw_subres_dirty_full - Mark a full subresource as dirty
1767 * @dirty: The surface's dirty tracker.
1770 static void vmw_subres_dirty_full(struct vmw_surface_dirty *dirty, u32 subres)
1772 const struct svga3dsurface_cache *cache = &dirty->cache;
1775 SVGA3dBox *box = &dirty->boxes[subres];
1792 struct vmw_surface_dirty *dirty =
1793 (struct vmw_surface_dirty *) res->dirty;
1800 cache = &dirty->cache;
1810 * fashion, compute the dirty region for each sheet and the
1811 * resulting union. Since this is not a common case, just dirty
1814 for (sub_res = 0; sub_res < dirty->num_subres; ++sub_res)
1815 vmw_subres_dirty_full(dirty, sub_res);
1820 vmw_subres_dirty_add(dirty, &loc1, &loc2);
1827 vmw_subres_dirty_add(dirty, &loc1, &loc_max);
1829 vmw_subres_dirty_add(dirty, &loc_min, &loc2);
1832 vmw_subres_dirty_full(dirty, sub_res);
1843 struct vmw_surface_dirty *dirty =
1844 (struct vmw_surface_dirty *) res->dirty;
1845 const struct svga3dsurface_cache *cache = &dirty->cache;
1847 SVGA3dBox *box = &dirty->boxes[0];
1886 struct vmw_surface_dirty *dirty =
1887 (struct vmw_surface_dirty *) res->dirty;
1889 const struct svga3dsurface_cache *cache = &dirty->cache;
1901 for (i = 0; i < dirty->num_subres; ++i) {
1902 const SVGA3dBox *box = &dirty->boxes[i];
1919 for (i = 0; i < dirty->num_subres; ++i) {
1920 const SVGA3dBox *box = &dirty->boxes[i];
1950 memset(&dirty->boxes[0], 0, sizeof(dirty->boxes[0]) *
1951 dirty->num_subres);
1963 struct vmw_surface_dirty *dirty;
1985 dirty_size = struct_size(dirty, boxes, num_subres);
1991 "dirty tracker.\n");
1995 dirty = kvzalloc(dirty_size, GFP_KERNEL);
1996 if (!dirty) {
2004 &dirty->cache);
2008 dirty->num_subres = num_subres;
2009 dirty->size = acc_size;
2010 res->dirty = (struct vmw_resource_dirty *) dirty;
2015 kvfree(dirty);
2026 struct vmw_surface_dirty *dirty =
2027 (struct vmw_surface_dirty *) res->dirty;
2028 size_t acc_size = dirty->size;
2030 kvfree(dirty);
2032 res->dirty = NULL;