Go to the source code of this file.
Data Structures | |
struct | intel_be_batchbuffer |
Defines | |
#define | BATCH_RESERVED 16 |
#define | INTEL_DEFAULT_RELOCS 100 |
#define | INTEL_MAX_RELOCS 400 |
#define | INTEL_BATCH_NO_CLIPRECTS 0x1 |
#define | INTEL_BATCH_CLIPRECTS 0x2 |
Functions | |
struct intel_be_batchbuffer * | intel_be_batchbuffer_alloc (struct intel_be_context *intel) |
void | intel_be_batchbuffer_free (struct intel_be_batchbuffer *batch) |
void | intel_be_batchbuffer_finish (struct intel_be_batchbuffer *batch) |
struct _DriFenceObject * | intel_be_batchbuffer_flush (struct intel_be_batchbuffer *batch) |
void | intel_be_batchbuffer_reset (struct intel_be_batchbuffer *batch) |
void | intel_be_offset_relocation (struct intel_be_batchbuffer *batch, unsigned pre_add, struct _DriBufferObject *driBO, uint64_t val_flags, uint64_t val_mask) |
#define BATCH_RESERVED 16 |
Definition at line 9 of file intel_be_batchbuffer.h.
#define INTEL_BATCH_CLIPRECTS 0x2 |
Definition at line 15 of file intel_be_batchbuffer.h.
#define INTEL_BATCH_NO_CLIPRECTS 0x1 |
Definition at line 14 of file intel_be_batchbuffer.h.
#define INTEL_DEFAULT_RELOCS 100 |
Definition at line 11 of file intel_be_batchbuffer.h.
#define INTEL_MAX_RELOCS 400 |
Definition at line 12 of file intel_be_batchbuffer.h.
struct intel_be_batchbuffer* intel_be_batchbuffer_alloc | ( | struct intel_be_context * | intel | ) | [read] |
Definition at line 87 of file intel_be_batchbuffer.c.
References intel_be_device::batchPool, intel_be_batchbuffer::buffer, intel_be_context::device, intel_be_batchbuffer::device, driBOCreateList(), driGenBuffers(), intel_be_batchbuffer::intel, intel_be_batchbuffer_reset(), intel_be_batchbuffer::last_fence, intel_be_batchbuffer::list, and intel_be_batchbuffer::reloc.
00088 { 00089 struct intel_be_batchbuffer *batch = calloc(sizeof(*batch), 1); 00090 00091 batch->intel = intel; 00092 batch->device = intel->device; 00093 00094 driGenBuffers(intel->device->batchPool, "batchbuffer", 1, 00095 &batch->buffer, 4096, 00096 DRM_BO_FLAG_MEM_TT | DRM_BO_FLAG_EXE, 0); 00097 batch->last_fence = NULL; 00098 batch->list = driBOCreateList(20); 00099 batch->reloc = NULL; 00100 intel_be_batchbuffer_reset(batch); 00101 return batch; 00102 }
void intel_be_batchbuffer_finish | ( | struct intel_be_batchbuffer * | batch | ) |
Definition at line 412 of file intel_be_batchbuffer.c.
References driFenceFinish(), driFenceType(), driFenceUnReference(), FALSE, and intel_be_batchbuffer_flush().
00413 { 00414 struct _DriFenceObject *fence = intel_be_batchbuffer_flush(batch); 00415 driFenceFinish(fence, driFenceType(fence), FALSE); 00416 driFenceUnReference(&fence); 00417 }
struct _DriFenceObject* intel_be_batchbuffer_flush | ( | struct intel_be_batchbuffer * | batch | ) | [read] |
Definition at line 349 of file intel_be_batchbuffer.c.
References intel_be_batchbuffer::base, intel_be_batchbuffer::buffer, do_flush_locked(), driBOUnmap(), driFenceReference(), FALSE, intel_be_batchbuffer::flags, intel_be_context::hardware_lock, intel_be_context::hardware_locked, intel_be_context::hardware_unlock, intel_be_batchbuffer::intel, INTEL_BATCH_CLIPRECTS, intel_be_batchbuffer_reset(), intel_be_batchbuffer::last_fence, i915_batchbuffer::map, MI_BATCH_BUFFER_END, and i915_batchbuffer::ptr.
00350 { 00351 struct intel_be_context *intel = batch->intel; 00352 unsigned int used = batch->base.ptr - batch->base.map; 00353 boolean was_locked = batch->intel->hardware_locked(intel); 00354 struct _DriFenceObject *fence; 00355 00356 if (used == 0) { 00357 driFenceReference(batch->last_fence); 00358 return batch->last_fence; 00359 } 00360 00361 /* Add the MI_BATCH_BUFFER_END. Always add an MI_FLUSH - this is a 00362 * performance drain that we would like to avoid. 00363 */ 00364 #if 0 /* ZZZ JB: what should we do here? */ 00365 if (used & 4) { 00366 ((int *) batch->base.ptr)[0] = intel->vtbl.flush_cmd(); 00367 ((int *) batch->base.ptr)[1] = 0; 00368 ((int *) batch->base.ptr)[2] = MI_BATCH_BUFFER_END; 00369 used += 12; 00370 } 00371 else { 00372 ((int *) batch->base.ptr)[0] = intel->vtbl.flush_cmd(); 00373 ((int *) batch->base.ptr)[1] = MI_BATCH_BUFFER_END; 00374 used += 8; 00375 } 00376 #else 00377 if (used & 4) { 00378 ((int *) batch->base.ptr)[0] = ((0<<29)|(4<<23)); // MI_FLUSH; 00379 ((int *) batch->base.ptr)[1] = 0; 00380 ((int *) batch->base.ptr)[2] = (0xA<<23); // MI_BATCH_BUFFER_END; 00381 used += 12; 00382 } 00383 else { 00384 ((int *) batch->base.ptr)[0] = ((0<<29)|(4<<23)); // MI_FLUSH; 00385 ((int *) batch->base.ptr)[1] = (0xA<<23); // MI_BATCH_BUFFER_END; 00386 used += 8; 00387 } 00388 #endif 00389 driBOUnmap(batch->buffer); 00390 batch->base.ptr = NULL; 00391 batch->base.map = NULL; 00392 00393 /* TODO: Just pass the relocation list and dma buffer up to the 00394 * kernel. 00395 */ 00396 if (!was_locked) 00397 intel->hardware_lock(intel); 00398 00399 fence = do_flush_locked(batch, used, !(batch->flags & INTEL_BATCH_CLIPRECTS), 00400 FALSE); 00401 00402 if (!was_locked) 00403 intel->hardware_unlock(intel); 00404 00405 /* Reset the buffer: 00406 */ 00407 intel_be_batchbuffer_reset(batch); 00408 return fence; 00409 }
void intel_be_batchbuffer_free | ( | struct intel_be_batchbuffer * | batch | ) |
Definition at line 105 of file intel_be_batchbuffer.c.
References intel_be_batchbuffer::base, intel_be_batchbuffer::buffer, driBOFreeList(), driBOUnmap(), driBOUnReference(), driFenceFinish(), driFenceUnReference(), FALSE, intel_be_batchbuffer::last_fence, intel_be_batchbuffer::list, i915_batchbuffer::map, and intel_be_batchbuffer::reloc.
00106 { 00107 if (batch->last_fence) { 00108 driFenceFinish(batch->last_fence, 00109 DRM_FENCE_TYPE_EXE, FALSE); 00110 driFenceUnReference(&batch->last_fence); 00111 } 00112 if (batch->base.map) { 00113 driBOUnmap(batch->buffer); 00114 batch->base.map = NULL; 00115 } 00116 driBOUnReference(batch->buffer); 00117 driBOFreeList(batch->list); 00118 if (batch->reloc) 00119 free(batch->reloc); 00120 batch->buffer = NULL; 00121 free(batch); 00122 }
void intel_be_batchbuffer_reset | ( | struct intel_be_batchbuffer * | batch | ) |
Definition at line 21 of file intel_be_batchbuffer.c.
References i915_batchbuffer::actual_size, assert, intel_be_batchbuffer::base, BATCH_RESERVED, _drmBONode::bo_arg, intel_be_batchbuffer::buffer, intel_be_batchbuffer::dest_location, intel_be_batchbuffer::device, intel_be_batchbuffer::dirty_state, driBOAddListItem(), driBOData(), driBOKernel(), driBOMap(), driBOPoolOffset(), driBOResetList(), driBOUnrefUserList(), driReadLockKernelBO(), driReadUnlockKernelBO(), intel_be_batchbuffer::drmBOVirtual, intel_be_batchbuffer::flags, intel_be_batchbuffer::id, INTEL_DEFAULT_RELOCS, INTEL_MAX_RELOCS, intel_realloc_relocs(), intel_be_batchbuffer::list, i915_batchbuffer::map, intel_be_device::max_batch_size, intel_be_batchbuffer::node, intel_be_batchbuffer::nr_relocs, intel_be_batchbuffer::poolOffset, i915_batchbuffer::ptr, intel_be_batchbuffer::reloc, intel_be_batchbuffer::reloc_size, and i915_batchbuffer::size.
00022 { 00023 /* 00024 * Get a new, free batchbuffer. 00025 */ 00026 drmBO *bo; 00027 struct drm_bo_info_req *req; 00028 00029 driBOUnrefUserList(batch->list); 00030 driBOResetList(batch->list); 00031 00032 /* base.size is the size available to the i915simple driver */ 00033 batch->base.size = batch->device->max_batch_size - BATCH_RESERVED; 00034 batch->base.actual_size = batch->device->max_batch_size; 00035 driBOData(batch->buffer, batch->base.actual_size, NULL, NULL, 0); 00036 00037 /* 00038 * Add the batchbuffer to the validate list. 00039 */ 00040 00041 driBOAddListItem(batch->list, batch->buffer, 00042 DRM_BO_FLAG_EXE | DRM_BO_FLAG_MEM_TT, 00043 DRM_BO_FLAG_EXE | DRM_BO_MASK_MEM, 00044 &batch->dest_location, &batch->node); 00045 00046 req = &batch->node->bo_arg.d.req.bo_req; 00047 00048 /* 00049 * Set up information needed for us to make relocations 00050 * relative to the underlying drm buffer objects. 00051 */ 00052 00053 driReadLockKernelBO(); 00054 bo = driBOKernel(batch->buffer); 00055 req->presumed_offset = (uint64_t) bo->offset; 00056 req->hint = DRM_BO_HINT_PRESUMED_OFFSET; 00057 batch->drmBOVirtual = (uint8_t *) bo->virtual; 00058 driReadUnlockKernelBO(); 00059 00060 /* 00061 * Adjust the relocation buffer size. 00062 */ 00063 00064 if (batch->reloc_size > INTEL_MAX_RELOCS || 00065 batch->reloc == NULL) 00066 intel_realloc_relocs(batch, INTEL_DEFAULT_RELOCS); 00067 00068 assert(batch->reloc != NULL); 00069 batch->reloc[0] = 0; /* No relocs yet. */ 00070 batch->reloc[1] = 1; /* Reloc type 1 */ 00071 batch->reloc[2] = 0; /* Only a single relocation list. */ 00072 batch->reloc[3] = 0; /* Only a single relocation list. */ 00073 00074 batch->base.map = driBOMap(batch->buffer, DRM_BO_FLAG_WRITE, 0); 00075 batch->poolOffset = driBOPoolOffset(batch->buffer); 00076 batch->base.ptr = batch->base.map; 00077 batch->dirty_state = ~0; 00078 batch->nr_relocs = 0; 00079 batch->flags = 0; 00080 batch->id = 0;//batch->intel->intelScreen->batch_id++; 00081 }
void intel_be_offset_relocation | ( | struct intel_be_batchbuffer * | batch, | |
unsigned | pre_add, | |||
struct _DriBufferObject * | driBO, | |||
uint64_t | val_flags, | |||
uint64_t | val_mask | |||
) |
Definition at line 125 of file intel_be_batchbuffer.c.
References intel_be_batchbuffer::base, _drmBONode::bo_arg, intel_be_batchbuffer::dest_location, driBOAddListItem(), driBOKernel(), driBOPoolOffset(), driReadLockKernelBO(), driReadUnlockKernelBO(), intel_be_batchbuffer::drmBOVirtual, i915_batchbuffer_dword(), intel_realloc_relocs(), intel_be_batchbuffer::list, intel_be_batchbuffer::nr_relocs, i915_batchbuffer::ptr, intel_be_batchbuffer::reloc, and intel_be_batchbuffer::reloc_size.
00130 { 00131 int itemLoc; 00132 struct _drmBONode *node; 00133 uint32_t *reloc; 00134 struct drm_bo_info_req *req; 00135 00136 driBOAddListItem(batch->list, driBO, val_flags, val_mask, 00137 &itemLoc, &node); 00138 req = &node->bo_arg.d.req.bo_req; 00139 00140 if (!(req->hint & DRM_BO_HINT_PRESUMED_OFFSET)) { 00141 00142 /* 00143 * Stop other threads from tampering with the underlying 00144 * drmBO while we're reading its offset. 00145 */ 00146 00147 driReadLockKernelBO(); 00148 req->presumed_offset = (uint64_t) driBOKernel(driBO)->offset; 00149 driReadUnlockKernelBO(); 00150 req->hint = DRM_BO_HINT_PRESUMED_OFFSET; 00151 } 00152 00153 pre_add += driBOPoolOffset(driBO); 00154 00155 if (batch->nr_relocs == batch->reloc_size) 00156 intel_realloc_relocs(batch, batch->reloc_size * 2); 00157 00158 reloc = batch->reloc + 00159 (I915_RELOC_HEADER + batch->nr_relocs * I915_RELOC0_STRIDE); 00160 00161 reloc[0] = ((uint8_t *)batch->base.ptr - batch->drmBOVirtual); 00162 i915_batchbuffer_dword(&batch->base, req->presumed_offset + pre_add); 00163 reloc[1] = pre_add; 00164 reloc[2] = itemLoc; 00165 reloc[3] = batch->dest_location; 00166 batch->nr_relocs++; 00167 }