Lines Matching refs:ctx
216 copybit_context_t* ctx = (copybit_context_t*)(ptr); in c2d_wait_loop() local
221 while(ctx->stop_thread == false) { in c2d_wait_loop()
222 pthread_mutex_lock(&ctx->wait_cleanup_lock); in c2d_wait_loop()
223 while(ctx->wait_timestamp == false && !ctx->stop_thread) { in c2d_wait_loop()
224 pthread_cond_wait(&(ctx->wait_cleanup_cond), in c2d_wait_loop()
225 &(ctx->wait_cleanup_lock)); in c2d_wait_loop()
227 if(ctx->wait_timestamp) { in c2d_wait_loop()
228 if(LINK_c2dWaitTimestamp(ctx->time_stamp)) { in c2d_wait_loop()
231 ctx->wait_timestamp = false; in c2d_wait_loop()
234 if (ctx->mapped_gpu_addr[i]) { in c2d_wait_loop()
235 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[i]); in c2d_wait_loop()
236 ctx->mapped_gpu_addr[i] = 0; in c2d_wait_loop()
240 ctx->blit_rgb_count = 0; in c2d_wait_loop()
241 ctx->blit_yuv_2_plane_count = 0; in c2d_wait_loop()
242 ctx->blit_yuv_3_plane_count = 0; in c2d_wait_loop()
243 ctx->blit_count = 0; in c2d_wait_loop()
244 ctx->dst_surface_mapped = false; in c2d_wait_loop()
245 ctx->dst_surface_base = 0; in c2d_wait_loop()
247 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in c2d_wait_loop()
248 if(ctx->stop_thread) in c2d_wait_loop()
342 static size_t c2d_get_gpuaddr(copybit_context_t* ctx, in c2d_get_gpuaddr() argument
363 if (ctx->mapped_gpu_addr[freeindex] == 0) { in c2d_get_gpuaddr()
378 ctx->mapped_gpu_addr[freeindex] = (size_t)gpuaddr; in c2d_get_gpuaddr()
385 static void unmap_gpuaddr(copybit_context_t* ctx, int mapped_idx) in unmap_gpuaddr() argument
387 if (!ctx || (mapped_idx == -1)) in unmap_gpuaddr()
390 if (ctx->mapped_gpu_addr[mapped_idx]) { in unmap_gpuaddr()
391 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[mapped_idx]); in unmap_gpuaddr()
392 ctx->mapped_gpu_addr[mapped_idx] = 0; in unmap_gpuaddr()
498 static int set_image(copybit_context_t* ctx, uint32 surfaceId, in set_image() argument
526 gpuaddr = c2d_get_gpuaddr(ctx, handle, mapped_idx); in set_image()
559 unmap_gpuaddr(ctx, mapped_idx); in set_image()
577 unmap_gpuaddr(ctx, mapped_idx); in set_image()
598 unmap_gpuaddr(ctx, mapped_idx); in set_image()
603 unmap_gpuaddr(ctx, mapped_idx); in set_image()
611 static int msm_copybit(struct copybit_context_t *ctx, unsigned int target) in msm_copybit() argument
613 if (ctx->blit_count == 0) { in msm_copybit()
617 for (int i = 0; i < ctx->blit_count; i++) in msm_copybit()
619 ctx->blit_list[i].next = &(ctx->blit_list[i+1]); in msm_copybit()
621 ctx->blit_list[ctx->blit_count-1].next = NULL; in msm_copybit()
622 uint32_t target_transform = ctx->trg_transform; in msm_copybit()
623 if (ctx->c2d_driver_info.capabilities_mask & in msm_copybit()
628 if(LINK_c2dDraw(target, target_transform, 0x0, 0, 0, ctx->blit_list, in msm_copybit()
629 ctx->blit_count)) { in msm_copybit()
640 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in flush_get_fence_copybit() local
642 if (!ctx) in flush_get_fence_copybit()
644 pthread_mutex_lock(&ctx->wait_cleanup_lock); in flush_get_fence_copybit()
645 status = msm_copybit(ctx, ctx->dst[ctx->dst_surface_type]); in flush_get_fence_copybit()
647 if(LINK_c2dFlush(ctx->dst[ctx->dst_surface_type], &ctx->time_stamp)) { in flush_get_fence_copybit()
650 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in flush_get_fence_copybit()
653 if(LINK_c2dCreateFenceFD(ctx->dst[ctx->dst_surface_type], ctx->time_stamp, in flush_get_fence_copybit()
660 ctx->wait_timestamp = true; in flush_get_fence_copybit()
661 pthread_cond_signal(&ctx->wait_cleanup_cond); in flush_get_fence_copybit()
663 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in flush_get_fence_copybit()
669 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in finish_copybit() local
670 if (!ctx) in finish_copybit()
673 int status = msm_copybit(ctx, ctx->dst[ctx->dst_surface_type]); in finish_copybit()
675 if(LINK_c2dFinish(ctx->dst[ctx->dst_surface_type])) { in finish_copybit()
682 if (ctx->mapped_gpu_addr[i]) { in finish_copybit()
683 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[i]); in finish_copybit()
684 ctx->mapped_gpu_addr[i] = 0; in finish_copybit()
689 ctx->blit_rgb_count = 0; in finish_copybit()
690 ctx->blit_yuv_2_plane_count = 0; in finish_copybit()
691 ctx->blit_yuv_3_plane_count = 0; in finish_copybit()
692 ctx->blit_count = 0; in finish_copybit()
693 ctx->dst_surface_mapped = false; in finish_copybit()
694 ctx->dst_surface_base = 0; in finish_copybit()
706 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in clear_copybit() local
707 if (ctx->is_dst_ubwc_format) in clear_copybit()
710 pthread_mutex_lock(&ctx->wait_cleanup_lock); in clear_copybit()
711 if(!ctx->dst_surface_mapped) { in clear_copybit()
712 ret = set_image(ctx, ctx->dst[RGB_SURFACE], buf, in clear_copybit()
716 unmap_gpuaddr(ctx, mapped_dst_idx); in clear_copybit()
717 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in clear_copybit()
722 ctx->dst_surface_mapped = true; in clear_copybit()
723 ctx->dst_surface_base = buf->base; in clear_copybit()
724 ret = LINK_c2dFillSurface(ctx->dst[RGB_SURFACE], 0x0, &c2drect); in clear_copybit()
726 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in clear_copybit()
732 static void set_rects(struct copybit_context_t *ctx, in set_rects() argument
739 if((ctx->trg_transform & C2D_TARGET_ROTATE_90) && in set_rects()
740 (ctx->trg_transform & C2D_TARGET_ROTATE_180)) { in set_rects()
743 c2dObject->target_rect.y = ctx->fb_width? in set_rects()
744 (ALIGN(ctx->fb_width,32)- dst->r):dst->r; in set_rects()
748 } else if(ctx->trg_transform & C2D_TARGET_ROTATE_90) { in set_rects()
749 c2dObject->target_rect.x = ctx->fb_height?(ctx->fb_height - dst->b):dst->b; in set_rects()
754 } else if(ctx->trg_transform & C2D_TARGET_ROTATE_180) { in set_rects()
755 c2dObject->target_rect.y = ctx->fb_height?(ctx->fb_height - dst->b):dst->b; in set_rects()
757 c2dObject->target_rect.x = ctx->fb_width? in set_rects()
758 (ALIGN(ctx->fb_width,32) - dst->r):dst->r; in set_rects()
793 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in set_parameter_copybit() local
795 if (!ctx) { in set_parameter_copybit()
800 pthread_mutex_lock(&ctx->wait_cleanup_lock); in set_parameter_copybit()
807 ctx->src_global_alpha = value; in set_parameter_copybit()
809 ctx->config_mask |= C2D_GLOBAL_ALPHA_BIT; in set_parameter_copybit()
811 ctx->config_mask &= ~C2D_GLOBAL_ALPHA_BIT; in set_parameter_copybit()
817 ctx->config_mask |= C2D_ALPHA_BLEND_NONE; in set_parameter_copybit()
818 ctx->is_premultiplied_alpha = true; in set_parameter_copybit()
820 ctx->is_premultiplied_alpha = true; in set_parameter_copybit()
822 ctx->config_mask &= ~C2D_ALPHA_BLEND_NONE; in set_parameter_copybit()
849 if (ctx->c2d_driver_info.capabilities_mask & in set_parameter_copybit()
851 ctx->config_mask |= config_mask; in set_parameter_copybit()
859 ctx->trg_transform = transform; in set_parameter_copybit()
863 ctx->fb_width = value; in set_parameter_copybit()
866 ctx->fb_height = value; in set_parameter_copybit()
875 ctx->is_src_ubwc_format = (value == COPYBIT_UBWC_COMPRESSED); in set_parameter_copybit()
878 ctx->is_dst_ubwc_format = (value == COPYBIT_UBWC_COMPRESSED); in set_parameter_copybit()
885 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in set_parameter_copybit()
892 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in get() local
895 if (!ctx) { in get()
915 if (ctx->c2d_driver_info.capabilities_mask & C2D_DRIVER_SUPPORTS_UBWC_COMPRESSED_OP) { in get()
1094 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in stretch_copybit_internal() local
1101 if (!ctx) { in stretch_copybit_internal()
1124 if (ctx->is_dst_ubwc_format) in stretch_copybit_internal()
1148 if (ctx->blit_rgb_count == MAX_RGB_SURFACES || in stretch_copybit_internal()
1149 ctx->blit_yuv_2_plane_count == MAX_YUV_2_PLANE_SURFACES || in stretch_copybit_internal()
1150 ctx->blit_yuv_3_plane_count == MAX_YUV_2_PLANE_SURFACES || in stretch_copybit_internal()
1151 ctx->blit_count == MAX_BLIT_OBJECT_COUNT || in stretch_copybit_internal()
1152 ctx->dst_surface_type != dst_surface_type) { in stretch_copybit_internal()
1160 ctx->dst_surface_type = dst_surface_type; in stretch_copybit_internal()
1181 if (get_size(dst_info) != (int) ctx->temp_dst_buffer.size) { in stretch_copybit_internal()
1182 free_temp_buffer(ctx->temp_dst_buffer); in stretch_copybit_internal()
1184 if (COPYBIT_FAILURE == get_temp_buffer(dst_info, ctx->temp_dst_buffer)) { in stretch_copybit_internal()
1190 dst_hnd->fd = ctx->temp_dst_buffer.fd; in stretch_copybit_internal()
1191 dst_hnd->size = ctx->temp_dst_buffer.size; in stretch_copybit_internal()
1192 dst_hnd->flags = ctx->temp_dst_buffer.allocType; in stretch_copybit_internal()
1193 dst_hnd->base = (uintptr_t)(ctx->temp_dst_buffer.base); in stretch_copybit_internal()
1194 dst_hnd->offset = ctx->temp_dst_buffer.offset; in stretch_copybit_internal()
1198 if(!ctx->dst_surface_mapped) { in stretch_copybit_internal()
1200 status = set_image(ctx, ctx->dst[ctx->dst_surface_type], &dst_image, in stretch_copybit_internal()
1205 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1208 ctx->dst_surface_mapped = true; in stretch_copybit_internal()
1209 ctx->dst_surface_base = dst->base; in stretch_copybit_internal()
1210 } else if(ctx->dst_surface_mapped && ctx->dst_surface_base != dst->base) { in stretch_copybit_internal()
1221 src_surface = ctx->blit_rgb_object[ctx->blit_rgb_count]; in stretch_copybit_internal()
1226 src_surface = ctx->blit_yuv_2_plane_object[ctx->blit_yuv_2_plane_count]; in stretch_copybit_internal()
1229 src_surface = ctx->blit_yuv_3_plane_object[ctx->blit_yuv_2_plane_count]; in stretch_copybit_internal()
1234 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1241 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1259 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1263 if (get_size(src_info) != (int) ctx->temp_src_buffer.size) { in stretch_copybit_internal()
1264 free_temp_buffer(ctx->temp_src_buffer); in stretch_copybit_internal()
1267 ctx->temp_src_buffer)) { in stretch_copybit_internal()
1271 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1275 src_hnd->fd = ctx->temp_src_buffer.fd; in stretch_copybit_internal()
1276 src_hnd->size = ctx->temp_src_buffer.size; in stretch_copybit_internal()
1277 src_hnd->flags = ctx->temp_src_buffer.allocType; in stretch_copybit_internal()
1278 src_hnd->base = (uintptr_t)(ctx->temp_src_buffer.base); in stretch_copybit_internal()
1279 src_hnd->offset = ctx->temp_src_buffer.offset; in stretch_copybit_internal()
1290 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1302 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1307 flags |= (ctx->is_premultiplied_alpha) ? FLAGS_PREMULTIPLIED_ALPHA : 0; in stretch_copybit_internal()
1308 flags |= (ctx->dst_surface_type != RGB_SURFACE) ? FLAGS_YUV_DESTINATION : 0; in stretch_copybit_internal()
1309 flags |= (ctx->is_src_ubwc_format) ? FLAGS_UBWC_FORMAT_MODE : 0; in stretch_copybit_internal()
1310 status = set_image(ctx, src_surface.surface_id, &src_image, in stretch_copybit_internal()
1316 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1317 unmap_gpuaddr(ctx, mapped_src_idx); in stretch_copybit_internal()
1321 src_surface.config_mask = C2D_NO_ANTIALIASING_BIT | ctx->config_mask; in stretch_copybit_internal()
1322 src_surface.global_alpha = ctx->src_global_alpha; in stretch_copybit_internal()
1330 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1331 unmap_gpuaddr(ctx, mapped_src_idx); in stretch_copybit_internal()
1340 ctx->blit_rgb_object[ctx->blit_rgb_count] = src_surface; in stretch_copybit_internal()
1341 ctx->blit_rgb_count++; in stretch_copybit_internal()
1343 ctx->blit_yuv_2_plane_object[ctx->blit_yuv_2_plane_count] = src_surface; in stretch_copybit_internal()
1344 ctx->blit_yuv_2_plane_count++; in stretch_copybit_internal()
1346 ctx->blit_yuv_3_plane_object[ctx->blit_yuv_3_plane_count] = src_surface; in stretch_copybit_internal()
1347 ctx->blit_yuv_3_plane_count++; in stretch_copybit_internal()
1352 set_rects(ctx, &(src_surface), dst_rect, src_rect, &clip); in stretch_copybit_internal()
1353 if (ctx->blit_count == MAX_BLIT_OBJECT_COUNT) { in stretch_copybit_internal()
1357 ctx->blit_list[ctx->blit_count] = src_surface; in stretch_copybit_internal()
1358 ctx->blit_count++; in stretch_copybit_internal()
1376 unmap_gpuaddr(ctx, mapped_dst_idx); in stretch_copybit_internal()
1377 unmap_gpuaddr(ctx, mapped_src_idx); in stretch_copybit_internal()
1389 ctx->is_premultiplied_alpha = false; in stretch_copybit_internal()
1390 ctx->fb_width = 0; in stretch_copybit_internal()
1391 ctx->fb_height = 0; in stretch_copybit_internal()
1392 ctx->config_mask = 0; in stretch_copybit_internal()
1413 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in stretch_copybit() local
1415 bool needsBlending = (ctx->src_global_alpha != 0); in stretch_copybit()
1416 pthread_mutex_lock(&ctx->wait_cleanup_lock); in stretch_copybit()
1419 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in stretch_copybit()
1431 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in blit_copybit() local
1434 pthread_mutex_lock(&ctx->wait_cleanup_lock); in blit_copybit()
1436 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in blit_copybit()
1455 static void clean_up(copybit_context_t* ctx) in clean_up() argument
1458 if (!ctx) in clean_up()
1462 pthread_mutex_lock(&ctx->wait_cleanup_lock); in clean_up()
1463 ctx->stop_thread = true; in clean_up()
1465 pthread_cond_signal(&ctx->wait_cleanup_cond); in clean_up()
1466 pthread_mutex_unlock(&ctx->wait_cleanup_lock); in clean_up()
1468 pthread_join(ctx->wait_thread_id, &ret); in clean_up()
1469 pthread_mutex_destroy(&ctx->wait_cleanup_lock); in clean_up()
1470 pthread_cond_destroy (&ctx->wait_cleanup_cond); in clean_up()
1473 if (ctx->dst[i]) in clean_up()
1474 LINK_c2dDestroySurface(ctx->dst[i]); in clean_up()
1478 if (ctx->blit_rgb_object[i].surface_id) in clean_up()
1479 LINK_c2dDestroySurface(ctx->blit_rgb_object[i].surface_id); in clean_up()
1483 if (ctx->blit_yuv_2_plane_object[i].surface_id) in clean_up()
1484 LINK_c2dDestroySurface(ctx->blit_yuv_2_plane_object[i].surface_id); in clean_up()
1488 if (ctx->blit_yuv_3_plane_object[i].surface_id) in clean_up()
1489 LINK_c2dDestroySurface(ctx->blit_yuv_3_plane_object[i].surface_id); in clean_up()
1492 if (ctx->libc2d2) { in clean_up()
1493 ::dlclose(ctx->libc2d2); in clean_up()
1497 free(ctx); in clean_up()
1503 struct copybit_context_t* ctx = (struct copybit_context_t*)dev; in close_copybit() local
1504 if (ctx) { in close_copybit()
1505 free_temp_buffer(ctx->temp_src_buffer); in close_copybit()
1506 free_temp_buffer(ctx->temp_dst_buffer); in close_copybit()
1508 clean_up(ctx); in close_copybit()
1523 struct copybit_context_t *ctx; in open_copybit() local
1525 ctx = (struct copybit_context_t *)malloc(sizeof(struct copybit_context_t)); in open_copybit()
1526 if(!ctx) { in open_copybit()
1532 memset(ctx, 0, sizeof(*ctx)); in open_copybit()
1533 ctx->libc2d2 = ::dlopen("libC2D2.so", RTLD_NOW); in open_copybit()
1534 if (!ctx->libc2d2) { in open_copybit()
1536 clean_up(ctx); in open_copybit()
1541 *(void **)&LINK_c2dCreateSurface = ::dlsym(ctx->libc2d2, in open_copybit()
1543 *(void **)&LINK_c2dUpdateSurface = ::dlsym(ctx->libc2d2, in open_copybit()
1545 *(void **)&LINK_c2dReadSurface = ::dlsym(ctx->libc2d2, in open_copybit()
1547 *(void **)&LINK_c2dDraw = ::dlsym(ctx->libc2d2, "c2dDraw"); in open_copybit()
1548 *(void **)&LINK_c2dFlush = ::dlsym(ctx->libc2d2, "c2dFlush"); in open_copybit()
1549 *(void **)&LINK_c2dFinish = ::dlsym(ctx->libc2d2, "c2dFinish"); in open_copybit()
1550 *(void **)&LINK_c2dWaitTimestamp = ::dlsym(ctx->libc2d2, in open_copybit()
1552 *(void **)&LINK_c2dDestroySurface = ::dlsym(ctx->libc2d2, in open_copybit()
1554 *(void **)&LINK_c2dMapAddr = ::dlsym(ctx->libc2d2, in open_copybit()
1556 *(void **)&LINK_c2dUnMapAddr = ::dlsym(ctx->libc2d2, in open_copybit()
1558 *(void **)&LINK_c2dGetDriverCapabilities = ::dlsym(ctx->libc2d2, in open_copybit()
1560 *(void **)&LINK_c2dCreateFenceFD = ::dlsym(ctx->libc2d2, in open_copybit()
1562 *(void **)&LINK_c2dFillSurface = ::dlsym(ctx->libc2d2, in open_copybit()
1571 clean_up(ctx); in open_copybit()
1577 ctx->device.common.tag = HARDWARE_DEVICE_TAG; in open_copybit()
1578 ctx->device.common.version = 1; in open_copybit()
1579 ctx->device.common.module = (hw_module_t*)(module); in open_copybit()
1580 ctx->device.common.close = close_copybit; in open_copybit()
1581 ctx->device.set_parameter = set_parameter_copybit; in open_copybit()
1582 ctx->device.get = get; in open_copybit()
1583 ctx->device.blit = blit_copybit; in open_copybit()
1584 ctx->device.set_sync = set_sync_copybit; in open_copybit()
1585 ctx->device.stretch = stretch_copybit; in open_copybit()
1586 ctx->device.finish = finish_copybit; in open_copybit()
1587 ctx->device.flush_get_fence = flush_get_fence_copybit; in open_copybit()
1588 ctx->device.clear = clear_copybit; in open_copybit()
1589 ctx->device.fill_color = fill_color; in open_copybit()
1598 if (LINK_c2dCreateSurface(&(ctx->dst[RGB_SURFACE]), C2D_TARGET | C2D_SOURCE, in open_copybit()
1604 ctx->dst[RGB_SURFACE] = 0; in open_copybit()
1605 clean_up(ctx); in open_copybit()
1620 ctx->blit_rgb_object[i].surface_id = 0; in open_copybit()
1624 ctx->blit_rgb_object[i].surface_id = surface_id; in open_copybit()
1626 ctx->blit_rgb_object[i].surface_id); in open_copybit()
1631 clean_up(ctx); in open_copybit()
1648 if (LINK_c2dCreateSurface(&(ctx->dst[YUV_SURFACE_2_PLANES]), in open_copybit()
1655 ctx->dst[YUV_SURFACE_2_PLANES] = 0; in open_copybit()
1656 clean_up(ctx); in open_copybit()
1670 ctx->blit_yuv_2_plane_object[i].surface_id = 0; in open_copybit()
1674 ctx->blit_yuv_2_plane_object[i].surface_id = surface_id; in open_copybit()
1676 ctx->blit_yuv_2_plane_object[i].surface_id); in open_copybit()
1681 clean_up(ctx); in open_copybit()
1693 if (LINK_c2dCreateSurface(&(ctx->dst[YUV_SURFACE_3_PLANES]), in open_copybit()
1700 ctx->dst[YUV_SURFACE_3_PLANES] = 0; in open_copybit()
1701 clean_up(ctx); in open_copybit()
1716 ctx->blit_yuv_3_plane_object[i].surface_id = 0; in open_copybit()
1720 ctx->blit_yuv_3_plane_object[i].surface_id = surface_id; in open_copybit()
1722 ctx->blit_yuv_3_plane_object[i].surface_id); in open_copybit()
1727 clean_up(ctx); in open_copybit()
1733 if (LINK_c2dGetDriverCapabilities(&(ctx->c2d_driver_info))) { in open_copybit()
1735 clean_up(ctx); in open_copybit()
1741 ctx->trg_transform = C2D_TARGET_ROTATE_0; in open_copybit()
1743 ctx->temp_src_buffer.fd = -1; in open_copybit()
1744 ctx->temp_src_buffer.base = 0; in open_copybit()
1745 ctx->temp_src_buffer.size = 0; in open_copybit()
1747 ctx->temp_dst_buffer.fd = -1; in open_copybit()
1748 ctx->temp_dst_buffer.base = 0; in open_copybit()
1749 ctx->temp_dst_buffer.size = 0; in open_copybit()
1751 ctx->fb_width = 0; in open_copybit()
1752 ctx->fb_height = 0; in open_copybit()
1754 ctx->blit_rgb_count = 0; in open_copybit()
1755 ctx->blit_yuv_2_plane_count = 0; in open_copybit()
1756 ctx->blit_yuv_3_plane_count = 0; in open_copybit()
1757 ctx->blit_count = 0; in open_copybit()
1759 ctx->wait_timestamp = false; in open_copybit()
1760 ctx->stop_thread = false; in open_copybit()
1761 pthread_mutex_init(&(ctx->wait_cleanup_lock), NULL); in open_copybit()
1762 pthread_cond_init(&(ctx->wait_cleanup_cond), NULL); in open_copybit()
1768 pthread_create(&ctx->wait_thread_id, &attr, &c2d_wait_loop, in open_copybit()
1769 (void *)ctx); in open_copybit()
1772 *device = &ctx->device.common; in open_copybit()