Lines Matching refs:bb

72   struct backed_block* bb;  in sparse_count_chunks()  local
76 for (bb = backed_block_iter_new(s->backed_block_list); bb; bb = backed_block_iter_next(bb)) { in sparse_count_chunks()
77 if (backed_block_block(bb) > last_block) { in sparse_count_chunks()
82 last_block = backed_block_block(bb) + DIV_ROUND_UP(backed_block_len(bb), s->block_size); in sparse_count_chunks()
91 static int sparse_file_write_block(struct output_file* out, struct backed_block* bb) { in sparse_file_write_block() argument
94 switch (backed_block_type(bb)) { in sparse_file_write_block()
96 ret = write_data_chunk(out, backed_block_len(bb), backed_block_data(bb)); in sparse_file_write_block()
99 ret = write_file_chunk(out, backed_block_len(bb), backed_block_filename(bb), in sparse_file_write_block()
100 backed_block_file_offset(bb)); in sparse_file_write_block()
103 ret = write_fd_chunk(out, backed_block_len(bb), backed_block_fd(bb), in sparse_file_write_block()
104 backed_block_file_offset(bb)); in sparse_file_write_block()
107 ret = write_fill_chunk(out, backed_block_len(bb), backed_block_fill_val(bb)); in sparse_file_write_block()
115 struct backed_block* bb; in write_all_blocks() local
120 for (bb = backed_block_iter_new(s->backed_block_list); bb; bb = backed_block_iter_next(bb)) { in write_all_blocks()
121 if (backed_block_block(bb) > last_block) { in write_all_blocks()
122 unsigned int blocks = backed_block_block(bb) - last_block; in write_all_blocks()
125 ret = sparse_file_write_block(out, bb); in write_all_blocks()
127 last_block = backed_block_block(bb) + DIV_ROUND_UP(backed_block_len(bb), s->block_size); in write_all_blocks()
146 struct backed_block* bb; in sparse_file_write() local
151 for (bb = backed_block_iter_new(s->backed_block_list); bb; bb = backed_block_iter_next(bb)) { in sparse_file_write()
152 ret = backed_block_split(s->backed_block_list, bb, MAX_BACKED_BLOCK_SIZE); in sparse_file_write()
207 struct backed_block* bb; in sparse_file_foreach_chunk() local
218 for (bb = backed_block_iter_new(s->backed_block_list); bb; bb = backed_block_iter_next(bb)) { in sparse_file_foreach_chunk()
219 chk.block = backed_block_block(bb); in sparse_file_foreach_chunk()
220 chk.nr_blocks = (backed_block_len(bb) - 1) / s->block_size + 1; in sparse_file_foreach_chunk()
221 ret = sparse_file_write_block(out, bb); in sparse_file_foreach_chunk()
268 struct backed_block* bb; in move_chunks_up_to_len() local
288 for (bb = start; bb; bb = backed_block_iter_next(bb)) { in move_chunks_up_to_len()
290 if (backed_block_block(bb) > last_block) count += sizeof(chunk_header_t); in move_chunks_up_to_len()
291 last_block = backed_block_block(bb) + DIV_ROUND_UP(backed_block_len(bb), to->block_size); in move_chunks_up_to_len()
294 ret = sparse_file_write_block(out_counter, bb); in move_chunks_up_to_len()
296 bb = nullptr; in move_chunks_up_to_len()
307 backed_block_split(from->backed_block_list, bb, len - file_len); in move_chunks_up_to_len()
308 last_bb = bb; in move_chunks_up_to_len()
313 last_bb = bb; in move_chunks_up_to_len()
322 return bb; in move_chunks_up_to_len()
327 struct backed_block* bb; in sparse_file_resparse() local
340 bb = move_chunks_up_to_len(in_s, s, max_len); in sparse_file_resparse()
349 } while (bb); in sparse_file_resparse()