1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "net/disk_cache/blockfile/in_flight_backend_io.h"
6
7 #include <utility>
8
9 #include "base/check_op.h"
10 #include "base/compiler_specific.h"
11 #include "base/functional/bind.h"
12 #include "base/functional/callback_helpers.h"
13 #include "base/location.h"
14 #include "base/metrics/histogram_functions.h"
15 #include "base/notreached.h"
16 #include "base/task/single_thread_task_runner.h"
17 #include "net/base/net_errors.h"
18 #include "net/disk_cache/blockfile/backend_impl.h"
19 #include "net/disk_cache/blockfile/entry_impl.h"
20
21 namespace disk_cache {
22
23 namespace {
24
25 // Used to leak a strong reference to an EntryImpl to the user of disk_cache.
LeakEntryImpl(scoped_refptr<EntryImpl> entry)26 EntryImpl* LeakEntryImpl(scoped_refptr<EntryImpl> entry) {
27 // Balanced on OP_CLOSE_ENTRY handling in BackendIO::ExecuteBackendOperation.
28 if (entry)
29 entry->AddRef();
30 return entry.get();
31 }
32
33 } // namespace
34
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,net::CompletionOnceCallback callback)35 BackendIO::BackendIO(InFlightBackendIO* controller,
36 BackendImpl* backend,
37 net::CompletionOnceCallback callback)
38 : BackendIO(controller, backend) {
39 callback_ = std::move(callback);
40 }
41
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,EntryResultCallback callback)42 BackendIO::BackendIO(InFlightBackendIO* controller,
43 BackendImpl* backend,
44 EntryResultCallback callback)
45 : BackendIO(controller, backend) {
46 entry_result_callback_ = std::move(callback);
47 }
48
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,RangeResultCallback callback)49 BackendIO::BackendIO(InFlightBackendIO* controller,
50 BackendImpl* backend,
51 RangeResultCallback callback)
52 : BackendIO(controller, backend) {
53 range_result_callback_ = std::move(callback);
54 }
55
BackendIO(InFlightBackendIO * controller,BackendImpl * backend)56 BackendIO::BackendIO(InFlightBackendIO* controller, BackendImpl* backend)
57 : BackgroundIO(controller),
58 backend_(backend),
59 background_task_runner_(controller->background_thread()) {
60 DCHECK(background_task_runner_);
61 start_time_ = base::TimeTicks::Now();
62 }
63
64 // Runs on the background thread.
ExecuteOperation()65 void BackendIO::ExecuteOperation() {
66 if (IsEntryOperation()) {
67 ExecuteEntryOperation();
68 } else {
69 ExecuteBackendOperation();
70 }
71 // Clear our pointer to entry we operated on. We don't need it any more, and
72 // it's possible by the time ~BackendIO gets destroyed on the main thread the
73 // entry will have been closed and freed on the cache/background thread.
74 entry_ = nullptr;
75 }
76
77 // Runs on the background thread.
OnIOComplete(int result)78 void BackendIO::OnIOComplete(int result) {
79 DCHECK(IsEntryOperation());
80 DCHECK_NE(result, net::ERR_IO_PENDING);
81 result_ = result;
82 NotifyController();
83 }
84
85 // Runs on the primary thread.
OnDone(bool cancel)86 void BackendIO::OnDone(bool cancel) {
87 if (IsEntryOperation() && backend_->GetCacheType() == net::DISK_CACHE) {
88 switch (operation_) {
89 case OP_READ:
90 base::UmaHistogramCustomTimes("DiskCache.0.TotalIOTimeRead",
91 ElapsedTime(), base::Milliseconds(1),
92 base::Seconds(10), 50);
93 break;
94
95 case OP_WRITE:
96 base::UmaHistogramCustomTimes("DiskCache.0.TotalIOTimeWrite",
97 ElapsedTime(), base::Milliseconds(1),
98 base::Seconds(10), 50);
99 break;
100
101 default:
102 // Other operations are not recorded.
103 break;
104 }
105 }
106
107 if (ReturnsEntry() && result_ == net::OK) {
108 static_cast<EntryImpl*>(out_entry_)->OnEntryCreated(backend_);
109 if (cancel)
110 out_entry_.ExtractAsDangling()->Close();
111 }
112 ClearController();
113 }
114
IsEntryOperation()115 bool BackendIO::IsEntryOperation() {
116 return operation_ > OP_MAX_BACKEND;
117 }
118
RunCallback(int result)119 void BackendIO::RunCallback(int result) {
120 std::move(callback_).Run(result);
121 }
122
RunEntryResultCallback()123 void BackendIO::RunEntryResultCallback() {
124 EntryResult entry_result;
125 if (result_ != net::OK) {
126 entry_result = EntryResult::MakeError(static_cast<net::Error>(result()));
127 } else if (out_entry_opened_) {
128 entry_result = EntryResult::MakeOpened(out_entry_.ExtractAsDangling());
129 } else {
130 entry_result = EntryResult::MakeCreated(out_entry_.ExtractAsDangling());
131 }
132 std::move(entry_result_callback_).Run(std::move(entry_result));
133 }
134
RunRangeResultCallback()135 void BackendIO::RunRangeResultCallback() {
136 std::move(range_result_callback_).Run(range_result_);
137 }
138
Init()139 void BackendIO::Init() {
140 operation_ = OP_INIT;
141 }
142
OpenOrCreateEntry(const std::string & key)143 void BackendIO::OpenOrCreateEntry(const std::string& key) {
144 operation_ = OP_OPEN_OR_CREATE;
145 key_ = key;
146 }
147
OpenEntry(const std::string & key)148 void BackendIO::OpenEntry(const std::string& key) {
149 operation_ = OP_OPEN;
150 key_ = key;
151 }
152
CreateEntry(const std::string & key)153 void BackendIO::CreateEntry(const std::string& key) {
154 operation_ = OP_CREATE;
155 key_ = key;
156 }
157
DoomEntry(const std::string & key)158 void BackendIO::DoomEntry(const std::string& key) {
159 operation_ = OP_DOOM;
160 key_ = key;
161 }
162
DoomAllEntries()163 void BackendIO::DoomAllEntries() {
164 operation_ = OP_DOOM_ALL;
165 }
166
DoomEntriesBetween(const base::Time initial_time,const base::Time end_time)167 void BackendIO::DoomEntriesBetween(const base::Time initial_time,
168 const base::Time end_time) {
169 operation_ = OP_DOOM_BETWEEN;
170 initial_time_ = initial_time;
171 end_time_ = end_time;
172 }
173
DoomEntriesSince(const base::Time initial_time)174 void BackendIO::DoomEntriesSince(const base::Time initial_time) {
175 operation_ = OP_DOOM_SINCE;
176 initial_time_ = initial_time;
177 }
178
CalculateSizeOfAllEntries()179 void BackendIO::CalculateSizeOfAllEntries() {
180 operation_ = OP_SIZE_ALL;
181 }
182
OpenNextEntry(Rankings::Iterator * iterator)183 void BackendIO::OpenNextEntry(Rankings::Iterator* iterator) {
184 operation_ = OP_OPEN_NEXT;
185 iterator_ = iterator;
186 }
187
EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator)188 void BackendIO::EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator) {
189 operation_ = OP_END_ENUMERATION;
190 scoped_iterator_ = std::move(iterator);
191 }
192
OnExternalCacheHit(const std::string & key)193 void BackendIO::OnExternalCacheHit(const std::string& key) {
194 operation_ = OP_ON_EXTERNAL_CACHE_HIT;
195 key_ = key;
196 }
197
CloseEntryImpl(EntryImpl * entry)198 void BackendIO::CloseEntryImpl(EntryImpl* entry) {
199 operation_ = OP_CLOSE_ENTRY;
200 entry_ = entry;
201 }
202
DoomEntryImpl(EntryImpl * entry)203 void BackendIO::DoomEntryImpl(EntryImpl* entry) {
204 operation_ = OP_DOOM_ENTRY;
205 entry_ = entry;
206 }
207
FlushQueue()208 void BackendIO::FlushQueue() {
209 operation_ = OP_FLUSH_QUEUE;
210 }
211
RunTask(base::OnceClosure task)212 void BackendIO::RunTask(base::OnceClosure task) {
213 operation_ = OP_RUN_TASK;
214 task_ = std::move(task);
215 }
216
ReadData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len)217 void BackendIO::ReadData(EntryImpl* entry, int index, int offset,
218 net::IOBuffer* buf, int buf_len) {
219 operation_ = OP_READ;
220 entry_ = entry;
221 index_ = index;
222 offset_ = offset;
223 buf_ = buf;
224 buf_len_ = buf_len;
225 }
226
WriteData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,bool truncate)227 void BackendIO::WriteData(EntryImpl* entry, int index, int offset,
228 net::IOBuffer* buf, int buf_len, bool truncate) {
229 operation_ = OP_WRITE;
230 entry_ = entry;
231 index_ = index;
232 offset_ = offset;
233 buf_ = buf;
234 buf_len_ = buf_len;
235 truncate_ = truncate;
236 }
237
ReadSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len)238 void BackendIO::ReadSparseData(EntryImpl* entry,
239 int64_t offset,
240 net::IOBuffer* buf,
241 int buf_len) {
242 operation_ = OP_READ_SPARSE;
243 entry_ = entry;
244 offset64_ = offset;
245 buf_ = buf;
246 buf_len_ = buf_len;
247 }
248
WriteSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len)249 void BackendIO::WriteSparseData(EntryImpl* entry,
250 int64_t offset,
251 net::IOBuffer* buf,
252 int buf_len) {
253 operation_ = OP_WRITE_SPARSE;
254 entry_ = entry;
255 offset64_ = offset;
256 buf_ = buf;
257 buf_len_ = buf_len;
258 }
259
GetAvailableRange(EntryImpl * entry,int64_t offset,int len)260 void BackendIO::GetAvailableRange(EntryImpl* entry, int64_t offset, int len) {
261 operation_ = OP_GET_RANGE;
262 entry_ = entry;
263 offset64_ = offset;
264 buf_len_ = len;
265 }
266
CancelSparseIO(EntryImpl * entry)267 void BackendIO::CancelSparseIO(EntryImpl* entry) {
268 operation_ = OP_CANCEL_IO;
269 entry_ = entry;
270 }
271
ReadyForSparseIO(EntryImpl * entry)272 void BackendIO::ReadyForSparseIO(EntryImpl* entry) {
273 operation_ = OP_IS_READY;
274 entry_ = entry;
275 }
276
~BackendIO()277 BackendIO::~BackendIO() {
278 if (!did_notify_controller_io_signalled() && out_entry_) {
279 // At this point it's very likely the Entry does not have a
280 // `background_queue_` so that Close() would do nothing. Post a task to the
281 // background task runner to drop the reference, which should effectively
282 // destroy if there are no more references. Destruction has to happen
283 // on the background task runner.
284 background_task_runner_->PostTask(
285 FROM_HERE,
286 base::BindOnce(&EntryImpl::Release,
287 base::Unretained(out_entry_.ExtractAsDangling())));
288 }
289 }
290
ReturnsEntry()291 bool BackendIO::ReturnsEntry() {
292 return operation_ == OP_OPEN || operation_ == OP_CREATE ||
293 operation_ == OP_OPEN_NEXT || operation_ == OP_OPEN_OR_CREATE;
294 }
295
ElapsedTime() const296 base::TimeDelta BackendIO::ElapsedTime() const {
297 return base::TimeTicks::Now() - start_time_;
298 }
299
300 // Runs on the background thread.
ExecuteBackendOperation()301 void BackendIO::ExecuteBackendOperation() {
302 switch (operation_) {
303 case OP_INIT:
304 result_ = backend_->SyncInit();
305 break;
306 case OP_OPEN_OR_CREATE: {
307 scoped_refptr<EntryImpl> entry;
308 result_ = backend_->SyncOpenEntry(key_, &entry);
309
310 if (result_ == net::OK) {
311 out_entry_ = LeakEntryImpl(std::move(entry));
312 out_entry_opened_ = true;
313 break;
314 }
315
316 // Opening failed, create an entry instead.
317 result_ = backend_->SyncCreateEntry(key_, &entry);
318 out_entry_ = LeakEntryImpl(std::move(entry));
319 out_entry_opened_ = false;
320 break;
321 }
322 case OP_OPEN: {
323 scoped_refptr<EntryImpl> entry;
324 result_ = backend_->SyncOpenEntry(key_, &entry);
325 out_entry_ = LeakEntryImpl(std::move(entry));
326 out_entry_opened_ = true;
327 break;
328 }
329 case OP_CREATE: {
330 scoped_refptr<EntryImpl> entry;
331 result_ = backend_->SyncCreateEntry(key_, &entry);
332 out_entry_ = LeakEntryImpl(std::move(entry));
333 out_entry_opened_ = false;
334 break;
335 }
336 case OP_DOOM:
337 result_ = backend_->SyncDoomEntry(key_);
338 break;
339 case OP_DOOM_ALL:
340 result_ = backend_->SyncDoomAllEntries();
341 break;
342 case OP_DOOM_BETWEEN:
343 result_ = backend_->SyncDoomEntriesBetween(initial_time_, end_time_);
344 break;
345 case OP_DOOM_SINCE:
346 result_ = backend_->SyncDoomEntriesSince(initial_time_);
347 break;
348 case OP_SIZE_ALL:
349 result_ = backend_->SyncCalculateSizeOfAllEntries();
350 break;
351 case OP_OPEN_NEXT: {
352 scoped_refptr<EntryImpl> entry;
353 result_ = backend_->SyncOpenNextEntry(iterator_, &entry);
354 out_entry_ = LeakEntryImpl(std::move(entry));
355 out_entry_opened_ = true;
356 // `iterator_` is a proxied argument and not needed beyond this point. Set
357 // it to nullptr so as to not leave a dangling pointer around.
358 iterator_ = nullptr;
359 break;
360 }
361 case OP_END_ENUMERATION:
362 backend_->SyncEndEnumeration(std::move(scoped_iterator_));
363 result_ = net::OK;
364 break;
365 case OP_ON_EXTERNAL_CACHE_HIT:
366 backend_->SyncOnExternalCacheHit(key_);
367 result_ = net::OK;
368 break;
369 case OP_CLOSE_ENTRY:
370 // Collect the reference to |entry_| to balance with the AddRef() in
371 // LeakEntryImpl.
372 entry_.ExtractAsDangling()->Release();
373 result_ = net::OK;
374 break;
375 case OP_DOOM_ENTRY:
376 entry_->DoomImpl();
377 result_ = net::OK;
378 break;
379 case OP_FLUSH_QUEUE:
380 result_ = net::OK;
381 break;
382 case OP_RUN_TASK:
383 std::move(task_).Run();
384 result_ = net::OK;
385 break;
386 default:
387 NOTREACHED() << "Invalid Operation";
388 result_ = net::ERR_UNEXPECTED;
389 }
390 DCHECK_NE(net::ERR_IO_PENDING, result_);
391 NotifyController();
392 backend_->OnSyncBackendOpComplete();
393 }
394
395 // Runs on the background thread.
ExecuteEntryOperation()396 void BackendIO::ExecuteEntryOperation() {
397 switch (operation_) {
398 case OP_READ:
399 result_ =
400 entry_->ReadDataImpl(index_, offset_, buf_.get(), buf_len_,
401 base::BindOnce(&BackendIO::OnIOComplete, this));
402 break;
403 case OP_WRITE:
404 result_ = entry_->WriteDataImpl(
405 index_, offset_, buf_.get(), buf_len_,
406 base::BindOnce(&BackendIO::OnIOComplete, this), truncate_);
407 break;
408 case OP_READ_SPARSE:
409 result_ = entry_->ReadSparseDataImpl(
410 offset64_, buf_.get(), buf_len_,
411 base::BindOnce(&BackendIO::OnIOComplete, this));
412 break;
413 case OP_WRITE_SPARSE:
414 result_ = entry_->WriteSparseDataImpl(
415 offset64_, buf_.get(), buf_len_,
416 base::BindOnce(&BackendIO::OnIOComplete, this));
417 break;
418 case OP_GET_RANGE:
419 range_result_ = entry_->GetAvailableRangeImpl(offset64_, buf_len_);
420 result_ = range_result_.net_error;
421 break;
422 case OP_CANCEL_IO:
423 entry_->CancelSparseIOImpl();
424 result_ = net::OK;
425 break;
426 case OP_IS_READY:
427 result_ = entry_->ReadyForSparseIOImpl(
428 base::BindOnce(&BackendIO::OnIOComplete, this));
429 break;
430 default:
431 NOTREACHED() << "Invalid Operation";
432 result_ = net::ERR_UNEXPECTED;
433 }
434 buf_ = nullptr;
435 if (result_ != net::ERR_IO_PENDING)
436 NotifyController();
437 }
438
InFlightBackendIO(BackendImpl * backend,const scoped_refptr<base::SingleThreadTaskRunner> & background_thread)439 InFlightBackendIO::InFlightBackendIO(
440 BackendImpl* backend,
441 const scoped_refptr<base::SingleThreadTaskRunner>& background_thread)
442 : backend_(backend), background_thread_(background_thread) {}
443
444 InFlightBackendIO::~InFlightBackendIO() = default;
445
Init(net::CompletionOnceCallback callback)446 void InFlightBackendIO::Init(net::CompletionOnceCallback callback) {
447 auto operation =
448 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
449 operation->Init();
450 PostOperation(FROM_HERE, operation.get());
451 }
452
OpenOrCreateEntry(const std::string & key,EntryResultCallback callback)453 void InFlightBackendIO::OpenOrCreateEntry(const std::string& key,
454 EntryResultCallback callback) {
455 auto operation =
456 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
457 operation->OpenOrCreateEntry(key);
458 PostOperation(FROM_HERE, operation.get());
459 }
460
OpenEntry(const std::string & key,EntryResultCallback callback)461 void InFlightBackendIO::OpenEntry(const std::string& key,
462 EntryResultCallback callback) {
463 auto operation =
464 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
465 operation->OpenEntry(key);
466 PostOperation(FROM_HERE, operation.get());
467 }
468
CreateEntry(const std::string & key,EntryResultCallback callback)469 void InFlightBackendIO::CreateEntry(const std::string& key,
470 EntryResultCallback callback) {
471 auto operation =
472 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
473 operation->CreateEntry(key);
474 PostOperation(FROM_HERE, operation.get());
475 }
476
DoomEntry(const std::string & key,net::CompletionOnceCallback callback)477 void InFlightBackendIO::DoomEntry(const std::string& key,
478 net::CompletionOnceCallback callback) {
479 auto operation =
480 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
481 operation->DoomEntry(key);
482 PostOperation(FROM_HERE, operation.get());
483 }
484
DoomAllEntries(net::CompletionOnceCallback callback)485 void InFlightBackendIO::DoomAllEntries(net::CompletionOnceCallback callback) {
486 auto operation =
487 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
488 operation->DoomAllEntries();
489 PostOperation(FROM_HERE, operation.get());
490 }
491
DoomEntriesBetween(const base::Time initial_time,const base::Time end_time,net::CompletionOnceCallback callback)492 void InFlightBackendIO::DoomEntriesBetween(
493 const base::Time initial_time,
494 const base::Time end_time,
495 net::CompletionOnceCallback callback) {
496 auto operation =
497 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
498 operation->DoomEntriesBetween(initial_time, end_time);
499 PostOperation(FROM_HERE, operation.get());
500 }
501
CalculateSizeOfAllEntries(net::CompletionOnceCallback callback)502 void InFlightBackendIO::CalculateSizeOfAllEntries(
503 net::CompletionOnceCallback callback) {
504 auto operation =
505 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
506 operation->CalculateSizeOfAllEntries();
507 PostOperation(FROM_HERE, operation.get());
508 }
509
DoomEntriesSince(const base::Time initial_time,net::CompletionOnceCallback callback)510 void InFlightBackendIO::DoomEntriesSince(const base::Time initial_time,
511 net::CompletionOnceCallback callback) {
512 auto operation =
513 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
514 operation->DoomEntriesSince(initial_time);
515 PostOperation(FROM_HERE, operation.get());
516 }
517
OpenNextEntry(Rankings::Iterator * iterator,EntryResultCallback callback)518 void InFlightBackendIO::OpenNextEntry(Rankings::Iterator* iterator,
519 EntryResultCallback callback) {
520 auto operation =
521 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
522 operation->OpenNextEntry(iterator);
523 PostOperation(FROM_HERE, operation.get());
524 }
525
EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator)526 void InFlightBackendIO::EndEnumeration(
527 std::unique_ptr<Rankings::Iterator> iterator) {
528 auto operation = base::MakeRefCounted<BackendIO>(
529 this, backend_, net::CompletionOnceCallback());
530 operation->EndEnumeration(std::move(iterator));
531 PostOperation(FROM_HERE, operation.get());
532 }
533
OnExternalCacheHit(const std::string & key)534 void InFlightBackendIO::OnExternalCacheHit(const std::string& key) {
535 auto operation = base::MakeRefCounted<BackendIO>(
536 this, backend_, net::CompletionOnceCallback());
537 operation->OnExternalCacheHit(key);
538 PostOperation(FROM_HERE, operation.get());
539 }
540
CloseEntryImpl(EntryImpl * entry)541 void InFlightBackendIO::CloseEntryImpl(EntryImpl* entry) {
542 auto operation = base::MakeRefCounted<BackendIO>(
543 this, backend_, net::CompletionOnceCallback());
544 operation->CloseEntryImpl(entry);
545 PostOperation(FROM_HERE, operation.get());
546 }
547
DoomEntryImpl(EntryImpl * entry)548 void InFlightBackendIO::DoomEntryImpl(EntryImpl* entry) {
549 auto operation = base::MakeRefCounted<BackendIO>(
550 this, backend_, net::CompletionOnceCallback());
551 operation->DoomEntryImpl(entry);
552 PostOperation(FROM_HERE, operation.get());
553 }
554
FlushQueue(net::CompletionOnceCallback callback)555 void InFlightBackendIO::FlushQueue(net::CompletionOnceCallback callback) {
556 auto operation =
557 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
558 operation->FlushQueue();
559 PostOperation(FROM_HERE, operation.get());
560 }
561
RunTask(base::OnceClosure task,net::CompletionOnceCallback callback)562 void InFlightBackendIO::RunTask(base::OnceClosure task,
563 net::CompletionOnceCallback callback) {
564 auto operation =
565 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
566 operation->RunTask(std::move(task));
567 PostOperation(FROM_HERE, operation.get());
568 }
569
ReadData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)570 void InFlightBackendIO::ReadData(EntryImpl* entry,
571 int index,
572 int offset,
573 net::IOBuffer* buf,
574 int buf_len,
575 net::CompletionOnceCallback callback) {
576 auto operation =
577 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
578 operation->ReadData(entry, index, offset, buf, buf_len);
579 PostOperation(FROM_HERE, operation.get());
580 }
581
WriteData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,bool truncate,net::CompletionOnceCallback callback)582 void InFlightBackendIO::WriteData(EntryImpl* entry,
583 int index,
584 int offset,
585 net::IOBuffer* buf,
586 int buf_len,
587 bool truncate,
588 net::CompletionOnceCallback callback) {
589 auto operation =
590 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
591 operation->WriteData(entry, index, offset, buf, buf_len, truncate);
592 PostOperation(FROM_HERE, operation.get());
593 }
594
ReadSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)595 void InFlightBackendIO::ReadSparseData(EntryImpl* entry,
596 int64_t offset,
597 net::IOBuffer* buf,
598 int buf_len,
599 net::CompletionOnceCallback callback) {
600 auto operation =
601 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
602 operation->ReadSparseData(entry, offset, buf, buf_len);
603 PostOperation(FROM_HERE, operation.get());
604 }
605
WriteSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)606 void InFlightBackendIO::WriteSparseData(EntryImpl* entry,
607 int64_t offset,
608 net::IOBuffer* buf,
609 int buf_len,
610 net::CompletionOnceCallback callback) {
611 auto operation =
612 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
613 operation->WriteSparseData(entry, offset, buf, buf_len);
614 PostOperation(FROM_HERE, operation.get());
615 }
616
GetAvailableRange(EntryImpl * entry,int64_t offset,int len,RangeResultCallback callback)617 void InFlightBackendIO::GetAvailableRange(EntryImpl* entry,
618 int64_t offset,
619 int len,
620 RangeResultCallback callback) {
621 auto operation =
622 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
623 operation->GetAvailableRange(entry, offset, len);
624 PostOperation(FROM_HERE, operation.get());
625 }
626
CancelSparseIO(EntryImpl * entry)627 void InFlightBackendIO::CancelSparseIO(EntryImpl* entry) {
628 auto operation = base::MakeRefCounted<BackendIO>(
629 this, backend_, net::CompletionOnceCallback());
630 operation->CancelSparseIO(entry);
631 PostOperation(FROM_HERE, operation.get());
632 }
633
ReadyForSparseIO(EntryImpl * entry,net::CompletionOnceCallback callback)634 void InFlightBackendIO::ReadyForSparseIO(EntryImpl* entry,
635 net::CompletionOnceCallback callback) {
636 auto operation =
637 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
638 operation->ReadyForSparseIO(entry);
639 PostOperation(FROM_HERE, operation.get());
640 }
641
WaitForPendingIO()642 void InFlightBackendIO::WaitForPendingIO() {
643 InFlightIO::WaitForPendingIO();
644 }
645
OnOperationComplete(BackgroundIO * operation,bool cancel)646 void InFlightBackendIO::OnOperationComplete(BackgroundIO* operation,
647 bool cancel) {
648 BackendIO* op = static_cast<BackendIO*>(operation);
649 op->OnDone(cancel);
650
651 if (op->has_callback() && (!cancel || op->IsEntryOperation()))
652 op->RunCallback(op->result());
653
654 if (op->has_range_result_callback()) {
655 DCHECK(op->IsEntryOperation());
656 op->RunRangeResultCallback();
657 }
658
659 if (op->has_entry_result_callback() && !cancel) {
660 DCHECK(!op->IsEntryOperation());
661 op->RunEntryResultCallback();
662 }
663 }
664
PostOperation(const base::Location & from_here,BackendIO * operation)665 void InFlightBackendIO::PostOperation(const base::Location& from_here,
666 BackendIO* operation) {
667 background_thread_->PostTask(
668 from_here, base::BindOnce(&BackendIO::ExecuteOperation, operation));
669 OnOperationPosted(operation);
670 }
671
GetWeakPtr()672 base::WeakPtr<InFlightBackendIO> InFlightBackendIO::GetWeakPtr() {
673 return ptr_factory_.GetWeakPtr();
674 }
675
676 } // namespace disk_cache
677