xref: /aosp_15_r20/external/perfetto/ui/src/base/gcs_uploader.ts (revision 6dbdd20afdafa5e3ca9b8809fa73465d530080dc)
1// Copyright (C) 2020 The Android Open Source Project
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//      http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15import {defer} from './deferred';
16import {Time} from './time';
17
18export const BUCKET_NAME = 'perfetto-ui-data';
19export const MIME_JSON = 'application/json; charset=utf-8';
20export const MIME_BINARY = 'application/octet-stream';
21
22export interface GcsUploaderArgs {
23  /**
24   * The mime-type to use for the upload. If undefined uses
25   * application/octet-stream.
26   */
27  mimeType?: string;
28
29  /**
30   * The name to use for the uploaded file. By default it uses a hash of
31   * the passed data/blob and uses content-addressing.
32   */
33  fileName?: string;
34
35  /** An optional callback that is invoked upon upload progress (or failure) */
36  onProgress?: (uploader: GcsUploader) => void;
37}
38
39/**
40 * A utility class to handle uploads of possibly large files to
41 * Google Cloud Storage.
42 * It returns immediately if the file exists already
43 */
44export class GcsUploader {
45  state: 'UPLOADING' | 'UPLOADED' | 'ERROR' = 'UPLOADING';
46  error = '';
47  totalSize = 0;
48  uploadedSize = 0;
49  uploadedUrl = '';
50  uploadedFileName = '';
51
52  private args: GcsUploaderArgs;
53  private onProgress: (_: GcsUploader) => void;
54  private req: XMLHttpRequest;
55  private donePromise = defer<void>();
56  private startTime = performance.now();
57
58  constructor(data: Blob | ArrayBuffer | string, args: GcsUploaderArgs) {
59    this.args = args;
60    this.onProgress = args.onProgress ?? ((_: GcsUploader) => {});
61    this.req = new XMLHttpRequest();
62    this.start(data);
63  }
64
65  async start(data: Blob | ArrayBuffer | string) {
66    let fname = this.args.fileName;
67    if (fname === undefined) {
68      // If the file name is unspecified, hash the contents.
69      if (data instanceof Blob) {
70        fname = await hashFileStreaming(data);
71      } else {
72        fname = await sha1(data);
73      }
74    }
75    this.uploadedFileName = fname;
76    this.uploadedUrl = `https://storage.googleapis.com/${BUCKET_NAME}/${fname}`;
77
78    // Check if the file has been uploaded already. If so, skip.
79    const res = await fetch(
80      `https://www.googleapis.com/storage/v1/b/${BUCKET_NAME}/o/${fname}`,
81    );
82    if (res.status === 200) {
83      console.log(
84        `Skipping upload of ${this.uploadedUrl} because it exists already`,
85      );
86      this.state = 'UPLOADED';
87      this.donePromise.resolve();
88      return;
89    }
90
91    const reqUrl =
92      'https://www.googleapis.com/upload/storage/v1/b/' +
93      `${BUCKET_NAME}/o?uploadType=media` +
94      `&name=${fname}&predefinedAcl=publicRead`;
95    this.req.onabort = (e: ProgressEvent) => this.onRpcEvent(e);
96    this.req.onerror = (e: ProgressEvent) => this.onRpcEvent(e);
97    this.req.upload.onprogress = (e: ProgressEvent) => this.onRpcEvent(e);
98    this.req.onloadend = (e: ProgressEvent) => this.onRpcEvent(e);
99    this.req.open('POST', reqUrl, /* async= */ true);
100    const mimeType = this.args.mimeType ?? MIME_BINARY;
101    this.req.setRequestHeader('Content-Type', mimeType);
102    this.req.send(data);
103  }
104
105  waitForCompletion(): Promise<void> {
106    return this.donePromise;
107  }
108
109  abort() {
110    if (this.state === 'UPLOADING') {
111      this.req.abort();
112    }
113  }
114
115  getEtaString() {
116    let str = `${Math.ceil((100 * this.uploadedSize) / this.totalSize)}%`;
117    str += ` (${(this.uploadedSize / 1e6).toFixed(2)} MB)`;
118    const elapsed = (performance.now() - this.startTime) / 1000;
119    const rate = this.uploadedSize / elapsed;
120    const etaSecs = Math.round((this.totalSize - this.uploadedSize) / rate);
121    str += ' - ETA: ' + Time.toTimecode(Time.fromSeconds(etaSecs)).dhhmmss;
122    return str;
123  }
124
125  private onRpcEvent(e: ProgressEvent) {
126    let done = false;
127    switch (e.type) {
128      case 'progress':
129        this.uploadedSize = e.loaded;
130        this.totalSize = e.total;
131        break;
132      case 'abort':
133        this.state = 'ERROR';
134        this.error = 'Upload aborted';
135        break;
136      case 'error':
137        this.state = 'ERROR';
138        this.error = `${this.req.status} - ${this.req.statusText}`;
139        break;
140      case 'loadend':
141        done = true;
142        if (this.req.status === 200) {
143          this.state = 'UPLOADED';
144        } else if (this.state === 'UPLOADING') {
145          this.state = 'ERROR';
146          this.error = `${this.req.status} - ${this.req.statusText}`;
147        }
148        break;
149      default:
150        return;
151    }
152    this.onProgress(this);
153    if (done) {
154      this.donePromise.resolve();
155    }
156  }
157}
158
159/**
160 * Computes the SHA-1 of a string or ArrayBuffer(View)
161 * @param data a string or ArrayBuffer to hash.
162 */
163async function sha1(data: string | ArrayBuffer): Promise<string> {
164  let buffer: ArrayBuffer;
165  if (typeof data === 'string') {
166    buffer = new TextEncoder().encode(data);
167  } else {
168    buffer = data;
169  }
170  const digest = await crypto.subtle.digest('SHA-1', buffer);
171  return digestToHex(digest);
172}
173
174/**
175 * Converts a hash for the given file in streaming mode, without loading the
176 * whole file into memory. The result is "a" SHA-1 but is not the same of
177 * `shasum -a 1 file`. The reason for this is that the crypto APIs support
178 * only one-shot digest computation and lack the usual update() + digest()
179 * chunked API. So we end up computing a SHA-1 of the concatenation of the
180 * SHA-1 of each chunk.
181 * Speed: ~800 MB/s on a M2 Macbook Air 2023.
182 * @param file The file to hash.
183 * @returns A hex-encoded string containing the hash of the file.
184 */
185async function hashFileStreaming(file: Blob): Promise<string> {
186  const CHUNK_SIZE = 32 * 1024 * 1024; // 32MB
187  const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
188  let chunkDigests = '';
189
190  for (let i = 0; i < totalChunks; i++) {
191    const start = i * CHUNK_SIZE;
192    const end = Math.min(start + CHUNK_SIZE, file.size);
193    const chunk = await file.slice(start, end).arrayBuffer();
194    const digest = await crypto.subtle.digest('SHA-1', chunk);
195    chunkDigests += digestToHex(digest);
196  }
197  return sha1(chunkDigests);
198}
199
200/**
201 * Converts the return value of crypto.digest() to a hex string.
202 * @param digest an array of bytes containing the digest
203 * @returns hex-encoded string of the digest.
204 */
205function digestToHex(digest: ArrayBuffer): string {
206  return Array.from(new Uint8Array(digest))
207    .map((x) => x.toString(16).padStart(2, '0'))
208    .join('');
209}
210