xref: /aosp_15_r20/external/tensorflow/tensorflow/core/protobuf/tpu/topology.proto (revision b6fb3261f9314811a0f4371741dbb8839866f948)
1syntax = "proto3";
2
3package tensorflow.tpu;
4
5option cc_enable_arenas = true;
6
7// Describes features of a tpu.
8message TPUHardwareFeature {
9  // Embedding feature of a tpu.
10  enum EmbeddingFeature {
11    // No embedding lookup accelerator available on the tpu.
12    UNSUPPORTED = 0;
13    // Embedding lookup accelerator V1. The embedding lookup operation can only
14    // be placed at the beginning of computation. Only one instance of embedding
15    // lookup layer is allowed.
16    V1 = 1;
17    // Embedding lookup accelerator V2. The embedding lookup operation can be
18    // placed anywhere of the computation. Multiple instances of embedding
19    // lookup layer is allowed.
20    V2 = 2;
21  }
22  EmbeddingFeature embedding_feature = 1;
23}
24
25// Describes the geometry of a TPU mesh.
26message TopologyProto {
27  // The dimensions of the TPU topology, in cores. Typically, this is a 4D
28  // topology [x, y, z, core], where the major dimensions correspond to TPU
29  // chips, and the minor dimension describes the number of cores on a multicore
30  // chip.
31  repeated int32 mesh_shape = 1;
32
33  // Number of TensorFlow tasks in the cluster.
34  int32 num_tasks = 2;
35
36  // Number of TPU devices per task.
37  int32 num_tpu_devices_per_task = 3;
38
39  // A flattened rank 3 int32 array with shape
40  // [num_tasks, num_tpu_devices_per_task, len(mesh_shape)].
41  // `tasks` is the number of tasks in the TPU cluster, `devices` is the number
42  // of TPU devices per task, and the minor dimension corresponds to a position
43  // in the TPU mesh topology. Each entry [task, device, axis] gives the
44  // `axis`-th coordinate in the topology of a task/device pair.
45  repeated int32 device_coordinates = 4;
46
47  // TPU supported features.
48  TPUHardwareFeature tpu_hardware_feature = 5;
49}
50