1# Copyright 2018 Google LLC 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14 15import getpass 16from pathlib import Path 17from typing import List, Mapping, Optional, Union 18import os 19import platform 20import tempfile 21 22from synthtool import _tracked_paths, metadata, shell 23from synthtool.log import logger 24from synthtool.sources import git 25 26GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis") 27GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private") 28LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS") 29GENERATOR_VERSION: str = os.environ.get( 30 "SYNTHTOOL_GAPIC_GENERATOR_PYTHON_VERSION", "latest" 31) 32 33 34class GAPICMicrogenerator: 35 """A synthtool component that can produce libraries using microgenerators. 36 37 A microgenerator is any code generator that follows the code 38 generation specification defined at https://aip.dev/client-libraries 39 """ 40 41 def __init__(self): 42 # Docker on mac by default cannot use the default temp file location 43 # instead use the more standard *nix /tmp location. 44 if platform.system() == "Darwin": 45 tempfile.tempdir = "/tmp" 46 self._ensure_dependencies_installed() 47 self._googleapis = None 48 self._googleapis_private = None 49 50 def py_library(self, service: str, version: str, **kwargs) -> Path: 51 """ 52 Generates the Python Library files using artman/GAPIC 53 returns a `Path` object 54 library: path to library. 'google/cloud/speech' 55 version: version of lib. 'v1' 56 """ 57 return self._generate_code(service, version, "python", **kwargs) 58 59 def go_library(self, service: str, version: str, **kwargs) -> Path: 60 return self._generate_code(service, version, "go", **kwargs) 61 62 def kotlin_library(self, service: str, version: str, **kwargs) -> Path: 63 return self._generate_code(service, version, "kotlin", **kwargs) 64 65 def typescript_library(self, service: str, version: str, **kwargs) -> Path: 66 return self._generate_code(service, version, "typescript", **kwargs) 67 68 def ruby_library(self, service: str, version: str, **kwargs) -> Path: 69 return self._generate_code(service, version, "ruby", **kwargs) 70 71 def _generate_code( 72 self, 73 service: str, 74 version: str, 75 language: str, 76 *, 77 private: bool = False, 78 proto_path: Union[str, Path] = None, 79 extra_proto_files: List[str] = [], 80 output_dir: Union[str, Path] = None, 81 generator_version: str = GENERATOR_VERSION, 82 generator_args: Mapping[str, str] = None, 83 ): 84 # Determine which googleapis repo to use 85 if not private: 86 googleapis = self._clone_googleapis() 87 else: 88 googleapis = self._clone_googleapis_private() 89 90 # Confidence check: We should have a googleapis repo; if we do not, 91 # something went wrong, and we should abort. 92 if googleapis is None: 93 raise RuntimeError( 94 f"Unable to generate {service}, the googleapis repository" 95 "is unavailable." 96 ) 97 98 # Pull the code generator for the requested language. 99 # If a code generator version was specified, honor that. 100 logger.debug( 101 f"Pulling Docker image: gapic-generator-{language}:{generator_version}" 102 ) 103 shell.run( 104 [ 105 "docker", 106 "pull", 107 f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}", 108 ], 109 hide_output=False, 110 ) 111 112 # Determine where the protos we are generating actually live. 113 # We can sometimes (but not always) determine this from the service 114 # and version; in other cases, the user must provide it outright. 115 if proto_path: 116 proto_path = Path(proto_path) 117 if proto_path.is_absolute(): 118 proto_path = proto_path.relative_to("/") 119 else: 120 proto_path = Path("google/cloud") / service / version 121 122 # Confidence check: Do we have protos where we think we should? 123 if not (googleapis / proto_path).exists(): 124 raise FileNotFoundError( 125 f"Unable to find directory for protos: {(googleapis / proto_path)}." 126 ) 127 if not tuple((googleapis / proto_path).glob("*.proto")): 128 raise FileNotFoundError( 129 f"Directory {(googleapis / proto_path)} exists, but no protos found." 130 ) 131 132 # Ensure the desired output directory exists. 133 # If none was provided, create a temporary directory. 134 if not output_dir: 135 output_dir = tempfile.mkdtemp() 136 output_dir = Path(output_dir).resolve() 137 138 # The time has come, the walrus said, to talk of actually running 139 # the code generator. 140 sep = os.path.sep 141 142 # try to figure out user ID and stay compatible. 143 # If there is no `os.getuid()`, fallback to `getpass.getuser()` 144 getuid = getattr(os, "getuid", None) 145 if getuid: 146 user = str(getuid()) 147 else: 148 user = getpass.getuser() 149 150 docker_run_args = [ 151 "docker", 152 "run", 153 "--mount", 154 f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly", 155 "--mount", 156 f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}", 157 "--rm", 158 "--user", 159 user, 160 ] 161 162 # Process extra proto files, e.g. google/cloud/common_resources.proto, 163 # if they are required by this API. 164 # First, bind mount all the extra proto files into the container. 165 for proto in extra_proto_files: 166 source_proto = googleapis / Path(proto) 167 if not source_proto.exists(): 168 raise FileNotFoundError( 169 f"Unable to find extra proto file: {source_proto}." 170 ) 171 docker_run_args.extend( 172 [ 173 "--mount", 174 f"type=bind,source={source_proto},destination={Path('/in') / proto},readonly", 175 ] 176 ) 177 178 docker_run_args.append( 179 f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}" 180 ) 181 182 # Populate any additional CLI arguments provided for Docker. 183 if generator_args: 184 for key, value in generator_args.items(): 185 docker_run_args.append(f"--{key}") 186 docker_run_args.append(value) 187 188 logger.debug(f"Generating code for: {proto_path}.") 189 shell.run(docker_run_args, hide_output=False) 190 191 # Confidence check: Does the output location have code in it? 192 # If not, complain. 193 if not tuple(output_dir.iterdir()): 194 raise RuntimeError( 195 f"Code generation seemed to succeed, but {output_dir} is empty." 196 ) 197 198 # Huzzah, it worked. 199 logger.success(f"Generated code into {output_dir}.") 200 201 # Record this in the synthtool metadata. 202 metadata.add_client_destination( 203 source="googleapis" if not private else "googleapis-private", 204 api_name=service, 205 api_version=version, 206 language=language, 207 generator=f"gapic-generator-{language}", 208 ) 209 210 _tracked_paths.add(output_dir) 211 return output_dir 212 213 def _clone_googleapis(self): 214 if self._googleapis is not None: 215 return self._googleapis 216 217 if LOCAL_GOOGLEAPIS: 218 self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser() 219 logger.debug(f"Using local googleapis at {self._googleapis}") 220 221 else: 222 logger.debug("Cloning googleapis.") 223 self._googleapis = git.clone(GOOGLEAPIS_URL) 224 225 return self._googleapis 226 227 def _clone_googleapis_private(self): 228 if self._googleapis_private is not None: 229 return self._googleapis_private 230 231 if LOCAL_GOOGLEAPIS: 232 self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser() 233 logger.debug( 234 f"Using local googleapis at {self._googleapis_private} for googleapis-private" 235 ) 236 237 else: 238 logger.debug("Cloning googleapis-private.") 239 self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL) 240 241 return self._googleapis_private 242 243 def _ensure_dependencies_installed(self): 244 logger.debug("Ensuring dependencies.") 245 246 dependencies = ["docker", "git"] 247 failed_dependencies = [] 248 for dependency in dependencies: 249 return_code = shell.run(["which", dependency], check=False).returncode 250 if return_code: 251 failed_dependencies.append(dependency) 252 253 if failed_dependencies: 254 raise EnvironmentError( 255 f"Dependencies missing: {', '.join(failed_dependencies)}" 256 ) 257