forked from googleapis/python-bigquery-storage
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathowlbot.py
128 lines (111 loc) · 4.57 KB
/
owlbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from pathlib import Path
import shutil
import synthtool as s
import synthtool.gcp as gcp
from synthtool.languages import python
# ----------------------------------------------------------------------------
# Copy the generated client from the owl-bot staging directory
# ----------------------------------------------------------------------------
clean_up_generated_samples = True
# Load the default version defined in .repo-metadata.json.
default_version = json.load(open(".repo-metadata.json", "rt")).get("default_version")
for library in s.get_staging_dirs(default_version):
if clean_up_generated_samples:
shutil.rmtree("samples/generated_samples", ignore_errors=True)
clean_up_generated_samples = False
# We don't want the generated client to be accessible through
# "google.cloud.bigquery_storage", replace it with the hand written client that
# wraps it.
s.replace(
library / "google/cloud/bigquery_storage/__init__.py",
f"from google\\.cloud\\.bigquery_storage_{library.name}\\.services.big_query_read.client import",
f"from google.cloud.bigquery_storage_{library.name} import",
)
# We also don't want to expose the async client just yet, at least not until
# it is wrapped in its own manual client class.
s.replace(
library / "google/cloud/bigquery_storage/__init__.py",
(
f"from google\\.cloud\\.bigquery_storage_{library.name}\\.services.big_query_read.async_client "
r"import BigQueryReadAsyncClient\n"
),
"",
)
s.replace(
library / "google/cloud/bigquery_storage/__init__.py",
r"""["']BigQueryReadAsyncClient["'],\n""",
"",
)
s.replace(
library / "google/cloud/bigquery_storage/__init__.py",
r"""["']ArrowRecordBatch["']""",
('"__version__",\n' ' "types",\n' " \\g<0>"),
)
# We want types to be accessible through the "main" library
s.replace(
library / "google/cloud/bigquery_storage/__init__.py",
f"from google\\.cloud\\.bigquery_storage_{library.name}\\.types\\.arrow import ArrowRecordBatch",
(
f"from google.cloud.bigquery_storage_{library.name} import gapic_types as types\n"
"\\g<0>"
),
)
# The DataFormat enum is not exposed in bigquery_storage_v1/types, add it there.
s.replace(
library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py",
r"from \.stream import \(",
"\\g<0>\n DataFormat,",
)
s.replace(
library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py",
r"""["']ReadSession["']""",
'"DataFormat",\n \\g<0>',
)
s.move(
[library],
excludes=[
"setup.py",
f"google/cloud/bigquery_storage_{library.name}/__init__.py",
# v1beta2 was first generated after the microgenerator migration.
"scripts/fixup_bigquery_storage_v1beta2_keywords.py",
"**/gapic_version.py",
"docs/index.rst",
"testing/constraints-3.7.txt",
],
)
s.remove_staging_dirs()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
extras = ["fastavro", "pandas", "pyarrow"]
unit_test_extras = ["tests"] + extras
templated_files = gcp.CommonTemplates().py_library(
cov_level=98,
microgenerator=True,
unit_test_extras=unit_test_extras,
system_test_extras=extras,
system_test_external_dependencies=["google-cloud-bigquery"],
versions=gcp.common.detect_versions(path="./google", default_first=True),
)
s.move(
templated_files,
excludes=[".coveragerc", ".github/release-please.yml", "docs/index.rst"],
)
python.py_samples(skip_readmes=True)
# run format session for all directories which have a noxfile
for noxfile in Path(".").glob("**/noxfile.py"):
s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False)