diff --git a/README.md b/README.md
index 02813f4d224..530ea0935ca 100644
--- a/README.md
+++ b/README.md
@@ -198,6 +198,7 @@ them to fit your particular use case.
approach
* [Dataflow Streaming XML to GCS](examples/dataflow-xml-pubsub-to-gcs) -
Dataflow example to handle streaming of xml encoded messages and write them to Google Cloud Storage
+* [Dataflow – DLP Flex De-ID (CSV from GCS to BigQuery)](examples/dataflow-dlp-flex-deid) - Dataflow Flex Template that batches CSV rows from Cloud Storage, de-identifies with Sensitive Data Protection (DLP), and writes to BigQuery.
* [Dataflow DLP Hashpipeline](examples/dataflow-dlp-hash-pipeline) - Match DLP
Social Security Number findings against a hashed dictionary in Firestore.
Use Secret Manager for the hash key.
diff --git a/examples/dataflow-dlp-flex-deid/.dockerignore b/examples/dataflow-dlp-flex-deid/.dockerignore
new file mode 100644
index 00000000000..a48386599e9
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/.dockerignore
@@ -0,0 +1,10 @@
+**/.git
+**/.github
+**/__pycache__
+**/.pytest_cache
+*.pyc
+*.pyo
+*.pyd
+*.egg-info
+*.log
+.DS_Store
diff --git a/examples/dataflow-dlp-flex-deid/.gcloudignore b/examples/dataflow-dlp-flex-deid/.gcloudignore
new file mode 100644
index 00000000000..7a454b780f6
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/.gcloudignore
@@ -0,0 +1,10 @@
+.git/
+.github/
+__pycache__/
+.pytest_cache/
+*.pyc
+*.pyo
+*.pyd
+*.egg-info/
+*.log
+.DS_Store
diff --git a/examples/dataflow-dlp-flex-deid/.gitignore b/examples/dataflow-dlp-flex-deid/.gitignore
new file mode 100644
index 00000000000..3c0036ed639
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/.gitignore
@@ -0,0 +1,15 @@
+# Python / build
+__pycache__/
+*.py[cod]
+*.egg-info/
+.build/
+dist/
+
+# Local env / IDE
+.venv/
+.env
+.idea/
+.vscode/
+
+# OS / misc
+.DS_Store
diff --git a/examples/dataflow-dlp-flex-deid/Dockerfile b/examples/dataflow-dlp-flex-deid/Dockerfile
new file mode 100644
index 00000000000..fe3ae9f2184
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/Dockerfile
@@ -0,0 +1,25 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM gcr.io/dataflow-templates-base/python3-template-launcher-base:PY311-2024-12-01
+
+ENV FLEX_TEMPLATE_PYTHON_PY_FILE=/app/main.py
+ENV FLEX_TEMPLATE_PYTHON_REQUIREMENTS_FILE=/app/requirements.txt
+
+WORKDIR /app
+COPY . /app
+
+RUN pip install --no-cache-dir -r /app/requirements.txt
+
+ENTRYPOINT ["/opt/google/dataflow/python_template_launcher"]
diff --git a/examples/dataflow-dlp-flex-deid/README.md b/examples/dataflow-dlp-flex-deid/README.md
new file mode 100644
index 00000000000..720d2abb72f
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/README.md
@@ -0,0 +1,144 @@
+# Dataflow Flex Template: De-identify CSVs in GCS (DLP) → BigQuery
+
+A runnable Flex Template that takes CSVs in Cloud Storage, calls DLP to de-identify sensitive fields, and writes sanitized rows to BigQuery.
+
+> **Prerequisites**
+> - Google Cloud project with billing enabled
+> - You can run commands either in **Cloud Shell** or locally with the **gcloud** CLI
+> - A DLP **De-identification Template** (`projects/
/locations//deidentifyTemplates/`)
+
+---
+
+## Parameters
+
+| Name | Required | Default | Description |
+|------|:--------:|---------|-------------|
+| `file_pattern` | ✅ | — | GCS glob to input CSVs |
+| `dataset` | ✅ | — | BigQuery dataset (table is created if needed) |
+| `deidentify_template_name` | ✅ | — | `projects//locations//deidentifyTemplates/` |
+| `csv_headers` **or** `headers_gcs_uri` | ⚙️ | — | Provide headers inline (comma-separated) **or** via a `gs://` file (first line is header) |
+| `batch_size` | ⚙️ | 500 | CSV lines per DLP call |
+| `dlp_api_retry_count` | ⚙️ | 3 | Retries per batch |
+| `skip_header_lines` | ⚙️ | 1 | Header lines to skip in `ReadFromText` |
+| `output_table` | ⚙️ | `output_` | Output table name within `dataset` |
+
+> **CSV & headers:** The CSV **must** have the same column order/count as the header names you supply. The DLP `table` item uses those names when returning de-identified rows.
+
+---
+
+## Quickstart (Cloud Shell or local with gcloud)
+
+### Variables
+```bash
+# Run from: examples/dataflow-dlp-flex-deid/
+export PROJECT_ID=""
+export REGION="us-central1"
+export DATASET="sensitive_data"
+export STAGING_BUCKET_NAME="${PROJECT_ID}-dataflow-assets"
+export AR_REPO_NAME="dataflow-images"
+export IMAGE_TAG="${REGION}-docker.pkg.dev/${PROJECT_ID}/${AR_REPO_NAME}/dlp-csv-deid:latest"
+export TEMPLATE_SPEC="gs://${STAGING_BUCKET_NAME}/templates/dlp-csv-deid.json"
+export DEID_TEMPLATE_NAME="projects/${PROJECT_ID}/locations/global/deidentifyTemplates/"
+export SERVICE_ACCOUNT_NAME="dlp-flex-template-runner"
+export SERVICE_ACCOUNT_EMAIL="${SERVICE_ACCOUNT_NAME}@${PROJECT_ID}.iam.gserviceaccount.com"
+```
+
+### One-time resources
+```bash
+# Run from: examples/dataflow-dlp-flex-deid/
+gcloud config set project "$PROJECT_ID"
+gcloud services enable \
+ dataflow.googleapis.com dlp.googleapis.com cloudbuild.googleapis.com \
+ artifactregistry.googleapis.com bigquery.googleapis.com compute.googleapis.com
+
+gcloud storage buckets create "gs://${STAGING_BUCKET_NAME}" \
+ --location="$REGION" --uniform-bucket-level-access || true
+
+bq --location="$REGION" mk --dataset "${PROJECT_ID}:${DATASET}" || true
+
+gcloud artifacts repositories create "${AR_REPO_NAME}" \
+ --repository-format=docker --location="${REGION}" || true
+
+gcloud iam service-accounts create "${SERVICE_ACCOUNT_NAME}" \
+ --display-name="DLP Flex Template Runner" || true
+
+for ROLE in roles/dataflow.worker roles/storage.objectAdmin \
+ roles/bigquery.jobUser roles/bigquery.dataEditor \
+ roles/artifactregistry.reader roles/dlp.user
+do
+ gcloud projects add-iam-policy-binding "$PROJECT_ID" \
+ --member="serviceAccount:${SERVICE_ACCOUNT_EMAIL}" \
+ --role="$ROLE" --condition=None
+done
+```
+
+### Build and push the template
+```bash
+# Run from: examples/dataflow-dlp-flex-deid/
+gcloud builds submit \
+ --config cloudbuild.yaml \
+ --substitutions=_IMAGE_TAG="${IMAGE_TAG}" \
+ --project="${PROJECT_ID}" .
+```
+
+### Build the Flex Template spec
+```bash
+# Run from: examples/dataflow-dlp-flex-deid/
+gcloud dataflow flex-template build "${TEMPLATE_SPEC}" \
+ --image "${IMAGE_TAG}" \
+ --sdk-language "PYTHON" \
+ --metadata-file "metadata.json" \
+ --project "${PROJECT_ID}"
+```
+
+### Run the job
+```bash
+# Run from: anywhere
+JOB_NAME="dlp-deid-csv-$(date +%Y%m%d-%H%M%S)"
+
+# Option A — inline headers:
+CSV_HEADERS="name,email,phone"
+
+gcloud dataflow flex-template run "${JOB_NAME}" \
+ --template-file-gcs-location "${TEMPLATE_SPEC}" \
+ --region "${REGION}" \
+ --service-account-email "${SERVICE_ACCOUNT_EMAIL}" \
+ --staging-location "gs://${STAGING_BUCKET_NAME}/staging" \
+ --temp-location "gs://${STAGING_BUCKET_NAME}/temp" \
+ --parameters file_pattern="gs:////*.csv" \
+ --parameters dataset="${DATASET}" \
+ --parameters deidentify_template_name="${DEID_TEMPLATE_NAME}" \
+ --parameters csv_headers="${CSV_HEADERS}" \
+ --parameters output_table="output_example"
+
+# Option B — headers file in GCS (first line is the header row):
+HEADERS_GCS_URI="gs:///headers.txt"
+
+gcloud dataflow flex-template run "${JOB_NAME}" \
+ --template-file-gcs-location "${TEMPLATE_SPEC}" \
+ --region "${REGION}" \
+ --service-account-email "${SERVICE_ACCOUNT_EMAIL}" \
+ --staging-location "gs://${STAGING_BUCKET_NAME}/staging" \
+ --temp-location "gs://${STAGING_BUCKET_NAME}/temp" \
+ --parameters file_pattern="gs:////*.csv" \
+ --parameters dataset="${DATASET}" \
+ --parameters deidentify_template_name="${DEID_TEMPLATE_NAME}" \
+ --parameters headers_gcs_uri="${HEADERS_GCS_URI}" \
+ --parameters output_table="output_example"
+```
+
+> **Output table name:** Defaults to `output_` (derived from the last segment of `deidentify_template_name`) unless you set `output_table`.
+
+---
+
+**Optional Private IPs:** add `--network`, `--subnetwork`, and `--disable-public-ips` to the run command and ensure Private Google Access (or Cloud NAT) so workers can reach Google APIs.
+
+---
+
+## Troubleshooting
+
+- **Header mismatch** → ensure headers match CSV columns and your DLP template.
+- **Permission denied** → verify runner service account roles listed in “One-time resources”.
+- **Template not found** → check `deidentify_template_name` and its location (`global` or regional).
+
+---
\ No newline at end of file
diff --git a/examples/dataflow-dlp-flex-deid/cloudbuild.yaml b/examples/dataflow-dlp-flex-deid/cloudbuild.yaml
new file mode 100644
index 00000000000..284b21fdb4b
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/cloudbuild.yaml
@@ -0,0 +1,28 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+steps:
+- name: gcr.io/cloud-builders/docker
+ args: ['build','-t','${_IMAGE_TAG}','.']
+- name: gcr.io/cloud-builders/docker
+ args: ['push','${_IMAGE_TAG}']
+
+images:
+- '${_IMAGE_TAG}'
+
+substitutions:
+ _IMAGE_TAG: 'REGION-docker.pkg.dev/PROJECT/REPO/dlp-csv-deid:latest'
+
+options:
+ logging: CLOUD_LOGGING_ONLY
diff --git a/examples/dataflow-dlp-flex-deid/main.py b/examples/dataflow-dlp-flex-deid/main.py
new file mode 100644
index 00000000000..3cd347653b2
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/main.py
@@ -0,0 +1,173 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import csv
+import io
+import logging
+import time
+from typing import Iterable, List, Dict, Optional
+
+import apache_beam as beam
+from apache_beam.options.pipeline_options import PipelineOptions
+from apache_beam.options.pipeline_options import GoogleCloudOptions
+
+
+class DlpDeidOptions(PipelineOptions):
+ """Custom options for the DLP de-identification Flex Template."""
+ @classmethod
+ def _add_argparse_args(cls, parser):
+ parser.add_argument("--file_pattern", required=True)
+ parser.add_argument("--dataset", required=True)
+ parser.add_argument("--deidentify_template_name", required=True)
+ parser.add_argument("--csv_headers", default=None,
+ help="Comma-separated header names matching the CSV")
+ parser.add_argument("--headers_gcs_uri", default=None,
+ help="gs:// path to a text file whose first line is the CSV header")
+ parser.add_argument("--batch_size", type=int, default=500)
+ parser.add_argument("--dlp_api_retry_count", type=int, default=3)
+ parser.add_argument("--skip_header_lines", type=int, default=1)
+ parser.add_argument("--output_table", default=None,
+ help="Output table name (defaults to output_)")
+
+
+def build_table_schema(headers: List[str]) -> Dict[str, List[Dict[str, str]]]:
+ """Build a BigQuery schema with STRING fields for each header."""
+ fields = [{"name": h, "type": "STRING", "mode": "NULLABLE"} for h in headers]
+ return {"fields": fields}
+
+
+def parse_headers_from_csv_line(line: str) -> List[str]:
+ return next(csv.reader(io.StringIO(line)))
+
+
+class DeidentifyWithDLP(beam.DoFn):
+ """Batch DoFn that calls DLP deidentifyContent on a table-shaped ContentItem."""
+ def __init__(self, *, headers: List[str], template_name: str, retry_count: int):
+ self._headers = headers
+ self._template_name = template_name
+ self._retry_count = retry_count
+ self._project_id = template_name.split("/")[1]
+ self._dlp_client = None
+
+ def setup(self):
+ from google.cloud import dlp_v2
+ self._dlp_client = dlp_v2.DlpServiceClient()
+
+ def process(self, batch: Iterable[str]) -> Iterable[List[Dict[str, str]]]:
+ rows = []
+ for line in batch:
+ try:
+ values = next(csv.reader(io.StringIO(line)))
+ except StopIteration:
+ logging.warning("Skipping empty or malformed line: %r", line)
+ continue
+ if len(values) != len(self._headers):
+ logging.warning(
+ "Skipping row with column mismatch. expected=%d actual=%d line=%r",
+ len(self._headers), len(values), line,
+ )
+ continue
+ rows.append({"values": [{"string_value": v} for v in values]})
+
+ if not rows:
+ return
+
+ dlp_table = {"headers": [{"name": h} for h in self._headers], "rows": rows}
+ request = {
+ "parent": f"projects/{self._project_id}",
+ "deidentify_template_name": self._template_name,
+ "item": {"table": dlp_table},
+ }
+
+ for attempt in range(self._retry_count):
+ try:
+ response = self._dlp_client.deidentify_content(request=request)
+ output_rows: List[Dict[str, str]] = []
+ for row in response.item.table.rows:
+ output_rows.append({
+ self._headers[i]: val.string_value
+ for i, val in enumerate(row.values)
+ })
+ yield output_rows
+ break
+ except Exception as e:
+ logging.warning("DLP API call failed (attempt %d/%d): %s",
+ attempt + 1, self._retry_count, e)
+ if attempt < self._retry_count - 1:
+ time.sleep(2 ** attempt)
+ else:
+ logging.error("All retries failed for batch (first row shown): %r",
+ batch[0] if batch else None)
+
+
+def run():
+ options = PipelineOptions(save_main_session=True, streaming=False)
+ opts = options.view_as(DlpDeidOptions)
+ gcp = options.view_as(GoogleCloudOptions)
+
+ # Resolve headers from parameters
+ headers: Optional[List[str]] = None
+ if opts.csv_headers:
+ headers = [h.strip() for h in opts.csv_headers.split(",") if h.strip()]
+ elif opts.headers_gcs_uri:
+ from google.cloud import storage
+ if not opts.headers_gcs_uri.startswith("gs://"):
+ raise ValueError("headers_gcs_uri must be a gs:// path")
+ _, path = opts.headers_gcs_uri.split("gs://", 1)
+ bucket_name, _, object_path = path.partition("/")
+ client = storage.Client()
+ data = client.bucket(bucket_name).blob(object_path).download_as_text()
+ first_line = data.splitlines()[0] if data else ""
+ headers = parse_headers_from_csv_line(first_line)
+ else:
+ raise ValueError("Provide either --csv_headers or --headers_gcs_uri")
+
+ if not headers:
+ raise ValueError("No CSV headers resolved")
+
+ # Default output table naming: output_
+ template_suffix = opts.deidentify_template_name.split("/")[-1]
+ output_table = opts.output_table or f"output_{template_suffix}"
+
+ # Determine project for BigQuery sink
+ bq_project = gcp.project or opts.deidentify_template_name.split("/")[1]
+ table_spec = f"{bq_project}:{opts.dataset}.{output_table}"
+
+ table_schema = build_table_schema(headers)
+
+ with beam.Pipeline(options=options) as p:
+ (
+ p
+ | "ReadFromGCS" >> beam.io.ReadFromText(
+ opts.file_pattern, skip_header_lines=opts.skip_header_lines)
+ | "BatchForDLP" >> beam.BatchElements(
+ min_batch_size=opts.batch_size, max_batch_size=opts.batch_size)
+ | "CallDLP" >> beam.ParDo(DeidentifyWithDLP(
+ headers=headers,
+ template_name=opts.deidentify_template_name,
+ retry_count=opts.dlp_api_retry_count,
+ ))
+ | "FlattenBatches" >> beam.FlatMap(lambda rows: rows)
+ | "WriteToBQ" >> beam.io.WriteToBigQuery(
+ table=table_spec,
+ schema=table_schema,
+ write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE,
+ create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
+ )
+ )
+
+
+if __name__ == "__main__":
+ logging.getLogger().setLevel(logging.INFO)
+ run()
diff --git a/examples/dataflow-dlp-flex-deid/metadata.json b/examples/dataflow-dlp-flex-deid/metadata.json
new file mode 100644
index 00000000000..781368c13e4
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/metadata.json
@@ -0,0 +1,14 @@
+{
+ "name": "deidentify-csv-gcs-to-bq",
+ "description": "De-identifies structured CSV data from GCS using a DLP template and writes it to BigQuery.",
+ "parameters": [
+ { "name": "file_pattern", "label": "Input GCS file pattern", "helpText": "e.g. gs:///*.csv", "regexes": ["^gs://.*$"] },
+ { "name": "dataset", "label": "Output BigQuery Dataset", "helpText": "BigQuery dataset where the output table will be created." },
+ { "name": "deidentify_template_name", "label": "DLP De-identification Template", "helpText": "Full resource name, e.g. projects//locations//deidentifyTemplates/" },
+ { "name": "csv_headers", "label": "CSV headers (comma-separated)", "helpText": "Provide column names matching the CSV and DLP template (alternative to headers_gcs_uri)", "optional": true },
+ { "name": "headers_gcs_uri", "label": "Headers file (gs://)", "helpText": "GCS path to a text file whose first line is the header row (alternative to csv_headers)", "regexes": ["^gs://.*$"], "optional": true },
+ { "name": "batch_size", "label": "Batch size", "helpText": "CSV lines per DLP call (keep within DLP request limits)", "optional": true },
+ { "name": "skip_header_lines", "label": "Skip header lines", "helpText": "Number of header lines to skip when reading CSV", "optional": true },
+ { "name": "output_table", "label": "Output table name", "helpText": "Defaults to output_", "optional": true }
+ ]
+}
diff --git a/examples/dataflow-dlp-flex-deid/requirements.txt b/examples/dataflow-dlp-flex-deid/requirements.txt
new file mode 100644
index 00000000000..6be47a13284
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/requirements.txt
@@ -0,0 +1,3 @@
+google-cloud-dlp>=3.14.0,<4.0.0
+# Only needed if you use --headers_gcs_uri
+google-cloud-storage>=2.14.0,<3.0.0
diff --git a/examples/dataflow-dlp-flex-deid/tests/test_csv_to_dlp_rows.py b/examples/dataflow-dlp-flex-deid/tests/test_csv_to_dlp_rows.py
new file mode 100644
index 00000000000..c32192ab8ea
--- /dev/null
+++ b/examples/dataflow-dlp-flex-deid/tests/test_csv_to_dlp_rows.py
@@ -0,0 +1,35 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import importlib.util
+import pathlib
+
+# Load main.py without installing as a module
+_spec = importlib.util.spec_from_file_location(
+ "example_main", pathlib.Path(__file__).resolve().parents[1] / "main.py"
+)
+_main = importlib.util.module_from_spec(_spec)
+assert _spec.loader is not None
+_spec.loader.exec_module(_main) # type: ignore
+
+def test_build_table_schema():
+ headers = ["name", "email", "phone"]
+ schema = _main.build_table_schema(headers)
+ assert "fields" in schema
+ assert [f["name"] for f in schema["fields"]] == headers
+ assert all(f["type"] == "STRING" for f in schema["fields"])
+
+def test_parse_headers_from_csv_line():
+ headers = _main.parse_headers_from_csv_line("a,b,c\n")
+ assert headers == ["a","b","c"]
diff --git a/examples/iap-user-profile/package-lock.json b/examples/iap-user-profile/package-lock.json
index 0d52c2b524f..a72fa8f9eec 100644
--- a/examples/iap-user-profile/package-lock.json
+++ b/examples/iap-user-profile/package-lock.json
@@ -1502,25 +1502,6 @@
"node": ">= 0.6"
}
},
- "node_modules/accepts/node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/accepts/node_modules/mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "dependencies": {
- "mime-db": "1.52.0"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
"node_modules/agent-base": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz",
@@ -2010,9 +1991,10 @@
}
},
"node_modules/call-bind-apply-helpers": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz",
- "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
+ "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
+ "license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
@@ -2454,11 +2436,12 @@
}
},
"node_modules/dunder-proto": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.0.tgz",
- "integrity": "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A==",
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
+ "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
+ "license": "MIT",
"dependencies": {
- "call-bind-apply-helpers": "^1.0.0",
+ "call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
@@ -2587,6 +2570,34 @@
"node": ">= 0.4"
}
},
+ "node_modules/es-object-atoms": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
+ "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-set-tostringtag": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
+ "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.6",
+ "has-tostringtag": "^1.0.2",
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/escalade": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz",
@@ -2943,19 +2954,44 @@
}
},
"node_modules/form-data": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
- "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
+ "version": "2.5.5",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
+ "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
- "combined-stream": "^1.0.6",
- "mime-types": "^2.1.12"
+ "combined-stream": "^1.0.8",
+ "es-set-tostringtag": "^2.1.0",
+ "hasown": "^2.0.2",
+ "mime-types": "^2.1.35",
+ "safe-buffer": "^5.2.1"
},
"engines": {
"node": ">= 0.12"
}
},
+ "node_modules/form-data/node_modules/safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ],
+ "license": "MIT"
+ },
"node_modules/formidable": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.2.tgz",
@@ -3056,18 +3092,21 @@
}
},
"node_modules/get-intrinsic": {
- "version": "1.2.5",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.5.tgz",
- "integrity": "sha512-Y4+pKa7XeRUPWFNvOOYHkRYrfzW07oraURSvjDmRVOJ748OrVmeXtpE4+GCEHncjCjkTxPNRt8kEbxDhsn6VTg==",
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
+ "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
+ "license": "MIT",
"dependencies": {
- "call-bind-apply-helpers": "^1.0.0",
- "dunder-proto": "^1.0.0",
+ "call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
+ "es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
+ "get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
- "hasown": "^2.0.2"
+ "hasown": "^2.0.2",
+ "math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -3085,6 +3124,19 @@
"node": ">=8.0.0"
}
},
+ "node_modules/get-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
+ "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
+ "license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.1",
+ "es-object-atoms": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/get-stream": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
@@ -3481,6 +3533,22 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/has-tostringtag": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
+ "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "has-symbols": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
@@ -4679,6 +4747,15 @@
"node": ">=8"
}
},
+ "node_modules/math-intrinsics": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
+ "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@@ -4732,19 +4809,21 @@
}
},
"node_modules/mime-db": {
- "version": "1.44.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
- "integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==",
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
- "version": "2.1.27",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz",
- "integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==",
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "license": "MIT",
"dependencies": {
- "mime-db": "1.44.0"
+ "mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
diff --git a/examples/ml-audio-content-profiling/app/angular/package-lock.json b/examples/ml-audio-content-profiling/app/angular/package-lock.json
index 3fcb27a6daf..89508312a17 100644
--- a/examples/ml-audio-content-profiling/app/angular/package-lock.json
+++ b/examples/ml-audio-content-profiling/app/angular/package-lock.json
@@ -21434,17 +21434,51 @@
"license": "ISC"
},
"node_modules/sha.js": {
- "version": "2.4.11",
- "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz",
- "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==",
+ "version": "2.4.12",
+ "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.12.tgz",
+ "integrity": "sha512-8LzC5+bvI45BjpfXU8V5fdU2mfeKiQe1D1gIMn7XUlF3OTUrpdJpPPH4EMAnF0DsHHdSZqCdSss5qCmJKuiO3w==",
+ "license": "(MIT AND BSD-3-Clause)",
"dependencies": {
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
+ "inherits": "^2.0.4",
+ "safe-buffer": "^5.2.1",
+ "to-buffer": "^1.2.0"
},
"bin": {
"sha.js": "bin.js"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/sha.js/node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "license": "ISC"
+ },
+ "node_modules/sha.js/node_modules/safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ],
+ "license": "MIT"
+ },
"node_modules/shallow-clone": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
diff --git a/tools/gcpviz/go.mod b/tools/gcpviz/go.mod
index f86aaa60399..5c3d45333f4 100644
--- a/tools/gcpviz/go.mod
+++ b/tools/gcpviz/go.mod
@@ -16,5 +16,5 @@ require (
github.com/willf/bitset v1.1.10 // indirect
github.com/willf/bloom v2.0.3+incompatible
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0
- gopkg.in/yaml.v3 v3.0.0
+ gopkg.in/yaml.v3 v3.0.1
)
diff --git a/tools/gcpviz/go.sum b/tools/gcpviz/go.sum
index c521219658b..0d56fce0fd6 100644
--- a/tools/gcpviz/go.sum
+++ b/tools/gcpviz/go.sum
@@ -712,8 +712,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20190905181640-827449938966/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA=
-gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/tools/lambda-compat/go.mod b/tools/lambda-compat/go.mod
index 2f325644ae9..1011e206e21 100644
--- a/tools/lambda-compat/go.mod
+++ b/tools/lambda-compat/go.mod
@@ -3,7 +3,7 @@ module github.com/GoogleCloudPlatform/professional-services/tools/lambda-compat
go 1.23.0
require (
- cloud.google.com/go/compute/metadata v0.2.3
+ cloud.google.com/go/compute/metadata v0.3.0
github.com/aws/aws-sdk-go-v2 v1.13.0
github.com/aws/aws-sdk-go-v2/config v1.13.0
github.com/aws/aws-sdk-go-v2/service/sts v1.14.0
@@ -14,7 +14,6 @@ require (
)
require (
- cloud.google.com/go/compute v1.19.1 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.10.0 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.4 // indirect
@@ -29,7 +28,7 @@ require (
github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect
go.opencensus.io v0.24.0 // indirect
golang.org/x/net v0.38.0 // indirect
- golang.org/x/oauth2 v0.7.0 // indirect
+ golang.org/x/oauth2 v0.27.0 // indirect
golang.org/x/sys v0.31.0 // indirect
golang.org/x/text v0.23.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
diff --git a/tools/lambda-compat/go.sum b/tools/lambda-compat/go.sum
index 38d4b70ea5e..8db7aff324d 100644
--- a/tools/lambda-compat/go.sum
+++ b/tools/lambda-compat/go.sum
@@ -1,8 +1,6 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go/compute v1.19.1 h1:am86mquDUgjGNWxiGn+5PGLbmgiWXlE/yNWpIpNvuXY=
-cloud.google.com/go/compute v1.19.1/go.mod h1:6ylj3a05WF8leseCdIf77NK0g1ey+nj5IKd5/kvShxE=
-cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
-cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
+cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc=
+cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/aws/aws-sdk-go-v2 v1.13.0 h1:1XIXAfxsEmbhbj5ry3D3vX+6ZcUYvIqSm4CWWEuGZCA=
github.com/aws/aws-sdk-go-v2 v1.13.0/go.mod h1:L6+ZpqHaLbAaxsqV0L4cvxZY7QupWJB4fhkf8LXvC7w=
@@ -111,8 +109,8 @@ golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.7.0 h1:qe6s0zUXlPX80/dITx3440hWZ7GwMwgDDyrSGTPJG/g=
-golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4=
+golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
+golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=