If you want to use a TPM with GCP mTLS Workload Federation where the private key is embeded in a TPM.
GCP's native workload federation support in the SDK uses python request library which will autmoatically load and use a TPM based private key through openssl's provider interface.
For detailes about that, see:
- Python mTLS client/server with TPM based key
- GCP Workload Identity Federation using x509 certificates
- https://github.com/salrashid123/cloud_auth_tpm
If workload-adc-1.crt and workload-adc-1.key represent the raw cert and private key used for mTLS, to embed the private key to a tpm for this usecase:
export TPM2TOOLS_TCTI="swtpm:port=2321"
printf '\x00\x00' > unique.dat
tpm2_createprimary -C o -G ecc -g sha256 \
-c primary.ctx \
-a "fixedtpm|fixedparent|sensitivedataorigin|userwithauth|noda|restricted|decrypt" -u unique.dat
tpm2_flushcontext -t && tpm2_flushcontext -s && tpm2_flushcontext -l
## embed workload-adc-1.key into the TPM as gcp_wif.pem
tpm2_import -C primary.ctx -G rsa2048:rsapss:null -g sha256 -i workload-adc-1.key -u gcp_wif.pub -r gcp_wif.prv
tpm2_load -C primary.ctx -u gcp_wif.pub -r gcp_wif.prv -c gcp_wif.ctx
tpm2_flushcontext -t && tpm2_flushcontext -s && tpm2_flushcontext -l
tpm2_encodeobject -C primary.ctx -u gcp_wif.pub -r gcp_wif.prv -o gcp_wif.pem
export TPM2OPENSSL_TCTI="swtpm:port=2321"
export OPENSSL_MODULES=/usr/lib/x86_64-linux-gnu/ossl-modules/
export OPENSSL_CONF=`pwd`/openssl.cnf
cat `pwd`/openssl.cnf
openssl_conf = openssl_init
[openssl_init]
providers = provider_sect
[provider_sect]
default = default_sect
tpm2 = tpm2_sect
[default_sect]
activate = 1
[tpm2_sect]
activate = 1
## make sure openssl is setup
$ openssl list --providers -provider tpm2
Providers:
default
name: OpenSSL Default Provider
version: 3.5.4
status: active
tpm2
name: TPM 2.0 Provider
version: 1.3.0
status: active
## then create a config
gcloud iam workload-identity-pools create-cred-config \
projects/995081019036/locations/global/workloadIdentityPools/cert-pool-1/providers/cert-provider-adc \
--credential-cert-path=/path/to/workload-adc-1.crt \
--credential-cert-private-key-path=/path/to/gcp_wif.pem --output-file=mtls-wif.json
## which will show
$ cat mtls-wif.json
{
"universe_domain": "googleapis.com",
"type": "external_account",
"audience": "//iam.googleapis.com/projects/995081019036/locations/global/workloadIdentityPools/cert-pool-1/providers/cert-provider-adc",
"subject_token_type": "urn:ietf:params:oauth:token-type:mtls",
"token_url": "https://sts.mtls.googleapis.com/v1/token",
"credential_source": {
"certificate": {
"use_default_certificate_config": true
}
},
"token_info_url": "https://sts.mtls.googleapis.com/v1/introspect"
}
### critically the certificate_config.json points to the TPM PEM key and certificate
$ cat $HOME/.config/gcloud/certificate_config.json
{
"cert_configs": {
"workload": {
"cert_path": "/path/to/workload-adc-1.crt",
"key_path": "/path/to/gcp_wif.pem"
}
}
}
## then just point to the workload federation file as the ADC cred and use as normal
export GOOGLE_APPLICATION_CREDENTIALS=`pwd`/mtls-wif.json
python3 main_gcp_wif.pymain_gcp_wif.py
from tpm2_pytss import *
from google.cloud import storage
import requests
import argparse
parser = argparse.ArgumentParser(description='GCP Auth using TPM')
parser.add_argument("--bucket_name", default='')
parser.add_argument("--project_id", default='')
args = parser.parse_args()
# payload = {'subject_token_type': 'urn:ietf:params:oauth:token-type:mtls', 'grant_type': 'urn:ietf:params:oauth:grant-type:token-exchange',
# 'audience':'//iam.googleapis.com/projects/99508101redacted/locations/global/workloadIdentityPools/cert-pool-1/providers/cert-provider-adc',
# 'requested_token_type':'urn:ietf:params:oauth:token-type:access_token', 'scope':'https://www.googleapis.com/auth/cloud-platform'}
# response = requests.post('https://sts.mtls.googleapis.com/v1/token', data=payload, verify=True,cert=('workload-adc-1.crt', 'gcp_wif.pem'))
# print("Status Code: %s" % response.status_code)
# print(response.text)
storage_client = storage.Client(project=args.project_id)
blobs = storage_client.list_blobs(args.bucket_name)
for blob in blobs:
print(blob.name)