diff --git a/cloud-info/ams-wrapper.sh b/cloud-info/ams-wrapper.sh index 9900e9dc..202c9fc9 100755 --- a/cloud-info/ams-wrapper.sh +++ b/cloud-info/ams-wrapper.sh @@ -41,7 +41,6 @@ AUTO_CONFIG_PATH="$(mktemp -d)" export CHECKIN_SECRETS_FILE="$CHECKIN_SECRETS_PATH/secrets.yaml" # TODO(enolfc): avoid creating new tokens for every provider export ACCESS_TOKEN_FILE="$AUTO_CONFIG_PATH/token.yaml" -USE_ACCESS_TOKEN=0 if token-generator; then # TODO(enolfc): even if this belows fails, we should use access token as it will provide # access to more projects @@ -49,10 +48,10 @@ if token-generator; then # this worked, let's update the env export CHECKIN_SECRETS_PATH="$AUTO_CONFIG_PATH/vos" export CLOUD_INFO_CONFIG="$AUTO_CONFIG_PATH/site.yaml" - USE_ACCESS_TOKEN=1 fi fi + # Any OS related parameter should be available as env variables if test "$CHECKIN_SECRETS_PATH" = ""; then # Case 1: manual config @@ -62,25 +61,22 @@ if test "$CHECKIN_SECRETS_PATH" = ""; then --format glue21 >cloud-info.out else # use service account for everyone - CHECKIN_DISCOVERY="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration" - CLIENT_ID="$(yq -r '.fedcloudops.client_id' <"$CHECKIN_SECRETS_FILE")" - CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' <"$CHECKIN_SECRETS_FILE")" + export OS_DISCOVERY_ENDPOINT="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration" + export OS_CLIENT_ID="$(yq -r '.fedcloudops.client_id' <"$CHECKIN_SECRETS_FILE")" + export OS_CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' <"$CHECKIN_SECRETS_FILE")" + export OS_ACCESS_TOKEN_TYPE="access_token" + export OS_AUTH_TYPE="v3oidcclientcredentials" + export OS_OPENID_SCOPE="openid profile eduperson_entitlement email" cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ --middleware "$CLOUD_INFO_MIDDLEWARE" \ --ignore-share-errors \ - --os-auth-type v3oidcclientcredentials \ - --os-discovery-endpoint "$CHECKIN_DISCOVERY" \ - --os-client-id "$CLIENT_ID" \ - --os-client-secret "$CLIENT_SECRET" \ - --os-access-token-type access_token \ - --os-openid-scope "openid profile eduperson_entitlement email" \ --format glue21 >cloud-info.out # Produce the json output also - if test "$RCLONE_CONFIG_S3_TYPE" != ""; then + RCLONE_CONFIG_S3="$(yq -r '.s3' <"$CHECKIN_SECRETS_FILE")" + if test "$RCLONE_CONFIG_S3" != "null"; then cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ --middleware "$CLOUD_INFO_MIDDLEWARE" \ --ignore-share-errors \ - --auth-refresher accesstoken \ --format glue21json >site.json fi fi @@ -102,8 +98,15 @@ printf '"}]}' >>ams-payload curl -X POST "$ARGO_URL" -H "content-type: application/json" -d @ams-payload if [ -f site.json ]; then - # Put this info into S3, assume the rclone env config has - # a provider named "s3" + # Put this info into S3, configure rclone config with + # a provider named "s3" using env variables + export RCLONE_CONFIG_S3_TYPE=s3 + export RCLONE_CONFIG_S3_ACCESS_KEY_ID="$(yq -r '.s3.access_key_id' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_SECRET_ACCESS_KEY="$(yq -r '.s3.secret_access_key' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ENDPOINT="$(yq -r '.s3.endpoint' <"$CHECKIN_SECRETS_FILE")" + export S3_BUCKET_NAME="$(yq -r '.s3.bucket' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ACL=private + export RCLONE_CONFIG_S3_NO_CHECK_BUCKET=true rclone copy site.json "s3:$S3_BUCKET_NAME/$SITE_NAME" fi diff --git a/cloud-info/cloud_info_catchall/config_generator.py b/cloud-info/cloud_info_catchall/config_generator.py index f9c3d071..a7aa79b0 100755 --- a/cloud-info/cloud_info_catchall/config_generator.py +++ b/cloud-info/cloud_info_catchall/config_generator.py @@ -49,6 +49,8 @@ def generate_shares(config, secrets): discoverer = RefresherShareDiscovery(config, secrets[s]) elif "access_token" in secrets[s]: discoverer = AccessTokenShareDiscovery(config, secrets[s]) + else: + continue token_shares = discoverer.get_token_shares() shares.update(token_shares) if not shares: diff --git a/cloud-info/cloud_info_catchall/token_generator.py b/cloud-info/cloud_info_catchall/token_generator.py index aa56a0ca..c79ec964 100755 --- a/cloud-info/cloud_info_catchall/token_generator.py +++ b/cloud-info/cloud_info_catchall/token_generator.py @@ -71,6 +71,9 @@ def generate_tokens(oidc_config, scopes, tokens, token_ttl, secrets): # not our thing if not isinstance(secrets[s], dict): continue + if "client_id" not in secrets[s] or "client_secret" not in secrets[s]: + # not suitable for us + continue if "refresh_token" in secrets[s]: # ignore those that have refresh token continue diff --git a/deploy/deploy.sh b/deploy/deploy.sh index 1f0b7087..bd030f50 100755 --- a/deploy/deploy.sh +++ b/deploy/deploy.sh @@ -31,6 +31,10 @@ fi # copy the secrets to the /etc/egi/vos dir which is readable from the containers cp secrets.yaml /etc/egi/vos/secrets.yaml +# make sure the container user (999) can access the files +chown -R 999:999 /etc/egi/ + + GITHUB_COMMIT_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/commits/$COMMIT_SHA/pulls" # Find out PR we need to update