update stdlib tests that use secrets

Signed-off-by: Tom Chauveau <tom.chauveau@epitech.eu>
This commit is contained in:
Tom Chauveau 2021-05-27 18:22:41 +02:00 committed by Andrea Luzzardi
parent 40d4c95bff
commit 5fb7ded2d5
18 changed files with 442 additions and 219 deletions

View File

@ -46,7 +46,7 @@ import (
aws configure set default.region "$AWS_DEFAULT_REGION" aws configure set default.region "$AWS_DEFAULT_REGION"
aws configure set default.cli_pager "" aws configure set default.cli_pager ""
aws configure set default.output "json" aws configure set default.output "json"
"""# """#,
] ]
mount: "/run/secrets/access_key": secret: config.accessKey mount: "/run/secrets/access_key": secret: config.accessKey
mount: "/run/secrets/secret_key": secret: config.secretKey mount: "/run/secrets/secret_key": secret: config.secretKey

View File

@ -39,14 +39,14 @@ import (
"-c", "-c",
#""" #"""
aws ecr get-login-password > /out aws ecr get-login-password > /out
"""# """#,
] ]
}, },
op.#Export & { op.#Export & {
source: "/out" source: "/out"
format: "string" format: "string"
} },
] ]
} }
} }

View File

@ -18,12 +18,14 @@ import (
// kubeconfig is the generated kube configuration file // kubeconfig is the generated kube configuration file
kubeconfig: { kubeconfig: {
// FIXME There is a problem with dagger.#Secret type @dagger(output)
string string
#up: [ #up: [
op.#Load & { op.#Load & {
from: aws.#CLI from: aws.#CLI & {
"config": config
}
}, },
op.#WriteFile & { op.#WriteFile & {
@ -42,13 +44,6 @@ import (
"/entrypoint.sh", "/entrypoint.sh",
] ]
env: { env: {
AWS_CONFIG_FILE: "/cache/aws/config"
AWS_ACCESS_KEY_ID: config.accessKey
AWS_SECRET_ACCESS_KEY: config.secretKey
AWS_DEFAULT_REGION: config.region
AWS_REGION: config.region
AWS_DEFAULT_OUTPUT: "json"
AWS_PAGER: ""
EKS_CLUSTER: clusterName EKS_CLUSTER: clusterName
KUBECTL_VERSION: version KUBECTL_VERSION: version
} }
@ -62,5 +57,5 @@ import (
format: "string" format: "string"
}, },
] ]
} @dagger(output) }
} }

View File

@ -18,30 +18,29 @@ import (
// exported priority // exported priority
priority: out @dagger(output) priority: out @dagger(output)
out: string out: {
string
aws.#Script & { #up: [
always: true op.#Load & {
from: aws.#CLI & {
files: { "config": config
"/inputs/listenerArn": listenerArn
if vhost != _|_ {
"/inputs/vhost": vhost
}
} }
},
export: "/priority" op.#Exec & {
args: [
//FIXME: The code below can end up not finding an available prio "/bin/bash",
// Better to exclude the existing allocated priorities from the random sequence "--noprofile",
code: #""" "--norc",
if [ -s /inputs/vhost ]; then "-eo",
"pipefail",
#"""
if [ -s "$VHOST" ]; then
# We passed a vhost as input, try to recycle priority from previously allocated vhost # We passed a vhost as input, try to recycle priority from previously allocated vhost
vhost="$(cat /inputs/vhost)"
priority=$(aws elbv2 describe-rules \ priority=$(aws elbv2 describe-rules \
--listener-arn "$(cat /inputs/listenerArn)" | \ --listener-arn "$LISTENER_ARN" | \
jq -r --arg vhost "$vhost" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $vhost) | .Priority') jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority')
if [ -n "${priority}" ]; then if [ -n "${priority}" ]; then
echo -n "${priority}" > /priority echo -n "${priority}" > /priority
@ -56,7 +55,7 @@ import (
p=$(shuf -i 1-50000 -n 1) p=$(shuf -i 1-50000 -n 1)
# Find the next priority available that we can allocate # Find the next priority available that we can allocate
aws elbv2 describe-rules \ aws elbv2 describe-rules \
--listener-arn "$(cat /inputs/listenerArn)" \ --listener-arn "$LISTENER_ARN" \
| jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue | jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue
priority="${p}" priority="${p}"
break break
@ -66,6 +65,18 @@ import (
exit 1 exit 1
fi fi
echo -n "${priority}" > /priority echo -n "${priority}" > /priority
"""# """#,
]
env: {
LISTENER_ARN: listenerArn
VHOST: vhost
}
},
op.#Export & {
source: "/db_created"
format: "string"
},
]
} }
} }

View File

@ -22,45 +22,60 @@ import (
dbType: "mysql" | "postgres" @dagger(input) dbType: "mysql" | "postgres" @dagger(input)
// Name of the DB created // Name of the DB created
out: string @dagger(output) out: {
@dagger(output)
string
aws.#Script & { #up: [
op.#Load & {
from: aws.#CLI & {
"config": config "config": config
files: {
"/inputs/name": name
"/inputs/db_arn": dbArn
"/inputs/secret_arn": secretArn
"/inputs/db_type": dbType
} }
},
export: "/db_created" op.#Exec & {
args: [
"/bin/bash",
"--noprofile",
"--norc",
"-eo",
"pipefail",
#"""
echo "dbType: $DB_TYPE"
code: #""" sql="CREATE DATABASE \`"$NAME" \`"
set +o pipefail if [ "$DB_TYPE" = postgres ]; then
sql="CREATE DATABASE \""$NAME"\""
dbType="$(cat /inputs/db_type)"
echo "dbType: $dbType"
sql="CREATE DATABASE \`$(cat /inputs/name)\`"
if [ "$dbType" = postgres ]; then
sql="CREATE DATABASE \"$(cat /inputs/name)\""
fi fi
cp /inputs/name /db_created echo "$NAME" >> /db_created
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$(cat /inputs/db_arn)" \ --resource-arn "$DB_ARN" \
--secret-arn "$(cat /inputs/secret_arn)" \ --secret-arn "$SECRET_ARN" \
--sql "$sql" \ --sql "$sql" \
--database "$dbType" \ --database "$DB_TYPE" \
--no-include-result-metadata \ --no-include-result-metadata \
|& tee /tmp/out |& tee /tmp/out
exit_code=${PIPESTATUS[0]} exit_code=${PIPESTATUS[0]}
if [ $exit_code -ne 0 ]; then if [ $exit_code -ne 0 ]; then
grep -q "database exists\|already exists" /tmp/out || exit $exit_code grep -q "database exists\|already exists" /tmp/out || exit $exit_code
fi fi
"""# """#,
]
env: {
NAME: name
DB_ARN: dbArn
SECRET_ARN: secretArn
DB_TYPE: dbType
}
},
op.#Export & {
source: "/db_created"
format: "string"
},
]
} }
} }
@ -69,56 +84,55 @@ import (
config: aws.#Config config: aws.#Config
// Username // Username
username: dagger.#Secret username: dagger.#Secret @dagger(input)
// Password // Password
password: dagger.#Secret password: dagger.#Secret @dagger(input)
// ARN of the database instance // ARN of the database instance
dbArn: string dbArn: string @dagger(input)
// ARN of the database secret (for connecting via rds api) // ARN of the database secret (for connecting via rds api)
secretArn: string secretArn: string @dagger(input)
grantDatabase: string | *"" grantDatabase: string | *"" @dagger(input)
dbType: "mysql" | "postgres" dbType: "mysql" | "postgres" @dagger(input)
// Outputed username // Outputed username
out: string out: {
@dagger(output)
string
aws.#Script & { #up: [
op.#Load & {
from: aws.#CLI & {
"config": config "config": config
files: {
"/inputs/username": username
"/inputs/password": password
"/inputs/db_arn": dbArn
"/inputs/secret_arn": secretArn
"/inputs/grant_database": grantDatabase
"/inputs/db_type": dbType
} }
},
export: "/username" op.#Exec & {
args: [
"/bin/bash",
"--noprofile",
"--norc",
"-eo",
"pipefail",
#"""
echo "dbType: $DB_TYPE"
code: #""" sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
set +o pipefail if [ "$DB_TYPE" = postgres ]; then
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
dbType="$(cat /inputs/db_type)"
echo "dbType: $dbType"
sql="CREATE USER '$(cat /inputs/username)'@'%' IDENTIFIED BY '$(cat /inputs/password)'"
if [ "$dbType" = postgres ]; then
sql="CREATE USER \"$(cat /inputs/username)\" WITH PASSWORD '$(cat /inputs/password)'"
fi fi
cp /inputs/username /username echo "$USERNAME" >> /username
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$(cat /inputs/db_arn)" \ --resource-arn "$DB_ARN" \
--secret-arn "$(cat /inputs/secret_arn)" \ --secret-arn "$SECRET_ARN" \
--sql "$sql" \ --sql "$sql" \
--database "$dbType" \ --database "$DB_TYPE" \
--no-include-result-metadata \ --no-include-result-metadata \
|& tee tmp/out |& tee tmp/out
exit_code=${PIPESTATUS[0]} exit_code=${PIPESTATUS[0]}
@ -126,32 +140,48 @@ import (
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
fi fi
sql="SET PASSWORD FOR '$(cat /inputs/username)'@'%' = PASSWORD('$(cat /inputs/password)')" sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
if [ "$dbType" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="ALTER ROLE \"$(cat /inputs/username)\" WITH PASSWORD '$(cat /inputs/password)'" sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi fi
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$(cat /inputs/db_arn)" \ --resource-arn "$DB_ARN" \
--secret-arn "$(cat /inputs/secret_arn)" \ --secret-arn "$SECRET_ARN" \
--sql "$sql" \ --sql "$sql" \
--database "$dbType" \ --database "$DB_TYPE" \
--no-include-result-metadata --no-include-result-metadata
sql="GRANT ALL ON \`$(cat /inputs/grant_database)\`.* to '$(cat /inputs/username)'@'%'" sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
if [ "$dbType" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"$(cat /inputs/username)\"; GRANT ALL PRIVILEGES ON DATABASE \"$(cat /inputs/grant_database)\" to \"$(cat /inputs/username)\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"$(cat /inputs/username)\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \"$(cat /inputs/username)\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \"$(cat /inputs/username)\"; GRANT USAGE ON SCHEMA public TO \"$(cat /inputs/username)\";" sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
fi fi
if [ -s /inputs/grant_database ]; then if [ -s "$GRAND_DATABASE ]; then
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$(cat /inputs/db_arn)" \ --resource-arn "$DB_ARN" \
--secret-arn "$(cat /inputs/secret_arn)" \ --secret-arn "$SECRET_ARN" \
--sql "$sql" \ --sql "$sql" \
--database "$dbType" \ --database "$DB_TYPE" \
--no-include-result-metadata --no-include-result-metadata
fi fi
"""# """#,
]
env: {
USERNAME: unsername
PASSWORD: password
DB_ARN: dbArn
SECRET_ARN: secretArn
GRAND_DATABASE: grandDatabase
DB_TYPE: dbType
}
},
op.#Export & {
source: "/username"
format: "string"
},
]
} }
} }
@ -160,35 +190,51 @@ import (
config: aws.#Config config: aws.#Config
// ARN of the database instance // ARN of the database instance
dbArn: string dbArn: string @dagger(input)
// DB hostname // DB hostname
hostname: info.hostname hostname: info.hostname @dagger(output)
// DB port // DB port
port: info.port port: info.port @dagger(output)
info: { info: {
hostname: string hostname: string
port: int port: int
} }
info: json.Unmarshal(out) info: json.Unmarshal(out) @dagger(output)
out: string out: {
string
aws.#Script & { #up: [
op.#Load & {
from: aws.#CLI & {
"config": config "config": config
}
},
files: "/inputs/db_arn": dbArn op.#Exec & {
args: [
export: "/out" "/bin/bash",
"--noprofile",
code: #""" "--norc",
db_arn="$(cat /inputs/db_arn)" "-eo",
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$db_arn" ) "pipefail",
#"""
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" )
echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out
echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out
cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out
"""# """#,
]
env: DB_ARN: dbArn
},
op.#Export & {
source: "/out"
format: "json"
},
]
} }
} }

View File

@ -44,7 +44,7 @@ import (
dest: "/source" dest: "/source"
content: sourceInline content: sourceInline
} }
} },
op.#Exec & { op.#Exec & {
if always != _|_ { if always != _|_ {
@ -79,14 +79,14 @@ import (
echo "$TARGET" \ echo "$TARGET" \
| sed -E 's=^s3://([^/]*)/=https://\1.s3.amazonaws.com/=' \ | sed -E 's=^s3://([^/]*)/=https://\1.s3.amazonaws.com/=' \
> /url > /url
"""# """#,
] ]
}, },
op.#Export & { op.#Export & {
source: "/url" source: "/url"
format: "string" format: "string"
} },
] ]
} }
} }

View File

@ -21,9 +21,7 @@ setup() {
} }
@test "stdlib: netlify" { @test "stdlib: netlify" {
skip_unless_secrets_available "$TESTDIR"/stdlib/netlify/inputs.yaml "$DAGGER" up -w "$TESTDIR"/stdlib/netlify/
"$DAGGER" compute "$TESTDIR"/stdlib/netlify --input-yaml "$TESTDIR"/stdlib/netlify/inputs.yaml
} }
@test "stdlib: kubernetes" { @test "stdlib: kubernetes" {
@ -47,9 +45,7 @@ setup() {
} }
@test "stdlib: aws: eks" { @test "stdlib: aws: eks" {
skip_unless_secrets_available "$TESTDIR"/stdlib/aws/inputs.yaml "$DAGGER" up -w "$TESTDIR"/stdlib/aws/eks
"$DAGGER" compute "$TESTDIR"/stdlib/aws/eks --input-yaml "$TESTDIR"/stdlib/aws/inputs.yaml
} }
@test "stdlib: aws: ecr" { @test "stdlib: aws: ecr" {

View File

@ -0,0 +1,2 @@
# dagger state
state/**

View File

@ -0,0 +1,53 @@
package eks
import (
"dagger.io/aws"
"dagger.io/aws/eks"
"dagger.io/kubernetes"
"dagger.io/dagger/op"
)
TestConfig: awsConfig: aws.#Config & {
region: "us-east-2"
}
TestCluster: eks.#KubeConfig & {
config: TestConfig.awsConfig
clusterName: *"dagger-example-eks-cluster" | string
}
TestEks: {
#GetPods:
"""
kubectl get pods -A
"""
#up: [
op.#Load & {
from: kubernetes.#Kubectl
},
op.#WriteFile & {
dest: "/kubeconfig"
content: TestCluster.kubeconfig
},
op.#WriteFile & {
dest: "/getPods.sh"
content: #GetPods
},
op.#Exec & {
always: true
args: [
"/bin/bash",
"--noprofile",
"--norc",
"-eo",
"pipefail",
"/getPods.sh",
]
env: KUBECONFIG: "/kubeconfig"
},
]
}

View File

@ -0,0 +1,26 @@
name: default
inputs:
TestConfig.awsConfig.accessKey:
secret: ENC[AES256_GCM,data:dzhlip9kKU8mMEycFjq6MobD5BA=,iv:LKeYUbXpnWIZneGs7DCLVKxv1W2aa/3EVGO4jnDlOgc=,tag:+TcxQahxFTweyoPaROTJSQ==,type:str]
TestConfig.awsConfig.secretKey:
secret: ENC[AES256_GCM,data:bu3AI5jODWv4ePvRKw2l/1UOuH07Z0/oB2hiY4QqrhTcfjdSbr6kBg==,iv:BqddzzXqvAv0cAj2SVhoFx/kUOnRsoevqMRujCINVv0=,tag:u0KjVnbN8h54CLFARJmJ0g==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAzV0ZXNW5qaGNJMjF5bnBO
d1Z1RXFhSnNRM1Vwa3lyWFJ6VVFDZTQ3cUhZClh0N1lxZ3dwSFhHTjRyS092OVVj
Tkw4ZlU4S3g0T1VGS1RYYnB1dGlzbVkKLS0tIEc4T1Z3SEU2NUNhd2FkSXlIUERM
UE5Cd2VwYkd1MHlTOXNJVEU3RVpqU2sK86kXU6ZaaVHTg9BuCEcOxnDrrW00+bwu
AHttbzqYVuC3YxXjOTzAZL8aYTStk14wGdI6TirZ9pX0fyaKAfzBUQ==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2021-05-27T16:01:59Z"
mac: ENC[AES256_GCM,data:T+0rcT9Xi/kJ8+EzCd7ewenDmc1cH/t2MxCpf+QXkILUC/uE8OgROizDMAiUYI2HpeBfZrmUgLMVzlTZirIbC51eWLAf6itbSIGKkVuz0uSNwhRpKGAROg6U1h39Scg6RpAvpzSTZvYOx5SwP78Uc6NQdp5yTDEb+0e9Wqzu+jU=,iv:INAN+EPwBv5dWWHQnaMr4QOBQWx3WCcohORvIPrBZN8=,tag:N4vtDowFKTDSHmMob5HgCw==,type:str]
pgp: []
encrypted_suffix: secret
version: 3.7.1

View File

@ -33,14 +33,14 @@ import (
"-c", "-c",
#""" #"""
aws s3 ls --recursive \#(target) > /contents aws s3 ls --recursive \#(target) > /contents
"""# """#,
] ]
}, },
op.#Export & { op.#Export & {
source: "/contents" source: "/contents"
format: "string" format: "string"
} },
] ]
} }
} }
@ -76,7 +76,7 @@ import (
"-eo", "-eo",
"pipefail", "pipefail",
"-c", "-c",
"grep -q \(file) /test" "grep -q \(file) /test",
] ]
}, },
] ]

View File

@ -11,7 +11,7 @@ source: dagger.#Artifact
registry: { registry: {
username: string username: string
secret: dagger.#Secret secret: string
} }
TestPushAndPull: { TestPushAndPull: {

View File

@ -0,0 +1,2 @@
# dagger state
state/**

View File

@ -0,0 +1,46 @@
package netlify
import (
"dagger.io/dagger/op"
"dagger.io/alpine"
"dagger.io/netlify"
)
TestNetlify: {
// Generate a website containing the random number
html: #up: [
op.#WriteFile & {
content: random
dest: "index.html"
},
]
// Deploy to netlify
deploy: netlify.#Site & {
contents: html
name: "dagger-test"
}
// Check if the deployed site has the random marker
check: #up: [
op.#Load & {
from: alpine.#Image & {
package: bash: "=~5.1"
package: curl: true
}
},
op.#Exec & {
args: [
"/bin/bash",
"--noprofile",
"--norc",
"-eo",
"pipefail",
"-c",
#"""
test "$(curl \#(deploy.deployUrl))" = "\#(random)"
"""#,
]
},
]
}

View File

@ -0,0 +1,20 @@
package netlify
import (
"dagger.io/alpine"
"dagger.io/dagger/op"
)
// Generate a random number
random: {
string
#up: [
op.#Load & {from: alpine.#Image},
op.#Exec & {
args: ["sh", "-c", "cat /dev/urandom | tr -dc 'a-z' | fold -w 10 | head -n 1 | tr -d '\n' > /rand"]
},
op.#Export & {
source: "/rand"
},
]
}

View File

@ -0,0 +1,26 @@
name: net
inputs:
TestNetlify.deploy.account.name:
text: blocklayer
TestNetlify.deploy.account.token:
secret: ENC[AES256_GCM,data:oWKi8eqTUEs+YClokLKeAKsEj3qae4yQTn/67u6ga4Ptcq4+MyYS/6wAUg==,iv:Xfw+L/4p7vO+jb/EVyYOvsIZ9KxZbi30ms2Ckg4E8cE=,tag:G4EBBer04D6FHFP9e+feTw==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBwWHFLRUtscWVma0lQM3Qv
M2czUFJhSEpnczdJTWhadnJHOWowaXd1dWtJCmk3aU15NDJYcmtUOE5pZ0lIQzRu
dTYvRFdsM0ZoUjFWSG91UnZRVWdvZjgKLS0tIENhK2VWNHByY3hYNUVmWDRmOUFM
SEdUK2RsaUxuVWg2aXUwdVJ0eUtrWWMKWkQDBuL5e4QDx5Wy6+fHiD+J4fp7QdMm
lsqgmxRvJMWgEvm1U+hDAo/Pkn8PFUFJf0KxEvkdF4qGuguQePgzFQ==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2021-05-27T14:36:27Z"
mac: ENC[AES256_GCM,data:S3l8tVat/Yp7fH5feeL4JxL+uQwZ0zwv8/LPsOoBebfDFWuE/j9sFZD304OT7XNCsfG8R/lqdpoxYmiyH6ToHeZyktXalpk0tAkwFXUV4VUZKyIn81UirbtWx4OT6fW7jusqqg2uX3nhvjGd+QerhEC4Qu4o8lQCKCMzLuQjmVw=,iv:4ucl0O+VgdK/SwtEad1jXIWJ4pQSxlWCCUzFbqNLDgg=,tag:xno2U/FIVW6KgSXW5RWDsw==,type:str]
pgp: []
encrypted_suffix: secret
version: 3.7.1