Merge pull request #564 from samalba/cmd-doc-gen
cmd/doc: generate whole stdlib documentation
This commit is contained in:
commit
a002d2e02e
@ -4,8 +4,10 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"text/tabwriter"
|
"text/tabwriter"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@ -53,7 +55,7 @@ type PackageJSON struct {
|
|||||||
var docCmd = &cobra.Command{
|
var docCmd = &cobra.Command{
|
||||||
Use: "doc [PACKAGE | PATH]",
|
Use: "doc [PACKAGE | PATH]",
|
||||||
Short: "document a package",
|
Short: "document a package",
|
||||||
Args: cobra.ExactArgs(1),
|
Args: cobra.MaximumNArgs(1),
|
||||||
PreRun: func(cmd *cobra.Command, args []string) {
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
// Fix Viper bug for duplicate flags:
|
// Fix Viper bug for duplicate flags:
|
||||||
// https://github.com/spf13/viper/issues/233
|
// https://github.com/spf13/viper/issues/233
|
||||||
@ -65,11 +67,24 @@ var docCmd = &cobra.Command{
|
|||||||
lg := logger.New()
|
lg := logger.New()
|
||||||
ctx := lg.WithContext(cmd.Context())
|
ctx := lg.WithContext(cmd.Context())
|
||||||
|
|
||||||
format := viper.GetString("output")
|
format := viper.GetString("format")
|
||||||
if format != textFormat &&
|
if format != textFormat &&
|
||||||
format != markdownFormat &&
|
format != markdownFormat &&
|
||||||
format != jsonFormat {
|
format != jsonFormat {
|
||||||
lg.Fatal().Msg("output must be either `txt`, `md` or `json`")
|
lg.Fatal().Msg("format must be either `txt`, `md` or `json`")
|
||||||
|
}
|
||||||
|
|
||||||
|
output := viper.GetString("output")
|
||||||
|
if output != "" {
|
||||||
|
if len(args) > 0 {
|
||||||
|
lg.Warn().Str("packageName", args[0]).Msg("arg is ignored when --output is set")
|
||||||
|
}
|
||||||
|
walkStdlib(ctx, output, format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) < 1 {
|
||||||
|
lg.Fatal().Msg("need to specify package name in command argument")
|
||||||
}
|
}
|
||||||
|
|
||||||
packageName := args[0]
|
packageName := args[0]
|
||||||
@ -78,12 +93,13 @@ var docCmd = &cobra.Command{
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
lg.Fatal().Err(err).Msg("cannot compile code")
|
lg.Fatal().Err(err).Msg("cannot compile code")
|
||||||
}
|
}
|
||||||
PrintDoc(ctx, packageName, val, format)
|
PrintDoc(ctx, os.Stdout, packageName, val, format)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
docCmd.Flags().StringP("output", "o", textFormat, "Output format (txt|md)")
|
docCmd.Flags().StringP("format", "f", textFormat, "Output format (txt|md)")
|
||||||
|
docCmd.Flags().StringP("output", "o", "", "Output directory")
|
||||||
|
|
||||||
if err := viper.BindPFlags(docCmd.Flags()); err != nil {
|
if err := viper.BindPFlags(docCmd.Flags()); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -132,9 +148,9 @@ func loadCode(packageName string) (*compiler.Value, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// printValuesText (text) formats an array of Values on stdout
|
// printValuesText (text) formats an array of Values on stdout
|
||||||
func printValuesText(libName string, values []*compiler.Value) {
|
func printValuesText(iw io.Writer, libName string, values []*compiler.Value) {
|
||||||
w := tabwriter.NewWriter(os.Stdout, 0, 4, len(textPadding), ' ', 0)
|
fmt.Fprintf(iw, "\n%sInputs:\n", textPadding)
|
||||||
fmt.Printf("\n%sInputs:\n", textPadding)
|
w := tabwriter.NewWriter(iw, 0, 4, len(textPadding), ' ', 0)
|
||||||
for _, i := range values {
|
for _, i := range values {
|
||||||
docStr := terminalTrim(common.ValueDocString(i))
|
docStr := terminalTrim(common.ValueDocString(i))
|
||||||
fmt.Fprintf(w, "\t\t%s\t%s\t%s\n",
|
fmt.Fprintf(w, "\t\t%s\t%s\t%s\n",
|
||||||
@ -144,8 +160,8 @@ func printValuesText(libName string, values []*compiler.Value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// printValuesMarkdown (markdown) formats an array of Values on stdout
|
// printValuesMarkdown (markdown) formats an array of Values on stdout
|
||||||
func printValuesMarkdown(libName string, values []*compiler.Value) {
|
func printValuesMarkdown(iw io.Writer, libName string, values []*compiler.Value) {
|
||||||
w := tabwriter.NewWriter(os.Stdout, 0, 4, len(textPadding), ' ', 0)
|
w := tabwriter.NewWriter(iw, 0, 4, len(textPadding), ' ', 0)
|
||||||
fmt.Fprintf(w, "| Name\t| Type\t| Description \t|\n")
|
fmt.Fprintf(w, "| Name\t| Type\t| Description \t|\n")
|
||||||
fmt.Fprintf(w, "| -------------\t|:-------------:\t|:-------------:\t|\n")
|
fmt.Fprintf(w, "| -------------\t|:-------------:\t|:-------------:\t|\n")
|
||||||
for _, i := range values {
|
for _, i := range values {
|
||||||
@ -173,7 +189,7 @@ func valuesToJSON(libName string, values []*compiler.Value) []ValueJSON {
|
|||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
|
|
||||||
func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, format string) {
|
func PrintDoc(ctx context.Context, w io.Writer, packageName string, val *compiler.Value, format string) {
|
||||||
lg := log.Ctx(ctx)
|
lg := log.Ctx(ctx)
|
||||||
|
|
||||||
fields, err := val.Fields(cue.Definitions(true))
|
fields, err := val.Fields(cue.Definitions(true))
|
||||||
@ -185,20 +201,20 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
|
|||||||
// Package Name + Description
|
// Package Name + Description
|
||||||
switch format {
|
switch format {
|
||||||
case textFormat:
|
case textFormat:
|
||||||
fmt.Printf("Package %s\n", packageName)
|
fmt.Fprintf(w, "Package %s\n", packageName)
|
||||||
fmt.Printf("\n%s\n", common.ValueDocString(val))
|
fmt.Fprintf(w, "\n%s\n", common.ValueDocString(val))
|
||||||
case markdownFormat:
|
case markdownFormat:
|
||||||
fmt.Printf("## Package %s\n", mdEscape(packageName))
|
fmt.Fprintf(w, "## Package %s\n", mdEscape(packageName))
|
||||||
comment := common.ValueDocString(val)
|
comment := common.ValueDocString(val)
|
||||||
if comment == "-" {
|
if comment == "-" {
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
fmt.Printf("\n%s\n\n", mdEscape(comment))
|
fmt.Fprintf(w, "\n%s\n\n", mdEscape(comment))
|
||||||
case jsonFormat:
|
case jsonFormat:
|
||||||
packageJSON.Name = packageName
|
packageJSON.Name = packageName
|
||||||
comment := common.ValueDocString(val)
|
comment := common.ValueDocString(val)
|
||||||
if comment != "" {
|
if comment != "-" {
|
||||||
packageJSON.Description = comment
|
packageJSON.Description = comment
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -223,16 +239,16 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
|
|||||||
comment := common.ValueDocString(v)
|
comment := common.ValueDocString(v)
|
||||||
switch format {
|
switch format {
|
||||||
case textFormat:
|
case textFormat:
|
||||||
fmt.Printf("\n%s\n\n%s%s\n", name, textPadding, comment)
|
fmt.Fprintf(w, "\n%s\n\n%s%s\n", name, textPadding, comment)
|
||||||
case markdownFormat:
|
case markdownFormat:
|
||||||
fmt.Printf("### %s\n\n", name)
|
fmt.Fprintf(w, "### %s\n\n", name)
|
||||||
if comment != "-" {
|
if comment != "-" {
|
||||||
fmt.Printf("%s\n\n", mdEscape(comment))
|
fmt.Fprintf(w, "%s\n\n", mdEscape(comment))
|
||||||
}
|
}
|
||||||
case jsonFormat:
|
case jsonFormat:
|
||||||
fieldJSON.Name = name
|
fieldJSON.Name = name
|
||||||
comment := common.ValueDocString(val)
|
comment := common.ValueDocString(val)
|
||||||
if comment != "" {
|
if comment != "-" {
|
||||||
fieldJSON.Description = comment
|
fieldJSON.Description = comment
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -242,17 +258,17 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
|
|||||||
switch format {
|
switch format {
|
||||||
case textFormat:
|
case textFormat:
|
||||||
if len(inp) == 0 {
|
if len(inp) == 0 {
|
||||||
fmt.Printf("\n%sInputs: none\n", textPadding)
|
fmt.Fprintf(w, "\n%sInputs: none\n", textPadding)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
printValuesText(name, inp)
|
printValuesText(w, name, inp)
|
||||||
case markdownFormat:
|
case markdownFormat:
|
||||||
fmt.Printf("#### %s Inputs\n\n", mdEscape(name))
|
fmt.Fprintf(w, "#### %s Inputs\n\n", mdEscape(name))
|
||||||
if len(inp) == 0 {
|
if len(inp) == 0 {
|
||||||
fmt.Printf("_No input._\n\n")
|
fmt.Fprintf(w, "_No input._\n\n")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
printValuesMarkdown(name, inp)
|
printValuesMarkdown(w, name, inp)
|
||||||
case jsonFormat:
|
case jsonFormat:
|
||||||
fieldJSON.Inputs = valuesToJSON(name, inp)
|
fieldJSON.Inputs = valuesToJSON(name, inp)
|
||||||
}
|
}
|
||||||
@ -262,17 +278,17 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
|
|||||||
switch format {
|
switch format {
|
||||||
case textFormat:
|
case textFormat:
|
||||||
if len(out) == 0 {
|
if len(out) == 0 {
|
||||||
fmt.Printf("\n%sOutputs: none\n", textPadding)
|
fmt.Fprintf(w, "\n%sOutputs: none\n", textPadding)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
printValuesText(name, out)
|
printValuesText(w, name, out)
|
||||||
case markdownFormat:
|
case markdownFormat:
|
||||||
fmt.Printf("#### %s Outputs\n\n", mdEscape(name))
|
fmt.Fprintf(w, "#### %s Outputs\n\n", mdEscape(name))
|
||||||
if len(out) == 0 {
|
if len(out) == 0 {
|
||||||
fmt.Printf("_No output._\n\n")
|
fmt.Fprintf(w, "_No output._\n\n")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
printValuesMarkdown(name, out)
|
printValuesMarkdown(w, name, out)
|
||||||
case jsonFormat:
|
case jsonFormat:
|
||||||
fieldJSON.Outputs = valuesToJSON(name, out)
|
fieldJSON.Outputs = valuesToJSON(name, out)
|
||||||
packageJSON.Fields = append(packageJSON.Fields, fieldJSON)
|
packageJSON.Fields = append(packageJSON.Fields, fieldJSON)
|
||||||
@ -284,6 +300,52 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
lg.Fatal().Err(err).Msg("json marshal")
|
lg.Fatal().Err(err).Msg("json marshal")
|
||||||
}
|
}
|
||||||
fmt.Printf("%s\n", data)
|
fmt.Fprintf(w, "%s\n", data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// walkStdlib generate whole docs from stdlib walk
|
||||||
|
func walkStdlib(ctx context.Context, output, format string) {
|
||||||
|
lg := log.Ctx(ctx)
|
||||||
|
|
||||||
|
lg.Info().Str("output", output).Msg("generating stdlib")
|
||||||
|
err := fs.WalkDir(stdlib.FS, ".", func(p string, d fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if p == "." || !d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
filename := fmt.Sprintf("%s.%s", p, format)
|
||||||
|
filepath := path.Join(output, filename)
|
||||||
|
|
||||||
|
if err := os.MkdirAll(path.Dir(filepath), 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
pkg := fmt.Sprintf("dagger.io/%s", p)
|
||||||
|
lg.Info().Str("package", pkg).Str("format", format).Msg("generating doc")
|
||||||
|
val, err := loadCode(fmt.Sprintf("dagger.io/%s", p))
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "no CUE files") {
|
||||||
|
lg.Warn().Str("package", p).Err(err).Msg("ignoring")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
PrintDoc(ctx, f, p, val, format)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
lg.Fatal().Err(err).Msg("cannot generate stdlib doc")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package elb
|
package elb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"dagger.io/dagger/op"
|
||||||
"dagger.io/aws"
|
"dagger.io/aws"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -37,34 +38,34 @@ import (
|
|||||||
"pipefail",
|
"pipefail",
|
||||||
#"""
|
#"""
|
||||||
if [ -s "$VHOST" ]; then
|
if [ -s "$VHOST" ]; then
|
||||||
# We passed a vhost as input, try to recycle priority from previously allocated vhost
|
# We passed a vhost as input, try to recycle priority from previously allocated vhost
|
||||||
priority=$(aws elbv2 describe-rules \
|
priority=$(aws elbv2 describe-rules \
|
||||||
--listener-arn "$LISTENER_ARN" | \
|
--listener-arn "$LISTENER_ARN" | \
|
||||||
jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority')
|
jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority')
|
||||||
|
|
||||||
|
if [ -n "${priority}" ]; then
|
||||||
|
echo -n "${priority}" > /priority
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -n "${priority}" ]; then
|
# Grab a priority random from 1-50k and check if available, retry 10 times if none available
|
||||||
echo -n "${priority}" > /priority
|
priority=0
|
||||||
exit 0
|
for i in {1..10}
|
||||||
fi
|
do
|
||||||
fi
|
p=$(shuf -i 1-50000 -n 1)
|
||||||
|
# Find the next priority available that we can allocate
|
||||||
# Grab a priority random from 1-50k and check if available, retry 10 times if none available
|
aws elbv2 describe-rules \
|
||||||
priority=0
|
--listener-arn "$LISTENER_ARN" \
|
||||||
for i in {1..10}
|
| jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue
|
||||||
do
|
priority="${p}"
|
||||||
p=$(shuf -i 1-50000 -n 1)
|
break
|
||||||
# Find the next priority available that we can allocate
|
done
|
||||||
aws elbv2 describe-rules \
|
if [ "${priority}" -lt 1 ]; then
|
||||||
--listener-arn "$LISTENER_ARN" \
|
echo "Error: cannot determine a Rule priority"
|
||||||
| jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue
|
exit 1
|
||||||
priority="${p}"
|
fi
|
||||||
break
|
echo -n "${priority}" > /priority
|
||||||
done
|
|
||||||
if [ "${priority}" -lt 1 ]; then
|
|
||||||
echo "Error: cannot determine a Rule priority"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo -n "${priority}" > /priority
|
|
||||||
"""#,
|
"""#,
|
||||||
]
|
]
|
||||||
env: {
|
env: {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package rds
|
package rds
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"dagger.io/dagger/op"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"dagger.io/dagger"
|
|
||||||
"dagger.io/aws"
|
"dagger.io/aws"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -41,26 +41,26 @@ import (
|
|||||||
"-eo",
|
"-eo",
|
||||||
"pipefail",
|
"pipefail",
|
||||||
#"""
|
#"""
|
||||||
echo "dbType: $DB_TYPE"
|
echo "dbType: $DB_TYPE"
|
||||||
|
|
||||||
sql="CREATE DATABASE \`"$NAME" \`"
|
sql="CREATE DATABASE \`"$NAME" \`"
|
||||||
if [ "$DB_TYPE" = postgres ]; then
|
if [ "$DB_TYPE" = postgres ]; then
|
||||||
sql="CREATE DATABASE \""$NAME"\""
|
sql="CREATE DATABASE \""$NAME"\""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "$NAME" >> /db_created
|
echo "$NAME" >> /db_created
|
||||||
|
|
||||||
aws rds-data execute-statement \
|
aws rds-data execute-statement \
|
||||||
--resource-arn "$DB_ARN" \
|
--resource-arn "$DB_ARN" \
|
||||||
--secret-arn "$SECRET_ARN" \
|
--secret-arn "$SECRET_ARN" \
|
||||||
--sql "$sql" \
|
--sql "$sql" \
|
||||||
--database "$DB_TYPE" \
|
--database "$DB_TYPE" \
|
||||||
--no-include-result-metadata \
|
--no-include-result-metadata \
|
||||||
|& tee /tmp/out
|
|& tee /tmp/out
|
||||||
exit_code=${PIPESTATUS[0]}
|
exit_code=${PIPESTATUS[0]}
|
||||||
if [ $exit_code -ne 0 ]; then
|
if [ $exit_code -ne 0 ]; then
|
||||||
grep -q "database exists\|already exists" /tmp/out || exit $exit_code
|
grep -q "database exists\|already exists" /tmp/out || exit $exit_code
|
||||||
fi
|
fi
|
||||||
"""#,
|
"""#,
|
||||||
]
|
]
|
||||||
env: {
|
env: {
|
||||||
@ -84,10 +84,10 @@ import (
|
|||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
// Username
|
// Username
|
||||||
username: dagger.#Secret @dagger(input)
|
username: string @dagger(input)
|
||||||
|
|
||||||
// Password
|
// Password
|
||||||
password: dagger.#Secret @dagger(input)
|
password: string @dagger(input)
|
||||||
|
|
||||||
// ARN of the database instance
|
// ARN of the database instance
|
||||||
dbArn: string @dagger(input)
|
dbArn: string @dagger(input)
|
||||||
@ -119,60 +119,60 @@ import (
|
|||||||
"-eo",
|
"-eo",
|
||||||
"pipefail",
|
"pipefail",
|
||||||
#"""
|
#"""
|
||||||
echo "dbType: $DB_TYPE"
|
echo "dbType: $DB_TYPE"
|
||||||
|
|
||||||
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
|
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
|
||||||
if [ "$DB_TYPE" = postgres ]; then
|
if [ "$DB_TYPE" = postgres ]; then
|
||||||
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "$USERNAME" >> /username
|
echo "$USERNAME" >> /username
|
||||||
|
|
||||||
aws rds-data execute-statement \
|
aws rds-data execute-statement \
|
||||||
--resource-arn "$DB_ARN" \
|
--resource-arn "$DB_ARN" \
|
||||||
--secret-arn "$SECRET_ARN" \
|
--secret-arn "$SECRET_ARN" \
|
||||||
--sql "$sql" \
|
--sql "$sql" \
|
||||||
--database "$DB_TYPE" \
|
--database "$DB_TYPE" \
|
||||||
--no-include-result-metadata \
|
--no-include-result-metadata \
|
||||||
|& tee tmp/out
|
|& tee tmp/out
|
||||||
exit_code=${PIPESTATUS[0]}
|
exit_code=${PIPESTATUS[0]}
|
||||||
if [ $exit_code -ne 0 ]; then
|
if [ $exit_code -ne 0 ]; then
|
||||||
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
|
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
|
||||||
fi
|
fi
|
||||||
|
|
||||||
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
|
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
|
||||||
if [ "$DB_TYPE" = postgres ]; then
|
if [ "$DB_TYPE" = postgres ]; then
|
||||||
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
aws rds-data execute-statement \
|
aws rds-data execute-statement \
|
||||||
--resource-arn "$DB_ARN" \
|
--resource-arn "$DB_ARN" \
|
||||||
--secret-arn "$SECRET_ARN" \
|
--secret-arn "$SECRET_ARN" \
|
||||||
--sql "$sql" \
|
--sql "$sql" \
|
||||||
--database "$DB_TYPE" \
|
--database "$DB_TYPE" \
|
||||||
--no-include-result-metadata
|
--no-include-result-metadata
|
||||||
|
|
||||||
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
|
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
|
||||||
if [ "$DB_TYPE" = postgres ]; then
|
if [ "$DB_TYPE" = postgres ]; then
|
||||||
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
|
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -s "$GRAND_DATABASE ]; then
|
if [ -s "$GRAND_DATABASE ]; then
|
||||||
aws rds-data execute-statement \
|
aws rds-data execute-statement \
|
||||||
--resource-arn "$DB_ARN" \
|
--resource-arn "$DB_ARN" \
|
||||||
--secret-arn "$SECRET_ARN" \
|
--secret-arn "$SECRET_ARN" \
|
||||||
--sql "$sql" \
|
--sql "$sql" \
|
||||||
--database "$DB_TYPE" \
|
--database "$DB_TYPE" \
|
||||||
--no-include-result-metadata
|
--no-include-result-metadata
|
||||||
fi
|
fi
|
||||||
"""#,
|
"""#,
|
||||||
]
|
]
|
||||||
env: {
|
env: {
|
||||||
USERNAME: unsername
|
USERNAME: username
|
||||||
PASSWORD: password
|
PASSWORD: password
|
||||||
DB_ARN: dbArn
|
DB_ARN: dbArn
|
||||||
SECRET_ARN: secretArn
|
SECRET_ARN: secretArn
|
||||||
GRAND_DATABASE: grandDatabase
|
GRAND_DATABASE: grantDatabase
|
||||||
DB_TYPE: dbType
|
DB_TYPE: dbType
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -222,10 +222,10 @@ import (
|
|||||||
"-eo",
|
"-eo",
|
||||||
"pipefail",
|
"pipefail",
|
||||||
#"""
|
#"""
|
||||||
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" )
|
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" )
|
||||||
echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out
|
echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out
|
||||||
echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out
|
echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out
|
||||||
cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out
|
cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out
|
||||||
"""#,
|
"""#,
|
||||||
]
|
]
|
||||||
env: DB_ARN: dbArn
|
env: DB_ARN: dbArn
|
||||||
|
Reference in New Issue
Block a user