Merge pull request #564 from samalba/cmd-doc-gen

cmd/doc: generate whole stdlib documentation
This commit is contained in:
Sam Alba 2021-06-07 10:27:49 +02:00 committed by GitHub
commit a002d2e02e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 196 additions and 133 deletions

View File

@ -4,8 +4,10 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"io/fs"
"os"
"path"
"strings"
"text/tabwriter"
"unicode/utf8"
@ -53,7 +55,7 @@ type PackageJSON struct {
var docCmd = &cobra.Command{
Use: "doc [PACKAGE | PATH]",
Short: "document a package",
Args: cobra.ExactArgs(1),
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
@ -65,11 +67,24 @@ var docCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
format := viper.GetString("output")
format := viper.GetString("format")
if format != textFormat &&
format != markdownFormat &&
format != jsonFormat {
lg.Fatal().Msg("output must be either `txt`, `md` or `json`")
lg.Fatal().Msg("format must be either `txt`, `md` or `json`")
}
output := viper.GetString("output")
if output != "" {
if len(args) > 0 {
lg.Warn().Str("packageName", args[0]).Msg("arg is ignored when --output is set")
}
walkStdlib(ctx, output, format)
return
}
if len(args) < 1 {
lg.Fatal().Msg("need to specify package name in command argument")
}
packageName := args[0]
@ -78,12 +93,13 @@ var docCmd = &cobra.Command{
if err != nil {
lg.Fatal().Err(err).Msg("cannot compile code")
}
PrintDoc(ctx, packageName, val, format)
PrintDoc(ctx, os.Stdout, packageName, val, format)
},
}
func init() {
docCmd.Flags().StringP("output", "o", textFormat, "Output format (txt|md)")
docCmd.Flags().StringP("format", "f", textFormat, "Output format (txt|md)")
docCmd.Flags().StringP("output", "o", "", "Output directory")
if err := viper.BindPFlags(docCmd.Flags()); err != nil {
panic(err)
@ -132,9 +148,9 @@ func loadCode(packageName string) (*compiler.Value, error) {
}
// printValuesText (text) formats an array of Values on stdout
func printValuesText(libName string, values []*compiler.Value) {
w := tabwriter.NewWriter(os.Stdout, 0, 4, len(textPadding), ' ', 0)
fmt.Printf("\n%sInputs:\n", textPadding)
func printValuesText(iw io.Writer, libName string, values []*compiler.Value) {
fmt.Fprintf(iw, "\n%sInputs:\n", textPadding)
w := tabwriter.NewWriter(iw, 0, 4, len(textPadding), ' ', 0)
for _, i := range values {
docStr := terminalTrim(common.ValueDocString(i))
fmt.Fprintf(w, "\t\t%s\t%s\t%s\n",
@ -144,8 +160,8 @@ func printValuesText(libName string, values []*compiler.Value) {
}
// printValuesMarkdown (markdown) formats an array of Values on stdout
func printValuesMarkdown(libName string, values []*compiler.Value) {
w := tabwriter.NewWriter(os.Stdout, 0, 4, len(textPadding), ' ', 0)
func printValuesMarkdown(iw io.Writer, libName string, values []*compiler.Value) {
w := tabwriter.NewWriter(iw, 0, 4, len(textPadding), ' ', 0)
fmt.Fprintf(w, "| Name\t| Type\t| Description \t|\n")
fmt.Fprintf(w, "| -------------\t|:-------------:\t|:-------------:\t|\n")
for _, i := range values {
@ -173,7 +189,7 @@ func valuesToJSON(libName string, values []*compiler.Value) []ValueJSON {
return val
}
func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, format string) {
func PrintDoc(ctx context.Context, w io.Writer, packageName string, val *compiler.Value, format string) {
lg := log.Ctx(ctx)
fields, err := val.Fields(cue.Definitions(true))
@ -185,20 +201,20 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
// Package Name + Description
switch format {
case textFormat:
fmt.Printf("Package %s\n", packageName)
fmt.Printf("\n%s\n", common.ValueDocString(val))
fmt.Fprintf(w, "Package %s\n", packageName)
fmt.Fprintf(w, "\n%s\n", common.ValueDocString(val))
case markdownFormat:
fmt.Printf("## Package %s\n", mdEscape(packageName))
fmt.Fprintf(w, "## Package %s\n", mdEscape(packageName))
comment := common.ValueDocString(val)
if comment == "-" {
fmt.Println()
break
}
fmt.Printf("\n%s\n\n", mdEscape(comment))
fmt.Fprintf(w, "\n%s\n\n", mdEscape(comment))
case jsonFormat:
packageJSON.Name = packageName
comment := common.ValueDocString(val)
if comment != "" {
if comment != "-" {
packageJSON.Description = comment
}
}
@ -223,16 +239,16 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
comment := common.ValueDocString(v)
switch format {
case textFormat:
fmt.Printf("\n%s\n\n%s%s\n", name, textPadding, comment)
fmt.Fprintf(w, "\n%s\n\n%s%s\n", name, textPadding, comment)
case markdownFormat:
fmt.Printf("### %s\n\n", name)
fmt.Fprintf(w, "### %s\n\n", name)
if comment != "-" {
fmt.Printf("%s\n\n", mdEscape(comment))
fmt.Fprintf(w, "%s\n\n", mdEscape(comment))
}
case jsonFormat:
fieldJSON.Name = name
comment := common.ValueDocString(val)
if comment != "" {
if comment != "-" {
fieldJSON.Description = comment
}
}
@ -242,17 +258,17 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
switch format {
case textFormat:
if len(inp) == 0 {
fmt.Printf("\n%sInputs: none\n", textPadding)
fmt.Fprintf(w, "\n%sInputs: none\n", textPadding)
break
}
printValuesText(name, inp)
printValuesText(w, name, inp)
case markdownFormat:
fmt.Printf("#### %s Inputs\n\n", mdEscape(name))
fmt.Fprintf(w, "#### %s Inputs\n\n", mdEscape(name))
if len(inp) == 0 {
fmt.Printf("_No input._\n\n")
fmt.Fprintf(w, "_No input._\n\n")
break
}
printValuesMarkdown(name, inp)
printValuesMarkdown(w, name, inp)
case jsonFormat:
fieldJSON.Inputs = valuesToJSON(name, inp)
}
@ -262,17 +278,17 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
switch format {
case textFormat:
if len(out) == 0 {
fmt.Printf("\n%sOutputs: none\n", textPadding)
fmt.Fprintf(w, "\n%sOutputs: none\n", textPadding)
break
}
printValuesText(name, out)
printValuesText(w, name, out)
case markdownFormat:
fmt.Printf("#### %s Outputs\n\n", mdEscape(name))
fmt.Fprintf(w, "#### %s Outputs\n\n", mdEscape(name))
if len(out) == 0 {
fmt.Printf("_No output._\n\n")
fmt.Fprintf(w, "_No output._\n\n")
break
}
printValuesMarkdown(name, out)
printValuesMarkdown(w, name, out)
case jsonFormat:
fieldJSON.Outputs = valuesToJSON(name, out)
packageJSON.Fields = append(packageJSON.Fields, fieldJSON)
@ -284,6 +300,52 @@ func PrintDoc(ctx context.Context, packageName string, val *compiler.Value, form
if err != nil {
lg.Fatal().Err(err).Msg("json marshal")
}
fmt.Printf("%s\n", data)
fmt.Fprintf(w, "%s\n", data)
}
}
// walkStdlib generate whole docs from stdlib walk
func walkStdlib(ctx context.Context, output, format string) {
lg := log.Ctx(ctx)
lg.Info().Str("output", output).Msg("generating stdlib")
err := fs.WalkDir(stdlib.FS, ".", func(p string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if p == "." || !d.IsDir() {
return nil
}
filename := fmt.Sprintf("%s.%s", p, format)
filepath := path.Join(output, filename)
if err := os.MkdirAll(path.Dir(filepath), 0755); err != nil {
return err
}
f, err := os.Create(filepath)
if err != nil {
return err
}
defer f.Close()
pkg := fmt.Sprintf("dagger.io/%s", p)
lg.Info().Str("package", pkg).Str("format", format).Msg("generating doc")
val, err := loadCode(fmt.Sprintf("dagger.io/%s", p))
if err != nil {
if strings.Contains(err.Error(), "no CUE files") {
lg.Warn().Str("package", p).Err(err).Msg("ignoring")
return nil
}
return err
}
PrintDoc(ctx, f, p, val, format)
return nil
})
if err != nil {
lg.Fatal().Err(err).Msg("cannot generate stdlib doc")
}
}

View File

@ -1,6 +1,7 @@
package elb
import (
"dagger.io/dagger/op"
"dagger.io/aws"
)
@ -37,34 +38,34 @@ import (
"pipefail",
#"""
if [ -s "$VHOST" ]; then
# We passed a vhost as input, try to recycle priority from previously allocated vhost
priority=$(aws elbv2 describe-rules \
--listener-arn "$LISTENER_ARN" | \
jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority')
# We passed a vhost as input, try to recycle priority from previously allocated vhost
priority=$(aws elbv2 describe-rules \
--listener-arn "$LISTENER_ARN" | \
jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority')
if [ -n "${priority}" ]; then
echo -n "${priority}" > /priority
exit 0
fi
fi
if [ -n "${priority}" ]; then
echo -n "${priority}" > /priority
exit 0
fi
fi
# Grab a priority random from 1-50k and check if available, retry 10 times if none available
priority=0
for i in {1..10}
do
p=$(shuf -i 1-50000 -n 1)
# Find the next priority available that we can allocate
aws elbv2 describe-rules \
--listener-arn "$LISTENER_ARN" \
| jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue
priority="${p}"
break
done
if [ "${priority}" -lt 1 ]; then
echo "Error: cannot determine a Rule priority"
exit 1
fi
echo -n "${priority}" > /priority
# Grab a priority random from 1-50k and check if available, retry 10 times if none available
priority=0
for i in {1..10}
do
p=$(shuf -i 1-50000 -n 1)
# Find the next priority available that we can allocate
aws elbv2 describe-rules \
--listener-arn "$LISTENER_ARN" \
| jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue
priority="${p}"
break
done
if [ "${priority}" -lt 1 ]; then
echo "Error: cannot determine a Rule priority"
exit 1
fi
echo -n "${priority}" > /priority
"""#,
]
env: {

View File

@ -1,8 +1,8 @@
package rds
import (
"dagger.io/dagger/op"
"encoding/json"
"dagger.io/dagger"
"dagger.io/aws"
)
@ -41,26 +41,26 @@ import (
"-eo",
"pipefail",
#"""
echo "dbType: $DB_TYPE"
echo "dbType: $DB_TYPE"
sql="CREATE DATABASE \`"$NAME" \`"
if [ "$DB_TYPE" = postgres ]; then
sql="CREATE DATABASE \""$NAME"\""
fi
sql="CREATE DATABASE \`"$NAME" \`"
if [ "$DB_TYPE" = postgres ]; then
sql="CREATE DATABASE \""$NAME"\""
fi
echo "$NAME" >> /db_created
echo "$NAME" >> /db_created
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata \
|& tee /tmp/out
exit_code=${PIPESTATUS[0]}
if [ $exit_code -ne 0 ]; then
grep -q "database exists\|already exists" /tmp/out || exit $exit_code
fi
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata \
|& tee /tmp/out
exit_code=${PIPESTATUS[0]}
if [ $exit_code -ne 0 ]; then
grep -q "database exists\|already exists" /tmp/out || exit $exit_code
fi
"""#,
]
env: {
@ -84,10 +84,10 @@ import (
config: aws.#Config
// Username
username: dagger.#Secret @dagger(input)
username: string @dagger(input)
// Password
password: dagger.#Secret @dagger(input)
password: string @dagger(input)
// ARN of the database instance
dbArn: string @dagger(input)
@ -119,60 +119,60 @@ import (
"-eo",
"pipefail",
#"""
echo "dbType: $DB_TYPE"
echo "dbType: $DB_TYPE"
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
if [ "$DB_TYPE" = postgres ]; then
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
if [ "$DB_TYPE" = postgres ]; then
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi
echo "$USERNAME" >> /username
echo "$USERNAME" >> /username
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata \
|& tee tmp/out
exit_code=${PIPESTATUS[0]}
if [ $exit_code -ne 0 ]; then
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
fi
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata \
|& tee tmp/out
exit_code=${PIPESTATUS[0]}
if [ $exit_code -ne 0 ]; then
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
fi
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
if [ "$DB_TYPE" = postgres ]; then
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
if [ "$DB_TYPE" = postgres ]; then
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
if [ "$DB_TYPE" = postgres ]; then
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
fi
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
if [ "$DB_TYPE" = postgres ]; then
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
fi
if [ -s "$GRAND_DATABASE ]; then
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata
fi
if [ -s "$GRAND_DATABASE ]; then
aws rds-data execute-statement \
--resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \
--sql "$sql" \
--database "$DB_TYPE" \
--no-include-result-metadata
fi
"""#,
]
env: {
USERNAME: unsername
USERNAME: username
PASSWORD: password
DB_ARN: dbArn
SECRET_ARN: secretArn
GRAND_DATABASE: grandDatabase
GRAND_DATABASE: grantDatabase
DB_TYPE: dbType
}
},
@ -222,10 +222,10 @@ import (
"-eo",
"pipefail",
#"""
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" )
echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out
echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out
cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out
data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" )
echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out
echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out
cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out
"""#,
]
env: DB_ARN: dbArn