docs: include required tools in source tree
In order to be able to build the documentation without internet access (as is required by some distribution build systems), all of the source code needed for the build needs to be available in the source tarball. This used to be possible with the docker-cli sources but was accidentally broken with some CI changes that switched to downloading the tools (by modifying go.mod as part of the docs build script). This pattern also maked documentation builds less reproducible since the tool version used was not based on the source code version. Fixes: 7dc35c03fca5 ("validate manpages target") Fixes: a650f4ddd008 ("switch to cli-docs-tool for yaml docs generation") Signed-off-by: Aleksa Sarai <cyphar@cyphar.com> Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
This commit is contained in:
parent
b199ece92a
commit
47775a8fa0
@ -1,13 +0,0 @@
|
||||
module github.com/docker/cli/docs/generate
|
||||
|
||||
// dummy go.mod to avoid dealing with dependencies specific
|
||||
// to docs generation and not really part of the project.
|
||||
|
||||
go 1.22.0
|
||||
|
||||
//require (
|
||||
// github.com/docker/cli v0.0.0+incompatible
|
||||
// github.com/docker/cli-docs-tool v0.8.0
|
||||
//)
|
||||
//
|
||||
//replace github.com/docker/cli v0.0.0+incompatible => ../../
|
@ -1,7 +0,0 @@
|
||||
//go:build tools
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "github.com/docker/cli-docs-tool"
|
||||
)
|
15
man/go.mod
15
man/go.mod
@ -1,15 +0,0 @@
|
||||
module github.com/docker/cli/man
|
||||
|
||||
// dummy go.mod to avoid dealing with dependencies specific
|
||||
// to manpages generation and not really part of the project.
|
||||
|
||||
go 1.12.0
|
||||
|
||||
//require (
|
||||
// github.com/docker/cli v0.0.0+incompatible
|
||||
// github.com/cpuguy83/go-md2man/v2 v2.0.6
|
||||
// github.com/spf13/cobra v1.2.1
|
||||
// github.com/spf13/pflag v1.0.5
|
||||
//)
|
||||
//
|
||||
//replace github.com/docker/cli v0.0.0+incompatible => ../
|
@ -2,9 +2,4 @@
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "github.com/cpuguy83/go-md2man/v2"
|
||||
_ "github.com/spf13/cobra"
|
||||
_ "github.com/spf13/cobra/doc"
|
||||
_ "github.com/spf13/pflag"
|
||||
)
|
||||
import _ "github.com/cpuguy83/go-md2man/v2"
|
||||
|
@ -4,32 +4,18 @@ set -eu
|
||||
|
||||
: "${MD2MAN_VERSION=v2.0.6}"
|
||||
|
||||
export GO111MODULE=auto
|
||||
|
||||
function clean {
|
||||
rm -rf "$buildir"
|
||||
function clean() {
|
||||
rm -f go.mod
|
||||
}
|
||||
|
||||
buildir=$(mktemp -d -t docker-cli-docsgen.XXXXXXXXXX)
|
||||
export GO111MODULE=auto
|
||||
trap clean EXIT
|
||||
|
||||
(
|
||||
set -x
|
||||
cp -r . "$buildir/"
|
||||
cd "$buildir"
|
||||
# init dummy go.mod
|
||||
./scripts/vendor init
|
||||
# install go-md2man and copy man/tools.go in root folder
|
||||
# to be able to fetch the required dependencies
|
||||
go mod edit -modfile=vendor.mod -require=github.com/cpuguy83/go-md2man/v2@${MD2MAN_VERSION}
|
||||
cp man/tools.go .
|
||||
# update vendor
|
||||
./scripts/vendor update
|
||||
# build gen-manpages
|
||||
go build -mod=vendor -modfile=vendor.mod -tags manpages -o /tmp/gen-manpages ./man/generate.go
|
||||
# build go-md2man
|
||||
go build -mod=vendor -modfile=vendor.mod -o /tmp/go-md2man ./vendor/github.com/cpuguy83/go-md2man/v2
|
||||
)
|
||||
./scripts/vendor init
|
||||
# build gen-manpages
|
||||
go build -mod=vendor -modfile=vendor.mod -tags manpages -o /tmp/gen-manpages ./man/generate.go
|
||||
# build go-md2man
|
||||
go build -mod=vendor -modfile=vendor.mod -o /tmp/go-md2man ./vendor/github.com/cpuguy83/go-md2man/v2
|
||||
|
||||
mkdir -p man/man1
|
||||
(set -x ; /tmp/gen-manpages --root "." --target "$(pwd)/man/man1")
|
||||
|
@ -4,30 +4,16 @@ set -eu
|
||||
|
||||
: "${CLI_DOCS_TOOL_VERSION=v0.9.0}"
|
||||
|
||||
export GO111MODULE=auto
|
||||
|
||||
function clean {
|
||||
rm -rf "$buildir"
|
||||
function clean() {
|
||||
rm -f go.mod
|
||||
}
|
||||
|
||||
buildir=$(mktemp -d -t docker-cli-docsgen.XXXXXXXXXX)
|
||||
export GO111MODULE=auto
|
||||
trap clean EXIT
|
||||
|
||||
(
|
||||
set -x
|
||||
cp -r . "$buildir/"
|
||||
cd "$buildir"
|
||||
# init dummy go.mod
|
||||
./scripts/vendor init
|
||||
# install cli-docs-tool and copy docs/tools.go in root folder
|
||||
# to be able to fetch the required dependencies
|
||||
go mod edit -modfile=vendor.mod -require=github.com/docker/cli-docs-tool@${CLI_DOCS_TOOL_VERSION}
|
||||
cp docs/generate/tools.go .
|
||||
# update vendor
|
||||
./scripts/vendor update
|
||||
# build docsgen
|
||||
go build -mod=vendor -modfile=vendor.mod -tags docsgen -o /tmp/docsgen ./docs/generate/generate.go
|
||||
)
|
||||
./scripts/vendor init
|
||||
# build docsgen
|
||||
go build -mod=vendor -modfile=vendor.mod -tags docsgen -o /tmp/docsgen ./docs/generate/generate.go
|
||||
|
||||
(
|
||||
set -x
|
||||
|
@ -4,30 +4,16 @@ set -eu
|
||||
|
||||
: "${CLI_DOCS_TOOL_VERSION=v0.9.0}"
|
||||
|
||||
export GO111MODULE=auto
|
||||
|
||||
function clean {
|
||||
rm -rf "$buildir"
|
||||
function clean() {
|
||||
rm -f go.mod
|
||||
}
|
||||
|
||||
buildir=$(mktemp -d -t docker-cli-docsgen.XXXXXXXXXX)
|
||||
export GO111MODULE=auto
|
||||
trap clean EXIT
|
||||
|
||||
(
|
||||
set -x
|
||||
cp -r . "$buildir/"
|
||||
cd "$buildir"
|
||||
# init dummy go.mod
|
||||
./scripts/vendor init
|
||||
# install cli-docs-tool and copy docs/tools.go in root folder
|
||||
# to be able to fetch the required dependencies
|
||||
go mod edit -modfile=vendor.mod -require=github.com/docker/cli-docs-tool@${CLI_DOCS_TOOL_VERSION}
|
||||
cp docs/generate/tools.go .
|
||||
# update vendor
|
||||
./scripts/vendor update
|
||||
# build docsgen
|
||||
go build -mod=vendor -modfile=vendor.mod -tags docsgen -o /tmp/docsgen ./docs/generate/generate.go
|
||||
)
|
||||
./scripts/vendor init
|
||||
# build docsgen
|
||||
go build -mod=vendor -modfile=vendor.mod -tags docsgen -o /tmp/docsgen ./docs/generate/generate.go
|
||||
|
||||
mkdir -p docs/yaml
|
||||
set -x
|
||||
|
@ -9,6 +9,7 @@ go 1.23.0
|
||||
require (
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/containerd/platforms v1.0.0-rc.1
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6
|
||||
github.com/creack/pty v1.1.24
|
||||
github.com/distribution/reference v0.6.0
|
||||
github.com/docker/cli-docs-tool v0.9.0
|
||||
@ -88,6 +89,7 @@ require (
|
||||
github.com/prometheus/common v0.55.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
go.etcd.io/etcd/raft/v3 v3.5.16 // indirect
|
||||
|
@ -37,6 +37,8 @@ github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3
|
||||
github.com/containerd/platforms v1.0.0-rc.1 h1:83KIq4yy1erSRgOVHNk1HYdPvzdJ5CnsWaRoJX4C41E=
|
||||
github.com/containerd/platforms v1.0.0-rc.1/go.mod h1:J71L7B+aiM5SdIEqmd9wp6THLVRzJGXfNuWCZCllLA4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
|
||||
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
|
||||
@ -236,6 +238,7 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
|
2
vendor/github.com/cpuguy83/go-md2man/v2/.gitignore
generated
vendored
Normal file
2
vendor/github.com/cpuguy83/go-md2man/v2/.gitignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
go-md2man
|
||||
bin
|
6
vendor/github.com/cpuguy83/go-md2man/v2/.golangci.yml
generated
vendored
Normal file
6
vendor/github.com/cpuguy83/go-md2man/v2/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
# For documentation, see https://golangci-lint.run/usage/configuration/
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- gofumpt
|
||||
|
14
vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile
generated
vendored
Normal file
14
vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
ARG GO_VERSION=1.21
|
||||
|
||||
FROM --platform=${BUILDPLATFORM} golang:${GO_VERSION} AS build
|
||||
COPY . /go/src/github.com/cpuguy83/go-md2man
|
||||
WORKDIR /go/src/github.com/cpuguy83/go-md2man
|
||||
ARG TARGETOS TARGETARCH TARGETVARIANT
|
||||
RUN \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
make build
|
||||
|
||||
FROM scratch
|
||||
COPY --from=build /go/src/github.com/cpuguy83/go-md2man/bin/go-md2man /go-md2man
|
||||
ENTRYPOINT ["/go-md2man"]
|
21
vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md
generated
vendored
Normal file
21
vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Brian Goff
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
49
vendor/github.com/cpuguy83/go-md2man/v2/Makefile
generated
vendored
Normal file
49
vendor/github.com/cpuguy83/go-md2man/v2/Makefile
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
GO111MODULE ?= on
|
||||
|
||||
export GO111MODULE
|
||||
|
||||
GOOS ?= $(if $(TARGETOS),$(TARGETOS),)
|
||||
GOARCH ?= $(if $(TARGETARCH),$(TARGETARCH),)
|
||||
|
||||
ifeq ($(TARGETARCH),amd64)
|
||||
GOAMD64 ?= $(TARGETVARIANT)
|
||||
endif
|
||||
|
||||
ifeq ($(TARGETARCH),arm)
|
||||
GOARM ?= $(TARGETVARIANT:v%=%)
|
||||
endif
|
||||
|
||||
ifneq ($(GOOS),)
|
||||
export GOOS
|
||||
endif
|
||||
|
||||
ifneq ($(GOARCH),)
|
||||
export GOARCH
|
||||
endif
|
||||
|
||||
ifneq ($(GOAMD64),)
|
||||
export GOAMD64
|
||||
endif
|
||||
|
||||
ifneq ($(GOARM),)
|
||||
export GOARM
|
||||
endif
|
||||
|
||||
.PHONY:
|
||||
build: bin/go-md2man
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
@rm -rf bin/*
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
@go test $(TEST_FLAGS) ./...
|
||||
|
||||
bin/go-md2man: go.mod go.sum md2man/* *.go
|
||||
@mkdir -p bin
|
||||
CGO_ENABLED=0 go build $(BUILD_FLAGS) -o $@
|
||||
|
||||
.PHONY: mod
|
||||
mod:
|
||||
@go mod tidy
|
14
vendor/github.com/cpuguy83/go-md2man/v2/README.md
generated
vendored
Normal file
14
vendor/github.com/cpuguy83/go-md2man/v2/README.md
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
go-md2man
|
||||
=========
|
||||
|
||||
Converts markdown into roff (man pages).
|
||||
|
||||
Uses [blackfriday](https://github.com/russross/blackfriday) to process markdown into man pages.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
go install github.com/cpuguy83/go-md2man@latest
|
||||
|
||||
go-md2man -in /path/to/markdownfile.md -out /manfile/output/path
|
||||
```
|
40
vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md
generated
vendored
Normal file
40
vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
go-md2man 1 "January 2015" go-md2man "User Manual"
|
||||
==================================================
|
||||
|
||||
# NAME
|
||||
go-md2man - Convert markdown files into manpages
|
||||
|
||||
# SYNOPSIS
|
||||
**go-md2man** [**-in**=*/path/to/md/file*] [**-out**=*/path/to/output*]
|
||||
|
||||
# DESCRIPTION
|
||||
**go-md2man** converts standard markdown formatted documents into manpages. It is
|
||||
written purely in Go so as to reduce dependencies on 3rd party libs.
|
||||
|
||||
By default, the input is stdin and the output is stdout.
|
||||
|
||||
# OPTIONS
|
||||
|
||||
**-in=**_file_
|
||||
: Path to markdown file to be processed.
|
||||
|
||||
Defaults to stdin.
|
||||
|
||||
**-out=**_file_
|
||||
: Path to output processed file.
|
||||
|
||||
Defaults to stdout.
|
||||
|
||||
# EXAMPLES
|
||||
Convert the markdown file *go-md2man.1.md* into a manpage:
|
||||
```
|
||||
go-md2man < go-md2man.1.md > go-md2man.1
|
||||
```
|
||||
|
||||
Same, but using command line arguments instead of shell redirection:
|
||||
```
|
||||
go-md2man -in=go-md2man.1.md -out=go-md2man.1
|
||||
```
|
||||
|
||||
# HISTORY
|
||||
January 2015, Originally compiled by Brian Goff (cpuguy83@gmail.com).
|
53
vendor/github.com/cpuguy83/go-md2man/v2/md2man.go
generated
vendored
Normal file
53
vendor/github.com/cpuguy83/go-md2man/v2/md2man.go
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"github.com/cpuguy83/go-md2man/v2/md2man"
|
||||
)
|
||||
|
||||
var (
|
||||
inFilePath = flag.String("in", "", "Path to file to be processed (default: stdin)")
|
||||
outFilePath = flag.String("out", "", "Path to output processed file (default: stdout)")
|
||||
)
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
flag.Parse()
|
||||
|
||||
inFile := os.Stdin
|
||||
if *inFilePath != "" {
|
||||
inFile, err = os.Open(*inFilePath)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
defer inFile.Close() // nolint: errcheck
|
||||
|
||||
doc, err := ioutil.ReadAll(inFile)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
out := md2man.Render(doc)
|
||||
|
||||
outFile := os.Stdout
|
||||
if *outFilePath != "" {
|
||||
outFile, err = os.Create(*outFilePath)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer outFile.Close() // nolint: errcheck
|
||||
}
|
||||
_, err = outFile.Write(out)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
62
vendor/github.com/cpuguy83/go-md2man/v2/md2man/debug.go
generated
vendored
Normal file
62
vendor/github.com/cpuguy83/go-md2man/v2/md2man/debug.go
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
package md2man
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
func fmtListFlags(flags blackfriday.ListType) string {
|
||||
knownFlags := []struct {
|
||||
name string
|
||||
flag blackfriday.ListType
|
||||
}{
|
||||
{"ListTypeOrdered", blackfriday.ListTypeOrdered},
|
||||
{"ListTypeDefinition", blackfriday.ListTypeDefinition},
|
||||
{"ListTypeTerm", blackfriday.ListTypeTerm},
|
||||
{"ListItemContainsBlock", blackfriday.ListItemContainsBlock},
|
||||
{"ListItemBeginningOfList", blackfriday.ListItemBeginningOfList},
|
||||
{"ListItemEndOfList", blackfriday.ListItemEndOfList},
|
||||
}
|
||||
|
||||
var f []string
|
||||
for _, kf := range knownFlags {
|
||||
if flags&kf.flag != 0 {
|
||||
f = append(f, kf.name)
|
||||
flags &^= kf.flag
|
||||
}
|
||||
}
|
||||
if flags != 0 {
|
||||
f = append(f, fmt.Sprintf("Unknown(%#x)", flags))
|
||||
}
|
||||
return strings.Join(f, "|")
|
||||
}
|
||||
|
||||
type debugDecorator struct {
|
||||
blackfriday.Renderer
|
||||
}
|
||||
|
||||
func depth(node *blackfriday.Node) int {
|
||||
d := 0
|
||||
for n := node.Parent; n != nil; n = n.Parent {
|
||||
d++
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *debugDecorator) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
fmt.Fprintf(os.Stderr, "%s%s %v %v\n",
|
||||
strings.Repeat(" ", depth(node)),
|
||||
map[bool]string{true: "+", false: "-"}[entering],
|
||||
node,
|
||||
fmtListFlags(node.ListFlags))
|
||||
var b strings.Builder
|
||||
status := d.Renderer.RenderNode(io.MultiWriter(&b, w), node, entering)
|
||||
if b.Len() > 0 {
|
||||
fmt.Fprintf(os.Stderr, ">> %q\n", b.String())
|
||||
}
|
||||
return status
|
||||
}
|
23
vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go
generated
vendored
Normal file
23
vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
package md2man
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
// Render converts a markdown document into a roff formatted document.
|
||||
func Render(doc []byte) []byte {
|
||||
renderer := NewRoffRenderer()
|
||||
var r blackfriday.Renderer = renderer
|
||||
if v, _ := strconv.ParseBool(os.Getenv("MD2MAN_DEBUG")); v {
|
||||
r = &debugDecorator{Renderer: r}
|
||||
}
|
||||
|
||||
return blackfriday.Run(doc,
|
||||
[]blackfriday.Option{
|
||||
blackfriday.WithRenderer(r),
|
||||
blackfriday.WithExtensions(renderer.GetExtensions()),
|
||||
}...)
|
||||
}
|
417
vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
generated
vendored
Normal file
417
vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
generated
vendored
Normal file
@ -0,0 +1,417 @@
|
||||
package md2man
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
// roffRenderer implements the blackfriday.Renderer interface for creating
|
||||
// roff format (manpages) from markdown text
|
||||
type roffRenderer struct {
|
||||
listCounters []int
|
||||
firstHeader bool
|
||||
listDepth int
|
||||
}
|
||||
|
||||
const (
|
||||
titleHeader = ".TH "
|
||||
topLevelHeader = "\n\n.SH "
|
||||
secondLevelHdr = "\n.SH "
|
||||
otherHeader = "\n.SS "
|
||||
crTag = "\n"
|
||||
emphTag = "\\fI"
|
||||
emphCloseTag = "\\fP"
|
||||
strongTag = "\\fB"
|
||||
strongCloseTag = "\\fP"
|
||||
breakTag = "\n.br\n"
|
||||
paraTag = "\n.PP\n"
|
||||
hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n"
|
||||
linkTag = "\n\\[la]"
|
||||
linkCloseTag = "\\[ra]"
|
||||
codespanTag = "\\fB"
|
||||
codespanCloseTag = "\\fR"
|
||||
codeTag = "\n.EX\n"
|
||||
codeCloseTag = ".EE\n" // Do not prepend a newline character since code blocks, by definition, include a newline already (or at least as how blackfriday gives us on).
|
||||
quoteTag = "\n.PP\n.RS\n"
|
||||
quoteCloseTag = "\n.RE\n"
|
||||
listTag = "\n.RS\n"
|
||||
listCloseTag = ".RE\n"
|
||||
dtTag = "\n.TP\n"
|
||||
dd2Tag = "\n"
|
||||
tableStart = "\n.TS\nallbox;\n"
|
||||
tableEnd = ".TE\n"
|
||||
tableCellStart = "T{\n"
|
||||
tableCellEnd = "\nT}\n"
|
||||
tablePreprocessor = `'\" t`
|
||||
)
|
||||
|
||||
// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents
|
||||
// from markdown
|
||||
func NewRoffRenderer() *roffRenderer { // nolint: golint
|
||||
return &roffRenderer{}
|
||||
}
|
||||
|
||||
// GetExtensions returns the list of extensions used by this renderer implementation
|
||||
func (*roffRenderer) GetExtensions() blackfriday.Extensions {
|
||||
return blackfriday.NoIntraEmphasis |
|
||||
blackfriday.Tables |
|
||||
blackfriday.FencedCode |
|
||||
blackfriday.SpaceHeadings |
|
||||
blackfriday.Footnotes |
|
||||
blackfriday.Titleblock |
|
||||
blackfriday.DefinitionLists
|
||||
}
|
||||
|
||||
// RenderHeader handles outputting the header at document start
|
||||
func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) {
|
||||
// We need to walk the tree to check if there are any tables.
|
||||
// If there are, we need to enable the roff table preprocessor.
|
||||
ast.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
if node.Type == blackfriday.Table {
|
||||
out(w, tablePreprocessor+"\n")
|
||||
return blackfriday.Terminate
|
||||
}
|
||||
return blackfriday.GoToNext
|
||||
})
|
||||
|
||||
// disable hyphenation
|
||||
out(w, ".nh\n")
|
||||
}
|
||||
|
||||
// RenderFooter handles outputting the footer at the document end; the roff
|
||||
// renderer has no footer information
|
||||
func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {
|
||||
}
|
||||
|
||||
// RenderNode is called for each node in a markdown document; based on the node
|
||||
// type the equivalent roff output is sent to the writer
|
||||
func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
walkAction := blackfriday.GoToNext
|
||||
|
||||
switch node.Type {
|
||||
case blackfriday.Text:
|
||||
// Special case: format the NAME section as required for proper whatis parsing.
|
||||
// Refer to the lexgrog(1) and groff_man(7) manual pages for details.
|
||||
if node.Parent != nil &&
|
||||
node.Parent.Type == blackfriday.Paragraph &&
|
||||
node.Parent.Prev != nil &&
|
||||
node.Parent.Prev.Type == blackfriday.Heading &&
|
||||
node.Parent.Prev.FirstChild != nil &&
|
||||
bytes.EqualFold(node.Parent.Prev.FirstChild.Literal, []byte("NAME")) {
|
||||
before, after, found := bytesCut(node.Literal, []byte(" - "))
|
||||
escapeSpecialChars(w, before)
|
||||
if found {
|
||||
out(w, ` \- `)
|
||||
escapeSpecialChars(w, after)
|
||||
}
|
||||
} else {
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
}
|
||||
case blackfriday.Softbreak:
|
||||
out(w, crTag)
|
||||
case blackfriday.Hardbreak:
|
||||
out(w, breakTag)
|
||||
case blackfriday.Emph:
|
||||
if entering {
|
||||
out(w, emphTag)
|
||||
} else {
|
||||
out(w, emphCloseTag)
|
||||
}
|
||||
case blackfriday.Strong:
|
||||
if entering {
|
||||
out(w, strongTag)
|
||||
} else {
|
||||
out(w, strongCloseTag)
|
||||
}
|
||||
case blackfriday.Link:
|
||||
// Don't render the link text for automatic links, because this
|
||||
// will only duplicate the URL in the roff output.
|
||||
// See https://daringfireball.net/projects/markdown/syntax#autolink
|
||||
if !bytes.Equal(node.LinkData.Destination, node.FirstChild.Literal) {
|
||||
out(w, string(node.FirstChild.Literal))
|
||||
}
|
||||
// Hyphens in a link must be escaped to avoid word-wrap in the rendered man page.
|
||||
escapedLink := strings.ReplaceAll(string(node.LinkData.Destination), "-", "\\-")
|
||||
out(w, linkTag+escapedLink+linkCloseTag)
|
||||
walkAction = blackfriday.SkipChildren
|
||||
case blackfriday.Image:
|
||||
// ignore images
|
||||
walkAction = blackfriday.SkipChildren
|
||||
case blackfriday.Code:
|
||||
out(w, codespanTag)
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
out(w, codespanCloseTag)
|
||||
case blackfriday.Document:
|
||||
break
|
||||
case blackfriday.Paragraph:
|
||||
if entering {
|
||||
if r.listDepth > 0 {
|
||||
// roff .PP markers break lists
|
||||
if node.Prev != nil { // continued paragraph
|
||||
if node.Prev.Type == blackfriday.List && node.Prev.ListFlags&blackfriday.ListTypeDefinition == 0 {
|
||||
out(w, ".IP\n")
|
||||
} else {
|
||||
out(w, crTag)
|
||||
}
|
||||
}
|
||||
} else if node.Prev != nil && node.Prev.Type == blackfriday.Heading {
|
||||
out(w, crTag)
|
||||
} else {
|
||||
out(w, paraTag)
|
||||
}
|
||||
} else {
|
||||
if node.Next == nil || node.Next.Type != blackfriday.List {
|
||||
out(w, crTag)
|
||||
}
|
||||
}
|
||||
case blackfriday.BlockQuote:
|
||||
if entering {
|
||||
out(w, quoteTag)
|
||||
} else {
|
||||
out(w, quoteCloseTag)
|
||||
}
|
||||
case blackfriday.Heading:
|
||||
r.handleHeading(w, node, entering)
|
||||
case blackfriday.HorizontalRule:
|
||||
out(w, hruleTag)
|
||||
case blackfriday.List:
|
||||
r.handleList(w, node, entering)
|
||||
case blackfriday.Item:
|
||||
r.handleItem(w, node, entering)
|
||||
case blackfriday.CodeBlock:
|
||||
out(w, codeTag)
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
out(w, codeCloseTag)
|
||||
case blackfriday.Table:
|
||||
r.handleTable(w, node, entering)
|
||||
case blackfriday.TableHead:
|
||||
case blackfriday.TableBody:
|
||||
case blackfriday.TableRow:
|
||||
// no action as cell entries do all the nroff formatting
|
||||
return blackfriday.GoToNext
|
||||
case blackfriday.TableCell:
|
||||
r.handleTableCell(w, node, entering)
|
||||
case blackfriday.HTMLSpan:
|
||||
// ignore other HTML tags
|
||||
case blackfriday.HTMLBlock:
|
||||
if bytes.HasPrefix(node.Literal, []byte("<!--")) {
|
||||
break // ignore comments, no warning
|
||||
}
|
||||
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
|
||||
default:
|
||||
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
|
||||
}
|
||||
return walkAction
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
switch node.Level {
|
||||
case 1:
|
||||
if !r.firstHeader {
|
||||
out(w, titleHeader)
|
||||
r.firstHeader = true
|
||||
break
|
||||
}
|
||||
out(w, topLevelHeader)
|
||||
case 2:
|
||||
out(w, secondLevelHdr)
|
||||
default:
|
||||
out(w, otherHeader)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
openTag := listTag
|
||||
closeTag := listCloseTag
|
||||
if (entering && r.listDepth == 0) || (!entering && r.listDepth == 1) {
|
||||
openTag = crTag
|
||||
closeTag = ""
|
||||
}
|
||||
if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||
// tags for definition lists handled within Item node
|
||||
openTag = ""
|
||||
closeTag = ""
|
||||
}
|
||||
if entering {
|
||||
r.listDepth++
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
r.listCounters = append(r.listCounters, 1)
|
||||
}
|
||||
out(w, openTag)
|
||||
} else {
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
r.listCounters = r.listCounters[:len(r.listCounters)-1]
|
||||
}
|
||||
out(w, closeTag)
|
||||
r.listDepth--
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1]))
|
||||
r.listCounters[len(r.listCounters)-1]++
|
||||
} else if node.ListFlags&blackfriday.ListTypeTerm != 0 {
|
||||
// DT (definition term): line just before DD (see below).
|
||||
out(w, dtTag)
|
||||
} else if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||
// DD (definition description): line that starts with ": ".
|
||||
//
|
||||
// We have to distinguish between the first DD and the
|
||||
// subsequent ones, as there should be no vertical
|
||||
// whitespace between the DT and the first DD.
|
||||
if node.Prev != nil && node.Prev.ListFlags&(blackfriday.ListTypeTerm|blackfriday.ListTypeDefinition) == blackfriday.ListTypeDefinition {
|
||||
if node.Prev.Type == blackfriday.Item &&
|
||||
node.Prev.LastChild != nil &&
|
||||
node.Prev.LastChild.Type == blackfriday.List &&
|
||||
node.Prev.LastChild.ListFlags&blackfriday.ListTypeDefinition == 0 {
|
||||
out(w, ".IP\n")
|
||||
} else {
|
||||
out(w, dd2Tag)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
out(w, ".IP \\(bu 2\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
out(w, tableStart)
|
||||
// call walker to count cells (and rows?) so format section can be produced
|
||||
columns := countColumns(node)
|
||||
out(w, strings.Repeat("l ", columns)+"\n")
|
||||
out(w, strings.Repeat("l ", columns)+".\n")
|
||||
} else {
|
||||
out(w, tableEnd)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
var start string
|
||||
if node.Prev != nil && node.Prev.Type == blackfriday.TableCell {
|
||||
start = "\t"
|
||||
}
|
||||
if node.IsHeader {
|
||||
start += strongTag
|
||||
} else if nodeLiteralSize(node) > 30 {
|
||||
start += tableCellStart
|
||||
}
|
||||
out(w, start)
|
||||
} else {
|
||||
var end string
|
||||
if node.IsHeader {
|
||||
end = strongCloseTag
|
||||
} else if nodeLiteralSize(node) > 30 {
|
||||
end = tableCellEnd
|
||||
}
|
||||
if node.Next == nil && end != tableCellEnd {
|
||||
// Last cell: need to carriage return if we are at the end of the
|
||||
// header row and content isn't wrapped in a "tablecell"
|
||||
end += crTag
|
||||
}
|
||||
out(w, end)
|
||||
}
|
||||
}
|
||||
|
||||
func nodeLiteralSize(node *blackfriday.Node) int {
|
||||
total := 0
|
||||
for n := node.FirstChild; n != nil; n = n.FirstChild {
|
||||
total += len(n.Literal)
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
// because roff format requires knowing the column count before outputting any table
|
||||
// data we need to walk a table tree and count the columns
|
||||
func countColumns(node *blackfriday.Node) int {
|
||||
var columns int
|
||||
|
||||
node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
switch node.Type {
|
||||
case blackfriday.TableRow:
|
||||
if !entering {
|
||||
return blackfriday.Terminate
|
||||
}
|
||||
case blackfriday.TableCell:
|
||||
if entering {
|
||||
columns++
|
||||
}
|
||||
default:
|
||||
}
|
||||
return blackfriday.GoToNext
|
||||
})
|
||||
return columns
|
||||
}
|
||||
|
||||
func out(w io.Writer, output string) {
|
||||
io.WriteString(w, output) // nolint: errcheck
|
||||
}
|
||||
|
||||
func escapeSpecialChars(w io.Writer, text []byte) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(text))
|
||||
|
||||
// count the number of lines in the text
|
||||
// we need to know this to avoid adding a newline after the last line
|
||||
n := bytes.Count(text, []byte{'\n'})
|
||||
idx := 0
|
||||
|
||||
for scanner.Scan() {
|
||||
dt := scanner.Bytes()
|
||||
if idx < n {
|
||||
idx++
|
||||
dt = append(dt, '\n')
|
||||
}
|
||||
escapeSpecialCharsLine(w, dt)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func escapeSpecialCharsLine(w io.Writer, text []byte) {
|
||||
for i := 0; i < len(text); i++ {
|
||||
// escape initial apostrophe or period
|
||||
if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
|
||||
out(w, "\\&")
|
||||
}
|
||||
|
||||
// directly copy normal characters
|
||||
org := i
|
||||
|
||||
for i < len(text) && text[i] != '\\' {
|
||||
i++
|
||||
}
|
||||
if i > org {
|
||||
w.Write(text[org:i]) // nolint: errcheck
|
||||
}
|
||||
|
||||
// escape a character
|
||||
if i >= len(text) {
|
||||
break
|
||||
}
|
||||
|
||||
w.Write([]byte{'\\', text[i]}) // nolint: errcheck
|
||||
}
|
||||
}
|
||||
|
||||
// bytesCut is a copy of [bytes.Cut] to provide compatibility with go1.17
|
||||
// and older. We can remove this once we drop support for go1.17 and older.
|
||||
func bytesCut(s, sep []byte) (before, after []byte, found bool) {
|
||||
if i := bytes.Index(s, sep); i >= 0 {
|
||||
return s[:i], s[i+len(sep):], true
|
||||
}
|
||||
return s, nil, false
|
||||
}
|
2
vendor/github.com/docker/cli-docs-tool/.dockerignore
generated
vendored
Normal file
2
vendor/github.com/docker/cli-docs-tool/.dockerignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
/coverage.txt
|
||||
/example/docs
|
2
vendor/github.com/docker/cli-docs-tool/.gitignore
generated
vendored
Normal file
2
vendor/github.com/docker/cli-docs-tool/.gitignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
/coverage.txt
|
||||
/example/docs
|
34
vendor/github.com/docker/cli-docs-tool/.golangci.yml
generated
vendored
Normal file
34
vendor/github.com/docker/cli-docs-tool/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
run:
|
||||
timeout: 10m
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- depguard
|
||||
- gofmt
|
||||
- goimports
|
||||
- revive
|
||||
- govet
|
||||
- importas
|
||||
- ineffassign
|
||||
- misspell
|
||||
- typecheck
|
||||
- errname
|
||||
- makezero
|
||||
- whitespace
|
||||
disable-all: true
|
||||
|
||||
linters-settings:
|
||||
depguard:
|
||||
rules:
|
||||
main:
|
||||
deny:
|
||||
- pkg: io/ioutil
|
||||
desc: The io/ioutil package has been deprecated, see https://go.dev/doc/go1.16#ioutil
|
||||
importas:
|
||||
no-unaliased: true
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
- linters:
|
||||
- revive
|
||||
text: "stutters"
|
103
vendor/github.com/docker/cli-docs-tool/Dockerfile
generated
vendored
Normal file
103
vendor/github.com/docker/cli-docs-tool/Dockerfile
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Copyright 2021 cli-docs-tool authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
ARG GO_VERSION="1.23"
|
||||
ARG XX_VERSION="1.6.1"
|
||||
ARG GOLANGCI_LINT_VERSION="v1.62"
|
||||
ARG ADDLICENSE_VERSION="v1.1.1"
|
||||
|
||||
ARG LICENSE_ARGS="-c cli-docs-tool -l apache"
|
||||
ARG LICENSE_FILES=".*\(Dockerfile\|\.go\|\.hcl\|\.sh\)"
|
||||
|
||||
FROM golangci/golangci-lint:${GOLANGCI_LINT_VERSION}-alpine AS golangci-lint
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx
|
||||
|
||||
FROM --platform=$BUILDPLATFORM golang:${GO_VERSION}-alpine AS base
|
||||
RUN apk add --no-cache cpio findutils git linux-headers
|
||||
ENV CGO_ENABLED=0
|
||||
WORKDIR /src
|
||||
COPY --link --from=xx / /
|
||||
|
||||
FROM base AS addlicense
|
||||
ARG ADDLICENSE_VERSION
|
||||
ARG TARGETPLATFORM
|
||||
RUN --mount=target=/root/.cache,type=cache \
|
||||
--mount=type=cache,target=/go/pkg/mod <<EOT
|
||||
set -ex
|
||||
xx-go install "github.com/google/addlicense@${ADDLICENSE_VERSION}"
|
||||
mkdir /out
|
||||
if ! xx-info is-cross; then
|
||||
mv /go/bin/addlicense /out
|
||||
else
|
||||
mv /go/bin/*/addlicense* /out
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM base AS vendored
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod tidy && go mod download && \
|
||||
mkdir /out && cp go.mod go.sum /out
|
||||
|
||||
FROM scratch AS vendor-update
|
||||
COPY --from=vendored /out /
|
||||
|
||||
FROM vendored AS vendor-validate
|
||||
RUN --mount=type=bind,target=.,rw <<EOT
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /out/* .
|
||||
diff=$(git status --porcelain -- go.mod go.sum)
|
||||
if [ -n "$diff" ]; then
|
||||
echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor"'
|
||||
echo "$diff"
|
||||
exit 1
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM base AS lint
|
||||
RUN --mount=type=bind,target=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=from=golangci-lint,source=/usr/bin/golangci-lint,target=/usr/bin/golangci-lint \
|
||||
golangci-lint run ./...
|
||||
|
||||
FROM base AS license-set
|
||||
ARG LICENSE_ARGS
|
||||
ARG LICENSE_FILES
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=from=addlicense,source=/out/addlicense,target=/usr/bin/addlicense \
|
||||
find . -regex "${LICENSE_FILES}" | xargs addlicense ${LICENSE_ARGS} \
|
||||
&& mkdir /out \
|
||||
&& find . -regex "${LICENSE_FILES}" | cpio -pdm /out
|
||||
|
||||
FROM scratch AS license-update
|
||||
COPY --from=set /out /
|
||||
|
||||
FROM base AS license-validate
|
||||
ARG LICENSE_ARGS
|
||||
ARG LICENSE_FILES
|
||||
RUN --mount=type=bind,target=. \
|
||||
--mount=from=addlicense,source=/out/addlicense,target=/usr/bin/addlicense \
|
||||
find . -regex "${LICENSE_FILES}" | xargs addlicense -check ${LICENSE_ARGS}
|
||||
|
||||
FROM vendored AS test
|
||||
RUN --mount=type=bind,target=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go test -v -coverprofile=/tmp/coverage.txt -covermode=atomic ./...
|
||||
|
||||
FROM scratch AS test-coverage
|
||||
COPY --from=test /tmp/coverage.txt /coverage.txt
|
67
vendor/github.com/docker/cli-docs-tool/README.md
generated
vendored
Normal file
67
vendor/github.com/docker/cli-docs-tool/README.md
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
[](https://pkg.go.dev/github.com/docker/cli-docs-tool)
|
||||
[](https://github.com/docker/cli-docs-tool/actions?query=workflow%3Atest)
|
||||
[](https://goreportcard.com/report/github.com/docker/cli-docs-tool)
|
||||
|
||||
## About
|
||||
|
||||
This is a library containing utilities to generate (reference) documentation
|
||||
for the [`docker` CLI](https://github.com/docker/cli) on [docs.docker.com](https://docs.docker.com/reference/).
|
||||
|
||||
## Disclaimer
|
||||
|
||||
This library is intended for use by Docker's CLIs, and is not intended to be a
|
||||
general-purpose utility. Various bits are hard-coded or make assumptions that
|
||||
are very specific to our use-case. Contributions are welcome, but we will not
|
||||
accept contributions to make this a general-purpose module.
|
||||
|
||||
## Usage
|
||||
|
||||
To generate the documentation it's recommended to do so using a Go submodule
|
||||
in your repository.
|
||||
|
||||
We will use the example of `docker/buildx` and create a Go submodule in a
|
||||
`docs` folder (recommended):
|
||||
|
||||
```console
|
||||
$ mkdir docs
|
||||
$ cd ./docs
|
||||
$ go mod init github.com/docker/buildx/docs
|
||||
$ go get github.com/docker/cli-docs-tool
|
||||
```
|
||||
|
||||
Your `go.mod` should look like this:
|
||||
|
||||
```text
|
||||
module github.com/docker/buildx/docs
|
||||
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/docker/cli-docs-tool v0.0.0
|
||||
)
|
||||
```
|
||||
|
||||
Next, create a file named `main.go` inside that directory containing the
|
||||
following Go code from [`example/main.go`](example/main.go).
|
||||
|
||||
Running this example should produce the following output:
|
||||
|
||||
```console
|
||||
$ go run main.go
|
||||
INFO: Generating Markdown for "docker buildx bake"
|
||||
INFO: Generating Markdown for "docker buildx build"
|
||||
INFO: Generating Markdown for "docker buildx create"
|
||||
INFO: Generating Markdown for "docker buildx du"
|
||||
...
|
||||
INFO: Generating YAML for "docker buildx uninstall"
|
||||
INFO: Generating YAML for "docker buildx use"
|
||||
INFO: Generating YAML for "docker buildx version"
|
||||
INFO: Generating YAML for "docker buildx"
|
||||
```
|
||||
|
||||
Generated docs will be available in the `./docs` folder of the project.
|
||||
|
||||
## Contributing
|
||||
|
||||
Want to contribute? Awesome! You can find information about contributing to
|
||||
this project in the [CONTRIBUTING.md](/.github/CONTRIBUTING.md)
|
191
vendor/github.com/docker/cli-docs-tool/clidocstool.go
generated
vendored
Normal file
191
vendor/github.com/docker/cli-docs-tool/clidocstool.go
generated
vendored
Normal file
@ -0,0 +1,191 @@
|
||||
// Copyright 2017 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package clidocstool
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
)
|
||||
|
||||
// Options defines options for cli-docs-tool
|
||||
type Options struct {
|
||||
Root *cobra.Command
|
||||
SourceDir string
|
||||
TargetDir string
|
||||
Plugin bool
|
||||
|
||||
ManHeader *doc.GenManHeader
|
||||
}
|
||||
|
||||
// Client represents an active cli-docs-tool object
|
||||
type Client struct {
|
||||
root *cobra.Command
|
||||
source string
|
||||
target string
|
||||
plugin bool
|
||||
|
||||
manHeader *doc.GenManHeader
|
||||
}
|
||||
|
||||
// New initializes a new cli-docs-tool client
|
||||
func New(opts Options) (*Client, error) {
|
||||
if opts.Root == nil {
|
||||
return nil, errors.New("root cmd required")
|
||||
}
|
||||
if len(opts.SourceDir) == 0 {
|
||||
return nil, errors.New("source dir required")
|
||||
}
|
||||
c := &Client{
|
||||
root: opts.Root,
|
||||
source: opts.SourceDir,
|
||||
plugin: opts.Plugin,
|
||||
manHeader: opts.ManHeader,
|
||||
}
|
||||
if len(opts.TargetDir) == 0 {
|
||||
c.target = c.source
|
||||
} else {
|
||||
c.target = opts.TargetDir
|
||||
}
|
||||
if err := os.MkdirAll(c.target, 0o755); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// GenAllTree creates all structured ref files for this command and
|
||||
// all descendants in the directory given.
|
||||
func (c *Client) GenAllTree() error {
|
||||
var err error
|
||||
if err = c.GenMarkdownTree(c.root); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = c.GenYamlTree(c.root); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = c.GenManTree(c.root); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadLongDescription gets long descriptions and examples from markdown.
|
||||
func (c *Client) loadLongDescription(cmd *cobra.Command, generator string) error {
|
||||
if cmd.HasSubCommands() {
|
||||
for _, sub := range cmd.Commands() {
|
||||
if err := c.loadLongDescription(sub, generator); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
name := cmd.CommandPath()
|
||||
if i := strings.Index(name, " "); i >= 0 {
|
||||
// remove root command / binary name
|
||||
name = name[i+1:]
|
||||
}
|
||||
if name == "" {
|
||||
return nil
|
||||
}
|
||||
mdFile := strings.ReplaceAll(name, " ", "_") + ".md"
|
||||
sourcePath := filepath.Join(c.source, mdFile)
|
||||
content, err := os.ReadFile(sourcePath)
|
||||
if os.IsNotExist(err) {
|
||||
log.Printf("WARN: %s does not exist, skipping Markdown examples for %s docs\n", mdFile, generator)
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
applyDescriptionAndExamples(cmd, string(content))
|
||||
return nil
|
||||
}
|
||||
|
||||
// applyDescriptionAndExamples fills in cmd.Long and cmd.Example with the
|
||||
// "Description" and "Examples" H2 sections in mdString (if present).
|
||||
func applyDescriptionAndExamples(cmd *cobra.Command, mdString string) {
|
||||
sections := getSections(mdString)
|
||||
var (
|
||||
anchors []string
|
||||
md string
|
||||
)
|
||||
if sections["description"] != "" {
|
||||
md, anchors = cleanupMarkDown(sections["description"])
|
||||
cmd.Long = md
|
||||
anchors = append(anchors, md)
|
||||
}
|
||||
if sections["examples"] != "" {
|
||||
md, anchors = cleanupMarkDown(sections["examples"])
|
||||
cmd.Example = md
|
||||
anchors = append(anchors, md)
|
||||
}
|
||||
if len(anchors) > 0 {
|
||||
if cmd.Annotations == nil {
|
||||
cmd.Annotations = make(map[string]string)
|
||||
}
|
||||
cmd.Annotations["anchors"] = strings.Join(anchors, ",")
|
||||
}
|
||||
}
|
||||
|
||||
func fileExists(f string) bool {
|
||||
info, err := os.Stat(f)
|
||||
if os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
return !info.IsDir()
|
||||
}
|
||||
|
||||
func copyFile(src string, dst string) error {
|
||||
sf, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer sf.Close()
|
||||
df, err := os.OpenFile(dst, os.O_CREATE|os.O_WRONLY, 0o600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer df.Close()
|
||||
_, err = io.Copy(df, sf)
|
||||
return err
|
||||
}
|
||||
|
||||
func getAliases(cmd *cobra.Command) []string {
|
||||
if a := cmd.Annotations["aliases"]; a != "" {
|
||||
aliases := strings.Split(a, ",")
|
||||
for i := 0; i < len(aliases); i++ {
|
||||
aliases[i] = strings.TrimSpace(aliases[i])
|
||||
}
|
||||
return aliases
|
||||
}
|
||||
if len(cmd.Aliases) == 0 {
|
||||
return cmd.Aliases
|
||||
}
|
||||
|
||||
var parentPath string
|
||||
if cmd.HasParent() {
|
||||
parentPath = cmd.Parent().CommandPath() + " "
|
||||
}
|
||||
aliases := []string{cmd.CommandPath()}
|
||||
for _, a := range cmd.Aliases {
|
||||
aliases = append(aliases, parentPath+a)
|
||||
}
|
||||
return aliases
|
||||
}
|
74
vendor/github.com/docker/cli-docs-tool/clidocstool_man.go
generated
vendored
Normal file
74
vendor/github.com/docker/cli-docs-tool/clidocstool_man.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
// Copyright 2016 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package clidocstool
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
)
|
||||
|
||||
// GenManTree generates a man page for the command and all descendants.
|
||||
// If SOURCE_DATE_EPOCH is set, in order to allow reproducible package
|
||||
// builds, we explicitly set the build time to SOURCE_DATE_EPOCH.
|
||||
func (c *Client) GenManTree(cmd *cobra.Command) error {
|
||||
if err := c.loadLongDescription(cmd, "man"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if epoch := os.Getenv("SOURCE_DATE_EPOCH"); c.manHeader != nil && epoch != "" {
|
||||
unixEpoch, err := strconv.ParseInt(epoch, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid SOURCE_DATE_EPOCH: %v", err)
|
||||
}
|
||||
now := time.Unix(unixEpoch, 0)
|
||||
c.manHeader.Date = &now
|
||||
}
|
||||
|
||||
return c.genManTreeCustom(cmd)
|
||||
}
|
||||
|
||||
func (c *Client) genManTreeCustom(cmd *cobra.Command) error {
|
||||
for _, sc := range cmd.Commands() {
|
||||
if err := c.genManTreeCustom(sc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// always disable the addition of [flags] to the usage
|
||||
cmd.DisableFlagsInUseLine = true
|
||||
|
||||
// always disable "spf13/cobra" auto gen tag
|
||||
cmd.DisableAutoGenTag = true
|
||||
|
||||
// Skip the root command altogether, to prevent generating a useless
|
||||
// md file for plugins.
|
||||
if c.plugin && !cmd.HasParent() {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("INFO: Generating Man for %q", cmd.CommandPath())
|
||||
|
||||
return doc.GenManTreeFromOpts(cmd, doc.GenManTreeOptions{
|
||||
Header: c.manHeader,
|
||||
Path: c.target,
|
||||
CommandSeparator: "-",
|
||||
})
|
||||
}
|
277
vendor/github.com/docker/cli-docs-tool/clidocstool_md.go
generated
vendored
Normal file
277
vendor/github.com/docker/cli-docs-tool/clidocstool_md.go
generated
vendored
Normal file
@ -0,0 +1,277 @@
|
||||
// Copyright 2021 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package clidocstool
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"text/template"
|
||||
|
||||
"github.com/docker/cli-docs-tool/annotation"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
|
||||
var (
|
||||
nlRegexp = regexp.MustCompile(`\r?\n`)
|
||||
adjustSep = regexp.MustCompile(`\|:---(\s+)`)
|
||||
)
|
||||
|
||||
// GenMarkdownTree will generate a markdown page for this command and all
|
||||
// descendants in the directory given.
|
||||
func (c *Client) GenMarkdownTree(cmd *cobra.Command) error {
|
||||
for _, sc := range cmd.Commands() {
|
||||
if err := c.GenMarkdownTree(sc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// always disable the addition of [flags] to the usage
|
||||
cmd.DisableFlagsInUseLine = true
|
||||
|
||||
// Skip the root command altogether, to prevent generating a useless
|
||||
// md file for plugins.
|
||||
if c.plugin && !cmd.HasParent() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Skip hidden command
|
||||
if cmd.Hidden {
|
||||
log.Printf("INFO: Skipping Markdown for %q (hidden command)", cmd.CommandPath())
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("INFO: Generating Markdown for %q", cmd.CommandPath())
|
||||
mdFile := mdFilename(cmd)
|
||||
sourcePath := filepath.Join(c.source, mdFile)
|
||||
targetPath := filepath.Join(c.target, mdFile)
|
||||
|
||||
// check recursively to handle inherited annotations
|
||||
for curr := cmd; curr != nil; curr = curr.Parent() {
|
||||
if _, ok := cmd.Annotations[annotation.CodeDelimiter]; !ok {
|
||||
if cd, cok := curr.Annotations[annotation.CodeDelimiter]; cok {
|
||||
if cmd.Annotations == nil {
|
||||
cmd.Annotations = map[string]string{}
|
||||
}
|
||||
cmd.Annotations[annotation.CodeDelimiter] = cd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !fileExists(sourcePath) {
|
||||
var icBuf bytes.Buffer
|
||||
icTpl, err := template.New("ic").Option("missingkey=error").Parse(`# {{ .Command }}
|
||||
|
||||
<!---MARKER_GEN_START-->
|
||||
<!---MARKER_GEN_END-->
|
||||
|
||||
`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = icTpl.Execute(&icBuf, struct {
|
||||
Command string
|
||||
}{
|
||||
Command: cmd.CommandPath(),
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = os.WriteFile(targetPath, icBuf.Bytes(), 0o644); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if err := copyFile(sourcePath, targetPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(targetPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cs := string(content)
|
||||
|
||||
start := strings.Index(cs, "<!---MARKER_GEN_START-->")
|
||||
end := strings.Index(cs, "<!---MARKER_GEN_END-->")
|
||||
|
||||
if start == -1 {
|
||||
return fmt.Errorf("no start marker in %s", mdFile)
|
||||
}
|
||||
if end == -1 {
|
||||
return fmt.Errorf("no end marker in %s", mdFile)
|
||||
}
|
||||
|
||||
out, err := mdCmdOutput(cmd, cs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cont := cs[:start] + "<!---MARKER_GEN_START-->" + "\n" + out + "\n" + cs[end:]
|
||||
|
||||
fi, err := os.Stat(targetPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = os.WriteFile(targetPath, []byte(cont), fi.Mode()); err != nil {
|
||||
return fmt.Errorf("failed to write %s: %w", targetPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func mdFilename(cmd *cobra.Command) string {
|
||||
name := cmd.CommandPath()
|
||||
if i := strings.Index(name, " "); i >= 0 {
|
||||
name = name[i+1:]
|
||||
}
|
||||
return strings.ReplaceAll(name, " ", "_") + ".md"
|
||||
}
|
||||
|
||||
func mdMakeLink(txt, link string, f *pflag.Flag, isAnchor bool) string {
|
||||
link = "#" + link
|
||||
annotations, ok := f.Annotations[annotation.ExternalURL]
|
||||
if ok && len(annotations) > 0 {
|
||||
link = annotations[0]
|
||||
} else {
|
||||
if !isAnchor {
|
||||
return txt
|
||||
}
|
||||
}
|
||||
|
||||
return "[" + txt + "](" + link + ")"
|
||||
}
|
||||
|
||||
type mdTable struct {
|
||||
out *strings.Builder
|
||||
tabWriter *tabwriter.Writer
|
||||
}
|
||||
|
||||
func newMdTable(headers ...string) *mdTable {
|
||||
w := &strings.Builder{}
|
||||
t := &mdTable{
|
||||
out: w,
|
||||
// Using tabwriter.Debug, which uses "|" as separator instead of tabs,
|
||||
// which is what we want. It's a bit of a hack, but does the job :)
|
||||
tabWriter: tabwriter.NewWriter(w, 5, 5, 1, ' ', tabwriter.Debug),
|
||||
}
|
||||
t.addHeader(headers...)
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *mdTable) addHeader(cols ...string) {
|
||||
t.AddRow(cols...)
|
||||
_, _ = t.tabWriter.Write([]byte("|" + strings.Repeat(":---\t", len(cols)) + "\n"))
|
||||
}
|
||||
|
||||
func (t *mdTable) AddRow(cols ...string) {
|
||||
for i := range cols {
|
||||
cols[i] = mdEscapePipe(cols[i])
|
||||
}
|
||||
_, _ = t.tabWriter.Write([]byte("| " + strings.Join(cols, "\t ") + "\t\n"))
|
||||
}
|
||||
|
||||
func (t *mdTable) String() string {
|
||||
_ = t.tabWriter.Flush()
|
||||
return adjustSep.ReplaceAllStringFunc(t.out.String()+"\n", func(in string) string {
|
||||
return strings.ReplaceAll(in, " ", "-")
|
||||
})
|
||||
}
|
||||
|
||||
func mdCmdOutput(cmd *cobra.Command, old string) (string, error) {
|
||||
b := &strings.Builder{}
|
||||
|
||||
desc := cmd.Short
|
||||
if cmd.Long != "" {
|
||||
desc = cmd.Long
|
||||
}
|
||||
if desc != "" {
|
||||
b.WriteString(desc + "\n\n")
|
||||
}
|
||||
|
||||
if aliases := getAliases(cmd); len(aliases) != 0 {
|
||||
b.WriteString("### Aliases\n\n")
|
||||
b.WriteString("`" + strings.Join(aliases, "`, `") + "`")
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
|
||||
if len(cmd.Commands()) != 0 {
|
||||
b.WriteString("### Subcommands\n\n")
|
||||
table := newMdTable("Name", "Description")
|
||||
for _, c := range cmd.Commands() {
|
||||
if c.Hidden {
|
||||
continue
|
||||
}
|
||||
table.AddRow(fmt.Sprintf("[`%s`](%s)", c.Name(), mdFilename(c)), c.Short)
|
||||
}
|
||||
b.WriteString(table.String() + "\n")
|
||||
}
|
||||
|
||||
// add inherited flags before checking for flags availability
|
||||
cmd.Flags().AddFlagSet(cmd.InheritedFlags())
|
||||
|
||||
if cmd.Flags().HasAvailableFlags() {
|
||||
b.WriteString("### Options\n\n")
|
||||
table := newMdTable("Name", "Type", "Default", "Description")
|
||||
cmd.Flags().VisitAll(func(f *pflag.Flag) {
|
||||
if f.Hidden {
|
||||
return
|
||||
}
|
||||
isLink := strings.Contains(old, "<a name=\""+f.Name+"\"></a>")
|
||||
var name string
|
||||
if f.Shorthand != "" {
|
||||
name = mdMakeLink("`-"+f.Shorthand+"`", f.Name, f, isLink)
|
||||
name += ", "
|
||||
}
|
||||
name += mdMakeLink("`--"+f.Name+"`", f.Name, f, isLink)
|
||||
|
||||
ftype := "`" + f.Value.Type() + "`"
|
||||
|
||||
var defval string
|
||||
if v, ok := f.Annotations[annotation.DefaultValue]; ok && len(v) > 0 {
|
||||
defval = v[0]
|
||||
if cd, ok := f.Annotations[annotation.CodeDelimiter]; ok {
|
||||
defval = strings.ReplaceAll(defval, cd[0], "`")
|
||||
} else if cd, ok := cmd.Annotations[annotation.CodeDelimiter]; ok {
|
||||
defval = strings.ReplaceAll(defval, cd, "`")
|
||||
}
|
||||
} else if f.DefValue != "" && ((f.Value.Type() != "bool" && f.DefValue != "true") || (f.Value.Type() == "bool" && f.DefValue == "true")) && f.DefValue != "[]" {
|
||||
defval = "`" + f.DefValue + "`"
|
||||
}
|
||||
|
||||
usage := f.Usage
|
||||
if cd, ok := f.Annotations[annotation.CodeDelimiter]; ok {
|
||||
usage = strings.ReplaceAll(usage, cd[0], "`")
|
||||
} else if cd, ok := cmd.Annotations[annotation.CodeDelimiter]; ok {
|
||||
usage = strings.ReplaceAll(usage, cd, "`")
|
||||
}
|
||||
table.AddRow(name, ftype, defval, mdReplaceNewline(usage))
|
||||
})
|
||||
b.WriteString(table.String())
|
||||
}
|
||||
|
||||
return b.String(), nil
|
||||
}
|
||||
|
||||
func mdEscapePipe(s string) string {
|
||||
return strings.ReplaceAll(s, `|`, `\|`)
|
||||
}
|
||||
|
||||
func mdReplaceNewline(s string) string {
|
||||
return nlRegexp.ReplaceAllString(s, "<br>")
|
||||
}
|
378
vendor/github.com/docker/cli-docs-tool/clidocstool_yaml.go
generated
vendored
Normal file
378
vendor/github.com/docker/cli-docs-tool/clidocstool_yaml.go
generated
vendored
Normal file
@ -0,0 +1,378 @@
|
||||
// Copyright 2017 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package clidocstool
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/docker/cli-docs-tool/annotation"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type cmdOption struct {
|
||||
Option string
|
||||
Shorthand string `yaml:",omitempty"`
|
||||
ValueType string `yaml:"value_type,omitempty"`
|
||||
DefaultValue string `yaml:"default_value,omitempty"`
|
||||
Description string `yaml:",omitempty"`
|
||||
DetailsURL string `yaml:"details_url,omitempty"` // DetailsURL contains an anchor-id or link for more information on this flag
|
||||
Deprecated bool
|
||||
Hidden bool
|
||||
MinAPIVersion string `yaml:"min_api_version,omitempty"`
|
||||
Experimental bool
|
||||
ExperimentalCLI bool
|
||||
Kubernetes bool
|
||||
Swarm bool
|
||||
OSType string `yaml:"os_type,omitempty"`
|
||||
}
|
||||
|
||||
type cmdDoc struct {
|
||||
Name string `yaml:"command"`
|
||||
SeeAlso []string `yaml:"parent,omitempty"`
|
||||
Version string `yaml:"engine_version,omitempty"`
|
||||
Aliases string `yaml:",omitempty"`
|
||||
Short string `yaml:",omitempty"`
|
||||
Long string `yaml:",omitempty"`
|
||||
Usage string `yaml:",omitempty"`
|
||||
Pname string `yaml:",omitempty"`
|
||||
Plink string `yaml:",omitempty"`
|
||||
Cname []string `yaml:",omitempty"`
|
||||
Clink []string `yaml:",omitempty"`
|
||||
Options []cmdOption `yaml:",omitempty"`
|
||||
InheritedOptions []cmdOption `yaml:"inherited_options,omitempty"`
|
||||
Example string `yaml:"examples,omitempty"`
|
||||
Deprecated bool
|
||||
Hidden bool
|
||||
MinAPIVersion string `yaml:"min_api_version,omitempty"`
|
||||
Experimental bool
|
||||
ExperimentalCLI bool
|
||||
Kubernetes bool
|
||||
Swarm bool
|
||||
OSType string `yaml:"os_type,omitempty"`
|
||||
}
|
||||
|
||||
// GenYamlTree creates yaml structured ref files for this command and all descendants
|
||||
// in the directory given. This function may not work
|
||||
// correctly if your command names have `-` in them. If you have `cmd` with two
|
||||
// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third`
|
||||
// it is undefined which help output will be in the file `cmd-sub-third.1`.
|
||||
func (c *Client) GenYamlTree(cmd *cobra.Command) error {
|
||||
emptyStr := func(string) string { return "" }
|
||||
if err := c.loadLongDescription(cmd, "yaml"); err != nil {
|
||||
return err
|
||||
}
|
||||
return c.genYamlTreeCustom(cmd, emptyStr)
|
||||
}
|
||||
|
||||
// genYamlTreeCustom creates yaml structured ref files.
|
||||
func (c *Client) genYamlTreeCustom(cmd *cobra.Command, filePrepender func(string) string) error {
|
||||
for _, sc := range cmd.Commands() {
|
||||
if !sc.Runnable() && !sc.HasAvailableSubCommands() {
|
||||
// skip non-runnable commands without subcommands
|
||||
// but *do* generate YAML for hidden and deprecated commands
|
||||
// the YAML will have those included as metadata, so that the
|
||||
// documentation repository can decide whether or not to present them
|
||||
continue
|
||||
}
|
||||
if err := c.genYamlTreeCustom(sc, filePrepender); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// always disable the addition of [flags] to the usage
|
||||
cmd.DisableFlagsInUseLine = true
|
||||
|
||||
// The "root" command used in the generator is just a "stub", and only has a
|
||||
// list of subcommands, but not (e.g.) global options/flags. We should fix
|
||||
// that, so that the YAML file for the docker "root" command contains the
|
||||
// global flags.
|
||||
|
||||
// Skip the root command altogether, to prevent generating a useless
|
||||
// YAML file for plugins.
|
||||
if c.plugin && !cmd.HasParent() {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("INFO: Generating YAML for %q", cmd.CommandPath())
|
||||
basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".yaml"
|
||||
target := filepath.Join(c.target, basename)
|
||||
f, err := os.Create(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err := io.WriteString(f, filePrepender(target)); err != nil {
|
||||
return err
|
||||
}
|
||||
return c.genYamlCustom(cmd, f)
|
||||
}
|
||||
|
||||
// genYamlCustom creates custom yaml output.
|
||||
// nolint: gocyclo
|
||||
func (c *Client) genYamlCustom(cmd *cobra.Command, w io.Writer) error {
|
||||
const (
|
||||
// shortMaxWidth is the maximum width for the "Short" description before
|
||||
// we force YAML to use multi-line syntax. The goal is to make the total
|
||||
// width fit within 80 characters. This value is based on 80 characters
|
||||
// minus the with of the field, colon, and whitespace ('short: ').
|
||||
shortMaxWidth = 73
|
||||
|
||||
// longMaxWidth is the maximum width for the "Short" description before
|
||||
// we force YAML to use multi-line syntax. The goal is to make the total
|
||||
// width fit within 80 characters. This value is based on 80 characters
|
||||
// minus the with of the field, colon, and whitespace ('long: ').
|
||||
longMaxWidth = 74
|
||||
)
|
||||
|
||||
// necessary to add inherited flags otherwise some
|
||||
// fields are not properly declared like usage
|
||||
cmd.Flags().AddFlagSet(cmd.InheritedFlags())
|
||||
|
||||
cliDoc := cmdDoc{
|
||||
Name: cmd.CommandPath(),
|
||||
Aliases: strings.Join(getAliases(cmd), ", "),
|
||||
Short: forceMultiLine(cmd.Short, shortMaxWidth),
|
||||
Long: forceMultiLine(cmd.Long, longMaxWidth),
|
||||
Example: cmd.Example,
|
||||
Deprecated: len(cmd.Deprecated) > 0,
|
||||
Hidden: cmd.Hidden,
|
||||
}
|
||||
|
||||
if len(cliDoc.Long) == 0 {
|
||||
cliDoc.Long = cliDoc.Short
|
||||
}
|
||||
|
||||
if cmd.Runnable() {
|
||||
cliDoc.Usage = cmd.UseLine()
|
||||
}
|
||||
|
||||
// check recursively to handle inherited annotations
|
||||
for curr := cmd; curr != nil; curr = curr.Parent() {
|
||||
if v, ok := curr.Annotations["version"]; ok && cliDoc.MinAPIVersion == "" {
|
||||
cliDoc.MinAPIVersion = v
|
||||
}
|
||||
if _, ok := curr.Annotations["experimental"]; ok && !cliDoc.Experimental {
|
||||
cliDoc.Experimental = true
|
||||
}
|
||||
if _, ok := curr.Annotations["experimentalCLI"]; ok && !cliDoc.ExperimentalCLI {
|
||||
cliDoc.ExperimentalCLI = true
|
||||
}
|
||||
if _, ok := curr.Annotations["kubernetes"]; ok && !cliDoc.Kubernetes {
|
||||
cliDoc.Kubernetes = true
|
||||
}
|
||||
if _, ok := curr.Annotations["swarm"]; ok && !cliDoc.Swarm {
|
||||
cliDoc.Swarm = true
|
||||
}
|
||||
if o, ok := curr.Annotations["ostype"]; ok && cliDoc.OSType == "" {
|
||||
cliDoc.OSType = o
|
||||
}
|
||||
if _, ok := cmd.Annotations[annotation.CodeDelimiter]; !ok {
|
||||
if cd, cok := curr.Annotations[annotation.CodeDelimiter]; cok {
|
||||
if cmd.Annotations == nil {
|
||||
cmd.Annotations = map[string]string{}
|
||||
}
|
||||
cmd.Annotations[annotation.CodeDelimiter] = cd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anchors := make(map[string]struct{})
|
||||
if a, ok := cmd.Annotations["anchors"]; ok && a != "" {
|
||||
for _, anchor := range strings.Split(a, ",") {
|
||||
anchors[anchor] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
flags := cmd.NonInheritedFlags()
|
||||
if flags.HasFlags() {
|
||||
cliDoc.Options = genFlagResult(cmd, flags, anchors)
|
||||
}
|
||||
flags = cmd.InheritedFlags()
|
||||
if flags.HasFlags() {
|
||||
cliDoc.InheritedOptions = genFlagResult(cmd, flags, anchors)
|
||||
}
|
||||
|
||||
if hasSeeAlso(cmd) {
|
||||
if cmd.HasParent() {
|
||||
parent := cmd.Parent()
|
||||
cliDoc.Pname = parent.CommandPath()
|
||||
cliDoc.Plink = strings.Replace(cliDoc.Pname, " ", "_", -1) + ".yaml"
|
||||
cmd.VisitParents(func(c *cobra.Command) {
|
||||
if c.DisableAutoGenTag {
|
||||
cmd.DisableAutoGenTag = c.DisableAutoGenTag
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
children := cmd.Commands()
|
||||
sort.Sort(byName(children))
|
||||
|
||||
for _, child := range children {
|
||||
if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
cliDoc.Cname = append(cliDoc.Cname, cliDoc.Name+" "+child.Name())
|
||||
cliDoc.Clink = append(cliDoc.Clink, strings.Replace(cliDoc.Name+"_"+child.Name(), " ", "_", -1)+".yaml")
|
||||
}
|
||||
}
|
||||
|
||||
final, err := yaml.Marshal(&cliDoc)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if _, err := fmt.Fprintln(w, string(final)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func genFlagResult(cmd *cobra.Command, flags *pflag.FlagSet, anchors map[string]struct{}) []cmdOption {
|
||||
var (
|
||||
result []cmdOption
|
||||
opt cmdOption
|
||||
)
|
||||
|
||||
const (
|
||||
// shortMaxWidth is the maximum width for the "Short" description before
|
||||
// we force YAML to use multi-line syntax. The goal is to make the total
|
||||
// width fit within 80 characters. This value is based on 80 characters
|
||||
// minus the with of the field, colon, and whitespace (' default_value: ').
|
||||
defaultValueMaxWidth = 64
|
||||
|
||||
// longMaxWidth is the maximum width for the "Short" description before
|
||||
// we force YAML to use multi-line syntax. The goal is to make the total
|
||||
// width fit within 80 characters. This value is based on 80 characters
|
||||
// minus the with of the field, colon, and whitespace (' description: ').
|
||||
descriptionMaxWidth = 66
|
||||
)
|
||||
|
||||
flags.VisitAll(func(flag *pflag.Flag) {
|
||||
opt = cmdOption{
|
||||
Option: flag.Name,
|
||||
ValueType: flag.Value.Type(),
|
||||
Deprecated: len(flag.Deprecated) > 0,
|
||||
Hidden: flag.Hidden,
|
||||
}
|
||||
|
||||
var defval string
|
||||
if v, ok := flag.Annotations[annotation.DefaultValue]; ok && len(v) > 0 {
|
||||
defval = v[0]
|
||||
if cd, ok := flag.Annotations[annotation.CodeDelimiter]; ok {
|
||||
defval = strings.ReplaceAll(defval, cd[0], "`")
|
||||
} else if cd, ok := cmd.Annotations[annotation.CodeDelimiter]; ok {
|
||||
defval = strings.ReplaceAll(defval, cd, "`")
|
||||
}
|
||||
} else {
|
||||
defval = flag.DefValue
|
||||
}
|
||||
opt.DefaultValue = forceMultiLine(defval, defaultValueMaxWidth)
|
||||
|
||||
usage := flag.Usage
|
||||
if cd, ok := flag.Annotations[annotation.CodeDelimiter]; ok {
|
||||
usage = strings.ReplaceAll(usage, cd[0], "`")
|
||||
} else if cd, ok := cmd.Annotations[annotation.CodeDelimiter]; ok {
|
||||
usage = strings.ReplaceAll(usage, cd, "`")
|
||||
}
|
||||
opt.Description = forceMultiLine(usage, descriptionMaxWidth)
|
||||
|
||||
if v, ok := flag.Annotations[annotation.ExternalURL]; ok && len(v) > 0 {
|
||||
opt.DetailsURL = strings.TrimPrefix(v[0], "https://docs.docker.com")
|
||||
} else if _, ok = anchors[flag.Name]; ok {
|
||||
opt.DetailsURL = "#" + flag.Name
|
||||
}
|
||||
|
||||
// Todo, when we mark a shorthand is deprecated, but specify an empty message.
|
||||
// The flag.ShorthandDeprecated is empty as the shorthand is deprecated.
|
||||
// Using len(flag.ShorthandDeprecated) > 0 can't handle this, others are ok.
|
||||
if !(len(flag.ShorthandDeprecated) > 0) && len(flag.Shorthand) > 0 {
|
||||
opt.Shorthand = flag.Shorthand
|
||||
}
|
||||
if _, ok := flag.Annotations["experimental"]; ok {
|
||||
opt.Experimental = true
|
||||
}
|
||||
if _, ok := flag.Annotations["deprecated"]; ok {
|
||||
opt.Deprecated = true
|
||||
}
|
||||
if v, ok := flag.Annotations["version"]; ok {
|
||||
opt.MinAPIVersion = v[0]
|
||||
}
|
||||
if _, ok := flag.Annotations["experimentalCLI"]; ok {
|
||||
opt.ExperimentalCLI = true
|
||||
}
|
||||
if _, ok := flag.Annotations["kubernetes"]; ok {
|
||||
opt.Kubernetes = true
|
||||
}
|
||||
if _, ok := flag.Annotations["swarm"]; ok {
|
||||
opt.Swarm = true
|
||||
}
|
||||
|
||||
// Note that the annotation can have multiple ostypes set, however, multiple
|
||||
// values are currently not used (and unlikely will).
|
||||
//
|
||||
// To simplify usage of the os_type property in the YAML, and for consistency
|
||||
// with the same property for commands, we're only using the first ostype that's set.
|
||||
if ostypes, ok := flag.Annotations["ostype"]; ok && len(opt.OSType) == 0 && len(ostypes) > 0 {
|
||||
opt.OSType = ostypes[0]
|
||||
}
|
||||
|
||||
result = append(result, opt)
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// forceMultiLine appends a newline (\n) to strings that are longer than max
|
||||
// to force the yaml lib to use block notation (https://yaml.org/spec/1.2/spec.html#Block)
|
||||
// instead of a single-line string with newlines and tabs encoded("string\nline1\nline2").
|
||||
//
|
||||
// This makes the generated YAML more readable, and easier to review changes.
|
||||
// max can be used to customize the width to keep the whole line < 80 chars.
|
||||
func forceMultiLine(s string, max int) string {
|
||||
s = strings.TrimSpace(s)
|
||||
if len(s) > max && !strings.Contains(s, "\n") {
|
||||
s = s + "\n"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Small duplication for cobra utils
|
||||
func hasSeeAlso(cmd *cobra.Command) bool {
|
||||
if cmd.HasParent() {
|
||||
return true
|
||||
}
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type byName []*cobra.Command
|
||||
|
||||
func (s byName) Len() int { return len(s) }
|
||||
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
|
63
vendor/github.com/docker/cli-docs-tool/docker-bake.hcl
generated
vendored
Normal file
63
vendor/github.com/docker/cli-docs-tool/docker-bake.hcl
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright 2021 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
target "_common" {
|
||||
args = {
|
||||
BUILDKIT_CONTEXT_KEEP_GIT_DIR = 1
|
||||
}
|
||||
}
|
||||
|
||||
group "default" {
|
||||
targets = ["test"]
|
||||
}
|
||||
|
||||
group "validate" {
|
||||
targets = ["lint", "vendor-validate", "license-validate"]
|
||||
}
|
||||
|
||||
target "lint" {
|
||||
inherits = ["_common"]
|
||||
target = "lint"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "vendor-validate" {
|
||||
inherits = ["_common"]
|
||||
target = "vendor-validate"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "vendor-update" {
|
||||
inherits = ["_common"]
|
||||
target = "vendor-update"
|
||||
output = ["."]
|
||||
}
|
||||
|
||||
target "test" {
|
||||
inherits = ["_common"]
|
||||
target = "test-coverage"
|
||||
output = ["."]
|
||||
}
|
||||
|
||||
target "license-validate" {
|
||||
inherits = ["_common"]
|
||||
target = "license-validate"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "license-update" {
|
||||
inherits = ["_common"]
|
||||
target = "license-update"
|
||||
output = ["."]
|
||||
}
|
100
vendor/github.com/docker/cli-docs-tool/markdown.go
generated
vendored
Normal file
100
vendor/github.com/docker/cli-docs-tool/markdown.go
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
// Copyright 2017 cli-docs-tool authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package clidocstool
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var (
|
||||
// mdHeading matches MarkDown H1..h6 headings. Note that this regex may produce
|
||||
// false positives for (e.g.) comments in code-blocks (# this is a comment),
|
||||
// so should not be used as a generic regex for other purposes.
|
||||
mdHeading = regexp.MustCompile(`^([#]{1,6})\s(.*)$`)
|
||||
// htmlAnchor matches inline HTML anchors. This is intended to only match anchors
|
||||
// for our use-case; DO NOT consider using this as a generic regex, or at least
|
||||
// not before reading https://stackoverflow.com/a/1732454/1811501.
|
||||
htmlAnchor = regexp.MustCompile(`<a\s+(?:name|id)="?([^"]+)"?\s*></a>\s*`)
|
||||
// relativeLink matches parts of internal links between .md documents
|
||||
// e.g. "](buildx_build.md)"
|
||||
relativeLink = regexp.MustCompile(`\]\((\.\/)?[a-z-_]+\.md(#.*)?\)`)
|
||||
)
|
||||
|
||||
// getSections returns all H2 sections by title (lowercase)
|
||||
func getSections(mdString string) map[string]string {
|
||||
parsedContent := strings.Split("\n"+mdString, "\n## ")
|
||||
sections := make(map[string]string, len(parsedContent))
|
||||
for _, s := range parsedContent {
|
||||
if strings.HasPrefix(s, "#") {
|
||||
// not a H2 Section
|
||||
continue
|
||||
}
|
||||
parts := strings.SplitN(s, "\n", 2)
|
||||
if len(parts) == 2 {
|
||||
sections[strings.ToLower(parts[0])] = parts[1]
|
||||
}
|
||||
}
|
||||
return sections
|
||||
}
|
||||
|
||||
// cleanupMarkDown cleans up the MarkDown passed in mdString for inclusion in
|
||||
// YAML. It removes trailing whitespace and substitutes tabs for four spaces
|
||||
// to prevent YAML switching to use "compact" form; ("line1 \nline\t2\n")
|
||||
// which, although equivalent, is hard to read.
|
||||
func cleanupMarkDown(mdString string) (md string, anchors []string) {
|
||||
// remove leading/trailing whitespace, and replace tabs in the whole content
|
||||
mdString = strings.TrimSpace(mdString)
|
||||
mdString = strings.ReplaceAll(mdString, "\t", " ")
|
||||
mdString = strings.ReplaceAll(mdString, "https://docs.docker.com", "")
|
||||
|
||||
// Rewrite internal links, replacing relative paths with absolute path
|
||||
// e.g. from [docker buildx build](buildx_build.md#build-arg)
|
||||
// to [docker buildx build](/reference/cli/docker/buildx/build/#build-arg)
|
||||
mdString = relativeLink.ReplaceAllStringFunc(mdString, func(link string) string {
|
||||
link = strings.TrimLeft(link, "](./")
|
||||
link = strings.ReplaceAll(link, "_", "/")
|
||||
link = strings.ReplaceAll(link, ".md", "/")
|
||||
return "](/reference/cli/docker/" + link
|
||||
})
|
||||
|
||||
var id string
|
||||
// replace trailing whitespace per line, and handle custom anchors
|
||||
lines := strings.Split(mdString, "\n")
|
||||
for i := 0; i < len(lines); i++ {
|
||||
lines[i] = strings.TrimRightFunc(lines[i], unicode.IsSpace)
|
||||
lines[i], id = convertHTMLAnchor(lines[i])
|
||||
if id != "" {
|
||||
anchors = append(anchors, id)
|
||||
}
|
||||
}
|
||||
return strings.Join(lines, "\n"), anchors
|
||||
}
|
||||
|
||||
// convertHTMLAnchor converts inline anchor-tags in headings (<a name=myanchor></a>)
|
||||
// to an extended-markdown property ({#myanchor}). Extended Markdown properties
|
||||
// are not supported in GitHub Flavored Markdown, but are supported by Jekyll,
|
||||
// and lead to cleaner HTML in our docs, and prevents duplicate anchors.
|
||||
// It returns the converted MarkDown heading and the custom ID (if present)
|
||||
func convertHTMLAnchor(mdLine string) (md string, customID string) {
|
||||
if m := mdHeading.FindStringSubmatch(mdLine); len(m) > 0 {
|
||||
if a := htmlAnchor.FindStringSubmatch(m[2]); len(a) > 0 {
|
||||
customID = a[1]
|
||||
mdLine = m[1] + " " + htmlAnchor.ReplaceAllString(m[2], "") + " {#" + customID + "}"
|
||||
}
|
||||
}
|
||||
return mdLine, customID
|
||||
}
|
8
vendor/github.com/russross/blackfriday/v2/.gitignore
generated
vendored
Normal file
8
vendor/github.com/russross/blackfriday/v2/.gitignore
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
*.out
|
||||
*.swp
|
||||
*.8
|
||||
*.6
|
||||
_obj
|
||||
_test*
|
||||
markdown
|
||||
tags
|
17
vendor/github.com/russross/blackfriday/v2/.travis.yml
generated
vendored
Normal file
17
vendor/github.com/russross/blackfriday/v2/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- "1.10.x"
|
||||
- "1.11.x"
|
||||
- tip
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- go: tip
|
||||
install:
|
||||
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
|
||||
script:
|
||||
- go get -t -v ./...
|
||||
- diff -u <(echo -n) <(gofmt -d -s .)
|
||||
- go tool vet .
|
||||
- go test -v ./...
|
29
vendor/github.com/russross/blackfriday/v2/LICENSE.txt
generated
vendored
Normal file
29
vendor/github.com/russross/blackfriday/v2/LICENSE.txt
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
Blackfriday is distributed under the Simplified BSD License:
|
||||
|
||||
> Copyright © 2011 Russ Ross
|
||||
> All rights reserved.
|
||||
>
|
||||
> Redistribution and use in source and binary forms, with or without
|
||||
> modification, are permitted provided that the following conditions
|
||||
> are met:
|
||||
>
|
||||
> 1. Redistributions of source code must retain the above copyright
|
||||
> notice, this list of conditions and the following disclaimer.
|
||||
>
|
||||
> 2. Redistributions in binary form must reproduce the above
|
||||
> copyright notice, this list of conditions and the following
|
||||
> disclaimer in the documentation and/or other materials provided with
|
||||
> the distribution.
|
||||
>
|
||||
> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
> POSSIBILITY OF SUCH DAMAGE.
|
335
vendor/github.com/russross/blackfriday/v2/README.md
generated
vendored
Normal file
335
vendor/github.com/russross/blackfriday/v2/README.md
generated
vendored
Normal file
@ -0,0 +1,335 @@
|
||||
Blackfriday
|
||||
[![Build Status][BuildV2SVG]][BuildV2URL]
|
||||
[![PkgGoDev][PkgGoDevV2SVG]][PkgGoDevV2URL]
|
||||
===========
|
||||
|
||||
Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It
|
||||
is paranoid about its input (so you can safely feed it user-supplied
|
||||
data), it is fast, it supports common extensions (tables, smart
|
||||
punctuation substitutions, etc.), and it is safe for all utf-8
|
||||
(unicode) input.
|
||||
|
||||
HTML output is currently supported, along with Smartypants
|
||||
extensions.
|
||||
|
||||
It started as a translation from C of [Sundown][3].
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Blackfriday is compatible with modern Go releases in module mode.
|
||||
With Go installed:
|
||||
|
||||
go get github.com/russross/blackfriday/v2
|
||||
|
||||
will resolve and add the package to the current development module,
|
||||
then build and install it. Alternatively, you can achieve the same
|
||||
if you import it in a package:
|
||||
|
||||
import "github.com/russross/blackfriday/v2"
|
||||
|
||||
and `go get` without parameters.
|
||||
|
||||
Legacy GOPATH mode is unsupported.
|
||||
|
||||
|
||||
Versions
|
||||
--------
|
||||
|
||||
Currently maintained and recommended version of Blackfriday is `v2`. It's being
|
||||
developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the
|
||||
documentation is available at
|
||||
https://pkg.go.dev/github.com/russross/blackfriday/v2.
|
||||
|
||||
It is `go get`-able in module mode at `github.com/russross/blackfriday/v2`.
|
||||
|
||||
Version 2 offers a number of improvements over v1:
|
||||
|
||||
* Cleaned up API
|
||||
* A separate call to [`Parse`][4], which produces an abstract syntax tree for
|
||||
the document
|
||||
* Latest bug fixes
|
||||
* Flexibility to easily add your own rendering extensions
|
||||
|
||||
Potential drawbacks:
|
||||
|
||||
* Our benchmarks show v2 to be slightly slower than v1. Currently in the
|
||||
ballpark of around 15%.
|
||||
* API breakage. If you can't afford modifying your code to adhere to the new API
|
||||
and don't care too much about the new features, v2 is probably not for you.
|
||||
* Several bug fixes are trailing behind and still need to be forward-ported to
|
||||
v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for
|
||||
tracking.
|
||||
|
||||
If you are still interested in the legacy `v1`, you can import it from
|
||||
`github.com/russross/blackfriday`. Documentation for the legacy v1 can be found
|
||||
here: https://pkg.go.dev/github.com/russross/blackfriday.
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
For the most sensible markdown processing, it is as simple as getting your input
|
||||
into a byte slice and calling:
|
||||
|
||||
```go
|
||||
output := blackfriday.Run(input)
|
||||
```
|
||||
|
||||
Your input will be parsed and the output rendered with a set of most popular
|
||||
extensions enabled. If you want the most basic feature set, corresponding with
|
||||
the bare Markdown specification, use:
|
||||
|
||||
```go
|
||||
output := blackfriday.Run(input, blackfriday.WithNoExtensions())
|
||||
```
|
||||
|
||||
### Sanitize untrusted content
|
||||
|
||||
Blackfriday itself does nothing to protect against malicious content. If you are
|
||||
dealing with user-supplied markdown, we recommend running Blackfriday's output
|
||||
through HTML sanitizer such as [Bluemonday][5].
|
||||
|
||||
Here's an example of simple usage of Blackfriday together with Bluemonday:
|
||||
|
||||
```go
|
||||
import (
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
// ...
|
||||
unsafe := blackfriday.Run(input)
|
||||
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
||||
```
|
||||
|
||||
### Custom options
|
||||
|
||||
If you want to customize the set of options, use `blackfriday.WithExtensions`,
|
||||
`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`.
|
||||
|
||||
### `blackfriday-tool`
|
||||
|
||||
You can also check out `blackfriday-tool` for a more complete example
|
||||
of how to use it. Download and install it using:
|
||||
|
||||
go get github.com/russross/blackfriday-tool
|
||||
|
||||
This is a simple command-line tool that allows you to process a
|
||||
markdown file using a standalone program. You can also browse the
|
||||
source directly on github if you are just looking for some example
|
||||
code:
|
||||
|
||||
* <https://github.com/russross/blackfriday-tool>
|
||||
|
||||
Note that if you have not already done so, installing
|
||||
`blackfriday-tool` will be sufficient to download and install
|
||||
blackfriday in addition to the tool itself. The tool binary will be
|
||||
installed in `$GOPATH/bin`. This is a statically-linked binary that
|
||||
can be copied to wherever you need it without worrying about
|
||||
dependencies and library versions.
|
||||
|
||||
### Sanitized anchor names
|
||||
|
||||
Blackfriday includes an algorithm for creating sanitized anchor names
|
||||
corresponding to a given input text. This algorithm is used to create
|
||||
anchors for headings when `AutoHeadingIDs` extension is enabled. The
|
||||
algorithm has a specification, so that other packages can create
|
||||
compatible anchor names and links to those anchors.
|
||||
|
||||
The specification is located at https://pkg.go.dev/github.com/russross/blackfriday/v2#hdr-Sanitized_Anchor_Names.
|
||||
|
||||
[`SanitizedAnchorName`](https://pkg.go.dev/github.com/russross/blackfriday/v2#SanitizedAnchorName) exposes this functionality, and can be used to
|
||||
create compatible links to the anchor names generated by blackfriday.
|
||||
This algorithm is also implemented in a small standalone package at
|
||||
[`github.com/shurcooL/sanitized_anchor_name`](https://pkg.go.dev/github.com/shurcooL/sanitized_anchor_name). It can be useful for clients
|
||||
that want a small package and don't need full functionality of blackfriday.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
All features of Sundown are supported, including:
|
||||
|
||||
* **Compatibility**. The Markdown v1.0.3 test suite passes with
|
||||
the `--tidy` option. Without `--tidy`, the differences are
|
||||
mostly in whitespace and entity escaping, where blackfriday is
|
||||
more consistent and cleaner.
|
||||
|
||||
* **Common extensions**, including table support, fenced code
|
||||
blocks, autolinks, strikethroughs, non-strict emphasis, etc.
|
||||
|
||||
* **Safety**. Blackfriday is paranoid when parsing, making it safe
|
||||
to feed untrusted user input without fear of bad things
|
||||
happening. The test suite stress tests this and there are no
|
||||
known inputs that make it crash. If you find one, please let me
|
||||
know and send me the input that does it.
|
||||
|
||||
NOTE: "safety" in this context means *runtime safety only*. In order to
|
||||
protect yourself against JavaScript injection in untrusted content, see
|
||||
[this example](https://github.com/russross/blackfriday#sanitize-untrusted-content).
|
||||
|
||||
* **Fast processing**. It is fast enough to render on-demand in
|
||||
most web applications without having to cache the output.
|
||||
|
||||
* **Thread safety**. You can run multiple parsers in different
|
||||
goroutines without ill effect. There is no dependence on global
|
||||
shared state.
|
||||
|
||||
* **Minimal dependencies**. Blackfriday only depends on standard
|
||||
library packages in Go. The source code is pretty
|
||||
self-contained, so it is easy to add to any project, including
|
||||
Google App Engine projects.
|
||||
|
||||
* **Standards compliant**. Output successfully validates using the
|
||||
W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional.
|
||||
|
||||
|
||||
Extensions
|
||||
----------
|
||||
|
||||
In addition to the standard markdown syntax, this package
|
||||
implements the following extensions:
|
||||
|
||||
* **Intra-word emphasis supression**. The `_` character is
|
||||
commonly used inside words when discussing code, so having
|
||||
markdown interpret it as an emphasis command is usually the
|
||||
wrong thing. Blackfriday lets you treat all emphasis markers as
|
||||
normal characters when they occur inside a word.
|
||||
|
||||
* **Tables**. Tables can be created by drawing them in the input
|
||||
using a simple syntax:
|
||||
|
||||
```
|
||||
Name | Age
|
||||
--------|------
|
||||
Bob | 27
|
||||
Alice | 23
|
||||
```
|
||||
|
||||
* **Fenced code blocks**. In addition to the normal 4-space
|
||||
indentation to mark code blocks, you can explicitly mark them
|
||||
and supply a language (to make syntax highlighting simple). Just
|
||||
mark it like this:
|
||||
|
||||
```go
|
||||
func getTrue() bool {
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
You can use 3 or more backticks to mark the beginning of the
|
||||
block, and the same number to mark the end of the block.
|
||||
|
||||
To preserve classes of fenced code blocks while using the bluemonday
|
||||
HTML sanitizer, use the following policy:
|
||||
|
||||
```go
|
||||
p := bluemonday.UGCPolicy()
|
||||
p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code")
|
||||
html := p.SanitizeBytes(unsafe)
|
||||
```
|
||||
|
||||
* **Definition lists**. A simple definition list is made of a single-line
|
||||
term followed by a colon and the definition for that term.
|
||||
|
||||
Cat
|
||||
: Fluffy animal everyone likes
|
||||
|
||||
Internet
|
||||
: Vector of transmission for pictures of cats
|
||||
|
||||
Terms must be separated from the previous definition by a blank line.
|
||||
|
||||
* **Footnotes**. A marker in the text that will become a superscript number;
|
||||
a footnote definition that will be placed in a list of footnotes at the
|
||||
end of the document. A footnote looks like this:
|
||||
|
||||
This is a footnote.[^1]
|
||||
|
||||
[^1]: the footnote text.
|
||||
|
||||
* **Autolinking**. Blackfriday can find URLs that have not been
|
||||
explicitly marked as links and turn them into links.
|
||||
|
||||
* **Strikethrough**. Use two tildes (`~~`) to mark text that
|
||||
should be crossed out.
|
||||
|
||||
* **Hard line breaks**. With this extension enabled newlines in the input
|
||||
translate into line breaks in the output. This extension is off by default.
|
||||
|
||||
* **Smart quotes**. Smartypants-style punctuation substitution is
|
||||
supported, turning normal double- and single-quote marks into
|
||||
curly quotes, etc.
|
||||
|
||||
* **LaTeX-style dash parsing** is an additional option, where `--`
|
||||
is translated into `–`, and `---` is translated into
|
||||
`—`. This differs from most smartypants processors, which
|
||||
turn a single hyphen into an ndash and a double hyphen into an
|
||||
mdash.
|
||||
|
||||
* **Smart fractions**, where anything that looks like a fraction
|
||||
is translated into suitable HTML (instead of just a few special
|
||||
cases like most smartypant processors). For example, `4/5`
|
||||
becomes `<sup>4</sup>⁄<sub>5</sub>`, which renders as
|
||||
<sup>4</sup>⁄<sub>5</sub>.
|
||||
|
||||
|
||||
Other renderers
|
||||
---------------
|
||||
|
||||
Blackfriday is structured to allow alternative rendering engines. Here
|
||||
are a few of note:
|
||||
|
||||
* [github_flavored_markdown](https://pkg.go.dev/github.com/shurcooL/github_flavored_markdown):
|
||||
provides a GitHub Flavored Markdown renderer with fenced code block
|
||||
highlighting, clickable heading anchor links.
|
||||
|
||||
It's not customizable, and its goal is to produce HTML output
|
||||
equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode),
|
||||
except the rendering is performed locally.
|
||||
|
||||
* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt,
|
||||
but for markdown.
|
||||
|
||||
* [LaTeX output](https://gitlab.com/ambrevar/blackfriday-latex):
|
||||
renders output as LaTeX.
|
||||
|
||||
* [bfchroma](https://github.com/Depado/bfchroma/): provides convenience
|
||||
integration with the [Chroma](https://github.com/alecthomas/chroma) code
|
||||
highlighting library. bfchroma is only compatible with v2 of Blackfriday and
|
||||
provides a drop-in renderer ready to use with Blackfriday, as well as
|
||||
options and means for further customization.
|
||||
|
||||
* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer.
|
||||
|
||||
* [Blackfriday-Slack](https://github.com/karriereat/blackfriday-slack): converts markdown to slack message style
|
||||
|
||||
|
||||
TODO
|
||||
----
|
||||
|
||||
* More unit testing
|
||||
* Improve Unicode support. It does not understand all Unicode
|
||||
rules (about what constitutes a letter, a punctuation symbol,
|
||||
etc.), so it may fail to detect word boundaries correctly in
|
||||
some instances. It is safe on all UTF-8 input.
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt)
|
||||
|
||||
|
||||
[1]: https://daringfireball.net/projects/markdown/ "Markdown"
|
||||
[2]: https://golang.org/ "Go Language"
|
||||
[3]: https://github.com/vmg/sundown "Sundown"
|
||||
[4]: https://pkg.go.dev/github.com/russross/blackfriday/v2#Parse "Parse func"
|
||||
[5]: https://github.com/microcosm-cc/bluemonday "Bluemonday"
|
||||
|
||||
[BuildV2SVG]: https://travis-ci.org/russross/blackfriday.svg?branch=v2
|
||||
[BuildV2URL]: https://travis-ci.org/russross/blackfriday
|
||||
[PkgGoDevV2SVG]: https://pkg.go.dev/badge/github.com/russross/blackfriday/v2
|
||||
[PkgGoDevV2URL]: https://pkg.go.dev/github.com/russross/blackfriday/v2
|
1612
vendor/github.com/russross/blackfriday/v2/block.go
generated
vendored
Normal file
1612
vendor/github.com/russross/blackfriday/v2/block.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
46
vendor/github.com/russross/blackfriday/v2/doc.go
generated
vendored
Normal file
46
vendor/github.com/russross/blackfriday/v2/doc.go
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
// Package blackfriday is a markdown processor.
|
||||
//
|
||||
// It translates plain text with simple formatting rules into an AST, which can
|
||||
// then be further processed to HTML (provided by Blackfriday itself) or other
|
||||
// formats (provided by the community).
|
||||
//
|
||||
// The simplest way to invoke Blackfriday is to call the Run function. It will
|
||||
// take a text input and produce a text output in HTML (or other format).
|
||||
//
|
||||
// A slightly more sophisticated way to use Blackfriday is to create a Markdown
|
||||
// processor and to call Parse, which returns a syntax tree for the input
|
||||
// document. You can leverage Blackfriday's parsing for content extraction from
|
||||
// markdown documents. You can assign a custom renderer and set various options
|
||||
// to the Markdown processor.
|
||||
//
|
||||
// If you're interested in calling Blackfriday from command line, see
|
||||
// https://github.com/russross/blackfriday-tool.
|
||||
//
|
||||
// Sanitized Anchor Names
|
||||
//
|
||||
// Blackfriday includes an algorithm for creating sanitized anchor names
|
||||
// corresponding to a given input text. This algorithm is used to create
|
||||
// anchors for headings when AutoHeadingIDs extension is enabled. The
|
||||
// algorithm is specified below, so that other packages can create
|
||||
// compatible anchor names and links to those anchors.
|
||||
//
|
||||
// The algorithm iterates over the input text, interpreted as UTF-8,
|
||||
// one Unicode code point (rune) at a time. All runes that are letters (category L)
|
||||
// or numbers (category N) are considered valid characters. They are mapped to
|
||||
// lower case, and included in the output. All other runes are considered
|
||||
// invalid characters. Invalid characters that precede the first valid character,
|
||||
// as well as invalid character that follow the last valid character
|
||||
// are dropped completely. All other sequences of invalid characters
|
||||
// between two valid characters are replaced with a single dash character '-'.
|
||||
//
|
||||
// SanitizedAnchorName exposes this functionality, and can be used to
|
||||
// create compatible links to the anchor names generated by blackfriday.
|
||||
// This algorithm is also implemented in a small standalone package at
|
||||
// github.com/shurcooL/sanitized_anchor_name. It can be useful for clients
|
||||
// that want a small package and don't need full functionality of blackfriday.
|
||||
package blackfriday
|
||||
|
||||
// NOTE: Keep Sanitized Anchor Name algorithm in sync with package
|
||||
// github.com/shurcooL/sanitized_anchor_name.
|
||||
// Otherwise, users of sanitized_anchor_name will get anchor names
|
||||
// that are incompatible with those generated by blackfriday.
|
2236
vendor/github.com/russross/blackfriday/v2/entities.go
generated
vendored
Normal file
2236
vendor/github.com/russross/blackfriday/v2/entities.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
70
vendor/github.com/russross/blackfriday/v2/esc.go
generated
vendored
Normal file
70
vendor/github.com/russross/blackfriday/v2/esc.go
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
package blackfriday
|
||||
|
||||
import (
|
||||
"html"
|
||||
"io"
|
||||
)
|
||||
|
||||
var htmlEscaper = [256][]byte{
|
||||
'&': []byte("&"),
|
||||
'<': []byte("<"),
|
||||
'>': []byte(">"),
|
||||
'"': []byte("""),
|
||||
}
|
||||
|
||||
func escapeHTML(w io.Writer, s []byte) {
|
||||
escapeEntities(w, s, false)
|
||||
}
|
||||
|
||||
func escapeAllHTML(w io.Writer, s []byte) {
|
||||
escapeEntities(w, s, true)
|
||||
}
|
||||
|
||||
func escapeEntities(w io.Writer, s []byte, escapeValidEntities bool) {
|
||||
var start, end int
|
||||
for end < len(s) {
|
||||
escSeq := htmlEscaper[s[end]]
|
||||
if escSeq != nil {
|
||||
isEntity, entityEnd := nodeIsEntity(s, end)
|
||||
if isEntity && !escapeValidEntities {
|
||||
w.Write(s[start : entityEnd+1])
|
||||
start = entityEnd + 1
|
||||
} else {
|
||||
w.Write(s[start:end])
|
||||
w.Write(escSeq)
|
||||
start = end + 1
|
||||
}
|
||||
}
|
||||
end++
|
||||
}
|
||||
if start < len(s) && end <= len(s) {
|
||||
w.Write(s[start:end])
|
||||
}
|
||||
}
|
||||
|
||||
func nodeIsEntity(s []byte, end int) (isEntity bool, endEntityPos int) {
|
||||
isEntity = false
|
||||
endEntityPos = end + 1
|
||||
|
||||
if s[end] == '&' {
|
||||
for endEntityPos < len(s) {
|
||||
if s[endEntityPos] == ';' {
|
||||
if entities[string(s[end:endEntityPos+1])] {
|
||||
isEntity = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !isalnum(s[endEntityPos]) && s[endEntityPos] != '&' && s[endEntityPos] != '#' {
|
||||
break
|
||||
}
|
||||
endEntityPos++
|
||||
}
|
||||
}
|
||||
|
||||
return isEntity, endEntityPos
|
||||
}
|
||||
|
||||
func escLink(w io.Writer, text []byte) {
|
||||
unesc := html.UnescapeString(string(text))
|
||||
escapeHTML(w, []byte(unesc))
|
||||
}
|
952
vendor/github.com/russross/blackfriday/v2/html.go
generated
vendored
Normal file
952
vendor/github.com/russross/blackfriday/v2/html.go
generated
vendored
Normal file
@ -0,0 +1,952 @@
|
||||
//
|
||||
// Blackfriday Markdown Processor
|
||||
// Available at http://github.com/russross/blackfriday
|
||||
//
|
||||
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||
// Distributed under the Simplified BSD License.
|
||||
// See README.md for details.
|
||||
//
|
||||
|
||||
//
|
||||
//
|
||||
// HTML rendering backend
|
||||
//
|
||||
//
|
||||
|
||||
package blackfriday
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// HTMLFlags control optional behavior of HTML renderer.
|
||||
type HTMLFlags int
|
||||
|
||||
// HTML renderer configuration options.
|
||||
const (
|
||||
HTMLFlagsNone HTMLFlags = 0
|
||||
SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks
|
||||
SkipImages // Skip embedded images
|
||||
SkipLinks // Skip all links
|
||||
Safelink // Only link to trusted protocols
|
||||
NofollowLinks // Only link with rel="nofollow"
|
||||
NoreferrerLinks // Only link with rel="noreferrer"
|
||||
NoopenerLinks // Only link with rel="noopener"
|
||||
HrefTargetBlank // Add a blank target
|
||||
CompletePage // Generate a complete HTML page
|
||||
UseXHTML // Generate XHTML output instead of HTML
|
||||
FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source
|
||||
Smartypants // Enable smart punctuation substitutions
|
||||
SmartypantsFractions // Enable smart fractions (with Smartypants)
|
||||
SmartypantsDashes // Enable smart dashes (with Smartypants)
|
||||
SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants)
|
||||
SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering
|
||||
SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants)
|
||||
TOC // Generate a table of contents
|
||||
)
|
||||
|
||||
var (
|
||||
htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag)
|
||||
)
|
||||
|
||||
const (
|
||||
htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" +
|
||||
processingInstruction + "|" + declaration + "|" + cdata + ")"
|
||||
closeTag = "</" + tagName + "\\s*[>]"
|
||||
openTag = "<" + tagName + attribute + "*" + "\\s*/?>"
|
||||
attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)"
|
||||
attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")"
|
||||
attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")"
|
||||
attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*"
|
||||
cdata = "<!\\[CDATA\\[[\\s\\S]*?\\]\\]>"
|
||||
declaration = "<![A-Z]+" + "\\s+[^>]*>"
|
||||
doubleQuotedValue = "\"[^\"]*\""
|
||||
htmlComment = "<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->"
|
||||
processingInstruction = "[<][?].*?[?][>]"
|
||||
singleQuotedValue = "'[^']*'"
|
||||
tagName = "[A-Za-z][A-Za-z0-9-]*"
|
||||
unquotedValue = "[^\"'=<>`\\x00-\\x20]+"
|
||||
)
|
||||
|
||||
// HTMLRendererParameters is a collection of supplementary parameters tweaking
|
||||
// the behavior of various parts of HTML renderer.
|
||||
type HTMLRendererParameters struct {
|
||||
// Prepend this text to each relative URL.
|
||||
AbsolutePrefix string
|
||||
// Add this text to each footnote anchor, to ensure uniqueness.
|
||||
FootnoteAnchorPrefix string
|
||||
// Show this text inside the <a> tag for a footnote return link, if the
|
||||
// HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string
|
||||
// <sup>[return]</sup> is used.
|
||||
FootnoteReturnLinkContents string
|
||||
// If set, add this text to the front of each Heading ID, to ensure
|
||||
// uniqueness.
|
||||
HeadingIDPrefix string
|
||||
// If set, add this text to the back of each Heading ID, to ensure uniqueness.
|
||||
HeadingIDSuffix string
|
||||
// Increase heading levels: if the offset is 1, <h1> becomes <h2> etc.
|
||||
// Negative offset is also valid.
|
||||
// Resulting levels are clipped between 1 and 6.
|
||||
HeadingLevelOffset int
|
||||
|
||||
Title string // Document title (used if CompletePage is set)
|
||||
CSS string // Optional CSS file URL (used if CompletePage is set)
|
||||
Icon string // Optional icon file URL (used if CompletePage is set)
|
||||
|
||||
Flags HTMLFlags // Flags allow customizing this renderer's behavior
|
||||
}
|
||||
|
||||
// HTMLRenderer is a type that implements the Renderer interface for HTML output.
|
||||
//
|
||||
// Do not create this directly, instead use the NewHTMLRenderer function.
|
||||
type HTMLRenderer struct {
|
||||
HTMLRendererParameters
|
||||
|
||||
closeTag string // how to end singleton tags: either " />" or ">"
|
||||
|
||||
// Track heading IDs to prevent ID collision in a single generation.
|
||||
headingIDs map[string]int
|
||||
|
||||
lastOutputLen int
|
||||
disableTags int
|
||||
|
||||
sr *SPRenderer
|
||||
}
|
||||
|
||||
const (
|
||||
xhtmlClose = " />"
|
||||
htmlClose = ">"
|
||||
)
|
||||
|
||||
// NewHTMLRenderer creates and configures an HTMLRenderer object, which
|
||||
// satisfies the Renderer interface.
|
||||
func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer {
|
||||
// configure the rendering engine
|
||||
closeTag := htmlClose
|
||||
if params.Flags&UseXHTML != 0 {
|
||||
closeTag = xhtmlClose
|
||||
}
|
||||
|
||||
if params.FootnoteReturnLinkContents == "" {
|
||||
// U+FE0E is VARIATION SELECTOR-15.
|
||||
// It suppresses automatic emoji presentation of the preceding
|
||||
// U+21A9 LEFTWARDS ARROW WITH HOOK on iOS and iPadOS.
|
||||
params.FootnoteReturnLinkContents = "<span aria-label='Return'>↩\ufe0e</span>"
|
||||
}
|
||||
|
||||
return &HTMLRenderer{
|
||||
HTMLRendererParameters: params,
|
||||
|
||||
closeTag: closeTag,
|
||||
headingIDs: make(map[string]int),
|
||||
|
||||
sr: NewSmartypantsRenderer(params.Flags),
|
||||
}
|
||||
}
|
||||
|
||||
func isHTMLTag(tag []byte, tagname string) bool {
|
||||
found, _ := findHTMLTagPos(tag, tagname)
|
||||
return found
|
||||
}
|
||||
|
||||
// Look for a character, but ignore it when it's in any kind of quotes, it
|
||||
// might be JavaScript
|
||||
func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int {
|
||||
inSingleQuote := false
|
||||
inDoubleQuote := false
|
||||
inGraveQuote := false
|
||||
i := start
|
||||
for i < len(html) {
|
||||
switch {
|
||||
case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote:
|
||||
return i
|
||||
case html[i] == '\'':
|
||||
inSingleQuote = !inSingleQuote
|
||||
case html[i] == '"':
|
||||
inDoubleQuote = !inDoubleQuote
|
||||
case html[i] == '`':
|
||||
inGraveQuote = !inGraveQuote
|
||||
}
|
||||
i++
|
||||
}
|
||||
return start
|
||||
}
|
||||
|
||||
func findHTMLTagPos(tag []byte, tagname string) (bool, int) {
|
||||
i := 0
|
||||
if i < len(tag) && tag[0] != '<' {
|
||||
return false, -1
|
||||
}
|
||||
i++
|
||||
i = skipSpace(tag, i)
|
||||
|
||||
if i < len(tag) && tag[i] == '/' {
|
||||
i++
|
||||
}
|
||||
|
||||
i = skipSpace(tag, i)
|
||||
j := 0
|
||||
for ; i < len(tag); i, j = i+1, j+1 {
|
||||
if j >= len(tagname) {
|
||||
break
|
||||
}
|
||||
|
||||
if strings.ToLower(string(tag[i]))[0] != tagname[j] {
|
||||
return false, -1
|
||||
}
|
||||
}
|
||||
|
||||
if i == len(tag) {
|
||||
return false, -1
|
||||
}
|
||||
|
||||
rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>')
|
||||
if rightAngle >= i {
|
||||
return true, rightAngle
|
||||
}
|
||||
|
||||
return false, -1
|
||||
}
|
||||
|
||||
func skipSpace(tag []byte, i int) int {
|
||||
for i < len(tag) && isspace(tag[i]) {
|
||||
i++
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func isRelativeLink(link []byte) (yes bool) {
|
||||
// a tag begin with '#'
|
||||
if link[0] == '#' {
|
||||
return true
|
||||
}
|
||||
|
||||
// link begin with '/' but not '//', the second maybe a protocol relative link
|
||||
if len(link) >= 2 && link[0] == '/' && link[1] != '/' {
|
||||
return true
|
||||
}
|
||||
|
||||
// only the root '/'
|
||||
if len(link) == 1 && link[0] == '/' {
|
||||
return true
|
||||
}
|
||||
|
||||
// current directory : begin with "./"
|
||||
if bytes.HasPrefix(link, []byte("./")) {
|
||||
return true
|
||||
}
|
||||
|
||||
// parent directory : begin with "../"
|
||||
if bytes.HasPrefix(link, []byte("../")) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string {
|
||||
for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] {
|
||||
tmp := fmt.Sprintf("%s-%d", id, count+1)
|
||||
|
||||
if _, tmpFound := r.headingIDs[tmp]; !tmpFound {
|
||||
r.headingIDs[id] = count + 1
|
||||
id = tmp
|
||||
} else {
|
||||
id = id + "-1"
|
||||
}
|
||||
}
|
||||
|
||||
if _, found := r.headingIDs[id]; !found {
|
||||
r.headingIDs[id] = 0
|
||||
}
|
||||
|
||||
return id
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte {
|
||||
if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' {
|
||||
newDest := r.AbsolutePrefix
|
||||
if link[0] != '/' {
|
||||
newDest += "/"
|
||||
}
|
||||
newDest += string(link)
|
||||
return []byte(newDest)
|
||||
}
|
||||
return link
|
||||
}
|
||||
|
||||
func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string {
|
||||
if isRelativeLink(link) {
|
||||
return attrs
|
||||
}
|
||||
val := []string{}
|
||||
if flags&NofollowLinks != 0 {
|
||||
val = append(val, "nofollow")
|
||||
}
|
||||
if flags&NoreferrerLinks != 0 {
|
||||
val = append(val, "noreferrer")
|
||||
}
|
||||
if flags&NoopenerLinks != 0 {
|
||||
val = append(val, "noopener")
|
||||
}
|
||||
if flags&HrefTargetBlank != 0 {
|
||||
attrs = append(attrs, "target=\"_blank\"")
|
||||
}
|
||||
if len(val) == 0 {
|
||||
return attrs
|
||||
}
|
||||
attr := fmt.Sprintf("rel=%q", strings.Join(val, " "))
|
||||
return append(attrs, attr)
|
||||
}
|
||||
|
||||
func isMailto(link []byte) bool {
|
||||
return bytes.HasPrefix(link, []byte("mailto:"))
|
||||
}
|
||||
|
||||
func needSkipLink(flags HTMLFlags, dest []byte) bool {
|
||||
if flags&SkipLinks != 0 {
|
||||
return true
|
||||
}
|
||||
return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest)
|
||||
}
|
||||
|
||||
func isSmartypantable(node *Node) bool {
|
||||
pt := node.Parent.Type
|
||||
return pt != Link && pt != CodeBlock && pt != Code
|
||||
}
|
||||
|
||||
func appendLanguageAttr(attrs []string, info []byte) []string {
|
||||
if len(info) == 0 {
|
||||
return attrs
|
||||
}
|
||||
endOfLang := bytes.IndexAny(info, "\t ")
|
||||
if endOfLang < 0 {
|
||||
endOfLang = len(info)
|
||||
}
|
||||
return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang]))
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) {
|
||||
w.Write(name)
|
||||
if len(attrs) > 0 {
|
||||
w.Write(spaceBytes)
|
||||
w.Write([]byte(strings.Join(attrs, " ")))
|
||||
}
|
||||
w.Write(gtBytes)
|
||||
r.lastOutputLen = 1
|
||||
}
|
||||
|
||||
func footnoteRef(prefix string, node *Node) []byte {
|
||||
urlFrag := prefix + string(slugify(node.Destination))
|
||||
anchor := fmt.Sprintf(`<a href="#fn:%s">%d</a>`, urlFrag, node.NoteID)
|
||||
return []byte(fmt.Sprintf(`<sup class="footnote-ref" id="fnref:%s">%s</sup>`, urlFrag, anchor))
|
||||
}
|
||||
|
||||
func footnoteItem(prefix string, slug []byte) []byte {
|
||||
return []byte(fmt.Sprintf(`<li id="fn:%s%s">`, prefix, slug))
|
||||
}
|
||||
|
||||
func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte {
|
||||
const format = ` <a class="footnote-return" href="#fnref:%s%s">%s</a>`
|
||||
return []byte(fmt.Sprintf(format, prefix, slug, returnLink))
|
||||
}
|
||||
|
||||
func itemOpenCR(node *Node) bool {
|
||||
if node.Prev == nil {
|
||||
return false
|
||||
}
|
||||
ld := node.Parent.ListData
|
||||
return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0
|
||||
}
|
||||
|
||||
func skipParagraphTags(node *Node) bool {
|
||||
grandparent := node.Parent.Parent
|
||||
if grandparent == nil || grandparent.Type != List {
|
||||
return false
|
||||
}
|
||||
tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0
|
||||
return grandparent.Type == List && tightOrTerm
|
||||
}
|
||||
|
||||
func cellAlignment(align CellAlignFlags) string {
|
||||
switch align {
|
||||
case TableAlignmentLeft:
|
||||
return "left"
|
||||
case TableAlignmentRight:
|
||||
return "right"
|
||||
case TableAlignmentCenter:
|
||||
return "center"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) out(w io.Writer, text []byte) {
|
||||
if r.disableTags > 0 {
|
||||
w.Write(htmlTagRe.ReplaceAll(text, []byte{}))
|
||||
} else {
|
||||
w.Write(text)
|
||||
}
|
||||
r.lastOutputLen = len(text)
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) cr(w io.Writer) {
|
||||
if r.lastOutputLen > 0 {
|
||||
r.out(w, nlBytes)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
nlBytes = []byte{'\n'}
|
||||
gtBytes = []byte{'>'}
|
||||
spaceBytes = []byte{' '}
|
||||
)
|
||||
|
||||
var (
|
||||
brTag = []byte("<br>")
|
||||
brXHTMLTag = []byte("<br />")
|
||||
emTag = []byte("<em>")
|
||||
emCloseTag = []byte("</em>")
|
||||
strongTag = []byte("<strong>")
|
||||
strongCloseTag = []byte("</strong>")
|
||||
delTag = []byte("<del>")
|
||||
delCloseTag = []byte("</del>")
|
||||
ttTag = []byte("<tt>")
|
||||
ttCloseTag = []byte("</tt>")
|
||||
aTag = []byte("<a")
|
||||
aCloseTag = []byte("</a>")
|
||||
preTag = []byte("<pre>")
|
||||
preCloseTag = []byte("</pre>")
|
||||
codeTag = []byte("<code>")
|
||||
codeCloseTag = []byte("</code>")
|
||||
pTag = []byte("<p>")
|
||||
pCloseTag = []byte("</p>")
|
||||
blockquoteTag = []byte("<blockquote>")
|
||||
blockquoteCloseTag = []byte("</blockquote>")
|
||||
hrTag = []byte("<hr>")
|
||||
hrXHTMLTag = []byte("<hr />")
|
||||
ulTag = []byte("<ul>")
|
||||
ulCloseTag = []byte("</ul>")
|
||||
olTag = []byte("<ol>")
|
||||
olCloseTag = []byte("</ol>")
|
||||
dlTag = []byte("<dl>")
|
||||
dlCloseTag = []byte("</dl>")
|
||||
liTag = []byte("<li>")
|
||||
liCloseTag = []byte("</li>")
|
||||
ddTag = []byte("<dd>")
|
||||
ddCloseTag = []byte("</dd>")
|
||||
dtTag = []byte("<dt>")
|
||||
dtCloseTag = []byte("</dt>")
|
||||
tableTag = []byte("<table>")
|
||||
tableCloseTag = []byte("</table>")
|
||||
tdTag = []byte("<td")
|
||||
tdCloseTag = []byte("</td>")
|
||||
thTag = []byte("<th")
|
||||
thCloseTag = []byte("</th>")
|
||||
theadTag = []byte("<thead>")
|
||||
theadCloseTag = []byte("</thead>")
|
||||
tbodyTag = []byte("<tbody>")
|
||||
tbodyCloseTag = []byte("</tbody>")
|
||||
trTag = []byte("<tr>")
|
||||
trCloseTag = []byte("</tr>")
|
||||
h1Tag = []byte("<h1")
|
||||
h1CloseTag = []byte("</h1>")
|
||||
h2Tag = []byte("<h2")
|
||||
h2CloseTag = []byte("</h2>")
|
||||
h3Tag = []byte("<h3")
|
||||
h3CloseTag = []byte("</h3>")
|
||||
h4Tag = []byte("<h4")
|
||||
h4CloseTag = []byte("</h4>")
|
||||
h5Tag = []byte("<h5")
|
||||
h5CloseTag = []byte("</h5>")
|
||||
h6Tag = []byte("<h6")
|
||||
h6CloseTag = []byte("</h6>")
|
||||
|
||||
footnotesDivBytes = []byte("\n<div class=\"footnotes\">\n\n")
|
||||
footnotesCloseDivBytes = []byte("\n</div>\n")
|
||||
)
|
||||
|
||||
func headingTagsFromLevel(level int) ([]byte, []byte) {
|
||||
if level <= 1 {
|
||||
return h1Tag, h1CloseTag
|
||||
}
|
||||
switch level {
|
||||
case 2:
|
||||
return h2Tag, h2CloseTag
|
||||
case 3:
|
||||
return h3Tag, h3CloseTag
|
||||
case 4:
|
||||
return h4Tag, h4CloseTag
|
||||
case 5:
|
||||
return h5Tag, h5CloseTag
|
||||
}
|
||||
return h6Tag, h6CloseTag
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) outHRTag(w io.Writer) {
|
||||
if r.Flags&UseXHTML == 0 {
|
||||
r.out(w, hrTag)
|
||||
} else {
|
||||
r.out(w, hrXHTMLTag)
|
||||
}
|
||||
}
|
||||
|
||||
// RenderNode is a default renderer of a single node of a syntax tree. For
|
||||
// block nodes it will be called twice: first time with entering=true, second
|
||||
// time with entering=false, so that it could know when it's working on an open
|
||||
// tag and when on close. It writes the result to w.
|
||||
//
|
||||
// The return value is a way to tell the calling walker to adjust its walk
|
||||
// pattern: e.g. it can terminate the traversal by returning Terminate. Or it
|
||||
// can ask the walker to skip a subtree of this node by returning SkipChildren.
|
||||
// The typical behavior is to return GoToNext, which asks for the usual
|
||||
// traversal to the next node.
|
||||
func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus {
|
||||
attrs := []string{}
|
||||
switch node.Type {
|
||||
case Text:
|
||||
if r.Flags&Smartypants != 0 {
|
||||
var tmp bytes.Buffer
|
||||
escapeHTML(&tmp, node.Literal)
|
||||
r.sr.Process(w, tmp.Bytes())
|
||||
} else {
|
||||
if node.Parent.Type == Link {
|
||||
escLink(w, node.Literal)
|
||||
} else {
|
||||
escapeHTML(w, node.Literal)
|
||||
}
|
||||
}
|
||||
case Softbreak:
|
||||
r.cr(w)
|
||||
// TODO: make it configurable via out(renderer.softbreak)
|
||||
case Hardbreak:
|
||||
if r.Flags&UseXHTML == 0 {
|
||||
r.out(w, brTag)
|
||||
} else {
|
||||
r.out(w, brXHTMLTag)
|
||||
}
|
||||
r.cr(w)
|
||||
case Emph:
|
||||
if entering {
|
||||
r.out(w, emTag)
|
||||
} else {
|
||||
r.out(w, emCloseTag)
|
||||
}
|
||||
case Strong:
|
||||
if entering {
|
||||
r.out(w, strongTag)
|
||||
} else {
|
||||
r.out(w, strongCloseTag)
|
||||
}
|
||||
case Del:
|
||||
if entering {
|
||||
r.out(w, delTag)
|
||||
} else {
|
||||
r.out(w, delCloseTag)
|
||||
}
|
||||
case HTMLSpan:
|
||||
if r.Flags&SkipHTML != 0 {
|
||||
break
|
||||
}
|
||||
r.out(w, node.Literal)
|
||||
case Link:
|
||||
// mark it but don't link it if it is not a safe link: no smartypants
|
||||
dest := node.LinkData.Destination
|
||||
if needSkipLink(r.Flags, dest) {
|
||||
if entering {
|
||||
r.out(w, ttTag)
|
||||
} else {
|
||||
r.out(w, ttCloseTag)
|
||||
}
|
||||
} else {
|
||||
if entering {
|
||||
dest = r.addAbsPrefix(dest)
|
||||
var hrefBuf bytes.Buffer
|
||||
hrefBuf.WriteString("href=\"")
|
||||
escLink(&hrefBuf, dest)
|
||||
hrefBuf.WriteByte('"')
|
||||
attrs = append(attrs, hrefBuf.String())
|
||||
if node.NoteID != 0 {
|
||||
r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node))
|
||||
break
|
||||
}
|
||||
attrs = appendLinkAttrs(attrs, r.Flags, dest)
|
||||
if len(node.LinkData.Title) > 0 {
|
||||
var titleBuff bytes.Buffer
|
||||
titleBuff.WriteString("title=\"")
|
||||
escapeHTML(&titleBuff, node.LinkData.Title)
|
||||
titleBuff.WriteByte('"')
|
||||
attrs = append(attrs, titleBuff.String())
|
||||
}
|
||||
r.tag(w, aTag, attrs)
|
||||
} else {
|
||||
if node.NoteID != 0 {
|
||||
break
|
||||
}
|
||||
r.out(w, aCloseTag)
|
||||
}
|
||||
}
|
||||
case Image:
|
||||
if r.Flags&SkipImages != 0 {
|
||||
return SkipChildren
|
||||
}
|
||||
if entering {
|
||||
dest := node.LinkData.Destination
|
||||
dest = r.addAbsPrefix(dest)
|
||||
if r.disableTags == 0 {
|
||||
//if options.safe && potentiallyUnsafe(dest) {
|
||||
//out(w, `<img src="" alt="`)
|
||||
//} else {
|
||||
r.out(w, []byte(`<img src="`))
|
||||
escLink(w, dest)
|
||||
r.out(w, []byte(`" alt="`))
|
||||
//}
|
||||
}
|
||||
r.disableTags++
|
||||
} else {
|
||||
r.disableTags--
|
||||
if r.disableTags == 0 {
|
||||
if node.LinkData.Title != nil {
|
||||
r.out(w, []byte(`" title="`))
|
||||
escapeHTML(w, node.LinkData.Title)
|
||||
}
|
||||
r.out(w, []byte(`" />`))
|
||||
}
|
||||
}
|
||||
case Code:
|
||||
r.out(w, codeTag)
|
||||
escapeAllHTML(w, node.Literal)
|
||||
r.out(w, codeCloseTag)
|
||||
case Document:
|
||||
break
|
||||
case Paragraph:
|
||||
if skipParagraphTags(node) {
|
||||
break
|
||||
}
|
||||
if entering {
|
||||
// TODO: untangle this clusterfuck about when the newlines need
|
||||
// to be added and when not.
|
||||
if node.Prev != nil {
|
||||
switch node.Prev.Type {
|
||||
case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule:
|
||||
r.cr(w)
|
||||
}
|
||||
}
|
||||
if node.Parent.Type == BlockQuote && node.Prev == nil {
|
||||
r.cr(w)
|
||||
}
|
||||
r.out(w, pTag)
|
||||
} else {
|
||||
r.out(w, pCloseTag)
|
||||
if !(node.Parent.Type == Item && node.Next == nil) {
|
||||
r.cr(w)
|
||||
}
|
||||
}
|
||||
case BlockQuote:
|
||||
if entering {
|
||||
r.cr(w)
|
||||
r.out(w, blockquoteTag)
|
||||
} else {
|
||||
r.out(w, blockquoteCloseTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case HTMLBlock:
|
||||
if r.Flags&SkipHTML != 0 {
|
||||
break
|
||||
}
|
||||
r.cr(w)
|
||||
r.out(w, node.Literal)
|
||||
r.cr(w)
|
||||
case Heading:
|
||||
headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level
|
||||
openTag, closeTag := headingTagsFromLevel(headingLevel)
|
||||
if entering {
|
||||
if node.IsTitleblock {
|
||||
attrs = append(attrs, `class="title"`)
|
||||
}
|
||||
if node.HeadingID != "" {
|
||||
id := r.ensureUniqueHeadingID(node.HeadingID)
|
||||
if r.HeadingIDPrefix != "" {
|
||||
id = r.HeadingIDPrefix + id
|
||||
}
|
||||
if r.HeadingIDSuffix != "" {
|
||||
id = id + r.HeadingIDSuffix
|
||||
}
|
||||
attrs = append(attrs, fmt.Sprintf(`id="%s"`, id))
|
||||
}
|
||||
r.cr(w)
|
||||
r.tag(w, openTag, attrs)
|
||||
} else {
|
||||
r.out(w, closeTag)
|
||||
if !(node.Parent.Type == Item && node.Next == nil) {
|
||||
r.cr(w)
|
||||
}
|
||||
}
|
||||
case HorizontalRule:
|
||||
r.cr(w)
|
||||
r.outHRTag(w)
|
||||
r.cr(w)
|
||||
case List:
|
||||
openTag := ulTag
|
||||
closeTag := ulCloseTag
|
||||
if node.ListFlags&ListTypeOrdered != 0 {
|
||||
openTag = olTag
|
||||
closeTag = olCloseTag
|
||||
}
|
||||
if node.ListFlags&ListTypeDefinition != 0 {
|
||||
openTag = dlTag
|
||||
closeTag = dlCloseTag
|
||||
}
|
||||
if entering {
|
||||
if node.IsFootnotesList {
|
||||
r.out(w, footnotesDivBytes)
|
||||
r.outHRTag(w)
|
||||
r.cr(w)
|
||||
}
|
||||
r.cr(w)
|
||||
if node.Parent.Type == Item && node.Parent.Parent.Tight {
|
||||
r.cr(w)
|
||||
}
|
||||
r.tag(w, openTag[:len(openTag)-1], attrs)
|
||||
r.cr(w)
|
||||
} else {
|
||||
r.out(w, closeTag)
|
||||
//cr(w)
|
||||
//if node.parent.Type != Item {
|
||||
// cr(w)
|
||||
//}
|
||||
if node.Parent.Type == Item && node.Next != nil {
|
||||
r.cr(w)
|
||||
}
|
||||
if node.Parent.Type == Document || node.Parent.Type == BlockQuote {
|
||||
r.cr(w)
|
||||
}
|
||||
if node.IsFootnotesList {
|
||||
r.out(w, footnotesCloseDivBytes)
|
||||
}
|
||||
}
|
||||
case Item:
|
||||
openTag := liTag
|
||||
closeTag := liCloseTag
|
||||
if node.ListFlags&ListTypeDefinition != 0 {
|
||||
openTag = ddTag
|
||||
closeTag = ddCloseTag
|
||||
}
|
||||
if node.ListFlags&ListTypeTerm != 0 {
|
||||
openTag = dtTag
|
||||
closeTag = dtCloseTag
|
||||
}
|
||||
if entering {
|
||||
if itemOpenCR(node) {
|
||||
r.cr(w)
|
||||
}
|
||||
if node.ListData.RefLink != nil {
|
||||
slug := slugify(node.ListData.RefLink)
|
||||
r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug))
|
||||
break
|
||||
}
|
||||
r.out(w, openTag)
|
||||
} else {
|
||||
if node.ListData.RefLink != nil {
|
||||
slug := slugify(node.ListData.RefLink)
|
||||
if r.Flags&FootnoteReturnLinks != 0 {
|
||||
r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug))
|
||||
}
|
||||
}
|
||||
r.out(w, closeTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case CodeBlock:
|
||||
attrs = appendLanguageAttr(attrs, node.Info)
|
||||
r.cr(w)
|
||||
r.out(w, preTag)
|
||||
r.tag(w, codeTag[:len(codeTag)-1], attrs)
|
||||
escapeAllHTML(w, node.Literal)
|
||||
r.out(w, codeCloseTag)
|
||||
r.out(w, preCloseTag)
|
||||
if node.Parent.Type != Item {
|
||||
r.cr(w)
|
||||
}
|
||||
case Table:
|
||||
if entering {
|
||||
r.cr(w)
|
||||
r.out(w, tableTag)
|
||||
} else {
|
||||
r.out(w, tableCloseTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case TableCell:
|
||||
openTag := tdTag
|
||||
closeTag := tdCloseTag
|
||||
if node.IsHeader {
|
||||
openTag = thTag
|
||||
closeTag = thCloseTag
|
||||
}
|
||||
if entering {
|
||||
align := cellAlignment(node.Align)
|
||||
if align != "" {
|
||||
attrs = append(attrs, fmt.Sprintf(`align="%s"`, align))
|
||||
}
|
||||
if node.Prev == nil {
|
||||
r.cr(w)
|
||||
}
|
||||
r.tag(w, openTag, attrs)
|
||||
} else {
|
||||
r.out(w, closeTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case TableHead:
|
||||
if entering {
|
||||
r.cr(w)
|
||||
r.out(w, theadTag)
|
||||
} else {
|
||||
r.out(w, theadCloseTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case TableBody:
|
||||
if entering {
|
||||
r.cr(w)
|
||||
r.out(w, tbodyTag)
|
||||
// XXX: this is to adhere to a rather silly test. Should fix test.
|
||||
if node.FirstChild == nil {
|
||||
r.cr(w)
|
||||
}
|
||||
} else {
|
||||
r.out(w, tbodyCloseTag)
|
||||
r.cr(w)
|
||||
}
|
||||
case TableRow:
|
||||
if entering {
|
||||
r.cr(w)
|
||||
r.out(w, trTag)
|
||||
} else {
|
||||
r.out(w, trCloseTag)
|
||||
r.cr(w)
|
||||
}
|
||||
default:
|
||||
panic("Unknown node type " + node.Type.String())
|
||||
}
|
||||
return GoToNext
|
||||
}
|
||||
|
||||
// RenderHeader writes HTML document preamble and TOC if requested.
|
||||
func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) {
|
||||
r.writeDocumentHeader(w)
|
||||
if r.Flags&TOC != 0 {
|
||||
r.writeTOC(w, ast)
|
||||
}
|
||||
}
|
||||
|
||||
// RenderFooter writes HTML document footer.
|
||||
func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) {
|
||||
if r.Flags&CompletePage == 0 {
|
||||
return
|
||||
}
|
||||
io.WriteString(w, "\n</body>\n</html>\n")
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) {
|
||||
if r.Flags&CompletePage == 0 {
|
||||
return
|
||||
}
|
||||
ending := ""
|
||||
if r.Flags&UseXHTML != 0 {
|
||||
io.WriteString(w, "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" ")
|
||||
io.WriteString(w, "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n")
|
||||
io.WriteString(w, "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n")
|
||||
ending = " /"
|
||||
} else {
|
||||
io.WriteString(w, "<!DOCTYPE html>\n")
|
||||
io.WriteString(w, "<html>\n")
|
||||
}
|
||||
io.WriteString(w, "<head>\n")
|
||||
io.WriteString(w, " <title>")
|
||||
if r.Flags&Smartypants != 0 {
|
||||
r.sr.Process(w, []byte(r.Title))
|
||||
} else {
|
||||
escapeHTML(w, []byte(r.Title))
|
||||
}
|
||||
io.WriteString(w, "</title>\n")
|
||||
io.WriteString(w, " <meta name=\"GENERATOR\" content=\"Blackfriday Markdown Processor v")
|
||||
io.WriteString(w, Version)
|
||||
io.WriteString(w, "\"")
|
||||
io.WriteString(w, ending)
|
||||
io.WriteString(w, ">\n")
|
||||
io.WriteString(w, " <meta charset=\"utf-8\"")
|
||||
io.WriteString(w, ending)
|
||||
io.WriteString(w, ">\n")
|
||||
if r.CSS != "" {
|
||||
io.WriteString(w, " <link rel=\"stylesheet\" type=\"text/css\" href=\"")
|
||||
escapeHTML(w, []byte(r.CSS))
|
||||
io.WriteString(w, "\"")
|
||||
io.WriteString(w, ending)
|
||||
io.WriteString(w, ">\n")
|
||||
}
|
||||
if r.Icon != "" {
|
||||
io.WriteString(w, " <link rel=\"icon\" type=\"image/x-icon\" href=\"")
|
||||
escapeHTML(w, []byte(r.Icon))
|
||||
io.WriteString(w, "\"")
|
||||
io.WriteString(w, ending)
|
||||
io.WriteString(w, ">\n")
|
||||
}
|
||||
io.WriteString(w, "</head>\n")
|
||||
io.WriteString(w, "<body>\n\n")
|
||||
}
|
||||
|
||||
func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) {
|
||||
buf := bytes.Buffer{}
|
||||
|
||||
inHeading := false
|
||||
tocLevel := 0
|
||||
headingCount := 0
|
||||
|
||||
ast.Walk(func(node *Node, entering bool) WalkStatus {
|
||||
if node.Type == Heading && !node.HeadingData.IsTitleblock {
|
||||
inHeading = entering
|
||||
if entering {
|
||||
node.HeadingID = fmt.Sprintf("toc_%d", headingCount)
|
||||
if node.Level == tocLevel {
|
||||
buf.WriteString("</li>\n\n<li>")
|
||||
} else if node.Level < tocLevel {
|
||||
for node.Level < tocLevel {
|
||||
tocLevel--
|
||||
buf.WriteString("</li>\n</ul>")
|
||||
}
|
||||
buf.WriteString("</li>\n\n<li>")
|
||||
} else {
|
||||
for node.Level > tocLevel {
|
||||
tocLevel++
|
||||
buf.WriteString("\n<ul>\n<li>")
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(&buf, `<a href="#toc_%d">`, headingCount)
|
||||
headingCount++
|
||||
} else {
|
||||
buf.WriteString("</a>")
|
||||
}
|
||||
return GoToNext
|
||||
}
|
||||
|
||||
if inHeading {
|
||||
return r.RenderNode(&buf, node, entering)
|
||||
}
|
||||
|
||||
return GoToNext
|
||||
})
|
||||
|
||||
for ; tocLevel > 0; tocLevel-- {
|
||||
buf.WriteString("</li>\n</ul>")
|
||||
}
|
||||
|
||||
if buf.Len() > 0 {
|
||||
io.WriteString(w, "<nav>\n")
|
||||
w.Write(buf.Bytes())
|
||||
io.WriteString(w, "\n\n</nav>\n")
|
||||
}
|
||||
r.lastOutputLen = buf.Len()
|
||||
}
|
1228
vendor/github.com/russross/blackfriday/v2/inline.go
generated
vendored
Normal file
1228
vendor/github.com/russross/blackfriday/v2/inline.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
950
vendor/github.com/russross/blackfriday/v2/markdown.go
generated
vendored
Normal file
950
vendor/github.com/russross/blackfriday/v2/markdown.go
generated
vendored
Normal file
@ -0,0 +1,950 @@
|
||||
// Blackfriday Markdown Processor
|
||||
// Available at http://github.com/russross/blackfriday
|
||||
//
|
||||
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||
// Distributed under the Simplified BSD License.
|
||||
// See README.md for details.
|
||||
|
||||
package blackfriday
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
//
|
||||
// Markdown parsing and processing
|
||||
//
|
||||
|
||||
// Version string of the package. Appears in the rendered document when
|
||||
// CompletePage flag is on.
|
||||
const Version = "2.0"
|
||||
|
||||
// Extensions is a bitwise or'ed collection of enabled Blackfriday's
|
||||
// extensions.
|
||||
type Extensions int
|
||||
|
||||
// These are the supported markdown parsing extensions.
|
||||
// OR these values together to select multiple extensions.
|
||||
const (
|
||||
NoExtensions Extensions = 0
|
||||
NoIntraEmphasis Extensions = 1 << iota // Ignore emphasis markers inside words
|
||||
Tables // Render tables
|
||||
FencedCode // Render fenced code blocks
|
||||
Autolink // Detect embedded URLs that are not explicitly marked
|
||||
Strikethrough // Strikethrough text using ~~test~~
|
||||
LaxHTMLBlocks // Loosen up HTML block parsing rules
|
||||
SpaceHeadings // Be strict about prefix heading rules
|
||||
HardLineBreak // Translate newlines into line breaks
|
||||
TabSizeEight // Expand tabs to eight spaces instead of four
|
||||
Footnotes // Pandoc-style footnotes
|
||||
NoEmptyLineBeforeBlock // No need to insert an empty line to start a (code, quote, ordered list, unordered list) block
|
||||
HeadingIDs // specify heading IDs with {#id}
|
||||
Titleblock // Titleblock ala pandoc
|
||||
AutoHeadingIDs // Create the heading ID from the text
|
||||
BackslashLineBreak // Translate trailing backslashes into line breaks
|
||||
DefinitionLists // Render definition lists
|
||||
|
||||
CommonHTMLFlags HTMLFlags = UseXHTML | Smartypants |
|
||||
SmartypantsFractions | SmartypantsDashes | SmartypantsLatexDashes
|
||||
|
||||
CommonExtensions Extensions = NoIntraEmphasis | Tables | FencedCode |
|
||||
Autolink | Strikethrough | SpaceHeadings | HeadingIDs |
|
||||
BackslashLineBreak | DefinitionLists
|
||||
)
|
||||
|
||||
// ListType contains bitwise or'ed flags for list and list item objects.
|
||||
type ListType int
|
||||
|
||||
// These are the possible flag values for the ListItem renderer.
|
||||
// Multiple flag values may be ORed together.
|
||||
// These are mostly of interest if you are writing a new output format.
|
||||
const (
|
||||
ListTypeOrdered ListType = 1 << iota
|
||||
ListTypeDefinition
|
||||
ListTypeTerm
|
||||
|
||||
ListItemContainsBlock
|
||||
ListItemBeginningOfList // TODO: figure out if this is of any use now
|
||||
ListItemEndOfList
|
||||
)
|
||||
|
||||
// CellAlignFlags holds a type of alignment in a table cell.
|
||||
type CellAlignFlags int
|
||||
|
||||
// These are the possible flag values for the table cell renderer.
|
||||
// Only a single one of these values will be used; they are not ORed together.
|
||||
// These are mostly of interest if you are writing a new output format.
|
||||
const (
|
||||
TableAlignmentLeft CellAlignFlags = 1 << iota
|
||||
TableAlignmentRight
|
||||
TableAlignmentCenter = (TableAlignmentLeft | TableAlignmentRight)
|
||||
)
|
||||
|
||||
// The size of a tab stop.
|
||||
const (
|
||||
TabSizeDefault = 4
|
||||
TabSizeDouble = 8
|
||||
)
|
||||
|
||||
// blockTags is a set of tags that are recognized as HTML block tags.
|
||||
// Any of these can be included in markdown text without special escaping.
|
||||
var blockTags = map[string]struct{}{
|
||||
"blockquote": {},
|
||||
"del": {},
|
||||
"div": {},
|
||||
"dl": {},
|
||||
"fieldset": {},
|
||||
"form": {},
|
||||
"h1": {},
|
||||
"h2": {},
|
||||
"h3": {},
|
||||
"h4": {},
|
||||
"h5": {},
|
||||
"h6": {},
|
||||
"iframe": {},
|
||||
"ins": {},
|
||||
"math": {},
|
||||
"noscript": {},
|
||||
"ol": {},
|
||||
"pre": {},
|
||||
"p": {},
|
||||
"script": {},
|
||||
"style": {},
|
||||
"table": {},
|
||||
"ul": {},
|
||||
|
||||
// HTML5
|
||||
"address": {},
|
||||
"article": {},
|
||||
"aside": {},
|
||||
"canvas": {},
|
||||
"figcaption": {},
|
||||
"figure": {},
|
||||
"footer": {},
|
||||
"header": {},
|
||||
"hgroup": {},
|
||||
"main": {},
|
||||
"nav": {},
|
||||
"output": {},
|
||||
"progress": {},
|
||||
"section": {},
|
||||
"video": {},
|
||||
}
|
||||
|
||||
// Renderer is the rendering interface. This is mostly of interest if you are
|
||||
// implementing a new rendering format.
|
||||
//
|
||||
// Only an HTML implementation is provided in this repository, see the README
|
||||
// for external implementations.
|
||||
type Renderer interface {
|
||||
// RenderNode is the main rendering method. It will be called once for
|
||||
// every leaf node and twice for every non-leaf node (first with
|
||||
// entering=true, then with entering=false). The method should write its
|
||||
// rendition of the node to the supplied writer w.
|
||||
RenderNode(w io.Writer, node *Node, entering bool) WalkStatus
|
||||
|
||||
// RenderHeader is a method that allows the renderer to produce some
|
||||
// content preceding the main body of the output document. The header is
|
||||
// understood in the broad sense here. For example, the default HTML
|
||||
// renderer will write not only the HTML document preamble, but also the
|
||||
// table of contents if it was requested.
|
||||
//
|
||||
// The method will be passed an entire document tree, in case a particular
|
||||
// implementation needs to inspect it to produce output.
|
||||
//
|
||||
// The output should be written to the supplied writer w. If your
|
||||
// implementation has no header to write, supply an empty implementation.
|
||||
RenderHeader(w io.Writer, ast *Node)
|
||||
|
||||
// RenderFooter is a symmetric counterpart of RenderHeader.
|
||||
RenderFooter(w io.Writer, ast *Node)
|
||||
}
|
||||
|
||||
// Callback functions for inline parsing. One such function is defined
|
||||
// for each character that triggers a response when parsing inline data.
|
||||
type inlineParser func(p *Markdown, data []byte, offset int) (int, *Node)
|
||||
|
||||
// Markdown is a type that holds extensions and the runtime state used by
|
||||
// Parse, and the renderer. You can not use it directly, construct it with New.
|
||||
type Markdown struct {
|
||||
renderer Renderer
|
||||
referenceOverride ReferenceOverrideFunc
|
||||
refs map[string]*reference
|
||||
inlineCallback [256]inlineParser
|
||||
extensions Extensions
|
||||
nesting int
|
||||
maxNesting int
|
||||
insideLink bool
|
||||
|
||||
// Footnotes need to be ordered as well as available to quickly check for
|
||||
// presence. If a ref is also a footnote, it's stored both in refs and here
|
||||
// in notes. Slice is nil if footnotes not enabled.
|
||||
notes []*reference
|
||||
|
||||
doc *Node
|
||||
tip *Node // = doc
|
||||
oldTip *Node
|
||||
lastMatchedContainer *Node // = doc
|
||||
allClosed bool
|
||||
}
|
||||
|
||||
func (p *Markdown) getRef(refid string) (ref *reference, found bool) {
|
||||
if p.referenceOverride != nil {
|
||||
r, overridden := p.referenceOverride(refid)
|
||||
if overridden {
|
||||
if r == nil {
|
||||
return nil, false
|
||||
}
|
||||
return &reference{
|
||||
link: []byte(r.Link),
|
||||
title: []byte(r.Title),
|
||||
noteID: 0,
|
||||
hasBlock: false,
|
||||
text: []byte(r.Text)}, true
|
||||
}
|
||||
}
|
||||
// refs are case insensitive
|
||||
ref, found = p.refs[strings.ToLower(refid)]
|
||||
return ref, found
|
||||
}
|
||||
|
||||
func (p *Markdown) finalize(block *Node) {
|
||||
above := block.Parent
|
||||
block.open = false
|
||||
p.tip = above
|
||||
}
|
||||
|
||||
func (p *Markdown) addChild(node NodeType, offset uint32) *Node {
|
||||
return p.addExistingChild(NewNode(node), offset)
|
||||
}
|
||||
|
||||
func (p *Markdown) addExistingChild(node *Node, offset uint32) *Node {
|
||||
for !p.tip.canContain(node.Type) {
|
||||
p.finalize(p.tip)
|
||||
}
|
||||
p.tip.AppendChild(node)
|
||||
p.tip = node
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *Markdown) closeUnmatchedBlocks() {
|
||||
if !p.allClosed {
|
||||
for p.oldTip != p.lastMatchedContainer {
|
||||
parent := p.oldTip.Parent
|
||||
p.finalize(p.oldTip)
|
||||
p.oldTip = parent
|
||||
}
|
||||
p.allClosed = true
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
// Public interface
|
||||
//
|
||||
//
|
||||
|
||||
// Reference represents the details of a link.
|
||||
// See the documentation in Options for more details on use-case.
|
||||
type Reference struct {
|
||||
// Link is usually the URL the reference points to.
|
||||
Link string
|
||||
// Title is the alternate text describing the link in more detail.
|
||||
Title string
|
||||
// Text is the optional text to override the ref with if the syntax used was
|
||||
// [refid][]
|
||||
Text string
|
||||
}
|
||||
|
||||
// ReferenceOverrideFunc is expected to be called with a reference string and
|
||||
// return either a valid Reference type that the reference string maps to or
|
||||
// nil. If overridden is false, the default reference logic will be executed.
|
||||
// See the documentation in Options for more details on use-case.
|
||||
type ReferenceOverrideFunc func(reference string) (ref *Reference, overridden bool)
|
||||
|
||||
// New constructs a Markdown processor. You can use the same With* functions as
|
||||
// for Run() to customize parser's behavior and the renderer.
|
||||
func New(opts ...Option) *Markdown {
|
||||
var p Markdown
|
||||
for _, opt := range opts {
|
||||
opt(&p)
|
||||
}
|
||||
p.refs = make(map[string]*reference)
|
||||
p.maxNesting = 16
|
||||
p.insideLink = false
|
||||
docNode := NewNode(Document)
|
||||
p.doc = docNode
|
||||
p.tip = docNode
|
||||
p.oldTip = docNode
|
||||
p.lastMatchedContainer = docNode
|
||||
p.allClosed = true
|
||||
// register inline parsers
|
||||
p.inlineCallback[' '] = maybeLineBreak
|
||||
p.inlineCallback['*'] = emphasis
|
||||
p.inlineCallback['_'] = emphasis
|
||||
if p.extensions&Strikethrough != 0 {
|
||||
p.inlineCallback['~'] = emphasis
|
||||
}
|
||||
p.inlineCallback['`'] = codeSpan
|
||||
p.inlineCallback['\n'] = lineBreak
|
||||
p.inlineCallback['['] = link
|
||||
p.inlineCallback['<'] = leftAngle
|
||||
p.inlineCallback['\\'] = escape
|
||||
p.inlineCallback['&'] = entity
|
||||
p.inlineCallback['!'] = maybeImage
|
||||
p.inlineCallback['^'] = maybeInlineFootnote
|
||||
if p.extensions&Autolink != 0 {
|
||||
p.inlineCallback['h'] = maybeAutoLink
|
||||
p.inlineCallback['m'] = maybeAutoLink
|
||||
p.inlineCallback['f'] = maybeAutoLink
|
||||
p.inlineCallback['H'] = maybeAutoLink
|
||||
p.inlineCallback['M'] = maybeAutoLink
|
||||
p.inlineCallback['F'] = maybeAutoLink
|
||||
}
|
||||
if p.extensions&Footnotes != 0 {
|
||||
p.notes = make([]*reference, 0)
|
||||
}
|
||||
return &p
|
||||
}
|
||||
|
||||
// Option customizes the Markdown processor's default behavior.
|
||||
type Option func(*Markdown)
|
||||
|
||||
// WithRenderer allows you to override the default renderer.
|
||||
func WithRenderer(r Renderer) Option {
|
||||
return func(p *Markdown) {
|
||||
p.renderer = r
|
||||
}
|
||||
}
|
||||
|
||||
// WithExtensions allows you to pick some of the many extensions provided by
|
||||
// Blackfriday. You can bitwise OR them.
|
||||
func WithExtensions(e Extensions) Option {
|
||||
return func(p *Markdown) {
|
||||
p.extensions = e
|
||||
}
|
||||
}
|
||||
|
||||
// WithNoExtensions turns off all extensions and custom behavior.
|
||||
func WithNoExtensions() Option {
|
||||
return func(p *Markdown) {
|
||||
p.extensions = NoExtensions
|
||||
p.renderer = NewHTMLRenderer(HTMLRendererParameters{
|
||||
Flags: HTMLFlagsNone,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// WithRefOverride sets an optional function callback that is called every
|
||||
// time a reference is resolved.
|
||||
//
|
||||
// In Markdown, the link reference syntax can be made to resolve a link to
|
||||
// a reference instead of an inline URL, in one of the following ways:
|
||||
//
|
||||
// * [link text][refid]
|
||||
// * [refid][]
|
||||
//
|
||||
// Usually, the refid is defined at the bottom of the Markdown document. If
|
||||
// this override function is provided, the refid is passed to the override
|
||||
// function first, before consulting the defined refids at the bottom. If
|
||||
// the override function indicates an override did not occur, the refids at
|
||||
// the bottom will be used to fill in the link details.
|
||||
func WithRefOverride(o ReferenceOverrideFunc) Option {
|
||||
return func(p *Markdown) {
|
||||
p.referenceOverride = o
|
||||
}
|
||||
}
|
||||
|
||||
// Run is the main entry point to Blackfriday. It parses and renders a
|
||||
// block of markdown-encoded text.
|
||||
//
|
||||
// The simplest invocation of Run takes one argument, input:
|
||||
// output := Run(input)
|
||||
// This will parse the input with CommonExtensions enabled and render it with
|
||||
// the default HTMLRenderer (with CommonHTMLFlags).
|
||||
//
|
||||
// Variadic arguments opts can customize the default behavior. Since Markdown
|
||||
// type does not contain exported fields, you can not use it directly. Instead,
|
||||
// use the With* functions. For example, this will call the most basic
|
||||
// functionality, with no extensions:
|
||||
// output := Run(input, WithNoExtensions())
|
||||
//
|
||||
// You can use any number of With* arguments, even contradicting ones. They
|
||||
// will be applied in order of appearance and the latter will override the
|
||||
// former:
|
||||
// output := Run(input, WithNoExtensions(), WithExtensions(exts),
|
||||
// WithRenderer(yourRenderer))
|
||||
func Run(input []byte, opts ...Option) []byte {
|
||||
r := NewHTMLRenderer(HTMLRendererParameters{
|
||||
Flags: CommonHTMLFlags,
|
||||
})
|
||||
optList := []Option{WithRenderer(r), WithExtensions(CommonExtensions)}
|
||||
optList = append(optList, opts...)
|
||||
parser := New(optList...)
|
||||
ast := parser.Parse(input)
|
||||
var buf bytes.Buffer
|
||||
parser.renderer.RenderHeader(&buf, ast)
|
||||
ast.Walk(func(node *Node, entering bool) WalkStatus {
|
||||
return parser.renderer.RenderNode(&buf, node, entering)
|
||||
})
|
||||
parser.renderer.RenderFooter(&buf, ast)
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Parse is an entry point to the parsing part of Blackfriday. It takes an
|
||||
// input markdown document and produces a syntax tree for its contents. This
|
||||
// tree can then be rendered with a default or custom renderer, or
|
||||
// analyzed/transformed by the caller to whatever non-standard needs they have.
|
||||
// The return value is the root node of the syntax tree.
|
||||
func (p *Markdown) Parse(input []byte) *Node {
|
||||
p.block(input)
|
||||
// Walk the tree and finish up some of unfinished blocks
|
||||
for p.tip != nil {
|
||||
p.finalize(p.tip)
|
||||
}
|
||||
// Walk the tree again and process inline markdown in each block
|
||||
p.doc.Walk(func(node *Node, entering bool) WalkStatus {
|
||||
if node.Type == Paragraph || node.Type == Heading || node.Type == TableCell {
|
||||
p.inline(node, node.content)
|
||||
node.content = nil
|
||||
}
|
||||
return GoToNext
|
||||
})
|
||||
p.parseRefsToAST()
|
||||
return p.doc
|
||||
}
|
||||
|
||||
func (p *Markdown) parseRefsToAST() {
|
||||
if p.extensions&Footnotes == 0 || len(p.notes) == 0 {
|
||||
return
|
||||
}
|
||||
p.tip = p.doc
|
||||
block := p.addBlock(List, nil)
|
||||
block.IsFootnotesList = true
|
||||
block.ListFlags = ListTypeOrdered
|
||||
flags := ListItemBeginningOfList
|
||||
// Note: this loop is intentionally explicit, not range-form. This is
|
||||
// because the body of the loop will append nested footnotes to p.notes and
|
||||
// we need to process those late additions. Range form would only walk over
|
||||
// the fixed initial set.
|
||||
for i := 0; i < len(p.notes); i++ {
|
||||
ref := p.notes[i]
|
||||
p.addExistingChild(ref.footnote, 0)
|
||||
block := ref.footnote
|
||||
block.ListFlags = flags | ListTypeOrdered
|
||||
block.RefLink = ref.link
|
||||
if ref.hasBlock {
|
||||
flags |= ListItemContainsBlock
|
||||
p.block(ref.title)
|
||||
} else {
|
||||
p.inline(block, ref.title)
|
||||
}
|
||||
flags &^= ListItemBeginningOfList | ListItemContainsBlock
|
||||
}
|
||||
above := block.Parent
|
||||
finalizeList(block)
|
||||
p.tip = above
|
||||
block.Walk(func(node *Node, entering bool) WalkStatus {
|
||||
if node.Type == Paragraph || node.Type == Heading {
|
||||
p.inline(node, node.content)
|
||||
node.content = nil
|
||||
}
|
||||
return GoToNext
|
||||
})
|
||||
}
|
||||
|
||||
//
|
||||
// Link references
|
||||
//
|
||||
// This section implements support for references that (usually) appear
|
||||
// as footnotes in a document, and can be referenced anywhere in the document.
|
||||
// The basic format is:
|
||||
//
|
||||
// [1]: http://www.google.com/ "Google"
|
||||
// [2]: http://www.github.com/ "Github"
|
||||
//
|
||||
// Anywhere in the document, the reference can be linked by referring to its
|
||||
// label, i.e., 1 and 2 in this example, as in:
|
||||
//
|
||||
// This library is hosted on [Github][2], a git hosting site.
|
||||
//
|
||||
// Actual footnotes as specified in Pandoc and supported by some other Markdown
|
||||
// libraries such as php-markdown are also taken care of. They look like this:
|
||||
//
|
||||
// This sentence needs a bit of further explanation.[^note]
|
||||
//
|
||||
// [^note]: This is the explanation.
|
||||
//
|
||||
// Footnotes should be placed at the end of the document in an ordered list.
|
||||
// Finally, there are inline footnotes such as:
|
||||
//
|
||||
// Inline footnotes^[Also supported.] provide a quick inline explanation,
|
||||
// but are rendered at the bottom of the document.
|
||||
//
|
||||
|
||||
// reference holds all information necessary for a reference-style links or
|
||||
// footnotes.
|
||||
//
|
||||
// Consider this markdown with reference-style links:
|
||||
//
|
||||
// [link][ref]
|
||||
//
|
||||
// [ref]: /url/ "tooltip title"
|
||||
//
|
||||
// It will be ultimately converted to this HTML:
|
||||
//
|
||||
// <p><a href=\"/url/\" title=\"title\">link</a></p>
|
||||
//
|
||||
// And a reference structure will be populated as follows:
|
||||
//
|
||||
// p.refs["ref"] = &reference{
|
||||
// link: "/url/",
|
||||
// title: "tooltip title",
|
||||
// }
|
||||
//
|
||||
// Alternatively, reference can contain information about a footnote. Consider
|
||||
// this markdown:
|
||||
//
|
||||
// Text needing a footnote.[^a]
|
||||
//
|
||||
// [^a]: This is the note
|
||||
//
|
||||
// A reference structure will be populated as follows:
|
||||
//
|
||||
// p.refs["a"] = &reference{
|
||||
// link: "a",
|
||||
// title: "This is the note",
|
||||
// noteID: <some positive int>,
|
||||
// }
|
||||
//
|
||||
// TODO: As you can see, it begs for splitting into two dedicated structures
|
||||
// for refs and for footnotes.
|
||||
type reference struct {
|
||||
link []byte
|
||||
title []byte
|
||||
noteID int // 0 if not a footnote ref
|
||||
hasBlock bool
|
||||
footnote *Node // a link to the Item node within a list of footnotes
|
||||
|
||||
text []byte // only gets populated by refOverride feature with Reference.Text
|
||||
}
|
||||
|
||||
func (r *reference) String() string {
|
||||
return fmt.Sprintf("{link: %q, title: %q, text: %q, noteID: %d, hasBlock: %v}",
|
||||
r.link, r.title, r.text, r.noteID, r.hasBlock)
|
||||
}
|
||||
|
||||
// Check whether or not data starts with a reference link.
|
||||
// If so, it is parsed and stored in the list of references
|
||||
// (in the render struct).
|
||||
// Returns the number of bytes to skip to move past it,
|
||||
// or zero if the first line is not a reference.
|
||||
func isReference(p *Markdown, data []byte, tabSize int) int {
|
||||
// up to 3 optional leading spaces
|
||||
if len(data) < 4 {
|
||||
return 0
|
||||
}
|
||||
i := 0
|
||||
for i < 3 && data[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
|
||||
noteID := 0
|
||||
|
||||
// id part: anything but a newline between brackets
|
||||
if data[i] != '[' {
|
||||
return 0
|
||||
}
|
||||
i++
|
||||
if p.extensions&Footnotes != 0 {
|
||||
if i < len(data) && data[i] == '^' {
|
||||
// we can set it to anything here because the proper noteIds will
|
||||
// be assigned later during the second pass. It just has to be != 0
|
||||
noteID = 1
|
||||
i++
|
||||
}
|
||||
}
|
||||
idOffset := i
|
||||
for i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != ']' {
|
||||
i++
|
||||
}
|
||||
if i >= len(data) || data[i] != ']' {
|
||||
return 0
|
||||
}
|
||||
idEnd := i
|
||||
// footnotes can have empty ID, like this: [^], but a reference can not be
|
||||
// empty like this: []. Break early if it's not a footnote and there's no ID
|
||||
if noteID == 0 && idOffset == idEnd {
|
||||
return 0
|
||||
}
|
||||
// spacer: colon (space | tab)* newline? (space | tab)*
|
||||
i++
|
||||
if i >= len(data) || data[i] != ':' {
|
||||
return 0
|
||||
}
|
||||
i++
|
||||
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||
i++
|
||||
}
|
||||
if i < len(data) && (data[i] == '\n' || data[i] == '\r') {
|
||||
i++
|
||||
if i < len(data) && data[i] == '\n' && data[i-1] == '\r' {
|
||||
i++
|
||||
}
|
||||
}
|
||||
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||
i++
|
||||
}
|
||||
if i >= len(data) {
|
||||
return 0
|
||||
}
|
||||
|
||||
var (
|
||||
linkOffset, linkEnd int
|
||||
titleOffset, titleEnd int
|
||||
lineEnd int
|
||||
raw []byte
|
||||
hasBlock bool
|
||||
)
|
||||
|
||||
if p.extensions&Footnotes != 0 && noteID != 0 {
|
||||
linkOffset, linkEnd, raw, hasBlock = scanFootnote(p, data, i, tabSize)
|
||||
lineEnd = linkEnd
|
||||
} else {
|
||||
linkOffset, linkEnd, titleOffset, titleEnd, lineEnd = scanLinkRef(p, data, i)
|
||||
}
|
||||
if lineEnd == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// a valid ref has been found
|
||||
|
||||
ref := &reference{
|
||||
noteID: noteID,
|
||||
hasBlock: hasBlock,
|
||||
}
|
||||
|
||||
if noteID > 0 {
|
||||
// reusing the link field for the id since footnotes don't have links
|
||||
ref.link = data[idOffset:idEnd]
|
||||
// if footnote, it's not really a title, it's the contained text
|
||||
ref.title = raw
|
||||
} else {
|
||||
ref.link = data[linkOffset:linkEnd]
|
||||
ref.title = data[titleOffset:titleEnd]
|
||||
}
|
||||
|
||||
// id matches are case-insensitive
|
||||
id := string(bytes.ToLower(data[idOffset:idEnd]))
|
||||
|
||||
p.refs[id] = ref
|
||||
|
||||
return lineEnd
|
||||
}
|
||||
|
||||
func scanLinkRef(p *Markdown, data []byte, i int) (linkOffset, linkEnd, titleOffset, titleEnd, lineEnd int) {
|
||||
// link: whitespace-free sequence, optionally between angle brackets
|
||||
if data[i] == '<' {
|
||||
i++
|
||||
}
|
||||
linkOffset = i
|
||||
for i < len(data) && data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' {
|
||||
i++
|
||||
}
|
||||
linkEnd = i
|
||||
if data[linkOffset] == '<' && data[linkEnd-1] == '>' {
|
||||
linkOffset++
|
||||
linkEnd--
|
||||
}
|
||||
|
||||
// optional spacer: (space | tab)* (newline | '\'' | '"' | '(' )
|
||||
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||
i++
|
||||
}
|
||||
if i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != '\'' && data[i] != '"' && data[i] != '(' {
|
||||
return
|
||||
}
|
||||
|
||||
// compute end-of-line
|
||||
if i >= len(data) || data[i] == '\r' || data[i] == '\n' {
|
||||
lineEnd = i
|
||||
}
|
||||
if i+1 < len(data) && data[i] == '\r' && data[i+1] == '\n' {
|
||||
lineEnd++
|
||||
}
|
||||
|
||||
// optional (space|tab)* spacer after a newline
|
||||
if lineEnd > 0 {
|
||||
i = lineEnd + 1
|
||||
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// optional title: any non-newline sequence enclosed in '"() alone on its line
|
||||
if i+1 < len(data) && (data[i] == '\'' || data[i] == '"' || data[i] == '(') {
|
||||
i++
|
||||
titleOffset = i
|
||||
|
||||
// look for EOL
|
||||
for i < len(data) && data[i] != '\n' && data[i] != '\r' {
|
||||
i++
|
||||
}
|
||||
if i+1 < len(data) && data[i] == '\n' && data[i+1] == '\r' {
|
||||
titleEnd = i + 1
|
||||
} else {
|
||||
titleEnd = i
|
||||
}
|
||||
|
||||
// step back
|
||||
i--
|
||||
for i > titleOffset && (data[i] == ' ' || data[i] == '\t') {
|
||||
i--
|
||||
}
|
||||
if i > titleOffset && (data[i] == '\'' || data[i] == '"' || data[i] == ')') {
|
||||
lineEnd = titleEnd
|
||||
titleEnd = i
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// The first bit of this logic is the same as Parser.listItem, but the rest
|
||||
// is much simpler. This function simply finds the entire block and shifts it
|
||||
// over by one tab if it is indeed a block (just returns the line if it's not).
|
||||
// blockEnd is the end of the section in the input buffer, and contents is the
|
||||
// extracted text that was shifted over one tab. It will need to be rendered at
|
||||
// the end of the document.
|
||||
func scanFootnote(p *Markdown, data []byte, i, indentSize int) (blockStart, blockEnd int, contents []byte, hasBlock bool) {
|
||||
if i == 0 || len(data) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// skip leading whitespace on first line
|
||||
for i < len(data) && data[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
|
||||
blockStart = i
|
||||
|
||||
// find the end of the line
|
||||
blockEnd = i
|
||||
for i < len(data) && data[i-1] != '\n' {
|
||||
i++
|
||||
}
|
||||
|
||||
// get working buffer
|
||||
var raw bytes.Buffer
|
||||
|
||||
// put the first line into the working buffer
|
||||
raw.Write(data[blockEnd:i])
|
||||
blockEnd = i
|
||||
|
||||
// process the following lines
|
||||
containsBlankLine := false
|
||||
|
||||
gatherLines:
|
||||
for blockEnd < len(data) {
|
||||
i++
|
||||
|
||||
// find the end of this line
|
||||
for i < len(data) && data[i-1] != '\n' {
|
||||
i++
|
||||
}
|
||||
|
||||
// if it is an empty line, guess that it is part of this item
|
||||
// and move on to the next line
|
||||
if p.isEmpty(data[blockEnd:i]) > 0 {
|
||||
containsBlankLine = true
|
||||
blockEnd = i
|
||||
continue
|
||||
}
|
||||
|
||||
n := 0
|
||||
if n = isIndented(data[blockEnd:i], indentSize); n == 0 {
|
||||
// this is the end of the block.
|
||||
// we don't want to include this last line in the index.
|
||||
break gatherLines
|
||||
}
|
||||
|
||||
// if there were blank lines before this one, insert a new one now
|
||||
if containsBlankLine {
|
||||
raw.WriteByte('\n')
|
||||
containsBlankLine = false
|
||||
}
|
||||
|
||||
// get rid of that first tab, write to buffer
|
||||
raw.Write(data[blockEnd+n : i])
|
||||
hasBlock = true
|
||||
|
||||
blockEnd = i
|
||||
}
|
||||
|
||||
if data[blockEnd-1] != '\n' {
|
||||
raw.WriteByte('\n')
|
||||
}
|
||||
|
||||
contents = raw.Bytes()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
// Miscellaneous helper functions
|
||||
//
|
||||
//
|
||||
|
||||
// Test if a character is a punctuation symbol.
|
||||
// Taken from a private function in regexp in the stdlib.
|
||||
func ispunct(c byte) bool {
|
||||
for _, r := range []byte("!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") {
|
||||
if c == r {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Test if a character is a whitespace character.
|
||||
func isspace(c byte) bool {
|
||||
return ishorizontalspace(c) || isverticalspace(c)
|
||||
}
|
||||
|
||||
// Test if a character is a horizontal whitespace character.
|
||||
func ishorizontalspace(c byte) bool {
|
||||
return c == ' ' || c == '\t'
|
||||
}
|
||||
|
||||
// Test if a character is a vertical character.
|
||||
func isverticalspace(c byte) bool {
|
||||
return c == '\n' || c == '\r' || c == '\f' || c == '\v'
|
||||
}
|
||||
|
||||
// Test if a character is letter.
|
||||
func isletter(c byte) bool {
|
||||
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|
||||
}
|
||||
|
||||
// Test if a character is a letter or a digit.
|
||||
// TODO: check when this is looking for ASCII alnum and when it should use unicode
|
||||
func isalnum(c byte) bool {
|
||||
return (c >= '0' && c <= '9') || isletter(c)
|
||||
}
|
||||
|
||||
// Replace tab characters with spaces, aligning to the next TAB_SIZE column.
|
||||
// always ends output with a newline
|
||||
func expandTabs(out *bytes.Buffer, line []byte, tabSize int) {
|
||||
// first, check for common cases: no tabs, or only tabs at beginning of line
|
||||
i, prefix := 0, 0
|
||||
slowcase := false
|
||||
for i = 0; i < len(line); i++ {
|
||||
if line[i] == '\t' {
|
||||
if prefix == i {
|
||||
prefix++
|
||||
} else {
|
||||
slowcase = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no need to decode runes if all tabs are at the beginning of the line
|
||||
if !slowcase {
|
||||
for i = 0; i < prefix*tabSize; i++ {
|
||||
out.WriteByte(' ')
|
||||
}
|
||||
out.Write(line[prefix:])
|
||||
return
|
||||
}
|
||||
|
||||
// the slow case: we need to count runes to figure out how
|
||||
// many spaces to insert for each tab
|
||||
column := 0
|
||||
i = 0
|
||||
for i < len(line) {
|
||||
start := i
|
||||
for i < len(line) && line[i] != '\t' {
|
||||
_, size := utf8.DecodeRune(line[i:])
|
||||
i += size
|
||||
column++
|
||||
}
|
||||
|
||||
if i > start {
|
||||
out.Write(line[start:i])
|
||||
}
|
||||
|
||||
if i >= len(line) {
|
||||
break
|
||||
}
|
||||
|
||||
for {
|
||||
out.WriteByte(' ')
|
||||
column++
|
||||
if column%tabSize == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// Find if a line counts as indented or not.
|
||||
// Returns number of characters the indent is (0 = not indented).
|
||||
func isIndented(data []byte, indentSize int) int {
|
||||
if len(data) == 0 {
|
||||
return 0
|
||||
}
|
||||
if data[0] == '\t' {
|
||||
return 1
|
||||
}
|
||||
if len(data) < indentSize {
|
||||
return 0
|
||||
}
|
||||
for i := 0; i < indentSize; i++ {
|
||||
if data[i] != ' ' {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
return indentSize
|
||||
}
|
||||
|
||||
// Create a url-safe slug for fragments
|
||||
func slugify(in []byte) []byte {
|
||||
if len(in) == 0 {
|
||||
return in
|
||||
}
|
||||
out := make([]byte, 0, len(in))
|
||||
sym := false
|
||||
|
||||
for _, ch := range in {
|
||||
if isalnum(ch) {
|
||||
sym = false
|
||||
out = append(out, ch)
|
||||
} else if sym {
|
||||
continue
|
||||
} else {
|
||||
out = append(out, '-')
|
||||
sym = true
|
||||
}
|
||||
}
|
||||
var a, b int
|
||||
var ch byte
|
||||
for a, ch = range out {
|
||||
if ch != '-' {
|
||||
break
|
||||
}
|
||||
}
|
||||
for b = len(out) - 1; b > 0; b-- {
|
||||
if out[b] != '-' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return out[a : b+1]
|
||||
}
|
360
vendor/github.com/russross/blackfriday/v2/node.go
generated
vendored
Normal file
360
vendor/github.com/russross/blackfriday/v2/node.go
generated
vendored
Normal file
@ -0,0 +1,360 @@
|
||||
package blackfriday
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// NodeType specifies a type of a single node of a syntax tree. Usually one
|
||||
// node (and its type) corresponds to a single markdown feature, e.g. emphasis
|
||||
// or code block.
|
||||
type NodeType int
|
||||
|
||||
// Constants for identifying different types of nodes. See NodeType.
|
||||
const (
|
||||
Document NodeType = iota
|
||||
BlockQuote
|
||||
List
|
||||
Item
|
||||
Paragraph
|
||||
Heading
|
||||
HorizontalRule
|
||||
Emph
|
||||
Strong
|
||||
Del
|
||||
Link
|
||||
Image
|
||||
Text
|
||||
HTMLBlock
|
||||
CodeBlock
|
||||
Softbreak
|
||||
Hardbreak
|
||||
Code
|
||||
HTMLSpan
|
||||
Table
|
||||
TableCell
|
||||
TableHead
|
||||
TableBody
|
||||
TableRow
|
||||
)
|
||||
|
||||
var nodeTypeNames = []string{
|
||||
Document: "Document",
|
||||
BlockQuote: "BlockQuote",
|
||||
List: "List",
|
||||
Item: "Item",
|
||||
Paragraph: "Paragraph",
|
||||
Heading: "Heading",
|
||||
HorizontalRule: "HorizontalRule",
|
||||
Emph: "Emph",
|
||||
Strong: "Strong",
|
||||
Del: "Del",
|
||||
Link: "Link",
|
||||
Image: "Image",
|
||||
Text: "Text",
|
||||
HTMLBlock: "HTMLBlock",
|
||||
CodeBlock: "CodeBlock",
|
||||
Softbreak: "Softbreak",
|
||||
Hardbreak: "Hardbreak",
|
||||
Code: "Code",
|
||||
HTMLSpan: "HTMLSpan",
|
||||
Table: "Table",
|
||||
TableCell: "TableCell",
|
||||
TableHead: "TableHead",
|
||||
TableBody: "TableBody",
|
||||
TableRow: "TableRow",
|
||||
}
|
||||
|
||||
func (t NodeType) String() string {
|
||||
return nodeTypeNames[t]
|
||||
}
|
||||
|
||||
// ListData contains fields relevant to a List and Item node type.
|
||||
type ListData struct {
|
||||
ListFlags ListType
|
||||
Tight bool // Skip <p>s around list item data if true
|
||||
BulletChar byte // '*', '+' or '-' in bullet lists
|
||||
Delimiter byte // '.' or ')' after the number in ordered lists
|
||||
RefLink []byte // If not nil, turns this list item into a footnote item and triggers different rendering
|
||||
IsFootnotesList bool // This is a list of footnotes
|
||||
}
|
||||
|
||||
// LinkData contains fields relevant to a Link node type.
|
||||
type LinkData struct {
|
||||
Destination []byte // Destination is what goes into a href
|
||||
Title []byte // Title is the tooltip thing that goes in a title attribute
|
||||
NoteID int // NoteID contains a serial number of a footnote, zero if it's not a footnote
|
||||
Footnote *Node // If it's a footnote, this is a direct link to the footnote Node. Otherwise nil.
|
||||
}
|
||||
|
||||
// CodeBlockData contains fields relevant to a CodeBlock node type.
|
||||
type CodeBlockData struct {
|
||||
IsFenced bool // Specifies whether it's a fenced code block or an indented one
|
||||
Info []byte // This holds the info string
|
||||
FenceChar byte
|
||||
FenceLength int
|
||||
FenceOffset int
|
||||
}
|
||||
|
||||
// TableCellData contains fields relevant to a TableCell node type.
|
||||
type TableCellData struct {
|
||||
IsHeader bool // This tells if it's under the header row
|
||||
Align CellAlignFlags // This holds the value for align attribute
|
||||
}
|
||||
|
||||
// HeadingData contains fields relevant to a Heading node type.
|
||||
type HeadingData struct {
|
||||
Level int // This holds the heading level number
|
||||
HeadingID string // This might hold heading ID, if present
|
||||
IsTitleblock bool // Specifies whether it's a title block
|
||||
}
|
||||
|
||||
// Node is a single element in the abstract syntax tree of the parsed document.
|
||||
// It holds connections to the structurally neighboring nodes and, for certain
|
||||
// types of nodes, additional information that might be needed when rendering.
|
||||
type Node struct {
|
||||
Type NodeType // Determines the type of the node
|
||||
Parent *Node // Points to the parent
|
||||
FirstChild *Node // Points to the first child, if any
|
||||
LastChild *Node // Points to the last child, if any
|
||||
Prev *Node // Previous sibling; nil if it's the first child
|
||||
Next *Node // Next sibling; nil if it's the last child
|
||||
|
||||
Literal []byte // Text contents of the leaf nodes
|
||||
|
||||
HeadingData // Populated if Type is Heading
|
||||
ListData // Populated if Type is List
|
||||
CodeBlockData // Populated if Type is CodeBlock
|
||||
LinkData // Populated if Type is Link
|
||||
TableCellData // Populated if Type is TableCell
|
||||
|
||||
content []byte // Markdown content of the block nodes
|
||||
open bool // Specifies an open block node that has not been finished to process yet
|
||||
}
|
||||
|
||||
// NewNode allocates a node of a specified type.
|
||||
func NewNode(typ NodeType) *Node {
|
||||
return &Node{
|
||||
Type: typ,
|
||||
open: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *Node) String() string {
|
||||
ellipsis := ""
|
||||
snippet := n.Literal
|
||||
if len(snippet) > 16 {
|
||||
snippet = snippet[:16]
|
||||
ellipsis = "..."
|
||||
}
|
||||
return fmt.Sprintf("%s: '%s%s'", n.Type, snippet, ellipsis)
|
||||
}
|
||||
|
||||
// Unlink removes node 'n' from the tree.
|
||||
// It panics if the node is nil.
|
||||
func (n *Node) Unlink() {
|
||||
if n.Prev != nil {
|
||||
n.Prev.Next = n.Next
|
||||
} else if n.Parent != nil {
|
||||
n.Parent.FirstChild = n.Next
|
||||
}
|
||||
if n.Next != nil {
|
||||
n.Next.Prev = n.Prev
|
||||
} else if n.Parent != nil {
|
||||
n.Parent.LastChild = n.Prev
|
||||
}
|
||||
n.Parent = nil
|
||||
n.Next = nil
|
||||
n.Prev = nil
|
||||
}
|
||||
|
||||
// AppendChild adds a node 'child' as a child of 'n'.
|
||||
// It panics if either node is nil.
|
||||
func (n *Node) AppendChild(child *Node) {
|
||||
child.Unlink()
|
||||
child.Parent = n
|
||||
if n.LastChild != nil {
|
||||
n.LastChild.Next = child
|
||||
child.Prev = n.LastChild
|
||||
n.LastChild = child
|
||||
} else {
|
||||
n.FirstChild = child
|
||||
n.LastChild = child
|
||||
}
|
||||
}
|
||||
|
||||
// InsertBefore inserts 'sibling' immediately before 'n'.
|
||||
// It panics if either node is nil.
|
||||
func (n *Node) InsertBefore(sibling *Node) {
|
||||
sibling.Unlink()
|
||||
sibling.Prev = n.Prev
|
||||
if sibling.Prev != nil {
|
||||
sibling.Prev.Next = sibling
|
||||
}
|
||||
sibling.Next = n
|
||||
n.Prev = sibling
|
||||
sibling.Parent = n.Parent
|
||||
if sibling.Prev == nil {
|
||||
sibling.Parent.FirstChild = sibling
|
||||
}
|
||||
}
|
||||
|
||||
// IsContainer returns true if 'n' can contain children.
|
||||
func (n *Node) IsContainer() bool {
|
||||
switch n.Type {
|
||||
case Document:
|
||||
fallthrough
|
||||
case BlockQuote:
|
||||
fallthrough
|
||||
case List:
|
||||
fallthrough
|
||||
case Item:
|
||||
fallthrough
|
||||
case Paragraph:
|
||||
fallthrough
|
||||
case Heading:
|
||||
fallthrough
|
||||
case Emph:
|
||||
fallthrough
|
||||
case Strong:
|
||||
fallthrough
|
||||
case Del:
|
||||
fallthrough
|
||||
case Link:
|
||||
fallthrough
|
||||
case Image:
|
||||
fallthrough
|
||||
case Table:
|
||||
fallthrough
|
||||
case TableHead:
|
||||
fallthrough
|
||||
case TableBody:
|
||||
fallthrough
|
||||
case TableRow:
|
||||
fallthrough
|
||||
case TableCell:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// IsLeaf returns true if 'n' is a leaf node.
|
||||
func (n *Node) IsLeaf() bool {
|
||||
return !n.IsContainer()
|
||||
}
|
||||
|
||||
func (n *Node) canContain(t NodeType) bool {
|
||||
if n.Type == List {
|
||||
return t == Item
|
||||
}
|
||||
if n.Type == Document || n.Type == BlockQuote || n.Type == Item {
|
||||
return t != Item
|
||||
}
|
||||
if n.Type == Table {
|
||||
return t == TableHead || t == TableBody
|
||||
}
|
||||
if n.Type == TableHead || n.Type == TableBody {
|
||||
return t == TableRow
|
||||
}
|
||||
if n.Type == TableRow {
|
||||
return t == TableCell
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// WalkStatus allows NodeVisitor to have some control over the tree traversal.
|
||||
// It is returned from NodeVisitor and different values allow Node.Walk to
|
||||
// decide which node to go to next.
|
||||
type WalkStatus int
|
||||
|
||||
const (
|
||||
// GoToNext is the default traversal of every node.
|
||||
GoToNext WalkStatus = iota
|
||||
// SkipChildren tells walker to skip all children of current node.
|
||||
SkipChildren
|
||||
// Terminate tells walker to terminate the traversal.
|
||||
Terminate
|
||||
)
|
||||
|
||||
// NodeVisitor is a callback to be called when traversing the syntax tree.
|
||||
// Called twice for every node: once with entering=true when the branch is
|
||||
// first visited, then with entering=false after all the children are done.
|
||||
type NodeVisitor func(node *Node, entering bool) WalkStatus
|
||||
|
||||
// Walk is a convenience method that instantiates a walker and starts a
|
||||
// traversal of subtree rooted at n.
|
||||
func (n *Node) Walk(visitor NodeVisitor) {
|
||||
w := newNodeWalker(n)
|
||||
for w.current != nil {
|
||||
status := visitor(w.current, w.entering)
|
||||
switch status {
|
||||
case GoToNext:
|
||||
w.next()
|
||||
case SkipChildren:
|
||||
w.entering = false
|
||||
w.next()
|
||||
case Terminate:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type nodeWalker struct {
|
||||
current *Node
|
||||
root *Node
|
||||
entering bool
|
||||
}
|
||||
|
||||
func newNodeWalker(root *Node) *nodeWalker {
|
||||
return &nodeWalker{
|
||||
current: root,
|
||||
root: root,
|
||||
entering: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (nw *nodeWalker) next() {
|
||||
if (!nw.current.IsContainer() || !nw.entering) && nw.current == nw.root {
|
||||
nw.current = nil
|
||||
return
|
||||
}
|
||||
if nw.entering && nw.current.IsContainer() {
|
||||
if nw.current.FirstChild != nil {
|
||||
nw.current = nw.current.FirstChild
|
||||
nw.entering = true
|
||||
} else {
|
||||
nw.entering = false
|
||||
}
|
||||
} else if nw.current.Next == nil {
|
||||
nw.current = nw.current.Parent
|
||||
nw.entering = false
|
||||
} else {
|
||||
nw.current = nw.current.Next
|
||||
nw.entering = true
|
||||
}
|
||||
}
|
||||
|
||||
func dump(ast *Node) {
|
||||
fmt.Println(dumpString(ast))
|
||||
}
|
||||
|
||||
func dumpR(ast *Node, depth int) string {
|
||||
if ast == nil {
|
||||
return ""
|
||||
}
|
||||
indent := bytes.Repeat([]byte("\t"), depth)
|
||||
content := ast.Literal
|
||||
if content == nil {
|
||||
content = ast.content
|
||||
}
|
||||
result := fmt.Sprintf("%s%s(%q)\n", indent, ast.Type, content)
|
||||
for n := ast.FirstChild; n != nil; n = n.Next {
|
||||
result += dumpR(n, depth+1)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func dumpString(ast *Node) string {
|
||||
return dumpR(ast, 0)
|
||||
}
|
457
vendor/github.com/russross/blackfriday/v2/smartypants.go
generated
vendored
Normal file
457
vendor/github.com/russross/blackfriday/v2/smartypants.go
generated
vendored
Normal file
@ -0,0 +1,457 @@
|
||||
//
|
||||
// Blackfriday Markdown Processor
|
||||
// Available at http://github.com/russross/blackfriday
|
||||
//
|
||||
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||
// Distributed under the Simplified BSD License.
|
||||
// See README.md for details.
|
||||
//
|
||||
|
||||
//
|
||||
//
|
||||
// SmartyPants rendering
|
||||
//
|
||||
//
|
||||
|
||||
package blackfriday
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
// SPRenderer is a struct containing state of a Smartypants renderer.
|
||||
type SPRenderer struct {
|
||||
inSingleQuote bool
|
||||
inDoubleQuote bool
|
||||
callbacks [256]smartCallback
|
||||
}
|
||||
|
||||
func wordBoundary(c byte) bool {
|
||||
return c == 0 || isspace(c) || ispunct(c)
|
||||
}
|
||||
|
||||
func tolower(c byte) byte {
|
||||
if c >= 'A' && c <= 'Z' {
|
||||
return c - 'A' + 'a'
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func isdigit(c byte) bool {
|
||||
return c >= '0' && c <= '9'
|
||||
}
|
||||
|
||||
func smartQuoteHelper(out *bytes.Buffer, previousChar byte, nextChar byte, quote byte, isOpen *bool, addNBSP bool) bool {
|
||||
// edge of the buffer is likely to be a tag that we don't get to see,
|
||||
// so we treat it like text sometimes
|
||||
|
||||
// enumerate all sixteen possibilities for (previousChar, nextChar)
|
||||
// each can be one of {0, space, punct, other}
|
||||
switch {
|
||||
case previousChar == 0 && nextChar == 0:
|
||||
// context is not any help here, so toggle
|
||||
*isOpen = !*isOpen
|
||||
case isspace(previousChar) && nextChar == 0:
|
||||
// [ "] might be [ "<code>foo...]
|
||||
*isOpen = true
|
||||
case ispunct(previousChar) && nextChar == 0:
|
||||
// [!"] hmm... could be [Run!"] or [("<code>...]
|
||||
*isOpen = false
|
||||
case /* isnormal(previousChar) && */ nextChar == 0:
|
||||
// [a"] is probably a close
|
||||
*isOpen = false
|
||||
case previousChar == 0 && isspace(nextChar):
|
||||
// [" ] might be [...foo</code>" ]
|
||||
*isOpen = false
|
||||
case isspace(previousChar) && isspace(nextChar):
|
||||
// [ " ] context is not any help here, so toggle
|
||||
*isOpen = !*isOpen
|
||||
case ispunct(previousChar) && isspace(nextChar):
|
||||
// [!" ] is probably a close
|
||||
*isOpen = false
|
||||
case /* isnormal(previousChar) && */ isspace(nextChar):
|
||||
// [a" ] this is one of the easy cases
|
||||
*isOpen = false
|
||||
case previousChar == 0 && ispunct(nextChar):
|
||||
// ["!] hmm... could be ["$1.95] or [</code>"!...]
|
||||
*isOpen = false
|
||||
case isspace(previousChar) && ispunct(nextChar):
|
||||
// [ "!] looks more like [ "$1.95]
|
||||
*isOpen = true
|
||||
case ispunct(previousChar) && ispunct(nextChar):
|
||||
// [!"!] context is not any help here, so toggle
|
||||
*isOpen = !*isOpen
|
||||
case /* isnormal(previousChar) && */ ispunct(nextChar):
|
||||
// [a"!] is probably a close
|
||||
*isOpen = false
|
||||
case previousChar == 0 /* && isnormal(nextChar) */ :
|
||||
// ["a] is probably an open
|
||||
*isOpen = true
|
||||
case isspace(previousChar) /* && isnormal(nextChar) */ :
|
||||
// [ "a] this is one of the easy cases
|
||||
*isOpen = true
|
||||
case ispunct(previousChar) /* && isnormal(nextChar) */ :
|
||||
// [!"a] is probably an open
|
||||
*isOpen = true
|
||||
default:
|
||||
// [a'b] maybe a contraction?
|
||||
*isOpen = false
|
||||
}
|
||||
|
||||
// Note that with the limited lookahead, this non-breaking
|
||||
// space will also be appended to single double quotes.
|
||||
if addNBSP && !*isOpen {
|
||||
out.WriteString(" ")
|
||||
}
|
||||
|
||||
out.WriteByte('&')
|
||||
if *isOpen {
|
||||
out.WriteByte('l')
|
||||
} else {
|
||||
out.WriteByte('r')
|
||||
}
|
||||
out.WriteByte(quote)
|
||||
out.WriteString("quo;")
|
||||
|
||||
if addNBSP && *isOpen {
|
||||
out.WriteString(" ")
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartSingleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 2 {
|
||||
t1 := tolower(text[1])
|
||||
|
||||
if t1 == '\'' {
|
||||
nextChar := byte(0)
|
||||
if len(text) >= 3 {
|
||||
nextChar = text[2]
|
||||
}
|
||||
if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
if (t1 == 's' || t1 == 't' || t1 == 'm' || t1 == 'd') && (len(text) < 3 || wordBoundary(text[2])) {
|
||||
out.WriteString("’")
|
||||
return 0
|
||||
}
|
||||
|
||||
if len(text) >= 3 {
|
||||
t2 := tolower(text[2])
|
||||
|
||||
if ((t1 == 'r' && t2 == 'e') || (t1 == 'l' && t2 == 'l') || (t1 == 'v' && t2 == 'e')) &&
|
||||
(len(text) < 4 || wordBoundary(text[3])) {
|
||||
out.WriteString("’")
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nextChar := byte(0)
|
||||
if len(text) > 1 {
|
||||
nextChar = text[1]
|
||||
}
|
||||
if smartQuoteHelper(out, previousChar, nextChar, 's', &r.inSingleQuote, false) {
|
||||
return 0
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartParens(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 3 {
|
||||
t1 := tolower(text[1])
|
||||
t2 := tolower(text[2])
|
||||
|
||||
if t1 == 'c' && t2 == ')' {
|
||||
out.WriteString("©")
|
||||
return 2
|
||||
}
|
||||
|
||||
if t1 == 'r' && t2 == ')' {
|
||||
out.WriteString("®")
|
||||
return 2
|
||||
}
|
||||
|
||||
if len(text) >= 4 && t1 == 't' && t2 == 'm' && text[3] == ')' {
|
||||
out.WriteString("™")
|
||||
return 3
|
||||
}
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartDash(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 2 {
|
||||
if text[1] == '-' {
|
||||
out.WriteString("—")
|
||||
return 1
|
||||
}
|
||||
|
||||
if wordBoundary(previousChar) && wordBoundary(text[1]) {
|
||||
out.WriteString("–")
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartDashLatex(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 3 && text[1] == '-' && text[2] == '-' {
|
||||
out.WriteString("—")
|
||||
return 2
|
||||
}
|
||||
if len(text) >= 2 && text[1] == '-' {
|
||||
out.WriteString("–")
|
||||
return 1
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartAmpVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte, addNBSP bool) int {
|
||||
if bytes.HasPrefix(text, []byte(""")) {
|
||||
nextChar := byte(0)
|
||||
if len(text) >= 7 {
|
||||
nextChar = text[6]
|
||||
}
|
||||
if smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, addNBSP) {
|
||||
return 5
|
||||
}
|
||||
}
|
||||
|
||||
if bytes.HasPrefix(text, []byte("�")) {
|
||||
return 3
|
||||
}
|
||||
|
||||
out.WriteByte('&')
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartAmp(angledQuotes, addNBSP bool) func(*bytes.Buffer, byte, []byte) int {
|
||||
var quote byte = 'd'
|
||||
if angledQuotes {
|
||||
quote = 'a'
|
||||
}
|
||||
|
||||
return func(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
return r.smartAmpVariant(out, previousChar, text, quote, addNBSP)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartPeriod(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 3 && text[1] == '.' && text[2] == '.' {
|
||||
out.WriteString("…")
|
||||
return 2
|
||||
}
|
||||
|
||||
if len(text) >= 5 && text[1] == ' ' && text[2] == '.' && text[3] == ' ' && text[4] == '.' {
|
||||
out.WriteString("…")
|
||||
return 4
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartBacktick(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if len(text) >= 2 && text[1] == '`' {
|
||||
nextChar := byte(0)
|
||||
if len(text) >= 3 {
|
||||
nextChar = text[2]
|
||||
}
|
||||
if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartNumberGeneric(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
|
||||
// is it of the form digits/digits(word boundary)?, i.e., \d+/\d+\b
|
||||
// note: check for regular slash (/) or fraction slash (⁄, 0x2044, or 0xe2 81 84 in utf-8)
|
||||
// and avoid changing dates like 1/23/2005 into fractions.
|
||||
numEnd := 0
|
||||
for len(text) > numEnd && isdigit(text[numEnd]) {
|
||||
numEnd++
|
||||
}
|
||||
if numEnd == 0 {
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
denStart := numEnd + 1
|
||||
if len(text) > numEnd+3 && text[numEnd] == 0xe2 && text[numEnd+1] == 0x81 && text[numEnd+2] == 0x84 {
|
||||
denStart = numEnd + 3
|
||||
} else if len(text) < numEnd+2 || text[numEnd] != '/' {
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
denEnd := denStart
|
||||
for len(text) > denEnd && isdigit(text[denEnd]) {
|
||||
denEnd++
|
||||
}
|
||||
if denEnd == denStart {
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
if len(text) == denEnd || wordBoundary(text[denEnd]) && text[denEnd] != '/' {
|
||||
out.WriteString("<sup>")
|
||||
out.Write(text[:numEnd])
|
||||
out.WriteString("</sup>⁄<sub>")
|
||||
out.Write(text[denStart:denEnd])
|
||||
out.WriteString("</sub>")
|
||||
return denEnd - 1
|
||||
}
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartNumber(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
|
||||
if text[0] == '1' && text[1] == '/' && text[2] == '2' {
|
||||
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' {
|
||||
out.WriteString("½")
|
||||
return 2
|
||||
}
|
||||
}
|
||||
|
||||
if text[0] == '1' && text[1] == '/' && text[2] == '4' {
|
||||
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 5 && tolower(text[3]) == 't' && tolower(text[4]) == 'h') {
|
||||
out.WriteString("¼")
|
||||
return 2
|
||||
}
|
||||
}
|
||||
|
||||
if text[0] == '3' && text[1] == '/' && text[2] == '4' {
|
||||
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 6 && tolower(text[3]) == 't' && tolower(text[4]) == 'h' && tolower(text[5]) == 's') {
|
||||
out.WriteString("¾")
|
||||
return 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out.WriteByte(text[0])
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartDoubleQuoteVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte) int {
|
||||
nextChar := byte(0)
|
||||
if len(text) > 1 {
|
||||
nextChar = text[1]
|
||||
}
|
||||
if !smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, false) {
|
||||
out.WriteString(""")
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
return r.smartDoubleQuoteVariant(out, previousChar, text, 'd')
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartAngledDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
return r.smartDoubleQuoteVariant(out, previousChar, text, 'a')
|
||||
}
|
||||
|
||||
func (r *SPRenderer) smartLeftAngle(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||
i := 0
|
||||
|
||||
for i < len(text) && text[i] != '>' {
|
||||
i++
|
||||
}
|
||||
|
||||
out.Write(text[:i+1])
|
||||
return i
|
||||
}
|
||||
|
||||
type smartCallback func(out *bytes.Buffer, previousChar byte, text []byte) int
|
||||
|
||||
// NewSmartypantsRenderer constructs a Smartypants renderer object.
|
||||
func NewSmartypantsRenderer(flags HTMLFlags) *SPRenderer {
|
||||
var (
|
||||
r SPRenderer
|
||||
|
||||
smartAmpAngled = r.smartAmp(true, false)
|
||||
smartAmpAngledNBSP = r.smartAmp(true, true)
|
||||
smartAmpRegular = r.smartAmp(false, false)
|
||||
smartAmpRegularNBSP = r.smartAmp(false, true)
|
||||
|
||||
addNBSP = flags&SmartypantsQuotesNBSP != 0
|
||||
)
|
||||
|
||||
if flags&SmartypantsAngledQuotes == 0 {
|
||||
r.callbacks['"'] = r.smartDoubleQuote
|
||||
if !addNBSP {
|
||||
r.callbacks['&'] = smartAmpRegular
|
||||
} else {
|
||||
r.callbacks['&'] = smartAmpRegularNBSP
|
||||
}
|
||||
} else {
|
||||
r.callbacks['"'] = r.smartAngledDoubleQuote
|
||||
if !addNBSP {
|
||||
r.callbacks['&'] = smartAmpAngled
|
||||
} else {
|
||||
r.callbacks['&'] = smartAmpAngledNBSP
|
||||
}
|
||||
}
|
||||
r.callbacks['\''] = r.smartSingleQuote
|
||||
r.callbacks['('] = r.smartParens
|
||||
if flags&SmartypantsDashes != 0 {
|
||||
if flags&SmartypantsLatexDashes == 0 {
|
||||
r.callbacks['-'] = r.smartDash
|
||||
} else {
|
||||
r.callbacks['-'] = r.smartDashLatex
|
||||
}
|
||||
}
|
||||
r.callbacks['.'] = r.smartPeriod
|
||||
if flags&SmartypantsFractions == 0 {
|
||||
r.callbacks['1'] = r.smartNumber
|
||||
r.callbacks['3'] = r.smartNumber
|
||||
} else {
|
||||
for ch := '1'; ch <= '9'; ch++ {
|
||||
r.callbacks[ch] = r.smartNumberGeneric
|
||||
}
|
||||
}
|
||||
r.callbacks['<'] = r.smartLeftAngle
|
||||
r.callbacks['`'] = r.smartBacktick
|
||||
return &r
|
||||
}
|
||||
|
||||
// Process is the entry point of the Smartypants renderer.
|
||||
func (r *SPRenderer) Process(w io.Writer, text []byte) {
|
||||
mark := 0
|
||||
for i := 0; i < len(text); i++ {
|
||||
if action := r.callbacks[text[i]]; action != nil {
|
||||
if i > mark {
|
||||
w.Write(text[mark:i])
|
||||
}
|
||||
previousChar := byte(0)
|
||||
if i > 0 {
|
||||
previousChar = text[i-1]
|
||||
}
|
||||
var tmp bytes.Buffer
|
||||
i += action(&tmp, previousChar, text[i:])
|
||||
w.Write(tmp.Bytes())
|
||||
mark = i + 1
|
||||
}
|
||||
}
|
||||
if mark < len(text) {
|
||||
w.Write(text[mark:])
|
||||
}
|
||||
}
|
246
vendor/github.com/spf13/cobra/doc/man_docs.go
generated
vendored
Normal file
246
vendor/github.com/spf13/cobra/doc/man_docs.go
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/cpuguy83/go-md2man/v2/md2man"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
|
||||
// GenManTree will generate a man page for this command and all descendants
|
||||
// in the directory given. The header may be nil. This function may not work
|
||||
// correctly if your command names have `-` in them. If you have `cmd` with two
|
||||
// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third`
|
||||
// it is undefined which help output will be in the file `cmd-sub-third.1`.
|
||||
func GenManTree(cmd *cobra.Command, header *GenManHeader, dir string) error {
|
||||
return GenManTreeFromOpts(cmd, GenManTreeOptions{
|
||||
Header: header,
|
||||
Path: dir,
|
||||
CommandSeparator: "-",
|
||||
})
|
||||
}
|
||||
|
||||
// GenManTreeFromOpts generates a man page for the command and all descendants.
|
||||
// The pages are written to the opts.Path directory.
|
||||
func GenManTreeFromOpts(cmd *cobra.Command, opts GenManTreeOptions) error {
|
||||
header := opts.Header
|
||||
if header == nil {
|
||||
header = &GenManHeader{}
|
||||
}
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
if err := GenManTreeFromOpts(c, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
section := "1"
|
||||
if header.Section != "" {
|
||||
section = header.Section
|
||||
}
|
||||
|
||||
separator := "_"
|
||||
if opts.CommandSeparator != "" {
|
||||
separator = opts.CommandSeparator
|
||||
}
|
||||
basename := strings.ReplaceAll(cmd.CommandPath(), " ", separator)
|
||||
filename := filepath.Join(opts.Path, basename+"."+section)
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
headerCopy := *header
|
||||
return GenMan(cmd, &headerCopy, f)
|
||||
}
|
||||
|
||||
// GenManTreeOptions is the options for generating the man pages.
|
||||
// Used only in GenManTreeFromOpts.
|
||||
type GenManTreeOptions struct {
|
||||
Header *GenManHeader
|
||||
Path string
|
||||
CommandSeparator string
|
||||
}
|
||||
|
||||
// GenManHeader is a lot like the .TH header at the start of man pages. These
|
||||
// include the title, section, date, source, and manual. We will use the
|
||||
// current time if Date is unset and will use "Auto generated by spf13/cobra"
|
||||
// if the Source is unset.
|
||||
type GenManHeader struct {
|
||||
Title string
|
||||
Section string
|
||||
Date *time.Time
|
||||
date string
|
||||
Source string
|
||||
Manual string
|
||||
}
|
||||
|
||||
// GenMan will generate a man page for the given command and write it to
|
||||
// w. The header argument may be nil, however obviously w may not.
|
||||
func GenMan(cmd *cobra.Command, header *GenManHeader, w io.Writer) error {
|
||||
if header == nil {
|
||||
header = &GenManHeader{}
|
||||
}
|
||||
if err := fillHeader(header, cmd.CommandPath(), cmd.DisableAutoGenTag); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b := genMan(cmd, header)
|
||||
_, err := w.Write(md2man.Render(b))
|
||||
return err
|
||||
}
|
||||
|
||||
func fillHeader(header *GenManHeader, name string, disableAutoGen bool) error {
|
||||
if header.Title == "" {
|
||||
header.Title = strings.ToUpper(strings.ReplaceAll(name, " ", "\\-"))
|
||||
}
|
||||
if header.Section == "" {
|
||||
header.Section = "1"
|
||||
}
|
||||
if header.Date == nil {
|
||||
now := time.Now()
|
||||
if epoch := os.Getenv("SOURCE_DATE_EPOCH"); epoch != "" {
|
||||
unixEpoch, err := strconv.ParseInt(epoch, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid SOURCE_DATE_EPOCH: %v", err)
|
||||
}
|
||||
now = time.Unix(unixEpoch, 0)
|
||||
}
|
||||
header.Date = &now
|
||||
}
|
||||
header.date = header.Date.Format("Jan 2006")
|
||||
if header.Source == "" && !disableAutoGen {
|
||||
header.Source = "Auto generated by spf13/cobra"
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func manPreamble(buf io.StringWriter, header *GenManHeader, cmd *cobra.Command, dashedName string) {
|
||||
description := cmd.Long
|
||||
if len(description) == 0 {
|
||||
description = cmd.Short
|
||||
}
|
||||
|
||||
cobra.WriteStringAndCheck(buf, fmt.Sprintf(`%% "%s" "%s" "%s" "%s" "%s"
|
||||
# NAME
|
||||
`, header.Title, header.Section, header.date, header.Source, header.Manual))
|
||||
cobra.WriteStringAndCheck(buf, fmt.Sprintf("%s \\- %s\n\n", dashedName, cmd.Short))
|
||||
cobra.WriteStringAndCheck(buf, "# SYNOPSIS\n")
|
||||
cobra.WriteStringAndCheck(buf, fmt.Sprintf("**%s**\n\n", cmd.UseLine()))
|
||||
cobra.WriteStringAndCheck(buf, "# DESCRIPTION\n")
|
||||
cobra.WriteStringAndCheck(buf, description+"\n\n")
|
||||
}
|
||||
|
||||
func manPrintFlags(buf io.StringWriter, flags *pflag.FlagSet) {
|
||||
flags.VisitAll(func(flag *pflag.Flag) {
|
||||
if len(flag.Deprecated) > 0 || flag.Hidden {
|
||||
return
|
||||
}
|
||||
format := ""
|
||||
if len(flag.Shorthand) > 0 && len(flag.ShorthandDeprecated) == 0 {
|
||||
format = fmt.Sprintf("**-%s**, **--%s**", flag.Shorthand, flag.Name)
|
||||
} else {
|
||||
format = fmt.Sprintf("**--%s**", flag.Name)
|
||||
}
|
||||
if len(flag.NoOptDefVal) > 0 {
|
||||
format += "["
|
||||
}
|
||||
if flag.Value.Type() == "string" {
|
||||
// put quotes on the value
|
||||
format += "=%q"
|
||||
} else {
|
||||
format += "=%s"
|
||||
}
|
||||
if len(flag.NoOptDefVal) > 0 {
|
||||
format += "]"
|
||||
}
|
||||
format += "\n\t%s\n\n"
|
||||
cobra.WriteStringAndCheck(buf, fmt.Sprintf(format, flag.DefValue, flag.Usage))
|
||||
})
|
||||
}
|
||||
|
||||
func manPrintOptions(buf io.StringWriter, command *cobra.Command) {
|
||||
flags := command.NonInheritedFlags()
|
||||
if flags.HasAvailableFlags() {
|
||||
cobra.WriteStringAndCheck(buf, "# OPTIONS\n")
|
||||
manPrintFlags(buf, flags)
|
||||
cobra.WriteStringAndCheck(buf, "\n")
|
||||
}
|
||||
flags = command.InheritedFlags()
|
||||
if flags.HasAvailableFlags() {
|
||||
cobra.WriteStringAndCheck(buf, "# OPTIONS INHERITED FROM PARENT COMMANDS\n")
|
||||
manPrintFlags(buf, flags)
|
||||
cobra.WriteStringAndCheck(buf, "\n")
|
||||
}
|
||||
}
|
||||
|
||||
func genMan(cmd *cobra.Command, header *GenManHeader) []byte {
|
||||
cmd.InitDefaultHelpCmd()
|
||||
cmd.InitDefaultHelpFlag()
|
||||
|
||||
// something like `rootcmd-subcmd1-subcmd2`
|
||||
dashCommandName := strings.ReplaceAll(cmd.CommandPath(), " ", "-")
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
manPreamble(buf, header, cmd, dashCommandName)
|
||||
manPrintOptions(buf, cmd)
|
||||
if len(cmd.Example) > 0 {
|
||||
buf.WriteString("# EXAMPLE\n")
|
||||
buf.WriteString(fmt.Sprintf("```\n%s\n```\n", cmd.Example))
|
||||
}
|
||||
if hasSeeAlso(cmd) {
|
||||
buf.WriteString("# SEE ALSO\n")
|
||||
seealsos := make([]string, 0)
|
||||
if cmd.HasParent() {
|
||||
parentPath := cmd.Parent().CommandPath()
|
||||
dashParentPath := strings.ReplaceAll(parentPath, " ", "-")
|
||||
seealso := fmt.Sprintf("**%s(%s)**", dashParentPath, header.Section)
|
||||
seealsos = append(seealsos, seealso)
|
||||
cmd.VisitParents(func(c *cobra.Command) {
|
||||
if c.DisableAutoGenTag {
|
||||
cmd.DisableAutoGenTag = c.DisableAutoGenTag
|
||||
}
|
||||
})
|
||||
}
|
||||
children := cmd.Commands()
|
||||
sort.Sort(byName(children))
|
||||
for _, c := range children {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
seealso := fmt.Sprintf("**%s-%s(%s)**", dashCommandName, c.Name(), header.Section)
|
||||
seealsos = append(seealsos, seealso)
|
||||
}
|
||||
buf.WriteString(strings.Join(seealsos, ", ") + "\n")
|
||||
}
|
||||
if !cmd.DisableAutoGenTag {
|
||||
buf.WriteString(fmt.Sprintf("# HISTORY\n%s Auto generated by spf13/cobra\n", header.Date.Format("2-Jan-2006")))
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
158
vendor/github.com/spf13/cobra/doc/md_docs.go
generated
vendored
Normal file
158
vendor/github.com/spf13/cobra/doc/md_docs.go
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const markdownExtension = ".md"
|
||||
|
||||
func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error {
|
||||
flags := cmd.NonInheritedFlags()
|
||||
flags.SetOutput(buf)
|
||||
if flags.HasAvailableFlags() {
|
||||
buf.WriteString("### Options\n\n```\n")
|
||||
flags.PrintDefaults()
|
||||
buf.WriteString("```\n\n")
|
||||
}
|
||||
|
||||
parentFlags := cmd.InheritedFlags()
|
||||
parentFlags.SetOutput(buf)
|
||||
if parentFlags.HasAvailableFlags() {
|
||||
buf.WriteString("### Options inherited from parent commands\n\n```\n")
|
||||
parentFlags.PrintDefaults()
|
||||
buf.WriteString("```\n\n")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GenMarkdown creates markdown output.
|
||||
func GenMarkdown(cmd *cobra.Command, w io.Writer) error {
|
||||
return GenMarkdownCustom(cmd, w, func(s string) string { return s })
|
||||
}
|
||||
|
||||
// GenMarkdownCustom creates custom markdown output.
|
||||
func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error {
|
||||
cmd.InitDefaultHelpCmd()
|
||||
cmd.InitDefaultHelpFlag()
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
name := cmd.CommandPath()
|
||||
|
||||
buf.WriteString("## " + name + "\n\n")
|
||||
buf.WriteString(cmd.Short + "\n\n")
|
||||
if len(cmd.Long) > 0 {
|
||||
buf.WriteString("### Synopsis\n\n")
|
||||
buf.WriteString(cmd.Long + "\n\n")
|
||||
}
|
||||
|
||||
if cmd.Runnable() {
|
||||
buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.UseLine()))
|
||||
}
|
||||
|
||||
if len(cmd.Example) > 0 {
|
||||
buf.WriteString("### Examples\n\n")
|
||||
buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.Example))
|
||||
}
|
||||
|
||||
if err := printOptions(buf, cmd, name); err != nil {
|
||||
return err
|
||||
}
|
||||
if hasSeeAlso(cmd) {
|
||||
buf.WriteString("### SEE ALSO\n\n")
|
||||
if cmd.HasParent() {
|
||||
parent := cmd.Parent()
|
||||
pname := parent.CommandPath()
|
||||
link := pname + markdownExtension
|
||||
link = strings.ReplaceAll(link, " ", "_")
|
||||
buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", pname, linkHandler(link), parent.Short))
|
||||
cmd.VisitParents(func(c *cobra.Command) {
|
||||
if c.DisableAutoGenTag {
|
||||
cmd.DisableAutoGenTag = c.DisableAutoGenTag
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
children := cmd.Commands()
|
||||
sort.Sort(byName(children))
|
||||
|
||||
for _, child := range children {
|
||||
if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
cname := name + " " + child.Name()
|
||||
link := cname + markdownExtension
|
||||
link = strings.ReplaceAll(link, " ", "_")
|
||||
buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", cname, linkHandler(link), child.Short))
|
||||
}
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
if !cmd.DisableAutoGenTag {
|
||||
buf.WriteString("###### Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "\n")
|
||||
}
|
||||
_, err := buf.WriteTo(w)
|
||||
return err
|
||||
}
|
||||
|
||||
// GenMarkdownTree will generate a markdown page for this command and all
|
||||
// descendants in the directory given. The header may be nil.
|
||||
// This function may not work correctly if your command names have `-` in them.
|
||||
// If you have `cmd` with two subcmds, `sub` and `sub-third`,
|
||||
// and `sub` has a subcommand called `third`, it is undefined which
|
||||
// help output will be in the file `cmd-sub-third.1`.
|
||||
func GenMarkdownTree(cmd *cobra.Command, dir string) error {
|
||||
identity := func(s string) string { return s }
|
||||
emptyStr := func(s string) string { return "" }
|
||||
return GenMarkdownTreeCustom(cmd, dir, emptyStr, identity)
|
||||
}
|
||||
|
||||
// GenMarkdownTreeCustom is the same as GenMarkdownTree, but
|
||||
// with custom filePrepender and linkHandler.
|
||||
func GenMarkdownTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error {
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
if err := GenMarkdownTreeCustom(c, dir, filePrepender, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
basename := strings.ReplaceAll(cmd.CommandPath(), " ", "_") + markdownExtension
|
||||
filename := filepath.Join(dir, basename)
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err := io.WriteString(f, filePrepender(filename)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := GenMarkdownCustom(cmd, f, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
186
vendor/github.com/spf13/cobra/doc/rest_docs.go
generated
vendored
Normal file
186
vendor/github.com/spf13/cobra/doc/rest_docs.go
generated
vendored
Normal file
@ -0,0 +1,186 @@
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error {
|
||||
flags := cmd.NonInheritedFlags()
|
||||
flags.SetOutput(buf)
|
||||
if flags.HasAvailableFlags() {
|
||||
buf.WriteString("Options\n")
|
||||
buf.WriteString("~~~~~~~\n\n::\n\n")
|
||||
flags.PrintDefaults()
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
|
||||
parentFlags := cmd.InheritedFlags()
|
||||
parentFlags.SetOutput(buf)
|
||||
if parentFlags.HasAvailableFlags() {
|
||||
buf.WriteString("Options inherited from parent commands\n")
|
||||
buf.WriteString("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n::\n\n")
|
||||
parentFlags.PrintDefaults()
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// defaultLinkHandler for default ReST hyperlink markup
|
||||
func defaultLinkHandler(name, ref string) string {
|
||||
return fmt.Sprintf("`%s <%s.rst>`_", name, ref)
|
||||
}
|
||||
|
||||
// GenReST creates reStructured Text output.
|
||||
func GenReST(cmd *cobra.Command, w io.Writer) error {
|
||||
return GenReSTCustom(cmd, w, defaultLinkHandler)
|
||||
}
|
||||
|
||||
// GenReSTCustom creates custom reStructured Text output.
|
||||
func GenReSTCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string, string) string) error {
|
||||
cmd.InitDefaultHelpCmd()
|
||||
cmd.InitDefaultHelpFlag()
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
name := cmd.CommandPath()
|
||||
|
||||
short := cmd.Short
|
||||
long := cmd.Long
|
||||
if len(long) == 0 {
|
||||
long = short
|
||||
}
|
||||
ref := strings.ReplaceAll(name, " ", "_")
|
||||
|
||||
buf.WriteString(".. _" + ref + ":\n\n")
|
||||
buf.WriteString(name + "\n")
|
||||
buf.WriteString(strings.Repeat("-", len(name)) + "\n\n")
|
||||
buf.WriteString(short + "\n\n")
|
||||
buf.WriteString("Synopsis\n")
|
||||
buf.WriteString("~~~~~~~~\n\n")
|
||||
buf.WriteString("\n" + long + "\n\n")
|
||||
|
||||
if cmd.Runnable() {
|
||||
buf.WriteString(fmt.Sprintf("::\n\n %s\n\n", cmd.UseLine()))
|
||||
}
|
||||
|
||||
if len(cmd.Example) > 0 {
|
||||
buf.WriteString("Examples\n")
|
||||
buf.WriteString("~~~~~~~~\n\n")
|
||||
buf.WriteString(fmt.Sprintf("::\n\n%s\n\n", indentString(cmd.Example, " ")))
|
||||
}
|
||||
|
||||
if err := printOptionsReST(buf, cmd, name); err != nil {
|
||||
return err
|
||||
}
|
||||
if hasSeeAlso(cmd) {
|
||||
buf.WriteString("SEE ALSO\n")
|
||||
buf.WriteString("~~~~~~~~\n\n")
|
||||
if cmd.HasParent() {
|
||||
parent := cmd.Parent()
|
||||
pname := parent.CommandPath()
|
||||
ref = strings.ReplaceAll(pname, " ", "_")
|
||||
buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(pname, ref), parent.Short))
|
||||
cmd.VisitParents(func(c *cobra.Command) {
|
||||
if c.DisableAutoGenTag {
|
||||
cmd.DisableAutoGenTag = c.DisableAutoGenTag
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
children := cmd.Commands()
|
||||
sort.Sort(byName(children))
|
||||
|
||||
for _, child := range children {
|
||||
if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
cname := name + " " + child.Name()
|
||||
ref = strings.ReplaceAll(cname, " ", "_")
|
||||
buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(cname, ref), child.Short))
|
||||
}
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
if !cmd.DisableAutoGenTag {
|
||||
buf.WriteString("*Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "*\n")
|
||||
}
|
||||
_, err := buf.WriteTo(w)
|
||||
return err
|
||||
}
|
||||
|
||||
// GenReSTTree will generate a ReST page for this command and all
|
||||
// descendants in the directory given.
|
||||
// This function may not work correctly if your command names have `-` in them.
|
||||
// If you have `cmd` with two subcmds, `sub` and `sub-third`,
|
||||
// and `sub` has a subcommand called `third`, it is undefined which
|
||||
// help output will be in the file `cmd-sub-third.1`.
|
||||
func GenReSTTree(cmd *cobra.Command, dir string) error {
|
||||
emptyStr := func(s string) string { return "" }
|
||||
return GenReSTTreeCustom(cmd, dir, emptyStr, defaultLinkHandler)
|
||||
}
|
||||
|
||||
// GenReSTTreeCustom is the same as GenReSTTree, but
|
||||
// with custom filePrepender and linkHandler.
|
||||
func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error {
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
if err := GenReSTTreeCustom(c, dir, filePrepender, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
basename := strings.ReplaceAll(cmd.CommandPath(), " ", "_") + ".rst"
|
||||
filename := filepath.Join(dir, basename)
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err := io.WriteString(f, filePrepender(filename)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := GenReSTCustom(cmd, f, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// indentString adapted from: https://github.com/kr/text/blob/main/indent.go
|
||||
func indentString(s, p string) string {
|
||||
var res []byte
|
||||
b := []byte(s)
|
||||
prefix := []byte(p)
|
||||
bol := true
|
||||
for _, c := range b {
|
||||
if bol && c != '\n' {
|
||||
res = append(res, prefix...)
|
||||
}
|
||||
res = append(res, c)
|
||||
bol = c == '\n'
|
||||
}
|
||||
return string(res)
|
||||
}
|
52
vendor/github.com/spf13/cobra/doc/util.go
generated
vendored
Normal file
52
vendor/github.com/spf13/cobra/doc/util.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// Test to see if we have a reason to print See Also information in docs
|
||||
// Basically this is a test for a parent command or a subcommand which is
|
||||
// both not deprecated and not the autogenerated help command.
|
||||
func hasSeeAlso(cmd *cobra.Command) bool {
|
||||
if cmd.HasParent() {
|
||||
return true
|
||||
}
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Temporary workaround for yaml lib generating incorrect yaml with long strings
|
||||
// that do not contain \n.
|
||||
func forceMultiLine(s string) string {
|
||||
if len(s) > 60 && !strings.Contains(s, "\n") {
|
||||
s += "\n"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type byName []*cobra.Command
|
||||
|
||||
func (s byName) Len() int { return len(s) }
|
||||
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
|
175
vendor/github.com/spf13/cobra/doc/yaml_docs.go
generated
vendored
Normal file
175
vendor/github.com/spf13/cobra/doc/yaml_docs.go
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type cmdOption struct {
|
||||
Name string
|
||||
Shorthand string `yaml:",omitempty"`
|
||||
DefaultValue string `yaml:"default_value,omitempty"`
|
||||
Usage string `yaml:",omitempty"`
|
||||
}
|
||||
|
||||
type cmdDoc struct {
|
||||
Name string
|
||||
Synopsis string `yaml:",omitempty"`
|
||||
Description string `yaml:",omitempty"`
|
||||
Usage string `yaml:",omitempty"`
|
||||
Options []cmdOption `yaml:",omitempty"`
|
||||
InheritedOptions []cmdOption `yaml:"inherited_options,omitempty"`
|
||||
Example string `yaml:",omitempty"`
|
||||
SeeAlso []string `yaml:"see_also,omitempty"`
|
||||
}
|
||||
|
||||
// GenYamlTree creates yaml structured ref files for this command and all descendants
|
||||
// in the directory given. This function may not work
|
||||
// correctly if your command names have `-` in them. If you have `cmd` with two
|
||||
// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third`
|
||||
// it is undefined which help output will be in the file `cmd-sub-third.1`.
|
||||
func GenYamlTree(cmd *cobra.Command, dir string) error {
|
||||
identity := func(s string) string { return s }
|
||||
emptyStr := func(s string) string { return "" }
|
||||
return GenYamlTreeCustom(cmd, dir, emptyStr, identity)
|
||||
}
|
||||
|
||||
// GenYamlTreeCustom creates yaml structured ref files.
|
||||
func GenYamlTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error {
|
||||
for _, c := range cmd.Commands() {
|
||||
if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
if err := GenYamlTreeCustom(c, dir, filePrepender, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
basename := strings.ReplaceAll(cmd.CommandPath(), " ", "_") + ".yaml"
|
||||
filename := filepath.Join(dir, basename)
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err := io.WriteString(f, filePrepender(filename)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := GenYamlCustom(cmd, f, linkHandler); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GenYaml creates yaml output.
|
||||
func GenYaml(cmd *cobra.Command, w io.Writer) error {
|
||||
return GenYamlCustom(cmd, w, func(s string) string { return s })
|
||||
}
|
||||
|
||||
// GenYamlCustom creates custom yaml output.
|
||||
func GenYamlCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error {
|
||||
cmd.InitDefaultHelpCmd()
|
||||
cmd.InitDefaultHelpFlag()
|
||||
|
||||
yamlDoc := cmdDoc{}
|
||||
yamlDoc.Name = cmd.CommandPath()
|
||||
|
||||
yamlDoc.Synopsis = forceMultiLine(cmd.Short)
|
||||
yamlDoc.Description = forceMultiLine(cmd.Long)
|
||||
|
||||
if cmd.Runnable() {
|
||||
yamlDoc.Usage = cmd.UseLine()
|
||||
}
|
||||
|
||||
if len(cmd.Example) > 0 {
|
||||
yamlDoc.Example = cmd.Example
|
||||
}
|
||||
|
||||
flags := cmd.NonInheritedFlags()
|
||||
if flags.HasFlags() {
|
||||
yamlDoc.Options = genFlagResult(flags)
|
||||
}
|
||||
flags = cmd.InheritedFlags()
|
||||
if flags.HasFlags() {
|
||||
yamlDoc.InheritedOptions = genFlagResult(flags)
|
||||
}
|
||||
|
||||
if hasSeeAlso(cmd) {
|
||||
result := []string{}
|
||||
if cmd.HasParent() {
|
||||
parent := cmd.Parent()
|
||||
result = append(result, parent.CommandPath()+" - "+parent.Short)
|
||||
}
|
||||
children := cmd.Commands()
|
||||
sort.Sort(byName(children))
|
||||
for _, child := range children {
|
||||
if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() {
|
||||
continue
|
||||
}
|
||||
result = append(result, child.CommandPath()+" - "+child.Short)
|
||||
}
|
||||
yamlDoc.SeeAlso = result
|
||||
}
|
||||
|
||||
final, err := yaml.Marshal(&yamlDoc)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if _, err := w.Write(final); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func genFlagResult(flags *pflag.FlagSet) []cmdOption {
|
||||
var result []cmdOption
|
||||
|
||||
flags.VisitAll(func(flag *pflag.Flag) {
|
||||
// Todo, when we mark a shorthand is deprecated, but specify an empty message.
|
||||
// The flag.ShorthandDeprecated is empty as the shorthand is deprecated.
|
||||
// Using len(flag.ShorthandDeprecated) > 0 can't handle this, others are ok.
|
||||
if !(len(flag.ShorthandDeprecated) > 0) && len(flag.Shorthand) > 0 {
|
||||
opt := cmdOption{
|
||||
flag.Name,
|
||||
flag.Shorthand,
|
||||
flag.DefValue,
|
||||
forceMultiLine(flag.Usage),
|
||||
}
|
||||
result = append(result, opt)
|
||||
} else {
|
||||
opt := cmdOption{
|
||||
Name: flag.Name,
|
||||
DefaultValue: forceMultiLine(flag.DefValue),
|
||||
Usage: forceMultiLine(flag.Usage),
|
||||
}
|
||||
result = append(result, opt)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
9
vendor/modules.txt
vendored
9
vendor/modules.txt
vendored
@ -29,6 +29,10 @@ github.com/containerd/log
|
||||
# github.com/containerd/platforms v1.0.0-rc.1
|
||||
## explicit; go 1.20
|
||||
github.com/containerd/platforms
|
||||
# github.com/cpuguy83/go-md2man/v2 v2.0.6
|
||||
## explicit; go 1.12
|
||||
github.com/cpuguy83/go-md2man/v2
|
||||
github.com/cpuguy83/go-md2man/v2/md2man
|
||||
# github.com/creack/pty v1.1.24
|
||||
## explicit; go 1.18
|
||||
github.com/creack/pty
|
||||
@ -37,6 +41,7 @@ github.com/creack/pty
|
||||
github.com/distribution/reference
|
||||
# github.com/docker/cli-docs-tool v0.9.0
|
||||
## explicit; go 1.18
|
||||
github.com/docker/cli-docs-tool
|
||||
github.com/docker/cli-docs-tool/annotation
|
||||
# github.com/docker/distribution v2.8.3+incompatible
|
||||
## explicit
|
||||
@ -269,12 +274,16 @@ github.com/prometheus/procfs/internal/util
|
||||
# github.com/rivo/uniseg v0.2.0
|
||||
## explicit; go 1.12
|
||||
github.com/rivo/uniseg
|
||||
# github.com/russross/blackfriday/v2 v2.1.0
|
||||
## explicit
|
||||
github.com/russross/blackfriday/v2
|
||||
# github.com/sirupsen/logrus v1.9.3
|
||||
## explicit; go 1.13
|
||||
github.com/sirupsen/logrus
|
||||
# github.com/spf13/cobra v1.8.1
|
||||
## explicit; go 1.15
|
||||
github.com/spf13/cobra
|
||||
github.com/spf13/cobra/doc
|
||||
# github.com/spf13/pflag v1.0.6
|
||||
## explicit; go 1.12
|
||||
github.com/spf13/pflag
|
||||
|
Loading…
x
Reference in New Issue
Block a user