Merge pull request #20715 from TomSweeneyRedHat/dev/tsweeney/buildah1.33.1

Bump Buildah to v1.33.1
This commit is contained in:
openshift-merge-bot[bot]
2023-11-19 05:58:14 +00:00
committed by GitHub
12 changed files with 428 additions and 179 deletions

4
go.mod
View File

@ -11,7 +11,7 @@ require (
github.com/checkpoint-restore/go-criu/v7 v7.0.0 github.com/checkpoint-restore/go-criu/v7 v7.0.0
github.com/containernetworking/cni v1.1.2 github.com/containernetworking/cni v1.1.2
github.com/containernetworking/plugins v1.3.0 github.com/containernetworking/plugins v1.3.0
github.com/containers/buildah v1.33.0 github.com/containers/buildah v1.33.1
github.com/containers/common v0.57.0 github.com/containers/common v0.57.0
github.com/containers/conmon v2.0.20+incompatible github.com/containers/conmon v2.0.20+incompatible
github.com/containers/gvisor-tap-vsock v0.7.1 github.com/containers/gvisor-tap-vsock v0.7.1
@ -160,7 +160,7 @@ require (
github.com/miekg/pkcs11 v1.1.1 // indirect github.com/miekg/pkcs11 v1.1.1 // indirect
github.com/mistifyio/go-zfs/v3 v3.0.1 // indirect github.com/mistifyio/go-zfs/v3 v3.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moby/buildkit v0.10.6 // indirect github.com/moby/buildkit v0.11.4 // indirect
github.com/moby/patternmatcher v0.5.0 // indirect github.com/moby/patternmatcher v0.5.0 // indirect
github.com/moby/sys/mountinfo v0.7.1 // indirect github.com/moby/sys/mountinfo v0.7.1 // indirect
github.com/moby/sys/sequential v0.5.0 // indirect github.com/moby/sys/sequential v0.5.0 // indirect

8
go.sum
View File

@ -253,8 +253,8 @@ github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHV
github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8=
github.com/containernetworking/plugins v1.3.0 h1:QVNXMT6XloyMUoO2wUOqWTC1hWFV62Q6mVDp5H1HnjM= github.com/containernetworking/plugins v1.3.0 h1:QVNXMT6XloyMUoO2wUOqWTC1hWFV62Q6mVDp5H1HnjM=
github.com/containernetworking/plugins v1.3.0/go.mod h1:Pc2wcedTQQCVuROOOaLBPPxrEXqqXBFt3cZ+/yVg6l0= github.com/containernetworking/plugins v1.3.0/go.mod h1:Pc2wcedTQQCVuROOOaLBPPxrEXqqXBFt3cZ+/yVg6l0=
github.com/containers/buildah v1.33.0 h1:5MfF/nl/W60V22Jt9paNunMEZkDT0K0LLbf0DnXknfE= github.com/containers/buildah v1.33.1 h1:s+5LaZx+vkOV/BboM6QZbf0Uma/A9W/B1REoUiM3CQo=
github.com/containers/buildah v1.33.0/go.mod h1:O8jJAByO/HSoNOYAg3uupbyISfRC+hJSfWNsNtxzKCw= github.com/containers/buildah v1.33.1/go.mod h1:xEvekGaEeflDV4kxdKcTk0NbTuV4FsbPW4UYReLkHIw=
github.com/containers/common v0.57.0 h1:5O/+6QUBafKK0/zeok9y1rLPukfWgdE0sT4nuzmyAqk= github.com/containers/common v0.57.0 h1:5O/+6QUBafKK0/zeok9y1rLPukfWgdE0sT4nuzmyAqk=
github.com/containers/common v0.57.0/go.mod h1:t/Z+/sFrapvFMEJe3YnecN49/Tae2wYEQShbEN6SRaU= github.com/containers/common v0.57.0/go.mod h1:t/Z+/sFrapvFMEJe3YnecN49/Tae2wYEQShbEN6SRaU=
github.com/containers/conmon v2.0.20+incompatible h1:YbCVSFSCqFjjVwHTPINGdMX1F6JXHGTUje2ZYobNrkg= github.com/containers/conmon v2.0.20+incompatible h1:YbCVSFSCqFjjVwHTPINGdMX1F6JXHGTUje2ZYobNrkg=
@ -781,8 +781,8 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
github.com/moby/buildkit v0.10.6 h1:DJlEuLIgnu34HQKF4n9Eg6q2YqQVC0eOpMb4p2eRS2w= github.com/moby/buildkit v0.11.4 h1:mleVHr+n7HUD65QNUkgkT3d8muTzhYUoHE9FM3Ej05s=
github.com/moby/buildkit v0.10.6/go.mod h1:tQuuyTWtOb9D+RE425cwOCUkX0/oZ+5iBZ+uWpWQ9bU= github.com/moby/buildkit v0.11.4/go.mod h1:P5Qi041LvCfhkfYBHry+Rwoo3Wi6H971J2ggE+PcIoo=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo= github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo=
github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=

View File

@ -2,6 +2,12 @@
# Changelog # Changelog
## v1.33.1 (2023-11-18)
fix(deps): update module github.com/moby/buildkit to v0.11.4 [security]
test,heredoc: use fedora instead of docker.io/library/python:latest
Bump to v1.33.1-dev
## v1.33.0 (2023-11-17) ## v1.33.0 (2023-11-17)
Never omit layers for emptyLayer instructions when squashing/cwing Never omit layers for emptyLayer instructions when squashing/cwing

View File

@ -1,3 +1,8 @@
- Changelog for v1.33.1 (2023-11-18)
* fix(deps): update module github.com/moby/buildkit to v0.11.4 [security]
* test,heredoc: use fedora instead of docker.io/library/python:latest
* Bump to v1.33.1-dev
- Changelog for v1.33.0 (2023-11-17) - Changelog for v1.33.0 (2023-11-17)
* Never omit layers for emptyLayer instructions when squashing/cwing * Never omit layers for emptyLayer instructions when squashing/cwing
* Add OverrideChanges and OverrideConfig to CommitOptions * Add OverrideChanges and OverrideConfig to CommitOptions

View File

@ -29,7 +29,7 @@ const (
// identify working containers. // identify working containers.
Package = "buildah" Package = "buildah"
// Version for the Package. Also used by .packit.sh for Packit builds. // Version for the Package. Also used by .packit.sh for Packit builds.
Version = "1.33.0" Version = "1.33.1"
// DefaultRuntime if containers.conf fails. // DefaultRuntime if containers.conf fails.
DefaultRuntime = "runc" DefaultRuntime = "runc"

View File

@ -0,0 +1,171 @@
package parser
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"regexp"
"strings"
"github.com/pkg/errors"
)
const (
keySyntax = "syntax"
keyEscape = "escape"
)
var validDirectives = map[string]struct{}{
keySyntax: {},
keyEscape: {},
}
type Directive struct {
Name string
Value string
Location []Range
}
// DirectiveParser is a parser for Dockerfile directives that enforces the
// quirks of the directive parser.
type DirectiveParser struct {
line int
regexp *regexp.Regexp
seen map[string]struct{}
done bool
}
func (d *DirectiveParser) setComment(comment string) {
d.regexp = regexp.MustCompile(fmt.Sprintf(`^%s\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`, comment))
}
func (d *DirectiveParser) ParseLine(line []byte) (*Directive, error) {
d.line++
if d.done {
return nil, nil
}
if d.regexp == nil {
d.setComment("#")
}
match := d.regexp.FindSubmatch(line)
if len(match) == 0 {
d.done = true
return nil, nil
}
k := strings.ToLower(string(match[1]))
if _, ok := validDirectives[k]; !ok {
d.done = true
return nil, nil
}
if d.seen == nil {
d.seen = map[string]struct{}{}
}
if _, ok := d.seen[k]; ok {
return nil, errors.Errorf("only one %s parser directive can be used", k)
}
d.seen[k] = struct{}{}
v := string(match[2])
directive := Directive{
Name: k,
Value: v,
Location: []Range{{
Start: Position{Line: d.line},
End: Position{Line: d.line},
}},
}
return &directive, nil
}
func (d *DirectiveParser) ParseAll(data []byte) ([]*Directive, error) {
scanner := bufio.NewScanner(bytes.NewReader(data))
var directives []*Directive
for scanner.Scan() {
if d.done {
break
}
d, err := d.ParseLine(scanner.Bytes())
if err != nil {
return directives, err
}
if d != nil {
directives = append(directives, d)
}
}
return directives, nil
}
// DetectSyntax returns the syntax of provided input.
//
// The traditional dockerfile directives '# syntax = ...' are used by default,
// however, the function will also fallback to c-style directives '// syntax = ...'
// and json-encoded directives '{ "syntax": "..." }'. Finally, starting lines
// with '#!' are treated as shebangs and ignored.
//
// This allows for a flexible range of input formats, and appropriate syntax
// selection.
func DetectSyntax(dt []byte) (string, string, []Range, bool) {
dt, hadShebang, err := discardShebang(dt)
if err != nil {
return "", "", nil, false
}
line := 0
if hadShebang {
line++
}
// use default directive parser, and search for #syntax=
directiveParser := DirectiveParser{line: line}
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok {
return syntax, cmdline, loc, true
}
// use directive with different comment prefix, and search for //syntax=
directiveParser = DirectiveParser{line: line}
directiveParser.setComment("//")
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok {
return syntax, cmdline, loc, true
}
// search for possible json directives
var directive struct {
Syntax string `json:"syntax"`
}
if err := json.Unmarshal(dt, &directive); err == nil {
if directive.Syntax != "" {
loc := []Range{{
Start: Position{Line: line},
End: Position{Line: line},
}}
return directive.Syntax, directive.Syntax, loc, true
}
}
return "", "", nil, false
}
func detectSyntaxFromParser(dt []byte, parser DirectiveParser) (string, string, []Range, bool) {
directives, _ := parser.ParseAll(dt)
for _, d := range directives {
// check for syntax directive before erroring out, since the error
// might have occurred *after* the syntax directive
if d.Name == keySyntax {
p, _, _ := strings.Cut(d.Value, " ")
return p, d.Value, d.Location, true
}
}
return "", "", nil, false
}
func discardShebang(dt []byte) ([]byte, bool, error) {
line, rest, _ := bytes.Cut(dt, []byte("\n"))
if bytes.HasPrefix(line, []byte("#!")) {
return rest, true, nil
}
return dt, false, nil
}

View File

@ -8,7 +8,6 @@ package parser
import ( import (
"encoding/json" "encoding/json"
"fmt"
"strings" "strings"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
@ -34,7 +33,6 @@ func parseIgnore(rest string, d *directives) (*Node, map[string]bool, error) {
// statement with sub-statements. // statement with sub-statements.
// //
// ONBUILD RUN foo bar -> (onbuild (run foo bar)) // ONBUILD RUN foo bar -> (onbuild (run foo bar))
//
func parseSubCommand(rest string, d *directives) (*Node, map[string]bool, error) { func parseSubCommand(rest string, d *directives) (*Node, map[string]bool, error) {
if rest == "" { if rest == "" {
return nil, nil, nil return nil, nil, nil
@ -154,7 +152,7 @@ func parseNameVal(rest string, key string, d *directives) (*Node, error) {
if !strings.Contains(words[0], "=") { if !strings.Contains(words[0], "=") {
parts := reWhitespace.Split(rest, 2) parts := reWhitespace.Split(rest, 2)
if len(parts) < 2 { if len(parts) < 2 {
return nil, fmt.Errorf(key + " must have two arguments") return nil, errors.Errorf("%s must have two arguments", key)
} }
return newKeyValueNode(parts[0], parts[1]), nil return newKeyValueNode(parts[0], parts[1]), nil
} }
@ -163,7 +161,7 @@ func parseNameVal(rest string, key string, d *directives) (*Node, error) {
var prevNode *Node var prevNode *Node
for _, word := range words { for _, word := range words {
if !strings.Contains(word, "=") { if !strings.Contains(word, "=") {
return nil, fmt.Errorf("Syntax error - can't find = in %q. Must be of the form: name=value", word) return nil, errors.Errorf("Syntax error - can't find = in %q. Must be of the form: name=value", word)
} }
parts := strings.SplitN(word, "=", 2) parts := strings.SplitN(word, "=", 2)
@ -274,7 +272,7 @@ func parseString(rest string, d *directives) (*Node, map[string]bool, error) {
func parseJSON(rest string, d *directives) (*Node, map[string]bool, error) { func parseJSON(rest string, d *directives) (*Node, map[string]bool, error) {
rest = strings.TrimLeftFunc(rest, unicode.IsSpace) rest = strings.TrimLeftFunc(rest, unicode.IsSpace)
if !strings.HasPrefix(rest, "[") { if !strings.HasPrefix(rest, "[") {
return nil, nil, fmt.Errorf(`Error parsing "%s" as a JSON array`, rest) return nil, nil, errors.Errorf("Error parsing %q as a JSON array", rest)
} }
var myJSON []interface{} var myJSON []interface{}

View File

@ -1,4 +1,5 @@
// Package parser implements a parser and parse tree dumper for Dockerfiles. // The parser package implements a parser that transforms a raw byte-stream
// into a low-level Abstract Syntax Tree.
package parser package parser
import ( import (
@ -27,7 +28,6 @@ import (
// This data structure is frankly pretty lousy for handling complex languages, // This data structure is frankly pretty lousy for handling complex languages,
// but lucky for us the Dockerfile isn't very complicated. This structure // but lucky for us the Dockerfile isn't very complicated. This structure
// works a little more effectively than a "proper" parse tree for our needs. // works a little more effectively than a "proper" parse tree for our needs.
//
type Node struct { type Node struct {
Value string // actual content Value string // actual content
Next *Node // the next item in the current sexp Next *Node // the next item in the current sexp
@ -115,7 +115,6 @@ type Heredoc struct {
var ( var (
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error) dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`) reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
reDirectives = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`)
reComment = regexp.MustCompile(`^#.*$`) reComment = regexp.MustCompile(`^#.*$`)
reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`) reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`)
reLeadingTabs = regexp.MustCompile(`(?m)^\t+`) reLeadingTabs = regexp.MustCompile(`(?m)^\t+`)
@ -124,11 +123,6 @@ var (
// DefaultEscapeToken is the default escape token // DefaultEscapeToken is the default escape token
const DefaultEscapeToken = '\\' const DefaultEscapeToken = '\\'
var validDirectives = map[string]struct{}{
"escape": {},
"syntax": {},
}
var ( var (
// Directives allowed to contain heredocs // Directives allowed to contain heredocs
heredocDirectives = map[string]bool{ heredocDirectives = map[string]bool{
@ -143,13 +137,12 @@ var (
} }
) )
// directive is the structure used during a build run to hold the state of // directives is the structure used during a build run to hold the state of
// parsing directives. // parsing directives.
type directives struct { type directives struct {
parser DirectiveParser
escapeToken rune // Current escape token escapeToken rune // Current escape token
lineContinuationRegex *regexp.Regexp // Current line continuation regex lineContinuationRegex *regexp.Regexp // Current line continuation regex
done bool // Whether we are done looking for directives
seen map[string]struct{} // Whether the escape directive has been seen
} }
// setEscapeToken sets the default token for escaping characters and as line- // setEscapeToken sets the default token for escaping characters and as line-
@ -178,40 +171,19 @@ func (d *directives) setEscapeToken(s string) error {
// Parser directives must precede any builder instruction or other comments, // Parser directives must precede any builder instruction or other comments,
// and cannot be repeated. // and cannot be repeated.
func (d *directives) possibleParserDirective(line string) error { func (d *directives) possibleParserDirective(line string) error {
if d.done { directive, err := d.parser.ParseLine([]byte(line))
return nil if err != nil {
return err
} }
if directive != nil && directive.Name == keyEscape {
match := reDirectives.FindStringSubmatch(line) return d.setEscapeToken(directive.Value)
if len(match) == 0 {
d.done = true
return nil
} }
k := strings.ToLower(match[1])
_, ok := validDirectives[k]
if !ok {
d.done = true
return nil
}
if _, ok := d.seen[k]; ok {
return errors.Errorf("only one %s parser directive can be used", k)
}
d.seen[k] = struct{}{}
if k == "escape" {
return d.setEscapeToken(match[2])
}
return nil return nil
} }
// newDefaultDirectives returns a new directives structure with the default escapeToken token // newDefaultDirectives returns a new directives structure with the default escapeToken token
func newDefaultDirectives() *directives { func newDefaultDirectives() *directives {
d := &directives{ d := &directives{}
seen: map[string]struct{}{},
}
d.setEscapeToken(string(DefaultEscapeToken)) d.setEscapeToken(string(DefaultEscapeToken))
return d return d
} }
@ -274,13 +246,15 @@ func newNodeFromLine(line string, d *directives, comments []string) (*Node, erro
}, nil }, nil
} }
// Result is the result of parsing a Dockerfile // Result contains the bundled outputs from parsing a Dockerfile.
type Result struct { type Result struct {
AST *Node AST *Node
EscapeToken rune EscapeToken rune
Warnings []Warning Warnings []Warning
} }
// Warning contains information to identify and locate a warning generated
// during parsing.
type Warning struct { type Warning struct {
Short string Short string
Detail [][]byte Detail [][]byte
@ -301,8 +275,8 @@ func (r *Result) PrintWarnings(out io.Writer) {
} }
} }
// Parse reads lines from a Reader, parses the lines into an AST and returns // Parse consumes lines from a provided Reader, parses each line into an AST
// the AST and escape token // and returns the results of doing so.
func Parse(rwc io.Reader) (*Result, error) { func Parse(rwc io.Reader) (*Result, error) {
d := newDefaultDirectives() d := newDefaultDirectives()
currentLine := 0 currentLine := 0
@ -421,7 +395,7 @@ func Parse(rwc io.Reader) (*Result, error) {
}, withLocation(handleScannerError(scanner.Err()), currentLine, 0) }, withLocation(handleScannerError(scanner.Err()), currentLine, 0)
} }
// Extracts a heredoc from a possible heredoc regex match // heredocFromMatch extracts a heredoc from a possible heredoc regex match.
func heredocFromMatch(match []string) (*Heredoc, error) { func heredocFromMatch(match []string) (*Heredoc, error) {
if len(match) == 0 { if len(match) == 0 {
return nil, nil return nil, nil
@ -457,7 +431,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
return nil, err return nil, err
} }
if len(wordsRaw) != len(words) { if len(wordsRaw) != len(words) {
return nil, fmt.Errorf("internal lexing of heredoc produced inconsistent results: %s", rest) return nil, errors.Errorf("internal lexing of heredoc produced inconsistent results: %s", rest)
} }
word := words[0] word := words[0]
@ -475,9 +449,14 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
}, nil }, nil
} }
// ParseHeredoc parses a heredoc word from a target string, returning the
// components from the doc.
func ParseHeredoc(src string) (*Heredoc, error) { func ParseHeredoc(src string) (*Heredoc, error) {
return heredocFromMatch(reHeredoc.FindStringSubmatch(src)) return heredocFromMatch(reHeredoc.FindStringSubmatch(src))
} }
// MustParseHeredoc is a variant of ParseHeredoc that discards the error, if
// there was one present.
func MustParseHeredoc(src string) *Heredoc { func MustParseHeredoc(src string) *Heredoc {
heredoc, _ := ParseHeredoc(src) heredoc, _ := ParseHeredoc(src)
return heredoc return heredoc
@ -503,6 +482,7 @@ func heredocsFromLine(line string) ([]Heredoc, error) {
return docs, nil return docs, nil
} }
// ChompHeredocContent chomps leading tabs from the heredoc.
func ChompHeredocContent(src string) string { func ChompHeredocContent(src string) string {
return reLeadingTabs.ReplaceAllString(src, "") return reLeadingTabs.ReplaceAllString(src, "")
} }

View File

@ -377,7 +377,7 @@ func (sw *shellWord) processDollar() (string, error) {
} }
// Grab the current value of the variable in question so we // Grab the current value of the variable in question so we
// can use to to determine what to do based on the modifier // can use it to determine what to do based on the modifier
newValue, found := sw.getEnv(name) newValue, found := sw.getEnv(name)
switch modifier { switch modifier {

View File

@ -79,7 +79,7 @@ func Enable(err error) error {
return err return err
} }
func Wrap(err error, s Stack) error { func Wrap(err error, s *Stack) error {
return &withStack{stack: s, error: err} return &withStack{stack: s, error: err}
} }
@ -151,7 +151,7 @@ func convertStack(s errors.StackTrace) *Stack {
if idx == -1 { if idx == -1 {
continue continue
} }
line, err := strconv.Atoi(p[1][idx+1:]) line, err := strconv.ParseInt(p[1][idx+1:], 10, 32)
if err != nil { if err != nil {
continue continue
} }
@ -169,7 +169,7 @@ func convertStack(s errors.StackTrace) *Stack {
} }
type withStack struct { type withStack struct {
stack Stack stack *Stack
error error
} }
@ -178,5 +178,5 @@ func (e *withStack) Unwrap() error {
} }
func (e *withStack) StackTrace() *Stack { func (e *withStack) StackTrace() *Stack {
return &e.stack return e.stack
} }

View File

@ -1,172 +1,261 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.11.4
// source: stack.proto // source: stack.proto
package stack package stack
import ( import (
fmt "fmt" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
proto "github.com/golang/protobuf/proto" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
math "math" reflect "reflect"
sync "sync"
) )
// Reference imports to suppress errors if they are not otherwise used. const (
var _ = proto.Marshal // Verify that this generated code is sufficiently up-to-date.
var _ = fmt.Errorf _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
var _ = math.Inf // Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
// This is a compile-time assertion to ensure that this generated file )
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Stack struct { type Stack struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Frames []*Frame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` Frames []*Frame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"`
Cmdline []string `protobuf:"bytes,2,rep,name=cmdline,proto3" json:"cmdline,omitempty"` Cmdline []string `protobuf:"bytes,2,rep,name=cmdline,proto3" json:"cmdline,omitempty"`
Pid int32 `protobuf:"varint,3,opt,name=pid,proto3" json:"pid,omitempty"` Pid int32 `protobuf:"varint,3,opt,name=pid,proto3" json:"pid,omitempty"`
Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"`
Revision string `protobuf:"bytes,5,opt,name=revision,proto3" json:"revision,omitempty"` Revision string `protobuf:"bytes,5,opt,name=revision,proto3" json:"revision,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
} }
func (m *Stack) Reset() { *m = Stack{} } func (x *Stack) Reset() {
func (m *Stack) String() string { return proto.CompactTextString(m) } *x = Stack{}
if protoimpl.UnsafeEnabled {
mi := &file_stack_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Stack) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Stack) ProtoMessage() {} func (*Stack) ProtoMessage() {}
func (x *Stack) ProtoReflect() protoreflect.Message {
mi := &file_stack_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Stack.ProtoReflect.Descriptor instead.
func (*Stack) Descriptor() ([]byte, []int) { func (*Stack) Descriptor() ([]byte, []int) {
return fileDescriptor_b44c07feb2ca0a5a, []int{0} return file_stack_proto_rawDescGZIP(), []int{0}
} }
func (m *Stack) XXX_Unmarshal(b []byte) error { func (x *Stack) GetFrames() []*Frame {
return xxx_messageInfo_Stack.Unmarshal(m, b) if x != nil {
} return x.Frames
func (m *Stack) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Stack.Marshal(b, m, deterministic)
}
func (m *Stack) XXX_Merge(src proto.Message) {
xxx_messageInfo_Stack.Merge(m, src)
}
func (m *Stack) XXX_Size() int {
return xxx_messageInfo_Stack.Size(m)
}
func (m *Stack) XXX_DiscardUnknown() {
xxx_messageInfo_Stack.DiscardUnknown(m)
}
var xxx_messageInfo_Stack proto.InternalMessageInfo
func (m *Stack) GetFrames() []*Frame {
if m != nil {
return m.Frames
} }
return nil return nil
} }
func (m *Stack) GetCmdline() []string { func (x *Stack) GetCmdline() []string {
if m != nil { if x != nil {
return m.Cmdline return x.Cmdline
} }
return nil return nil
} }
func (m *Stack) GetPid() int32 { func (x *Stack) GetPid() int32 {
if m != nil { if x != nil {
return m.Pid return x.Pid
} }
return 0 return 0
} }
func (m *Stack) GetVersion() string { func (x *Stack) GetVersion() string {
if m != nil { if x != nil {
return m.Version return x.Version
} }
return "" return ""
} }
func (m *Stack) GetRevision() string { func (x *Stack) GetRevision() string {
if m != nil { if x != nil {
return m.Revision return x.Revision
} }
return "" return ""
} }
type Frame struct { type Frame struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"Name,omitempty"` Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"Name,omitempty"`
File string `protobuf:"bytes,2,opt,name=File,proto3" json:"File,omitempty"` File string `protobuf:"bytes,2,opt,name=File,proto3" json:"File,omitempty"`
Line int32 `protobuf:"varint,3,opt,name=Line,proto3" json:"Line,omitempty"` Line int32 `protobuf:"varint,3,opt,name=Line,proto3" json:"Line,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
} }
func (m *Frame) Reset() { *m = Frame{} } func (x *Frame) Reset() {
func (m *Frame) String() string { return proto.CompactTextString(m) } *x = Frame{}
if protoimpl.UnsafeEnabled {
mi := &file_stack_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Frame) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Frame) ProtoMessage() {} func (*Frame) ProtoMessage() {}
func (x *Frame) ProtoReflect() protoreflect.Message {
mi := &file_stack_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Frame.ProtoReflect.Descriptor instead.
func (*Frame) Descriptor() ([]byte, []int) { func (*Frame) Descriptor() ([]byte, []int) {
return fileDescriptor_b44c07feb2ca0a5a, []int{1} return file_stack_proto_rawDescGZIP(), []int{1}
} }
func (m *Frame) XXX_Unmarshal(b []byte) error { func (x *Frame) GetName() string {
return xxx_messageInfo_Frame.Unmarshal(m, b) if x != nil {
} return x.Name
func (m *Frame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Frame.Marshal(b, m, deterministic)
}
func (m *Frame) XXX_Merge(src proto.Message) {
xxx_messageInfo_Frame.Merge(m, src)
}
func (m *Frame) XXX_Size() int {
return xxx_messageInfo_Frame.Size(m)
}
func (m *Frame) XXX_DiscardUnknown() {
xxx_messageInfo_Frame.DiscardUnknown(m)
}
var xxx_messageInfo_Frame proto.InternalMessageInfo
func (m *Frame) GetName() string {
if m != nil {
return m.Name
} }
return "" return ""
} }
func (m *Frame) GetFile() string { func (x *Frame) GetFile() string {
if m != nil { if x != nil {
return m.File return x.File
} }
return "" return ""
} }
func (m *Frame) GetLine() int32 { func (x *Frame) GetLine() int32 {
if m != nil { if x != nil {
return m.Line return x.Line
} }
return 0 return 0
} }
func init() { var File_stack_proto protoreflect.FileDescriptor
proto.RegisterType((*Stack)(nil), "stack.Stack")
proto.RegisterType((*Frame)(nil), "stack.Frame") var file_stack_proto_rawDesc = []byte{
0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, 0x73,
0x74, 0x61, 0x63, 0x6b, 0x22, 0x8f, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x12, 0x24,
0x0a, 0x06, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c,
0x2e, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x52, 0x06, 0x66, 0x72,
0x61, 0x6d, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x6c, 0x69, 0x6e, 0x65, 0x18,
0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x10,
0x0a, 0x03, 0x70, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x70, 0x69, 0x64,
0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28,
0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65,
0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65,
0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x43, 0x0a, 0x05, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x4e,
0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x18,
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x33,
} }
func init() { var (
proto.RegisterFile("stack.proto", fileDescriptor_b44c07feb2ca0a5a) file_stack_proto_rawDescOnce sync.Once
file_stack_proto_rawDescData = file_stack_proto_rawDesc
)
func file_stack_proto_rawDescGZIP() []byte {
file_stack_proto_rawDescOnce.Do(func() {
file_stack_proto_rawDescData = protoimpl.X.CompressGZIP(file_stack_proto_rawDescData)
})
return file_stack_proto_rawDescData
} }
var fileDescriptor_b44c07feb2ca0a5a = []byte{ var file_stack_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
// 185 bytes of a gzipped FileDescriptorProto var file_stack_proto_goTypes = []interface{}{
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x3c, 0x8f, 0x3d, 0xce, 0x82, 0x40, (*Stack)(nil), // 0: stack.Stack
0x10, 0x86, 0xb3, 0xdf, 0xb2, 0x7c, 0x3a, 0x58, 0x98, 0xa9, 0x36, 0x56, 0x1b, 0x62, 0x41, 0x45, (*Frame)(nil), // 1: stack.Frame
0xa1, 0x47, 0x30, 0xa1, 0x32, 0x16, 0x78, 0x02, 0x84, 0x35, 0xd9, 0xc8, 0x5f, 0x76, 0x09, 0xd7, }
0xf0, 0xca, 0x66, 0x06, 0xb4, 0x7b, 0xde, 0x9f, 0xe4, 0x9d, 0x81, 0x24, 0x4c, 0x55, 0xfd, 0xca, var file_stack_proto_depIdxs = []int32{
0x47, 0x3f, 0x4c, 0x03, 0x2a, 0x16, 0xe9, 0x5b, 0x80, 0xba, 0x13, 0xe1, 0x11, 0xe2, 0xa7, 0xaf, 1, // 0: stack.Stack.frames:type_name -> stack.Frame
0x3a, 0x1b, 0xb4, 0x30, 0x32, 0x4b, 0x4e, 0xbb, 0x7c, 0xa9, 0x17, 0x64, 0x96, 0x6b, 0x86, 0x1a, 1, // [1:1] is the sub-list for method output_type
0xfe, 0xeb, 0xae, 0x69, 0x5d, 0x6f, 0xf5, 0x9f, 0x91, 0xd9, 0xb6, 0xfc, 0x4a, 0xdc, 0x83, 0x1c, 1, // [1:1] is the sub-list for method input_type
0x5d, 0xa3, 0xa5, 0x11, 0x99, 0x2a, 0x09, 0xa9, 0x3b, 0x5b, 0x1f, 0xdc, 0xd0, 0xeb, 0xc8, 0x08, 1, // [1:1] is the sub-list for extension type_name
0xea, 0xae, 0x12, 0x0f, 0xb0, 0xf1, 0x76, 0x76, 0x1c, 0x29, 0x8e, 0x7e, 0x3a, 0xbd, 0x80, 0xe2, 1, // [1:1] is the sub-list for extension extendee
0x49, 0x44, 0x88, 0x6e, 0x55, 0x67, 0xb5, 0xe0, 0x02, 0x33, 0x79, 0x85, 0x6b, 0x69, 0x9b, 0x3d, 0, // [0:1] is the sub-list for field type_name
0x62, 0xf2, 0xae, 0x74, 0xcf, 0xb2, 0xcc, 0xfc, 0x88, 0xf9, 0xc9, 0xf3, 0x27, 0x00, 0x00, 0xff, }
0xff, 0xfd, 0x2c, 0xbb, 0xfb, 0xf3, 0x00, 0x00, 0x00,
func init() { file_stack_proto_init() }
func file_stack_proto_init() {
if File_stack_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_stack_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Stack); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_stack_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Frame); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_stack_proto_rawDesc,
NumEnums: 0,
NumMessages: 2,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_stack_proto_goTypes,
DependencyIndexes: file_stack_proto_depIdxs,
MessageInfos: file_stack_proto_msgTypes,
}.Build()
File_stack_proto = out.File
file_stack_proto_rawDesc = nil
file_stack_proto_goTypes = nil
file_stack_proto_depIdxs = nil
} }

6
vendor/modules.txt vendored
View File

@ -138,7 +138,7 @@ github.com/containernetworking/cni/pkg/version
# github.com/containernetworking/plugins v1.3.0 # github.com/containernetworking/plugins v1.3.0
## explicit; go 1.20 ## explicit; go 1.20
github.com/containernetworking/plugins/pkg/ns github.com/containernetworking/plugins/pkg/ns
# github.com/containers/buildah v1.33.0 # github.com/containers/buildah v1.33.1
## explicit; go 1.20 ## explicit; go 1.20
github.com/containers/buildah github.com/containers/buildah
github.com/containers/buildah/bind github.com/containers/buildah/bind
@ -775,8 +775,8 @@ github.com/mistifyio/go-zfs/v3
# github.com/mitchellh/mapstructure v1.5.0 # github.com/mitchellh/mapstructure v1.5.0
## explicit; go 1.14 ## explicit; go 1.14
github.com/mitchellh/mapstructure github.com/mitchellh/mapstructure
# github.com/moby/buildkit v0.10.6 # github.com/moby/buildkit v0.11.4
## explicit; go 1.17 ## explicit; go 1.18
github.com/moby/buildkit/frontend/dockerfile/command github.com/moby/buildkit/frontend/dockerfile/command
github.com/moby/buildkit/frontend/dockerfile/parser github.com/moby/buildkit/frontend/dockerfile/parser
github.com/moby/buildkit/frontend/dockerfile/shell github.com/moby/buildkit/frontend/dockerfile/shell