update deps and goreleaser

This commit is contained in:
Umputun
2024-05-23 03:14:10 -05:00
parent 57552c1798
commit 084e167283
79 changed files with 798 additions and 431 deletions

View File

@@ -22,10 +22,6 @@ builds:
archives: archives:
- name_template: "{{.ProjectName}}_{{.Tag}}_{{.Os}}_{{.Arch}}" - name_template: "{{.ProjectName}}_{{.Tag}}_{{.Os}}_{{.Arch}}"
replacements:
386: i386
amd64: x86_64
darwin: macos
format_overrides: format_overrides:
- goos: windows - goos: windows
format: zip format: zip
@@ -34,8 +30,7 @@ archives:
- README.md - README.md
nfpms: nfpms:
- - id: reproxy
id: reproxy
package_name: reproxy package_name: reproxy
file_name_template: "{{.ProjectName}}_{{.Tag}}_{{.Os}}_{{.Arch}}" file_name_template: "{{.ProjectName}}_{{.Tag}}_{{.Os}}_{{.Arch}}"
vendor: Umputun vendor: Umputun
@@ -52,11 +47,9 @@ nfpms:
contents: contents:
- src: reproxy.service - src: reproxy.service
dst: /etc/systemd/system/reproxy.service dst: /etc/systemd/system/reproxy.service
- src: reproxy-example.yml - src: reproxy-example.yml
dst: /etc/reproxy-example.yml dst: /etc/reproxy-example.yml
type: config type: config
scripts: scripts:
postinstall: "scripts/postinstall.sh" postinstall: "scripts/postinstall.sh"
preremove: "scripts/preremove.sh" preremove: "scripts/preremove.sh"

View File

@@ -1,4 +1,4 @@
FROM goreleaser/goreleaser:v1.25.1 as build FROM goreleaser/goreleaser:v1.26.1 as build
WORKDIR /build WORKDIR /build
ADD . /build ADD . /build
@@ -8,7 +8,7 @@ RUN apk add -u gnupg
RUN \ RUN \
export GNUPGHOME="$PWD/releaser-gpg" && export GNUPGHOME && mkdir -p "$GNUPGHOME" && chmod 0700 "$GNUPGHOME" && \ export GNUPGHOME="$PWD/releaser-gpg" && export GNUPGHOME && mkdir -p "$GNUPGHOME" && chmod 0700 "$GNUPGHOME" && \
cat reproxy.signing-key.gpg | gpg --batch --allow-secret-key-import --import && \ cat reproxy.signing-key.gpg | gpg --batch --allow-secret-key-import --import && \
goreleaser --snapshot --skip-publish --rm-dist goreleaser --snapshot --skip-publish --clean
FROM alpine FROM alpine
COPY --from=build /build/dist/ /dist/ COPY --from=build /build/dist/ /dist/

12
go.mod
View File

@@ -18,17 +18,17 @@ require (
require ( require (
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-pkgz/expirable-cache v1.0.0 // indirect github.com/go-pkgz/expirable-cache v1.0.0 // indirect
github.com/kr/text v0.1.0 // indirect github.com/kr/text v0.1.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.6.0 // indirect github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.50.0 // indirect github.com/prometheus/common v0.53.0 // indirect
github.com/prometheus/procfs v0.13.0 // indirect github.com/prometheus/procfs v0.15.0 // indirect
golang.org/x/net v0.23.0 // indirect golang.org/x/net v0.25.0 // indirect
golang.org/x/sys v0.20.0 // indirect golang.org/x/sys v0.20.0 // indirect
golang.org/x/text v0.15.0 // indirect golang.org/x/text v0.15.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect google.golang.org/protobuf v1.34.1 // indirect
) )

24
go.sum
View File

@@ -1,7 +1,7 @@
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -31,12 +31,12 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE=
github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho=
github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.50.0 h1:YSZE6aa9+luNa2da6/Tik0q0A5AbR+U003TItK57CPQ= github.com/prometheus/common v0.53.0 h1:U2pL9w9nmJwJDa4qqLQ3ZaePJ6ZTwt7cMD3AG3+aLCE=
github.com/prometheus/common v0.50.0/go.mod h1:wHFBCEVWVmHMUpg7pYcOm2QUR/ocQdYSJVQJKnHc3xQ= github.com/prometheus/common v0.53.0/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U=
github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o= github.com/prometheus/procfs v0.15.0 h1:A82kmvXJq2jTu5YUhSGNlYoxh85zLnKgPz4bMZgI5Ek=
github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g= github.com/prometheus/procfs v0.15.0/go.mod h1:Y0RJ/Y5g5wJpkTisOtqwDSo4HwhGmLB4VQSw2sQJLHk=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -48,14 +48,14 @@ github.com/umputun/go-flags v1.5.1 h1:vRauoXV3Ultt1HrxivSxowbintgZLJE+EcBy5ta3/m
github.com/umputun/go-flags v1.5.1/go.mod h1:nTbvsO/hKqe7Utri/NoyN18GR3+EWf+9RrmsdwdhrEc= github.com/umputun/go-flags v1.5.1/go.mod h1:nTbvsO/hKqe7Utri/NoyN18GR3+EWf+9RrmsdwdhrEc=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@@ -70,3 +70,5 @@ benchstat <(go test -benchtime 500ms -count 15 -bench 'Sum64$')
- [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics) - [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics)
- [FreeCache](https://github.com/coocood/freecache) - [FreeCache](https://github.com/coocood/freecache)
- [FastCache](https://github.com/VictoriaMetrics/fastcache) - [FastCache](https://github.com/VictoriaMetrics/fastcache)
- [Ristretto](https://github.com/dgraph-io/ristretto)
- [Badger](https://github.com/dgraph-io/badger)

View File

@@ -19,10 +19,13 @@ const (
// Store the primes in an array as well. // Store the primes in an array as well.
// //
// The consts are used when possible in Go code to avoid MOVs but we need a // The consts are used when possible in Go code to avoid MOVs but we need a
// contiguous array of the assembly code. // contiguous array for the assembly code.
var primes = [...]uint64{prime1, prime2, prime3, prime4, prime5} var primes = [...]uint64{prime1, prime2, prime3, prime4, prime5}
// Digest implements hash.Hash64. // Digest implements hash.Hash64.
//
// Note that a zero-valued Digest is not ready to receive writes.
// Call Reset or create a Digest using New before calling other methods.
type Digest struct { type Digest struct {
v1 uint64 v1 uint64
v2 uint64 v2 uint64
@@ -33,19 +36,31 @@ type Digest struct {
n int // how much of mem is used n int // how much of mem is used
} }
// New creates a new Digest that computes the 64-bit xxHash algorithm. // New creates a new Digest with a zero seed.
func New() *Digest { func New() *Digest {
return NewWithSeed(0)
}
// NewWithSeed creates a new Digest with the given seed.
func NewWithSeed(seed uint64) *Digest {
var d Digest var d Digest
d.Reset() d.ResetWithSeed(seed)
return &d return &d
} }
// Reset clears the Digest's state so that it can be reused. // Reset clears the Digest's state so that it can be reused.
// It uses a seed value of zero.
func (d *Digest) Reset() { func (d *Digest) Reset() {
d.v1 = primes[0] + prime2 d.ResetWithSeed(0)
d.v2 = prime2 }
d.v3 = 0
d.v4 = -primes[0] // ResetWithSeed clears the Digest's state so that it can be reused.
// It uses the given seed to initialize the state.
func (d *Digest) ResetWithSeed(seed uint64) {
d.v1 = seed + prime1 + prime2
d.v2 = seed + prime2
d.v3 = seed
d.v4 = seed - prime1
d.total = 0 d.total = 0
d.n = 0 d.n = 0
} }

View File

@@ -6,7 +6,7 @@
package xxhash package xxhash
// Sum64 computes the 64-bit xxHash digest of b. // Sum64 computes the 64-bit xxHash digest of b with a zero seed.
// //
//go:noescape //go:noescape
func Sum64(b []byte) uint64 func Sum64(b []byte) uint64

View File

@@ -3,7 +3,7 @@
package xxhash package xxhash
// Sum64 computes the 64-bit xxHash digest of b. // Sum64 computes the 64-bit xxHash digest of b with a zero seed.
func Sum64(b []byte) uint64 { func Sum64(b []byte) uint64 {
// A simpler version would be // A simpler version would be
// d := New() // d := New()

View File

@@ -5,7 +5,7 @@
package xxhash package xxhash
// Sum64String computes the 64-bit xxHash digest of s. // Sum64String computes the 64-bit xxHash digest of s with a zero seed.
func Sum64String(s string) uint64 { func Sum64String(s string) uint64 {
return Sum64([]byte(s)) return Sum64([]byte(s))
} }

View File

@@ -33,7 +33,7 @@ import (
// //
// See https://github.com/golang/go/issues/42739 for discussion. // See https://github.com/golang/go/issues/42739 for discussion.
// Sum64String computes the 64-bit xxHash digest of s. // Sum64String computes the 64-bit xxHash digest of s with a zero seed.
// It may be faster than Sum64([]byte(s)) by avoiding a copy. // It may be faster than Sum64([]byte(s)) by avoiding a copy.
func Sum64String(s string) uint64 { func Sum64String(s string) uint64 {
b := *(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)})) b := *(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)}))

View File

@@ -75,14 +75,14 @@ func ResponseFormat(h http.Header) Format {
func NewDecoder(r io.Reader, format Format) Decoder { func NewDecoder(r io.Reader, format Format) Decoder {
switch format.FormatType() { switch format.FormatType() {
case TypeProtoDelim: case TypeProtoDelim:
return &protoDecoder{r: r} return &protoDecoder{r: bufio.NewReader(r)}
} }
return &textDecoder{r: r} return &textDecoder{r: r}
} }
// protoDecoder implements the Decoder interface for protocol buffers. // protoDecoder implements the Decoder interface for protocol buffers.
type protoDecoder struct { type protoDecoder struct {
r io.Reader r protodelim.Reader
} }
// Decode implements the Decoder interface. // Decode implements the Decoder interface.
@@ -90,7 +90,7 @@ func (d *protoDecoder) Decode(v *dto.MetricFamily) error {
opts := protodelim.UnmarshalOptions{ opts := protodelim.UnmarshalOptions{
MaxSize: -1, MaxSize: -1,
} }
if err := opts.UnmarshalFrom(bufio.NewReader(d.r), v); err != nil { if err := opts.UnmarshalFrom(d.r, v); err != nil {
return err return err
} }
if !model.IsValidMetricName(model.LabelValue(v.GetName())) { if !model.IsValidMetricName(model.LabelValue(v.GetName())) {

View File

@@ -15,6 +15,7 @@
package expfmt package expfmt
import ( import (
"fmt"
"strings" "strings"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@@ -63,7 +64,7 @@ const (
type FormatType int type FormatType int
const ( const (
TypeUnknown = iota TypeUnknown FormatType = iota
TypeProtoCompact TypeProtoCompact
TypeProtoDelim TypeProtoDelim
TypeProtoText TypeProtoText
@@ -73,7 +74,8 @@ const (
// NewFormat generates a new Format from the type provided. Mostly used for // NewFormat generates a new Format from the type provided. Mostly used for
// tests, most Formats should be generated as part of content negotiation in // tests, most Formats should be generated as part of content negotiation in
// encode.go. // encode.go. If a type has more than one version, the latest version will be
// returned.
func NewFormat(t FormatType) Format { func NewFormat(t FormatType) Format {
switch t { switch t {
case TypeProtoCompact: case TypeProtoCompact:
@@ -91,13 +93,21 @@ func NewFormat(t FormatType) Format {
} }
} }
// NewOpenMetricsFormat generates a new OpenMetrics format matching the
// specified version number.
func NewOpenMetricsFormat(version string) (Format, error) {
if version == OpenMetricsVersion_0_0_1 {
return fmtOpenMetrics_0_0_1, nil
}
if version == OpenMetricsVersion_1_0_0 {
return fmtOpenMetrics_1_0_0, nil
}
return fmtUnknown, fmt.Errorf("unknown open metrics version string")
}
// FormatType deduces an overall FormatType for the given format. // FormatType deduces an overall FormatType for the given format.
func (f Format) FormatType() FormatType { func (f Format) FormatType() FormatType {
toks := strings.Split(string(f), ";") toks := strings.Split(string(f), ";")
if len(toks) < 2 {
return TypeUnknown
}
params := make(map[string]string) params := make(map[string]string)
for i, t := range toks { for i, t := range toks {
if i == 0 { if i == 0 {

View File

@@ -248,12 +248,12 @@ func MetricFamilyToOpenMetrics(out io.Writer, in *dto.MetricFamily, options ...E
var createdTsBytesWritten int var createdTsBytesWritten int
// Finally the samples, one line for each. // Finally the samples, one line for each.
if metricType == dto.MetricType_COUNTER && strings.HasSuffix(name, "_total") {
compliantName = compliantName + "_total"
}
for _, metric := range in.Metric { for _, metric := range in.Metric {
switch metricType { switch metricType {
case dto.MetricType_COUNTER: case dto.MetricType_COUNTER:
if strings.HasSuffix(name, "_total") {
compliantName = compliantName + "_total"
}
if metric.Counter == nil { if metric.Counter == nil {
return written, fmt.Errorf( return written, fmt.Errorf(
"expected counter in metric %s %s", compliantName, metric, "expected counter in metric %s %s", compliantName, metric,

View File

@@ -75,7 +75,12 @@ func (a *Alert) ResolvedAt(ts time.Time) bool {
// Status returns the status of the alert. // Status returns the status of the alert.
func (a *Alert) Status() AlertStatus { func (a *Alert) Status() AlertStatus {
if a.Resolved() { return a.StatusAt(time.Now())
}
// StatusAt returns the status of the alert at the given timestamp.
func (a *Alert) StatusAt(ts time.Time) AlertStatus {
if a.ResolvedAt(ts) {
return AlertResolved return AlertResolved
} }
return AlertFiring return AlertFiring
@@ -127,6 +132,17 @@ func (as Alerts) HasFiring() bool {
return false return false
} }
// HasFiringAt returns true iff one of the alerts is not resolved
// at the time ts.
func (as Alerts) HasFiringAt(ts time.Time) bool {
for _, a := range as {
if !a.ResolvedAt(ts) {
return true
}
}
return false
}
// Status returns StatusFiring iff at least one of the alerts is firing. // Status returns StatusFiring iff at least one of the alerts is firing.
func (as Alerts) Status() AlertStatus { func (as Alerts) Status() AlertStatus {
if as.HasFiring() { if as.HasFiring() {
@@ -134,3 +150,12 @@ func (as Alerts) Status() AlertStatus {
} }
return AlertResolved return AlertResolved
} }
// StatusAt returns StatusFiring iff at least one of the alerts is firing
// at the time ts.
func (as Alerts) StatusAt(ts time.Time) AlertStatus {
if as.HasFiringAt(ts) {
return AlertFiring
}
return AlertResolved
}

View File

@@ -14,12 +14,9 @@
package model package model
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"slices"
"sort" "sort"
"strconv"
) )
// A LabelSet is a collection of LabelName and LabelValue pairs. The LabelSet // A LabelSet is a collection of LabelName and LabelValue pairs. The LabelSet
@@ -131,29 +128,6 @@ func (l LabelSet) Merge(other LabelSet) LabelSet {
return result return result
} }
// String will look like `{foo="bar", more="less"}`. Names are sorted alphabetically.
func (l LabelSet) String() string {
var lna [32]LabelName // On stack to avoid memory allocation for sorting names.
labelNames := lna[:0]
for name := range l {
labelNames = append(labelNames, name)
}
slices.Sort(labelNames)
var bytea [1024]byte // On stack to avoid memory allocation while building the output.
b := bytes.NewBuffer(bytea[:0])
b.WriteByte('{')
for i, name := range labelNames {
if i > 0 {
b.WriteString(", ")
}
b.WriteString(string(name))
b.WriteByte('=')
b.Write(strconv.AppendQuote(b.AvailableBuffer(), string(l[name])))
}
b.WriteByte('}')
return b.String()
}
// Fingerprint returns the LabelSet's fingerprint. // Fingerprint returns the LabelSet's fingerprint.
func (ls LabelSet) Fingerprint() Fingerprint { func (ls LabelSet) Fingerprint() Fingerprint {
return labelSetToFingerprint(ls) return labelSetToFingerprint(ls)

View File

@@ -0,0 +1,45 @@
// Copyright 2024 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build go1.21
package model
import (
"bytes"
"slices"
"strconv"
)
// String will look like `{foo="bar", more="less"}`. Names are sorted alphabetically.
func (l LabelSet) String() string {
var lna [32]string // On stack to avoid memory allocation for sorting names.
labelNames := lna[:0]
for name := range l {
labelNames = append(labelNames, string(name))
}
slices.Sort(labelNames)
var bytea [1024]byte // On stack to avoid memory allocation while building the output.
b := bytes.NewBuffer(bytea[:0])
b.WriteByte('{')
for i, name := range labelNames {
if i > 0 {
b.WriteString(", ")
}
b.WriteString(name)
b.WriteByte('=')
b.Write(strconv.AppendQuote(b.AvailableBuffer(), string(l[LabelName(name)])))
}
b.WriteByte('}')
return b.String()
}

View File

@@ -0,0 +1,39 @@
// Copyright 2024 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build !go1.21
package model
import (
"fmt"
"sort"
"strings"
)
// String was optimized using functions not available for go 1.20
// or lower. We keep the old implementation for compatibility with client_golang.
// Once client golang drops support for go 1.20 (scheduled for August 2024), this
// file can be removed.
func (l LabelSet) String() string {
labelNames := make([]string, 0, len(l))
for name := range l {
labelNames = append(labelNames, string(name))
}
sort.Strings(labelNames)
lstrs := make([]string, 0, len(l))
for _, name := range labelNames {
lstrs = append(lstrs, fmt.Sprintf("%s=%q", name, l[LabelName(name)]))
}
return fmt.Sprintf("{%s}", strings.Join(lstrs, ", "))
}

View File

@@ -1,2 +1,3 @@
* Johannes 'fish' Ziemke <github@freigeist.org> @discordianfish * Johannes 'fish' Ziemke <github@freigeist.org> @discordianfish
* Paul Gier <pgier@redhat.com> @pgier * Paul Gier <paulgier@gmail.com> @pgier
* Ben Kochie <superq@gmail.com> @SuperQ

View File

@@ -49,19 +49,19 @@ endif
GOTEST := $(GO) test GOTEST := $(GO) test
GOTEST_DIR := GOTEST_DIR :=
ifneq ($(CIRCLE_JOB),) ifneq ($(CIRCLE_JOB),)
ifneq ($(shell command -v gotestsum > /dev/null),) ifneq ($(shell command -v gotestsum 2> /dev/null),)
GOTEST_DIR := test-results GOTEST_DIR := test-results
GOTEST := gotestsum --junitfile $(GOTEST_DIR)/unit-tests.xml -- GOTEST := gotestsum --junitfile $(GOTEST_DIR)/unit-tests.xml --
endif endif
endif endif
PROMU_VERSION ?= 0.15.0 PROMU_VERSION ?= 0.17.0
PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz
SKIP_GOLANGCI_LINT := SKIP_GOLANGCI_LINT :=
GOLANGCI_LINT := GOLANGCI_LINT :=
GOLANGCI_LINT_OPTS ?= GOLANGCI_LINT_OPTS ?=
GOLANGCI_LINT_VERSION ?= v1.55.2 GOLANGCI_LINT_VERSION ?= v1.56.2
# golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. # golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64.
# windows isn't included here because of the path separator being different. # windows isn't included here because of the path separator being different.
ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin))
@@ -182,7 +182,7 @@ endif
.PHONY: common-yamllint .PHONY: common-yamllint
common-yamllint: common-yamllint:
@echo ">> running yamllint on all YAML files in the repository" @echo ">> running yamllint on all YAML files in the repository"
ifeq (, $(shell command -v yamllint > /dev/null)) ifeq (, $(shell command -v yamllint 2> /dev/null))
@echo "yamllint not installed so skipping" @echo "yamllint not installed so skipping"
else else
yamllint . yamllint .
@@ -208,6 +208,10 @@ common-tarball: promu
@echo ">> building release tarball" @echo ">> building release tarball"
$(PROMU) tarball --prefix $(PREFIX) $(BIN_DIR) $(PROMU) tarball --prefix $(PREFIX) $(BIN_DIR)
.PHONY: common-docker-repo-name
common-docker-repo-name:
@echo "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)"
.PHONY: common-docker $(BUILD_DOCKER_ARCHS) .PHONY: common-docker $(BUILD_DOCKER_ARCHS)
common-docker: $(BUILD_DOCKER_ARCHS) common-docker: $(BUILD_DOCKER_ARCHS)
$(BUILD_DOCKER_ARCHS): common-docker-%: $(BUILD_DOCKER_ARCHS): common-docker-%:

View File

@@ -55,7 +55,7 @@ type ARPEntry struct {
func (fs FS) GatherARPEntries() ([]ARPEntry, error) { func (fs FS) GatherARPEntries() ([]ARPEntry, error) {
data, err := os.ReadFile(fs.proc.Path("net/arp")) data, err := os.ReadFile(fs.proc.Path("net/arp"))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: error reading arp %s: %w", ErrFileRead, fs.proc.Path("net/arp"), err) return nil, fmt.Errorf("%w: error reading arp %s: %w", ErrFileRead, fs.proc.Path("net/arp"), err)
} }
return parseARPEntries(data) return parseARPEntries(data)
@@ -78,11 +78,11 @@ func parseARPEntries(data []byte) ([]ARPEntry, error) {
} else if width == expectedDataWidth { } else if width == expectedDataWidth {
entry, err := parseARPEntry(columns) entry, err := parseARPEntry(columns)
if err != nil { if err != nil {
return []ARPEntry{}, fmt.Errorf("%s: Failed to parse ARP entry: %v: %w", ErrFileParse, entry, err) return []ARPEntry{}, fmt.Errorf("%w: Failed to parse ARP entry: %v: %w", ErrFileParse, entry, err)
} }
entries = append(entries, entry) entries = append(entries, entry)
} else { } else {
return []ARPEntry{}, fmt.Errorf("%s: %d columns found, but expected %d: %w", ErrFileParse, width, expectedDataWidth, err) return []ARPEntry{}, fmt.Errorf("%w: %d columns found, but expected %d: %w", ErrFileParse, width, expectedDataWidth, err)
} }
} }

View File

@@ -58,8 +58,8 @@ func parseBuddyInfo(r io.Reader) ([]BuddyInfo, error) {
return nil, fmt.Errorf("%w: Invalid number of fields, found: %v", ErrFileParse, parts) return nil, fmt.Errorf("%w: Invalid number of fields, found: %v", ErrFileParse, parts)
} }
node := strings.TrimRight(parts[1], ",") node := strings.TrimSuffix(parts[1], ",")
zone := strings.TrimRight(parts[3], ",") zone := strings.TrimSuffix(parts[3], ",")
arraySize := len(parts[4:]) arraySize := len(parts[4:])
if bucketCount == -1 { if bucketCount == -1 {
@@ -74,7 +74,7 @@ func parseBuddyInfo(r io.Reader) ([]BuddyInfo, error) {
for i := 0; i < arraySize; i++ { for i := 0; i < arraySize; i++ {
sizes[i], err = strconv.ParseFloat(parts[i+4], 64) sizes[i], err = strconv.ParseFloat(parts[i+4], 64)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Invalid valid in buddyinfo: %f: %w", ErrFileParse, sizes[i], err) return nil, fmt.Errorf("%w: Invalid valid in buddyinfo: %f: %w", ErrFileParse, sizes[i], err)
} }
} }

View File

@@ -194,7 +194,7 @@ func parseCPUInfoARM(info []byte) ([]CPUInfo, error) {
firstLine := firstNonEmptyLine(scanner) firstLine := firstNonEmptyLine(scanner)
match, err := regexp.MatchString("^[Pp]rocessor", firstLine) match, err := regexp.MatchString("^[Pp]rocessor", firstLine)
if !match || !strings.Contains(firstLine, ":") { if !match || !strings.Contains(firstLine, ":") {
return nil, fmt.Errorf("%s: Cannot parse line: %q: %w", ErrFileParse, firstLine, err) return nil, fmt.Errorf("%w: Cannot parse line: %q: %w", ErrFileParse, firstLine, err)
} }
field := strings.SplitN(firstLine, ": ", 2) field := strings.SplitN(firstLine, ": ", 2)
@@ -386,7 +386,7 @@ func parseCPUInfoLoong(info []byte) ([]CPUInfo, error) {
// find the first "processor" line // find the first "processor" line
firstLine := firstNonEmptyLine(scanner) firstLine := firstNonEmptyLine(scanner)
if !strings.HasPrefix(firstLine, "system type") || !strings.Contains(firstLine, ":") { if !strings.HasPrefix(firstLine, "system type") || !strings.Contains(firstLine, ":") {
return nil, errors.New("invalid cpuinfo file: " + firstLine) return nil, fmt.Errorf("%w: %q", ErrFileParse, firstLine)
} }
field := strings.SplitN(firstLine, ": ", 2) field := strings.SplitN(firstLine, ": ", 2)
cpuinfo := []CPUInfo{} cpuinfo := []CPUInfo{}

View File

@@ -55,13 +55,13 @@ func (fs FS) Crypto() ([]Crypto, error) {
path := fs.proc.Path("crypto") path := fs.proc.Path("crypto")
b, err := util.ReadFileNoStat(path) b, err := util.ReadFileNoStat(path)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot read file %v: %w", ErrFileRead, b, err) return nil, fmt.Errorf("%w: Cannot read file %v: %w", ErrFileRead, b, err)
} }
crypto, err := parseCrypto(bytes.NewReader(b)) crypto, err := parseCrypto(bytes.NewReader(b))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, crypto, err) return nil, fmt.Errorf("%w: Cannot parse %v: %w", ErrFileParse, crypto, err)
} }
return crypto, nil return crypto, nil

View File

@@ -236,7 +236,7 @@ func (fs FS) Fscacheinfo() (Fscacheinfo, error) {
m, err := parseFscacheinfo(bytes.NewReader(b)) m, err := parseFscacheinfo(bytes.NewReader(b))
if err != nil { if err != nil {
return Fscacheinfo{}, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, m, err) return Fscacheinfo{}, fmt.Errorf("%w: Cannot parse %v: %w", ErrFileParse, m, err)
} }
return *m, nil return *m, nil
@@ -245,7 +245,7 @@ func (fs FS) Fscacheinfo() (Fscacheinfo, error) {
func setFSCacheFields(fields []string, setFields ...*uint64) error { func setFSCacheFields(fields []string, setFields ...*uint64) error {
var err error var err error
if len(fields) < len(setFields) { if len(fields) < len(setFields) {
return fmt.Errorf("%s: Expected %d, but got %d: %w", ErrFileParse, len(setFields), len(fields), err) return fmt.Errorf("%w: Expected %d, but got %d: %w", ErrFileParse, len(setFields), len(fields), err)
} }
for i := range setFields { for i := range setFields {

View File

@@ -221,16 +221,16 @@ func parseIPPort(s string) (net.IP, uint16, error) {
case 46: case 46:
ip = net.ParseIP(s[1:40]) ip = net.ParseIP(s[1:40])
if ip == nil { if ip == nil {
return nil, 0, fmt.Errorf("%s: Invalid IPv6 addr %s: %w", ErrFileParse, s[1:40], err) return nil, 0, fmt.Errorf("%w: Invalid IPv6 addr %s: %w", ErrFileParse, s[1:40], err)
} }
default: default:
return nil, 0, fmt.Errorf("%s: Unexpected IP:Port %s: %w", ErrFileParse, s, err) return nil, 0, fmt.Errorf("%w: Unexpected IP:Port %s: %w", ErrFileParse, s, err)
} }
portString := s[len(s)-4:] portString := s[len(s)-4:]
if len(portString) != 4 { if len(portString) != 4 {
return nil, 0, return nil, 0,
fmt.Errorf("%s: Unexpected port string format %s: %w", ErrFileParse, portString, err) fmt.Errorf("%w: Unexpected port string format %s: %w", ErrFileParse, portString, err)
} }
port, err := strconv.ParseUint(portString, 16, 16) port, err := strconv.ParseUint(portString, 16, 16)
if err != nil { if err != nil {

View File

@@ -51,7 +51,7 @@ func parseLoad(loadavgBytes []byte) (*LoadAvg, error) {
for i, load := range parts[0:3] { for i, load := range parts[0:3] {
loads[i], err = strconv.ParseFloat(load, 64) loads[i], err = strconv.ParseFloat(load, 64)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse load: %f: %w", ErrFileParse, loads[i], err) return nil, fmt.Errorf("%w: Cannot parse load: %f: %w", ErrFileParse, loads[i], err)
} }
} }
return &LoadAvg{ return &LoadAvg{

View File

@@ -23,7 +23,7 @@ import (
var ( var (
statusLineRE = regexp.MustCompile(`(\d+) blocks .*\[(\d+)/(\d+)\] \[([U_]+)\]`) statusLineRE = regexp.MustCompile(`(\d+) blocks .*\[(\d+)/(\d+)\] \[([U_]+)\]`)
recoveryLineBlocksRE = regexp.MustCompile(`\((\d+)/\d+\)`) recoveryLineBlocksRE = regexp.MustCompile(`\((\d+/\d+)\)`)
recoveryLinePctRE = regexp.MustCompile(`= (.+)%`) recoveryLinePctRE = regexp.MustCompile(`= (.+)%`)
recoveryLineFinishRE = regexp.MustCompile(`finish=(.+)min`) recoveryLineFinishRE = regexp.MustCompile(`finish=(.+)min`)
recoveryLineSpeedRE = regexp.MustCompile(`speed=(.+)[A-Z]`) recoveryLineSpeedRE = regexp.MustCompile(`speed=(.+)[A-Z]`)
@@ -50,6 +50,8 @@ type MDStat struct {
BlocksTotal int64 BlocksTotal int64
// Number of blocks on the device that are in sync. // Number of blocks on the device that are in sync.
BlocksSynced int64 BlocksSynced int64
// Number of blocks on the device that need to be synced.
BlocksToBeSynced int64
// progress percentage of current sync // progress percentage of current sync
BlocksSyncedPct float64 BlocksSyncedPct float64
// estimated finishing time for current sync (in minutes) // estimated finishing time for current sync (in minutes)
@@ -70,7 +72,7 @@ func (fs FS) MDStat() ([]MDStat, error) {
} }
mdstat, err := parseMDStat(data) mdstat, err := parseMDStat(data)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, fs.proc.Path("mdstat"), err) return nil, fmt.Errorf("%w: Cannot parse %v: %w", ErrFileParse, fs.proc.Path("mdstat"), err)
} }
return mdstat, nil return mdstat, nil
} }
@@ -90,7 +92,7 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
deviceFields := strings.Fields(line) deviceFields := strings.Fields(line)
if len(deviceFields) < 3 { if len(deviceFields) < 3 {
return nil, fmt.Errorf("%s: Expected 3+ lines, got %q", ErrFileParse, line) return nil, fmt.Errorf("%w: Expected 3+ lines, got %q", ErrFileParse, line)
} }
mdName := deviceFields[0] // mdx mdName := deviceFields[0] // mdx
state := deviceFields[2] // active or inactive state := deviceFields[2] // active or inactive
@@ -105,7 +107,7 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
active, total, down, size, err := evalStatusLine(lines[i], lines[i+1]) active, total, down, size, err := evalStatusLine(lines[i], lines[i+1])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse md device lines: %v: %w", ErrFileParse, active, err) return nil, fmt.Errorf("%w: Cannot parse md device lines: %v: %w", ErrFileParse, active, err)
} }
syncLineIdx := i + 2 syncLineIdx := i + 2
@@ -115,7 +117,8 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
// If device is syncing at the moment, get the number of currently // If device is syncing at the moment, get the number of currently
// synced bytes, otherwise that number equals the size of the device. // synced bytes, otherwise that number equals the size of the device.
syncedBlocks := size blocksSynced := size
blocksToBeSynced := size
speed := float64(0) speed := float64(0)
finish := float64(0) finish := float64(0)
pct := float64(0) pct := float64(0)
@@ -136,11 +139,11 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
// Handle case when resync=PENDING or resync=DELAYED. // Handle case when resync=PENDING or resync=DELAYED.
if strings.Contains(lines[syncLineIdx], "PENDING") || if strings.Contains(lines[syncLineIdx], "PENDING") ||
strings.Contains(lines[syncLineIdx], "DELAYED") { strings.Contains(lines[syncLineIdx], "DELAYED") {
syncedBlocks = 0 blocksSynced = 0
} else { } else {
syncedBlocks, pct, finish, speed, err = evalRecoveryLine(lines[syncLineIdx]) blocksSynced, blocksToBeSynced, pct, finish, speed, err = evalRecoveryLine(lines[syncLineIdx])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse sync line in md device: %q: %w", ErrFileParse, mdName, err) return nil, fmt.Errorf("%w: Cannot parse sync line in md device: %q: %w", ErrFileParse, mdName, err)
} }
} }
} }
@@ -154,7 +157,8 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
DisksSpare: spare, DisksSpare: spare,
DisksTotal: total, DisksTotal: total,
BlocksTotal: size, BlocksTotal: size,
BlocksSynced: syncedBlocks, BlocksSynced: blocksSynced,
BlocksToBeSynced: blocksToBeSynced,
BlocksSyncedPct: pct, BlocksSyncedPct: pct,
BlocksSyncedFinishTime: finish, BlocksSyncedFinishTime: finish,
BlocksSyncedSpeed: speed, BlocksSyncedSpeed: speed,
@@ -168,13 +172,13 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
func evalStatusLine(deviceLine, statusLine string) (active, total, down, size int64, err error) { func evalStatusLine(deviceLine, statusLine string) (active, total, down, size int64, err error) {
statusFields := strings.Fields(statusLine) statusFields := strings.Fields(statusLine)
if len(statusFields) < 1 { if len(statusFields) < 1 {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) return 0, 0, 0, 0, fmt.Errorf("%w: Unexpected statusline %q: %w", ErrFileParse, statusLine, err)
} }
sizeStr := statusFields[0] sizeStr := statusFields[0]
size, err = strconv.ParseInt(sizeStr, 10, 64) size, err = strconv.ParseInt(sizeStr, 10, 64)
if err != nil { if err != nil {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) return 0, 0, 0, 0, fmt.Errorf("%w: Unexpected statusline %q: %w", ErrFileParse, statusLine, err)
} }
if strings.Contains(deviceLine, "raid0") || strings.Contains(deviceLine, "linear") { if strings.Contains(deviceLine, "raid0") || strings.Contains(deviceLine, "linear") {
@@ -189,65 +193,71 @@ func evalStatusLine(deviceLine, statusLine string) (active, total, down, size in
matches := statusLineRE.FindStringSubmatch(statusLine) matches := statusLineRE.FindStringSubmatch(statusLine)
if len(matches) != 5 { if len(matches) != 5 {
return 0, 0, 0, 0, fmt.Errorf("%s: Could not fild all substring matches %s: %w", ErrFileParse, statusLine, err) return 0, 0, 0, 0, fmt.Errorf("%w: Could not fild all substring matches %s: %w", ErrFileParse, statusLine, err)
} }
total, err = strconv.ParseInt(matches[2], 10, 64) total, err = strconv.ParseInt(matches[2], 10, 64)
if err != nil { if err != nil {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) return 0, 0, 0, 0, fmt.Errorf("%w: Unexpected statusline %q: %w", ErrFileParse, statusLine, err)
} }
active, err = strconv.ParseInt(matches[3], 10, 64) active, err = strconv.ParseInt(matches[3], 10, 64)
if err != nil { if err != nil {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected active %d: %w", ErrFileParse, active, err) return 0, 0, 0, 0, fmt.Errorf("%w: Unexpected active %d: %w", ErrFileParse, active, err)
} }
down = int64(strings.Count(matches[4], "_")) down = int64(strings.Count(matches[4], "_"))
return active, total, down, size, nil return active, total, down, size, nil
} }
func evalRecoveryLine(recoveryLine string) (syncedBlocks int64, pct float64, finish float64, speed float64, err error) { func evalRecoveryLine(recoveryLine string) (blocksSynced int64, blocksToBeSynced int64, pct float64, finish float64, speed float64, err error) {
matches := recoveryLineBlocksRE.FindStringSubmatch(recoveryLine) matches := recoveryLineBlocksRE.FindStringSubmatch(recoveryLine)
if len(matches) != 2 { if len(matches) != 2 {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected recoveryLine %s: %w", ErrFileParse, recoveryLine, err) return 0, 0, 0, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine blocks %s: %w", ErrFileParse, recoveryLine, err)
} }
syncedBlocks, err = strconv.ParseInt(matches[1], 10, 64) blocks := strings.Split(matches[1], "/")
blocksSynced, err = strconv.ParseInt(blocks[0], 10, 64)
if err != nil { if err != nil {
return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected parsing of recoveryLine %q: %w", ErrFileParse, recoveryLine, err) return 0, 0, 0, 0, 0, fmt.Errorf("%w: Unable to parse recovery blocks synced %q: %w", ErrFileParse, matches[1], err)
}
blocksToBeSynced, err = strconv.ParseInt(blocks[1], 10, 64)
if err != nil {
return blocksSynced, 0, 0, 0, 0, fmt.Errorf("%w: Unable to parse recovery to be synced blocks %q: %w", ErrFileParse, matches[2], err)
} }
// Get percentage complete // Get percentage complete
matches = recoveryLinePctRE.FindStringSubmatch(recoveryLine) matches = recoveryLinePctRE.FindStringSubmatch(recoveryLine)
if len(matches) != 2 { if len(matches) != 2 {
return syncedBlocks, 0, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching percentage %s", ErrFileParse, recoveryLine) return blocksSynced, blocksToBeSynced, 0, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching percentage %s", ErrFileParse, recoveryLine)
} }
pct, err = strconv.ParseFloat(strings.TrimSpace(matches[1]), 64) pct, err = strconv.ParseFloat(strings.TrimSpace(matches[1]), 64)
if err != nil { if err != nil {
return syncedBlocks, 0, 0, 0, fmt.Errorf("%w: Error parsing float from recoveryLine %q", ErrFileParse, recoveryLine) return blocksSynced, blocksToBeSynced, 0, 0, 0, fmt.Errorf("%w: Error parsing float from recoveryLine %q", ErrFileParse, recoveryLine)
} }
// Get time expected left to complete // Get time expected left to complete
matches = recoveryLineFinishRE.FindStringSubmatch(recoveryLine) matches = recoveryLineFinishRE.FindStringSubmatch(recoveryLine)
if len(matches) != 2 { if len(matches) != 2 {
return syncedBlocks, pct, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching est. finish time: %s", ErrFileParse, recoveryLine) return blocksSynced, blocksToBeSynced, pct, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching est. finish time: %s", ErrFileParse, recoveryLine)
} }
finish, err = strconv.ParseFloat(matches[1], 64) finish, err = strconv.ParseFloat(matches[1], 64)
if err != nil { if err != nil {
return syncedBlocks, pct, 0, 0, fmt.Errorf("%w: Unable to parse float from recoveryLine: %q", ErrFileParse, recoveryLine) return blocksSynced, blocksToBeSynced, pct, 0, 0, fmt.Errorf("%w: Unable to parse float from recoveryLine: %q", ErrFileParse, recoveryLine)
} }
// Get recovery speed // Get recovery speed
matches = recoveryLineSpeedRE.FindStringSubmatch(recoveryLine) matches = recoveryLineSpeedRE.FindStringSubmatch(recoveryLine)
if len(matches) != 2 { if len(matches) != 2 {
return syncedBlocks, pct, finish, 0, fmt.Errorf("%w: Unexpected recoveryLine value: %s", ErrFileParse, recoveryLine) return blocksSynced, blocksToBeSynced, pct, finish, 0, fmt.Errorf("%w: Unexpected recoveryLine value: %s", ErrFileParse, recoveryLine)
} }
speed, err = strconv.ParseFloat(matches[1], 64) speed, err = strconv.ParseFloat(matches[1], 64)
if err != nil { if err != nil {
return syncedBlocks, pct, finish, 0, fmt.Errorf("%s: Error parsing float from recoveryLine: %q: %w", ErrFileParse, recoveryLine, err) return blocksSynced, blocksToBeSynced, pct, finish, 0, fmt.Errorf("%w: Error parsing float from recoveryLine: %q: %w", ErrFileParse, recoveryLine, err)
} }
return syncedBlocks, pct, finish, speed, nil return blocksSynced, blocksToBeSynced, pct, finish, speed, nil
} }
func evalComponentDevices(deviceFields []string) []string { func evalComponentDevices(deviceFields []string) []string {

View File

@@ -202,7 +202,7 @@ func (fs FS) Meminfo() (Meminfo, error) {
m, err := parseMemInfo(bytes.NewReader(b)) m, err := parseMemInfo(bytes.NewReader(b))
if err != nil { if err != nil {
return Meminfo{}, fmt.Errorf("%s: %w", ErrFileParse, err) return Meminfo{}, fmt.Errorf("%w: %w", ErrFileParse, err)
} }
return *m, nil return *m, nil

View File

@@ -109,7 +109,7 @@ func parseMountInfoString(mountString string) (*MountInfo, error) {
if mountInfo[6] != "" { if mountInfo[6] != "" {
mount.OptionalFields, err = mountOptionsParseOptionalFields(mountInfo[6 : mountInfoLength-4]) mount.OptionalFields, err = mountOptionsParseOptionalFields(mountInfo[6 : mountInfoLength-4])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: %w", ErrFileParse, err) return nil, fmt.Errorf("%w: %w", ErrFileParse, err)
} }
} }
return mount, nil return mount, nil

View File

@@ -88,7 +88,7 @@ type MountStatsNFS struct {
// Statistics broken down by filesystem operation. // Statistics broken down by filesystem operation.
Operations []NFSOperationStats Operations []NFSOperationStats
// Statistics about the NFS RPC transport. // Statistics about the NFS RPC transport.
Transport NFSTransportStats Transport []NFSTransportStats
} }
// mountStats implements MountStats. // mountStats implements MountStats.
@@ -194,8 +194,6 @@ type NFSOperationStats struct {
CumulativeTotalResponseMilliseconds uint64 CumulativeTotalResponseMilliseconds uint64
// Duration from when a request was enqueued to when it was completely handled. // Duration from when a request was enqueued to when it was completely handled.
CumulativeTotalRequestMilliseconds uint64 CumulativeTotalRequestMilliseconds uint64
// The average time from the point the client sends RPC requests until it receives the response.
AverageRTTMilliseconds float64
// The count of operations that complete with tk_status < 0. These statuses usually indicate error conditions. // The count of operations that complete with tk_status < 0. These statuses usually indicate error conditions.
Errors uint64 Errors uint64
} }
@@ -434,7 +432,7 @@ func parseMountStatsNFS(s *bufio.Scanner, statVersion string) (*MountStatsNFS, e
return nil, err return nil, err
} }
stats.Transport = *tstats stats.Transport = append(stats.Transport, *tstats)
} }
// When encountering "per-operation statistics", we must break this // When encountering "per-operation statistics", we must break this
@@ -582,9 +580,6 @@ func parseNFSOperationStats(s *bufio.Scanner) ([]NFSOperationStats, error) {
CumulativeTotalResponseMilliseconds: ns[6], CumulativeTotalResponseMilliseconds: ns[6],
CumulativeTotalRequestMilliseconds: ns[7], CumulativeTotalRequestMilliseconds: ns[7],
} }
if ns[0] != 0 {
opStats.AverageRTTMilliseconds = float64(ns[6]) / float64(ns[0])
}
if len(ns) > 8 { if len(ns) > 8 {
opStats.Errors = ns[8] opStats.Errors = ns[8]
@@ -632,7 +627,7 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats
return nil, fmt.Errorf("%w: invalid NFS transport stats 1.1 statement: %v, protocol: %v", ErrFileParse, ss, protocol) return nil, fmt.Errorf("%w: invalid NFS transport stats 1.1 statement: %v, protocol: %v", ErrFileParse, ss, protocol)
} }
default: default:
return nil, fmt.Errorf("%s: Unrecognized NFS transport stats version: %q, protocol: %v", ErrFileParse, statVersion, protocol) return nil, fmt.Errorf("%w: Unrecognized NFS transport stats version: %q, protocol: %v", ErrFileParse, statVersion, protocol)
} }
// Allocate enough for v1.1 stats since zero value for v1.1 stats will be okay // Allocate enough for v1.1 stats since zero value for v1.1 stats will be okay

View File

@@ -58,7 +58,7 @@ func readConntrackStat(path string) ([]ConntrackStatEntry, error) {
stat, err := parseConntrackStat(bytes.NewReader(b)) stat, err := parseConntrackStat(bytes.NewReader(b))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, path, err) return nil, fmt.Errorf("%w: Cannot read file: %v: %w", ErrFileRead, path, err)
} }
return stat, nil return stat, nil
@@ -86,7 +86,7 @@ func parseConntrackStat(r io.Reader) ([]ConntrackStatEntry, error) {
func parseConntrackStatEntry(fields []string) (*ConntrackStatEntry, error) { func parseConntrackStatEntry(fields []string) (*ConntrackStatEntry, error) {
entries, err := util.ParseHexUint64s(fields) entries, err := util.ParseHexUint64s(fields)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse entry: %d: %w", ErrFileParse, entries, err) return nil, fmt.Errorf("%w: Cannot parse entry: %d: %w", ErrFileParse, entries, err)
} }
numEntries := len(entries) numEntries := len(entries)
if numEntries < 16 || numEntries > 17 { if numEntries < 16 || numEntries > 17 {

View File

@@ -141,7 +141,7 @@ func parseIP(hexIP string) (net.IP, error) {
var byteIP []byte var byteIP []byte
byteIP, err := hex.DecodeString(hexIP) byteIP, err := hex.DecodeString(hexIP)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse socket field in %q: %w", ErrFileParse, hexIP, err) return nil, fmt.Errorf("%w: Cannot parse socket field in %q: %w", ErrFileParse, hexIP, err)
} }
switch len(byteIP) { switch len(byteIP) {
case 4: case 4:
@@ -155,7 +155,7 @@ func parseIP(hexIP string) (net.IP, error) {
} }
return i, nil return i, nil
default: default:
return nil, fmt.Errorf("%s: Unable to parse IP %s: %w", ErrFileParse, hexIP, nil) return nil, fmt.Errorf("%w: Unable to parse IP %s: %v", ErrFileParse, hexIP, nil)
} }
} }
@@ -178,7 +178,7 @@ func parseNetIPSocketLine(fields []string, isUDP bool) (*netIPSocketLine, error)
} }
if line.Sl, err = strconv.ParseUint(s[0], 0, 64); err != nil { if line.Sl, err = strconv.ParseUint(s[0], 0, 64); err != nil {
return nil, fmt.Errorf("%s: Unable to parse sl field in %q: %w", ErrFileParse, line.Sl, err) return nil, fmt.Errorf("%w: Unable to parse sl field in %q: %w", ErrFileParse, line.Sl, err)
} }
// local_address // local_address
l := strings.Split(fields[1], ":") l := strings.Split(fields[1], ":")
@@ -189,7 +189,7 @@ func parseNetIPSocketLine(fields []string, isUDP bool) (*netIPSocketLine, error)
return nil, err return nil, err
} }
if line.LocalPort, err = strconv.ParseUint(l[1], 16, 64); err != nil { if line.LocalPort, err = strconv.ParseUint(l[1], 16, 64); err != nil {
return nil, fmt.Errorf("%s: Unable to parse local_address port value line %q: %w", ErrFileParse, line.LocalPort, err) return nil, fmt.Errorf("%w: Unable to parse local_address port value line %q: %w", ErrFileParse, line.LocalPort, err)
} }
// remote_address // remote_address
@@ -201,12 +201,12 @@ func parseNetIPSocketLine(fields []string, isUDP bool) (*netIPSocketLine, error)
return nil, err return nil, err
} }
if line.RemPort, err = strconv.ParseUint(r[1], 16, 64); err != nil { if line.RemPort, err = strconv.ParseUint(r[1], 16, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse rem_address port value in %q: %w", ErrFileParse, line.RemPort, err) return nil, fmt.Errorf("%w: Cannot parse rem_address port value in %q: %w", ErrFileParse, line.RemPort, err)
} }
// st // st
if line.St, err = strconv.ParseUint(fields[3], 16, 64); err != nil { if line.St, err = strconv.ParseUint(fields[3], 16, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse st value in %q: %w", ErrFileParse, line.St, err) return nil, fmt.Errorf("%w: Cannot parse st value in %q: %w", ErrFileParse, line.St, err)
} }
// tx_queue and rx_queue // tx_queue and rx_queue
@@ -219,27 +219,27 @@ func parseNetIPSocketLine(fields []string, isUDP bool) (*netIPSocketLine, error)
) )
} }
if line.TxQueue, err = strconv.ParseUint(q[0], 16, 64); err != nil { if line.TxQueue, err = strconv.ParseUint(q[0], 16, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse tx_queue value in %q: %w", ErrFileParse, line.TxQueue, err) return nil, fmt.Errorf("%w: Cannot parse tx_queue value in %q: %w", ErrFileParse, line.TxQueue, err)
} }
if line.RxQueue, err = strconv.ParseUint(q[1], 16, 64); err != nil { if line.RxQueue, err = strconv.ParseUint(q[1], 16, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse trx_queue value in %q: %w", ErrFileParse, line.RxQueue, err) return nil, fmt.Errorf("%w: Cannot parse trx_queue value in %q: %w", ErrFileParse, line.RxQueue, err)
} }
// uid // uid
if line.UID, err = strconv.ParseUint(fields[7], 0, 64); err != nil { if line.UID, err = strconv.ParseUint(fields[7], 0, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse UID value in %q: %w", ErrFileParse, line.UID, err) return nil, fmt.Errorf("%w: Cannot parse UID value in %q: %w", ErrFileParse, line.UID, err)
} }
// inode // inode
if line.Inode, err = strconv.ParseUint(fields[9], 0, 64); err != nil { if line.Inode, err = strconv.ParseUint(fields[9], 0, 64); err != nil {
return nil, fmt.Errorf("%s: Cannot parse inode value in %q: %w", ErrFileParse, line.Inode, err) return nil, fmt.Errorf("%w: Cannot parse inode value in %q: %w", ErrFileParse, line.Inode, err)
} }
// drops // drops
if isUDP { if isUDP {
drops, err := strconv.ParseUint(fields[12], 0, 64) drops, err := strconv.ParseUint(fields[12], 0, 64)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse drops value in %q: %w", ErrFileParse, drops, err) return nil, fmt.Errorf("%w: Cannot parse drops value in %q: %w", ErrFileParse, drops, err)
} }
line.Drops = &drops line.Drops = &drops
} }

View File

@@ -69,7 +69,7 @@ func readSockstat(name string) (*NetSockstat, error) {
stat, err := parseSockstat(bytes.NewReader(b)) stat, err := parseSockstat(bytes.NewReader(b))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: sockstats from %q: %w", ErrFileRead, name, err) return nil, fmt.Errorf("%w: sockstats from %q: %w", ErrFileRead, name, err)
} }
return stat, nil return stat, nil
@@ -89,7 +89,7 @@ func parseSockstat(r io.Reader) (*NetSockstat, error) {
// The remaining fields are key/value pairs. // The remaining fields are key/value pairs.
kvs, err := parseSockstatKVs(fields[1:]) kvs, err := parseSockstatKVs(fields[1:])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: sockstat key/value pairs from %q: %w", ErrFileParse, s.Text(), err) return nil, fmt.Errorf("%w: sockstat key/value pairs from %q: %w", ErrFileParse, s.Text(), err)
} }
// The first field is the protocol. We must trim its colon suffix. // The first field is the protocol. We must trim its colon suffix.

View File

@@ -64,7 +64,7 @@ func (fs FS) NetSoftnetStat() ([]SoftnetStat, error) {
entries, err := parseSoftnet(bytes.NewReader(b)) entries, err := parseSoftnet(bytes.NewReader(b))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: /proc/net/softnet_stat: %w", ErrFileParse, err) return nil, fmt.Errorf("%w: /proc/net/softnet_stat: %w", ErrFileParse, err)
} }
return entries, nil return entries, nil

View File

@@ -108,14 +108,14 @@ func parseNetUNIX(r io.Reader) (*NetUNIX, error) {
line := s.Text() line := s.Text()
item, err := nu.parseLine(line, hasInode, minFields) item, err := nu.parseLine(line, hasInode, minFields)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: /proc/net/unix encountered data %q: %w", ErrFileParse, line, err) return nil, fmt.Errorf("%w: /proc/net/unix encountered data %q: %w", ErrFileParse, line, err)
} }
nu.Rows = append(nu.Rows, item) nu.Rows = append(nu.Rows, item)
} }
if err := s.Err(); err != nil { if err := s.Err(); err != nil {
return nil, fmt.Errorf("%s: /proc/net/unix encountered data: %w", ErrFileParse, err) return nil, fmt.Errorf("%w: /proc/net/unix encountered data: %w", ErrFileParse, err)
} }
return &nu, nil return &nu, nil
@@ -136,29 +136,29 @@ func (u *NetUNIX) parseLine(line string, hasInode bool, min int) (*NetUNIXLine,
users, err := u.parseUsers(fields[1]) users, err := u.parseUsers(fields[1])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: ref count %q: %w", ErrFileParse, fields[1], err) return nil, fmt.Errorf("%w: ref count %q: %w", ErrFileParse, fields[1], err)
} }
flags, err := u.parseFlags(fields[3]) flags, err := u.parseFlags(fields[3])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Unable to parse flags %q: %w", ErrFileParse, fields[3], err) return nil, fmt.Errorf("%w: Unable to parse flags %q: %w", ErrFileParse, fields[3], err)
} }
typ, err := u.parseType(fields[4]) typ, err := u.parseType(fields[4])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Failed to parse type %q: %w", ErrFileParse, fields[4], err) return nil, fmt.Errorf("%w: Failed to parse type %q: %w", ErrFileParse, fields[4], err)
} }
state, err := u.parseState(fields[5]) state, err := u.parseState(fields[5])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Failed to parse state %q: %w", ErrFileParse, fields[5], err) return nil, fmt.Errorf("%w: Failed to parse state %q: %w", ErrFileParse, fields[5], err)
} }
var inode uint64 var inode uint64
if hasInode { if hasInode {
inode, err = u.parseInode(fields[6]) inode, err = u.parseInode(fields[6])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s failed to parse inode %q: %w", ErrFileParse, fields[6], err) return nil, fmt.Errorf("%w failed to parse inode %q: %w", ErrFileParse, fields[6], err)
} }
} }

View File

@@ -68,7 +68,7 @@ func (fs FS) Wireless() ([]*Wireless, error) {
m, err := parseWireless(bytes.NewReader(b)) m, err := parseWireless(bytes.NewReader(b))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: wireless: %w", ErrFileParse, err) return nil, fmt.Errorf("%w: wireless: %w", ErrFileParse, err)
} }
return m, nil return m, nil
@@ -114,47 +114,47 @@ func parseWireless(r io.Reader) ([]*Wireless, error) {
qlink, err := strconv.Atoi(strings.TrimSuffix(stats[1], ".")) qlink, err := strconv.Atoi(strings.TrimSuffix(stats[1], "."))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: parse Quality:link as integer %q: %w", ErrFileParse, qlink, err) return nil, fmt.Errorf("%w: parse Quality:link as integer %q: %w", ErrFileParse, qlink, err)
} }
qlevel, err := strconv.Atoi(strings.TrimSuffix(stats[2], ".")) qlevel, err := strconv.Atoi(strings.TrimSuffix(stats[2], "."))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Quality:level as integer %q: %w", ErrFileParse, qlevel, err) return nil, fmt.Errorf("%w: Quality:level as integer %q: %w", ErrFileParse, qlevel, err)
} }
qnoise, err := strconv.Atoi(strings.TrimSuffix(stats[3], ".")) qnoise, err := strconv.Atoi(strings.TrimSuffix(stats[3], "."))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Quality:noise as integer %q: %w", ErrFileParse, qnoise, err) return nil, fmt.Errorf("%w: Quality:noise as integer %q: %w", ErrFileParse, qnoise, err)
} }
dnwid, err := strconv.Atoi(stats[4]) dnwid, err := strconv.Atoi(stats[4])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Discarded:nwid as integer %q: %w", ErrFileParse, dnwid, err) return nil, fmt.Errorf("%w: Discarded:nwid as integer %q: %w", ErrFileParse, dnwid, err)
} }
dcrypt, err := strconv.Atoi(stats[5]) dcrypt, err := strconv.Atoi(stats[5])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Discarded:crypt as integer %q: %w", ErrFileParse, dcrypt, err) return nil, fmt.Errorf("%w: Discarded:crypt as integer %q: %w", ErrFileParse, dcrypt, err)
} }
dfrag, err := strconv.Atoi(stats[6]) dfrag, err := strconv.Atoi(stats[6])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Discarded:frag as integer %q: %w", ErrFileParse, dfrag, err) return nil, fmt.Errorf("%w: Discarded:frag as integer %q: %w", ErrFileParse, dfrag, err)
} }
dretry, err := strconv.Atoi(stats[7]) dretry, err := strconv.Atoi(stats[7])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Discarded:retry as integer %q: %w", ErrFileParse, dretry, err) return nil, fmt.Errorf("%w: Discarded:retry as integer %q: %w", ErrFileParse, dretry, err)
} }
dmisc, err := strconv.Atoi(stats[8]) dmisc, err := strconv.Atoi(stats[8])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Discarded:misc as integer %q: %w", ErrFileParse, dmisc, err) return nil, fmt.Errorf("%w: Discarded:misc as integer %q: %w", ErrFileParse, dmisc, err)
} }
mbeacon, err := strconv.Atoi(stats[9]) mbeacon, err := strconv.Atoi(stats[9])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Missed:beacon as integer %q: %w", ErrFileParse, mbeacon, err) return nil, fmt.Errorf("%w: Missed:beacon as integer %q: %w", ErrFileParse, mbeacon, err)
} }
w := &Wireless{ w := &Wireless{
@@ -175,7 +175,7 @@ func parseWireless(r io.Reader) ([]*Wireless, error) {
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("%s: Failed to scan /proc/net/wireless: %w", ErrFileRead, err) return nil, fmt.Errorf("%w: Failed to scan /proc/net/wireless: %w", ErrFileRead, err)
} }
return interfaces, nil return interfaces, nil

View File

@@ -111,7 +111,7 @@ func (fs FS) AllProcs() (Procs, error) {
names, err := d.Readdirnames(-1) names, err := d.Readdirnames(-1)
if err != nil { if err != nil {
return Procs{}, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, names, err) return Procs{}, fmt.Errorf("%w: Cannot read file: %v: %w", ErrFileRead, names, err)
} }
p := Procs{} p := Procs{}
@@ -137,7 +137,7 @@ func (p Proc) CmdLine() ([]string, error) {
return []string{}, nil return []string{}, nil
} }
return strings.Split(string(bytes.TrimRight(data, string("\x00"))), string(byte(0))), nil return strings.Split(string(bytes.TrimRight(data, "\x00")), "\x00"), nil
} }
// Wchan returns the wchan (wait channel) of a process. // Wchan returns the wchan (wait channel) of a process.
@@ -212,7 +212,7 @@ func (p Proc) FileDescriptors() ([]uintptr, error) {
for i, n := range names { for i, n := range names {
fd, err := strconv.ParseInt(n, 10, 32) fd, err := strconv.ParseInt(n, 10, 32)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot parse line: %v: %w", ErrFileParse, i, err) return nil, fmt.Errorf("%w: Cannot parse line: %v: %w", ErrFileParse, i, err)
} }
fds[i] = uintptr(fd) fds[i] = uintptr(fd)
} }
@@ -297,7 +297,7 @@ func (p Proc) fileDescriptors() ([]string, error) {
names, err := d.Readdirnames(-1) names, err := d.Readdirnames(-1)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, names, err) return nil, fmt.Errorf("%w: Cannot read file: %v: %w", ErrFileRead, names, err)
} }
return names, nil return names, nil

View File

@@ -154,7 +154,7 @@ func parseUint(s string) (uint64, error) {
} }
i, err := strconv.ParseUint(s, 10, 64) i, err := strconv.ParseUint(s, 10, 64)
if err != nil { if err != nil {
return 0, fmt.Errorf("%s: couldn't parse value %q: %w", ErrFileParse, s, err) return 0, fmt.Errorf("%w: couldn't parse value %q: %w", ErrFileParse, s, err)
} }
return i, nil return i, nil
} }

View File

@@ -40,7 +40,7 @@ func (p Proc) Namespaces() (Namespaces, error) {
names, err := d.Readdirnames(-1) names, err := d.Readdirnames(-1)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: failed to read contents of ns dir: %w", ErrFileRead, err) return nil, fmt.Errorf("%w: failed to read contents of ns dir: %w", ErrFileRead, err)
} }
ns := make(Namespaces, len(names)) ns := make(Namespaces, len(names))
@@ -58,7 +58,7 @@ func (p Proc) Namespaces() (Namespaces, error) {
typ := fields[0] typ := fields[0]
inode, err := strconv.ParseUint(strings.Trim(fields[1], "[]"), 10, 32) inode, err := strconv.ParseUint(strings.Trim(fields[1], "[]"), 10, 32)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: inode from %q: %w", ErrFileParse, fields[1], err) return nil, fmt.Errorf("%w: inode from %q: %w", ErrFileParse, fields[1], err)
} }
ns[name] = Namespace{typ, uint32(inode)} ns[name] = Namespace{typ, uint32(inode)}

View File

@@ -61,7 +61,7 @@ type PSIStats struct {
func (fs FS) PSIStatsForResource(resource string) (PSIStats, error) { func (fs FS) PSIStatsForResource(resource string) (PSIStats, error) {
data, err := util.ReadFileNoStat(fs.proc.Path(fmt.Sprintf("%s/%s", "pressure", resource))) data, err := util.ReadFileNoStat(fs.proc.Path(fmt.Sprintf("%s/%s", "pressure", resource)))
if err != nil { if err != nil {
return PSIStats{}, fmt.Errorf("%s: psi_stats: unavailable for %q: %w", ErrFileRead, resource, err) return PSIStats{}, fmt.Errorf("%w: psi_stats: unavailable for %q: %w", ErrFileRead, resource, err)
} }
return parsePSIStats(bytes.NewReader(data)) return parsePSIStats(bytes.NewReader(data))

View File

@@ -127,7 +127,7 @@ func (s *ProcSMapsRollup) parseLine(line string) error {
} }
v := strings.TrimSpace(kv[1]) v := strings.TrimSpace(kv[1])
v = strings.TrimRight(v, " kB") v = strings.TrimSuffix(v, " kB")
vKBytes, err := strconv.ParseUint(v, 10, 64) vKBytes, err := strconv.ParseUint(v, 10, 64)
if err != nil { if err != nil {

View File

@@ -15,6 +15,7 @@ package procfs
import ( import (
"bytes" "bytes"
"math/bits"
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
@@ -76,9 +77,9 @@ type ProcStatus struct {
NonVoluntaryCtxtSwitches uint64 NonVoluntaryCtxtSwitches uint64
// UIDs of the process (Real, effective, saved set, and filesystem UIDs) // UIDs of the process (Real, effective, saved set, and filesystem UIDs)
UIDs [4]string UIDs [4]uint64
// GIDs of the process (Real, effective, saved set, and filesystem GIDs) // GIDs of the process (Real, effective, saved set, and filesystem GIDs)
GIDs [4]string GIDs [4]uint64
// CpusAllowedList: List of cpu cores processes are allowed to run on. // CpusAllowedList: List of cpu cores processes are allowed to run on.
CpusAllowedList []uint64 CpusAllowedList []uint64
@@ -113,22 +114,37 @@ func (p Proc) NewStatus() (ProcStatus, error) {
// convert kB to B // convert kB to B
vBytes := vKBytes * 1024 vBytes := vKBytes * 1024
s.fillStatus(k, v, vKBytes, vBytes) err = s.fillStatus(k, v, vKBytes, vBytes)
if err != nil {
return ProcStatus{}, err
}
} }
return s, nil return s, nil
} }
func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintBytes uint64) { func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintBytes uint64) error {
switch k { switch k {
case "Tgid": case "Tgid":
s.TGID = int(vUint) s.TGID = int(vUint)
case "Name": case "Name":
s.Name = vString s.Name = vString
case "Uid": case "Uid":
copy(s.UIDs[:], strings.Split(vString, "\t")) var err error
for i, v := range strings.Split(vString, "\t") {
s.UIDs[i], err = strconv.ParseUint(v, 10, bits.UintSize)
if err != nil {
return err
}
}
case "Gid": case "Gid":
copy(s.GIDs[:], strings.Split(vString, "\t")) var err error
for i, v := range strings.Split(vString, "\t") {
s.GIDs[i], err = strconv.ParseUint(v, 10, bits.UintSize)
if err != nil {
return err
}
}
case "NSpid": case "NSpid":
s.NSpids = calcNSPidsList(vString) s.NSpids = calcNSPidsList(vString)
case "VmPeak": case "VmPeak":
@@ -173,6 +189,7 @@ func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintByt
s.CpusAllowedList = calcCpusAllowedList(vString) s.CpusAllowedList = calcCpusAllowedList(vString)
} }
return nil
} }
// TotalCtxtSwitches returns the total context switch. // TotalCtxtSwitches returns the total context switch.

View File

@@ -44,7 +44,7 @@ func (fs FS) SysctlInts(sysctl string) ([]int, error) {
vp := util.NewValueParser(f) vp := util.NewValueParser(f)
values[i] = vp.Int() values[i] = vp.Int()
if err := vp.Err(); err != nil { if err := vp.Err(); err != nil {
return nil, fmt.Errorf("%s: field %d in sysctl %s is not a valid int: %w", ErrFileParse, i, sysctl, err) return nil, fmt.Errorf("%w: field %d in sysctl %s is not a valid int: %w", ErrFileParse, i, sysctl, err)
} }
} }
return values, nil return values, nil

View File

@@ -74,7 +74,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.Hi = make([]uint64, len(perCPU)) softirqs.Hi = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.Hi[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.Hi[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (HI%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HI%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "TIMER:": case parts[0] == "TIMER:":
@@ -82,7 +82,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.Timer = make([]uint64, len(perCPU)) softirqs.Timer = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.Timer[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.Timer[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (TIMER%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TIMER%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "NET_TX:": case parts[0] == "NET_TX:":
@@ -90,7 +90,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.NetTx = make([]uint64, len(perCPU)) softirqs.NetTx = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.NetTx[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.NetTx[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (NET_TX%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_TX%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "NET_RX:": case parts[0] == "NET_RX:":
@@ -98,7 +98,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.NetRx = make([]uint64, len(perCPU)) softirqs.NetRx = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.NetRx[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.NetRx[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (NET_RX%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_RX%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "BLOCK:": case parts[0] == "BLOCK:":
@@ -106,7 +106,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.Block = make([]uint64, len(perCPU)) softirqs.Block = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.Block[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.Block[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (BLOCK%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (BLOCK%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "IRQ_POLL:": case parts[0] == "IRQ_POLL:":
@@ -114,7 +114,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.IRQPoll = make([]uint64, len(perCPU)) softirqs.IRQPoll = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.IRQPoll[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.IRQPoll[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (IRQ_POLL%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (IRQ_POLL%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "TASKLET:": case parts[0] == "TASKLET:":
@@ -122,7 +122,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.Tasklet = make([]uint64, len(perCPU)) softirqs.Tasklet = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.Tasklet[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.Tasklet[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (TASKLET%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TASKLET%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "SCHED:": case parts[0] == "SCHED:":
@@ -130,7 +130,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.Sched = make([]uint64, len(perCPU)) softirqs.Sched = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.Sched[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.Sched[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (SCHED%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (SCHED%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "HRTIMER:": case parts[0] == "HRTIMER:":
@@ -138,7 +138,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.HRTimer = make([]uint64, len(perCPU)) softirqs.HRTimer = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.HRTimer[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.HRTimer[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (HRTIMER%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HRTIMER%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "RCU:": case parts[0] == "RCU:":
@@ -146,14 +146,14 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) {
softirqs.RCU = make([]uint64, len(perCPU)) softirqs.RCU = make([]uint64, len(perCPU))
for i, count := range perCPU { for i, count := range perCPU {
if softirqs.RCU[i], err = strconv.ParseUint(count, 10, 64); err != nil { if softirqs.RCU[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (RCU%d): %w", ErrFileParse, count, i, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (RCU%d): %w", ErrFileParse, count, i, err)
} }
} }
} }
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
return Softirqs{}, fmt.Errorf("%s: couldn't parse softirqs: %w", ErrFileParse, err) return Softirqs{}, fmt.Errorf("%w: couldn't parse softirqs: %w", ErrFileParse, err)
} }
return softirqs, scanner.Err() return softirqs, scanner.Err()

View File

@@ -93,7 +93,7 @@ func parseCPUStat(line string) (CPUStat, int64, error) {
&cpuStat.Guest, &cpuStat.GuestNice) &cpuStat.Guest, &cpuStat.GuestNice)
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
return CPUStat{}, -1, fmt.Errorf("%s: couldn't parse %q (cpu): %w", ErrFileParse, line, err) return CPUStat{}, -1, fmt.Errorf("%w: couldn't parse %q (cpu): %w", ErrFileParse, line, err)
} }
if count == 0 { if count == 0 {
return CPUStat{}, -1, fmt.Errorf("%w: couldn't parse %q (cpu): 0 elements parsed", ErrFileParse, line) return CPUStat{}, -1, fmt.Errorf("%w: couldn't parse %q (cpu): 0 elements parsed", ErrFileParse, line)
@@ -116,7 +116,7 @@ func parseCPUStat(line string) (CPUStat, int64, error) {
cpuID, err := strconv.ParseInt(cpu[3:], 10, 64) cpuID, err := strconv.ParseInt(cpu[3:], 10, 64)
if err != nil { if err != nil {
return CPUStat{}, -1, fmt.Errorf("%s: couldn't parse %q (cpu/cpuid): %w", ErrFileParse, line, err) return CPUStat{}, -1, fmt.Errorf("%w: couldn't parse %q (cpu/cpuid): %w", ErrFileParse, line, err)
} }
return cpuStat, cpuID, nil return cpuStat, cpuID, nil
@@ -136,7 +136,7 @@ func parseSoftIRQStat(line string) (SoftIRQStat, uint64, error) {
&softIRQStat.Hrtimer, &softIRQStat.Rcu) &softIRQStat.Hrtimer, &softIRQStat.Rcu)
if err != nil { if err != nil {
return SoftIRQStat{}, 0, fmt.Errorf("%s: couldn't parse %q (softirq): %w", ErrFileParse, line, err) return SoftIRQStat{}, 0, fmt.Errorf("%w: couldn't parse %q (softirq): %w", ErrFileParse, line, err)
} }
return softIRQStat, total, nil return softIRQStat, total, nil
@@ -201,34 +201,34 @@ func parseStat(r io.Reader, fileName string) (Stat, error) {
switch { switch {
case parts[0] == "btime": case parts[0] == "btime":
if stat.BootTime, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.BootTime, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (btime): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (btime): %w", ErrFileParse, parts[1], err)
} }
case parts[0] == "intr": case parts[0] == "intr":
if stat.IRQTotal, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.IRQTotal, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (intr): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (intr): %w", ErrFileParse, parts[1], err)
} }
numberedIRQs := parts[2:] numberedIRQs := parts[2:]
stat.IRQ = make([]uint64, len(numberedIRQs)) stat.IRQ = make([]uint64, len(numberedIRQs))
for i, count := range numberedIRQs { for i, count := range numberedIRQs {
if stat.IRQ[i], err = strconv.ParseUint(count, 10, 64); err != nil { if stat.IRQ[i], err = strconv.ParseUint(count, 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (intr%d): %w", ErrFileParse, count, i, err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (intr%d): %w", ErrFileParse, count, i, err)
} }
} }
case parts[0] == "ctxt": case parts[0] == "ctxt":
if stat.ContextSwitches, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.ContextSwitches, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (ctxt): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (ctxt): %w", ErrFileParse, parts[1], err)
} }
case parts[0] == "processes": case parts[0] == "processes":
if stat.ProcessCreated, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.ProcessCreated, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (processes): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (processes): %w", ErrFileParse, parts[1], err)
} }
case parts[0] == "procs_running": case parts[0] == "procs_running":
if stat.ProcessesRunning, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.ProcessesRunning, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (procs_running): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (procs_running): %w", ErrFileParse, parts[1], err)
} }
case parts[0] == "procs_blocked": case parts[0] == "procs_blocked":
if stat.ProcessesBlocked, err = strconv.ParseUint(parts[1], 10, 64); err != nil { if stat.ProcessesBlocked, err = strconv.ParseUint(parts[1], 10, 64); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q (procs_blocked): %w", ErrFileParse, parts[1], err) return Stat{}, fmt.Errorf("%w: couldn't parse %q (procs_blocked): %w", ErrFileParse, parts[1], err)
} }
case parts[0] == "softirq": case parts[0] == "softirq":
softIRQStats, total, err := parseSoftIRQStat(line) softIRQStats, total, err := parseSoftIRQStat(line)
@@ -251,7 +251,7 @@ func parseStat(r io.Reader, fileName string) (Stat, error) {
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
return Stat{}, fmt.Errorf("%s: couldn't parse %q: %w", ErrFileParse, fileName, err) return Stat{}, fmt.Errorf("%w: couldn't parse %q: %w", ErrFileParse, fileName, err)
} }
return stat, nil return stat, nil

View File

@@ -74,15 +74,15 @@ func parseSwapString(swapString string) (*Swap, error) {
swap.Size, err = strconv.Atoi(swapFields[2]) swap.Size, err = strconv.Atoi(swapFields[2])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: invalid swap size: %s: %w", ErrFileParse, swapFields[2], err) return nil, fmt.Errorf("%w: invalid swap size: %s: %w", ErrFileParse, swapFields[2], err)
} }
swap.Used, err = strconv.Atoi(swapFields[3]) swap.Used, err = strconv.Atoi(swapFields[3])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: invalid swap used: %s: %w", ErrFileParse, swapFields[3], err) return nil, fmt.Errorf("%w: invalid swap used: %s: %w", ErrFileParse, swapFields[3], err)
} }
swap.Priority, err = strconv.Atoi(swapFields[4]) swap.Priority, err = strconv.Atoi(swapFields[4])
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: invalid swap priority: %s: %w", ErrFileParse, swapFields[4], err) return nil, fmt.Errorf("%w: invalid swap priority: %s: %w", ErrFileParse, swapFields[4], err)
} }
return swap, nil return swap, nil

View File

@@ -45,7 +45,7 @@ func (fs FS) AllThreads(pid int) (Procs, error) {
names, err := d.Readdirnames(-1) names, err := d.Readdirnames(-1)
if err != nil { if err != nil {
return Procs{}, fmt.Errorf("%s: could not read %q: %w", ErrFileRead, d.Name(), err) return Procs{}, fmt.Errorf("%w: could not read %q: %w", ErrFileRead, d.Name(), err)
} }
t := Procs{} t := Procs{}

View File

@@ -75,11 +75,11 @@ var nodeZoneRE = regexp.MustCompile(`(\d+), zone\s+(\w+)`)
func (fs FS) Zoneinfo() ([]Zoneinfo, error) { func (fs FS) Zoneinfo() ([]Zoneinfo, error) {
data, err := os.ReadFile(fs.proc.Path("zoneinfo")) data, err := os.ReadFile(fs.proc.Path("zoneinfo"))
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: error reading zoneinfo %q: %w", ErrFileRead, fs.proc.Path("zoneinfo"), err) return nil, fmt.Errorf("%w: error reading zoneinfo %q: %w", ErrFileRead, fs.proc.Path("zoneinfo"), err)
} }
zoneinfo, err := parseZoneinfo(data) zoneinfo, err := parseZoneinfo(data)
if err != nil { if err != nil {
return nil, fmt.Errorf("%s: error parsing zoneinfo %q: %w", ErrFileParse, fs.proc.Path("zoneinfo"), err) return nil, fmt.Errorf("%w: error parsing zoneinfo %q: %w", ErrFileParse, fs.proc.Path("zoneinfo"), err)
} }
return zoneinfo, nil return zoneinfo, nil
} }

View File

@@ -27,15 +27,17 @@ const defaultIndent = " "
// Format formats the message as a multiline string. // Format formats the message as a multiline string.
// This function is only intended for human consumption and ignores errors. // This function is only intended for human consumption and ignores errors.
// Do not depend on the output being stable. It may change over time across // Do not depend on the output being stable. Its output will change across
// different versions of the program. // different builds of your program, even when using the same version of the
// protobuf module.
func Format(m proto.Message) string { func Format(m proto.Message) string {
return MarshalOptions{Multiline: true}.Format(m) return MarshalOptions{Multiline: true}.Format(m)
} }
// Marshal writes the given [proto.Message] in textproto format using default // Marshal writes the given [proto.Message] in textproto format using default
// options. Do not depend on the output being stable. It may change over time // options. Do not depend on the output being stable. Its output will change
// across different versions of the program. // across different builds of your program, even when using the same version of
// the protobuf module.
func Marshal(m proto.Message) ([]byte, error) { func Marshal(m proto.Message) ([]byte, error) {
return MarshalOptions{}.Marshal(m) return MarshalOptions{}.Marshal(m)
} }
@@ -84,8 +86,9 @@ type MarshalOptions struct {
// Format formats the message as a string. // Format formats the message as a string.
// This method is only intended for human consumption and ignores errors. // This method is only intended for human consumption and ignores errors.
// Do not depend on the output being stable. It may change over time across // Do not depend on the output being stable. Its output will change across
// different versions of the program. // different builds of your program, even when using the same version of the
// protobuf module.
func (o MarshalOptions) Format(m proto.Message) string { func (o MarshalOptions) Format(m proto.Message) string {
if m == nil || !m.ProtoReflect().IsValid() { if m == nil || !m.ProtoReflect().IsValid() {
return "<nil>" // invalid syntax, but okay since this is for debugging return "<nil>" // invalid syntax, but okay since this is for debugging
@@ -98,8 +101,9 @@ func (o MarshalOptions) Format(m proto.Message) string {
} }
// Marshal writes the given [proto.Message] in textproto format using options in // Marshal writes the given [proto.Message] in textproto format using options in
// MarshalOptions object. Do not depend on the output being stable. It may // MarshalOptions object. Do not depend on the output being stable. Its output
// change over time across different versions of the program. // will change across different builds of your program, even when using the
// same version of the protobuf module.
func (o MarshalOptions) Marshal(m proto.Message) ([]byte, error) { func (o MarshalOptions) Marshal(m proto.Message) ([]byte, error) {
return o.marshal(nil, m) return o.marshal(nil, m)
} }

View File

@@ -252,6 +252,7 @@ func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool, record fu
{rv.MethodByName("Values"), "Values"}, {rv.MethodByName("Values"), "Values"},
{rv.MethodByName("ReservedNames"), "ReservedNames"}, {rv.MethodByName("ReservedNames"), "ReservedNames"},
{rv.MethodByName("ReservedRanges"), "ReservedRanges"}, {rv.MethodByName("ReservedRanges"), "ReservedRanges"},
{rv.MethodByName("IsClosed"), "IsClosed"},
}...) }...)
case protoreflect.EnumValueDescriptor: case protoreflect.EnumValueDescriptor:

View File

@@ -32,6 +32,7 @@ var byteType = reflect.TypeOf(byte(0))
func Unmarshal(tag string, goType reflect.Type, evs protoreflect.EnumValueDescriptors) protoreflect.FieldDescriptor { func Unmarshal(tag string, goType reflect.Type, evs protoreflect.EnumValueDescriptors) protoreflect.FieldDescriptor {
f := new(filedesc.Field) f := new(filedesc.Field)
f.L0.ParentFile = filedesc.SurrogateProto2 f.L0.ParentFile = filedesc.SurrogateProto2
f.L1.EditionFeatures = f.L0.ParentFile.L1.EditionFeatures
for len(tag) > 0 { for len(tag) > 0 {
i := strings.IndexByte(tag, ',') i := strings.IndexByte(tag, ',')
if i < 0 { if i < 0 {
@@ -107,8 +108,7 @@ func Unmarshal(tag string, goType reflect.Type, evs protoreflect.EnumValueDescri
f.L1.StringName.InitJSON(jsonName) f.L1.StringName.InitJSON(jsonName)
} }
case s == "packed": case s == "packed":
f.L1.HasPacked = true f.L1.EditionFeatures.IsPacked = true
f.L1.IsPacked = true
case strings.HasPrefix(s, "weak="): case strings.HasPrefix(s, "weak="):
f.L1.IsWeak = true f.L1.IsWeak = true
f.L1.Message = filedesc.PlaceholderMessage(protoreflect.FullName(s[len("weak="):])) f.L1.Message = filedesc.PlaceholderMessage(protoreflect.FullName(s[len("weak="):]))

View File

@@ -87,3 +87,18 @@ func InvalidUTF8(name string) error {
func RequiredNotSet(name string) error { func RequiredNotSet(name string) error {
return New("required field %v not set", name) return New("required field %v not set", name)
} }
type SizeMismatchError struct {
Calculated, Measured int
}
func (e *SizeMismatchError) Error() string {
return fmt.Sprintf("size mismatch (see https://github.com/golang/protobuf/issues/1609): calculated=%d, measured=%d", e.Calculated, e.Measured)
}
func MismatchedSizeCalculation(calculated, measured int) error {
return &SizeMismatchError{
Calculated: calculated,
Measured: measured,
}
}

View File

@@ -7,6 +7,7 @@ package filedesc
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
@@ -108,9 +109,12 @@ func (fd *File) ParentFile() protoreflect.FileDescriptor { return fd }
func (fd *File) Parent() protoreflect.Descriptor { return nil } func (fd *File) Parent() protoreflect.Descriptor { return nil }
func (fd *File) Index() int { return 0 } func (fd *File) Index() int { return 0 }
func (fd *File) Syntax() protoreflect.Syntax { return fd.L1.Syntax } func (fd *File) Syntax() protoreflect.Syntax { return fd.L1.Syntax }
func (fd *File) Name() protoreflect.Name { return fd.L1.Package.Name() }
func (fd *File) FullName() protoreflect.FullName { return fd.L1.Package } // Not exported and just used to reconstruct the original FileDescriptor proto
func (fd *File) IsPlaceholder() bool { return false } func (fd *File) Edition() int32 { return int32(fd.L1.Edition) }
func (fd *File) Name() protoreflect.Name { return fd.L1.Package.Name() }
func (fd *File) FullName() protoreflect.FullName { return fd.L1.Package }
func (fd *File) IsPlaceholder() bool { return false }
func (fd *File) Options() protoreflect.ProtoMessage { func (fd *File) Options() protoreflect.ProtoMessage {
if f := fd.lazyInit().Options; f != nil { if f := fd.lazyInit().Options; f != nil {
return f() return f()
@@ -202,6 +206,9 @@ func (ed *Enum) lazyInit() *EnumL2 {
ed.L0.ParentFile.lazyInit() // implicitly initializes L2 ed.L0.ParentFile.lazyInit() // implicitly initializes L2
return ed.L2 return ed.L2
} }
func (ed *Enum) IsClosed() bool {
return !ed.L1.EditionFeatures.IsOpenEnum
}
func (ed *EnumValue) Options() protoreflect.ProtoMessage { func (ed *EnumValue) Options() protoreflect.ProtoMessage {
if f := ed.L1.Options; f != nil { if f := ed.L1.Options; f != nil {
@@ -251,10 +258,6 @@ type (
StringName stringName StringName stringName
IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto
IsWeak bool // promoted from google.protobuf.FieldOptions IsWeak bool // promoted from google.protobuf.FieldOptions
HasPacked bool // promoted from google.protobuf.FieldOptions
IsPacked bool // promoted from google.protobuf.FieldOptions
HasEnforceUTF8 bool // promoted from google.protobuf.FieldOptions
EnforceUTF8 bool // promoted from google.protobuf.FieldOptions
Default defaultValue Default defaultValue
ContainingOneof protoreflect.OneofDescriptor // must be consistent with Message.Oneofs.Fields ContainingOneof protoreflect.OneofDescriptor // must be consistent with Message.Oneofs.Fields
Enum protoreflect.EnumDescriptor Enum protoreflect.EnumDescriptor
@@ -331,8 +334,7 @@ func (fd *Field) HasPresence() bool {
if fd.L1.Cardinality == protoreflect.Repeated { if fd.L1.Cardinality == protoreflect.Repeated {
return false return false
} }
explicitFieldPresence := fd.Syntax() == protoreflect.Editions && fd.L1.EditionFeatures.IsFieldPresence return fd.IsExtension() || fd.L1.EditionFeatures.IsFieldPresence || fd.L1.Message != nil || fd.L1.ContainingOneof != nil
return fd.Syntax() == protoreflect.Proto2 || explicitFieldPresence || fd.L1.Message != nil || fd.L1.ContainingOneof != nil
} }
func (fd *Field) HasOptionalKeyword() bool { func (fd *Field) HasOptionalKeyword() bool {
return (fd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 && fd.L1.Cardinality == protoreflect.Optional && fd.L1.ContainingOneof == nil) || fd.L1.IsProto3Optional return (fd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 && fd.L1.Cardinality == protoreflect.Optional && fd.L1.ContainingOneof == nil) || fd.L1.IsProto3Optional
@@ -345,14 +347,7 @@ func (fd *Field) IsPacked() bool {
case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind: case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind:
return false return false
} }
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions { return fd.L1.EditionFeatures.IsPacked
return fd.L1.EditionFeatures.IsPacked
}
if fd.L0.ParentFile.L1.Syntax == protoreflect.Proto3 {
// proto3 repeated fields are packed by default.
return !fd.L1.HasPacked || fd.L1.IsPacked
}
return fd.L1.IsPacked
} }
func (fd *Field) IsExtension() bool { return false } func (fd *Field) IsExtension() bool { return false }
func (fd *Field) IsWeak() bool { return fd.L1.IsWeak } func (fd *Field) IsWeak() bool { return fd.L1.IsWeak }
@@ -399,13 +394,7 @@ func (fd *Field) ProtoType(protoreflect.FieldDescriptor) {}
// WARNING: This method is exempt from the compatibility promise and may be // WARNING: This method is exempt from the compatibility promise and may be
// removed in the future without warning. // removed in the future without warning.
func (fd *Field) EnforceUTF8() bool { func (fd *Field) EnforceUTF8() bool {
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions { return fd.L1.EditionFeatures.IsUTF8Validated
return fd.L1.EditionFeatures.IsUTF8Validated
}
if fd.L1.HasEnforceUTF8 {
return fd.L1.EnforceUTF8
}
return fd.L0.ParentFile.L1.Syntax == protoreflect.Proto3
} }
func (od *Oneof) IsSynthetic() bool { func (od *Oneof) IsSynthetic() bool {
@@ -438,7 +427,6 @@ type (
Options func() protoreflect.ProtoMessage Options func() protoreflect.ProtoMessage
StringName stringName StringName stringName
IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto
IsPacked bool // promoted from google.protobuf.FieldOptions
Default defaultValue Default defaultValue
Enum protoreflect.EnumDescriptor Enum protoreflect.EnumDescriptor
Message protoreflect.MessageDescriptor Message protoreflect.MessageDescriptor
@@ -461,7 +449,16 @@ func (xd *Extension) HasPresence() bool { return xd.L1.Cardi
func (xd *Extension) HasOptionalKeyword() bool { func (xd *Extension) HasOptionalKeyword() bool {
return (xd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 && xd.L1.Cardinality == protoreflect.Optional) || xd.lazyInit().IsProto3Optional return (xd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 && xd.L1.Cardinality == protoreflect.Optional) || xd.lazyInit().IsProto3Optional
} }
func (xd *Extension) IsPacked() bool { return xd.lazyInit().IsPacked } func (xd *Extension) IsPacked() bool {
if xd.L1.Cardinality != protoreflect.Repeated {
return false
}
switch xd.L1.Kind {
case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind:
return false
}
return xd.L1.EditionFeatures.IsPacked
}
func (xd *Extension) IsExtension() bool { return true } func (xd *Extension) IsExtension() bool { return true }
func (xd *Extension) IsWeak() bool { return false } func (xd *Extension) IsWeak() bool { return false }
func (xd *Extension) IsList() bool { return xd.Cardinality() == protoreflect.Repeated } func (xd *Extension) IsList() bool { return xd.Cardinality() == protoreflect.Repeated }
@@ -542,8 +539,9 @@ func (md *Method) ProtoInternal(pragma.DoNotImplement) {}
// Surrogate files are can be used to create standalone descriptors // Surrogate files are can be used to create standalone descriptors
// where the syntax is only information derived from the parent file. // where the syntax is only information derived from the parent file.
var ( var (
SurrogateProto2 = &File{L1: FileL1{Syntax: protoreflect.Proto2}, L2: &FileL2{}} SurrogateProto2 = &File{L1: FileL1{Syntax: protoreflect.Proto2}, L2: &FileL2{}}
SurrogateProto3 = &File{L1: FileL1{Syntax: protoreflect.Proto3}, L2: &FileL2{}} SurrogateProto3 = &File{L1: FileL1{Syntax: protoreflect.Proto3}, L2: &FileL2{}}
SurrogateEdition2023 = &File{L1: FileL1{Syntax: protoreflect.Editions, Edition: Edition2023}, L2: &FileL2{}}
) )
type ( type (
@@ -585,6 +583,34 @@ func (s *stringName) InitJSON(name string) {
s.nameJSON = name s.nameJSON = name
} }
// Returns true if this field is structured like the synthetic field of a proto2
// group. This allows us to expand our treatment of delimited fields without
// breaking proto2 files that have been upgraded to editions.
func isGroupLike(fd protoreflect.FieldDescriptor) bool {
// Groups are always group types.
if fd.Kind() != protoreflect.GroupKind {
return false
}
// Group fields are always the lowercase type name.
if strings.ToLower(string(fd.Message().Name())) != string(fd.Name()) {
return false
}
// Groups could only be defined in the same file they're used.
if fd.Message().ParentFile() != fd.ParentFile() {
return false
}
// Group messages are always defined in the same scope as the field. File
// level extensions will compare NULL == NULL here, which is why the file
// comparison above is necessary to ensure both come from the same file.
if fd.IsExtension() {
return fd.Parent() == fd.Message().Parent()
}
return fd.ContainingMessage() == fd.Message().Parent()
}
func (s *stringName) lazyInit(fd protoreflect.FieldDescriptor) *stringName { func (s *stringName) lazyInit(fd protoreflect.FieldDescriptor) *stringName {
s.once.Do(func() { s.once.Do(func() {
if fd.IsExtension() { if fd.IsExtension() {
@@ -605,7 +631,7 @@ func (s *stringName) lazyInit(fd protoreflect.FieldDescriptor) *stringName {
// Format the text name. // Format the text name.
s.nameText = string(fd.Name()) s.nameText = string(fd.Name())
if fd.Kind() == protoreflect.GroupKind { if isGroupLike(fd) {
s.nameText = string(fd.Message().Name()) s.nameText = string(fd.Message().Name())
} }
} }

View File

@@ -113,8 +113,10 @@ func (fd *File) unmarshalSeed(b []byte) {
switch string(v) { switch string(v) {
case "proto2": case "proto2":
fd.L1.Syntax = protoreflect.Proto2 fd.L1.Syntax = protoreflect.Proto2
fd.L1.Edition = EditionProto2
case "proto3": case "proto3":
fd.L1.Syntax = protoreflect.Proto3 fd.L1.Syntax = protoreflect.Proto3
fd.L1.Edition = EditionProto3
case "editions": case "editions":
fd.L1.Syntax = protoreflect.Editions fd.L1.Syntax = protoreflect.Editions
default: default:
@@ -177,11 +179,10 @@ func (fd *File) unmarshalSeed(b []byte) {
// If syntax is missing, it is assumed to be proto2. // If syntax is missing, it is assumed to be proto2.
if fd.L1.Syntax == 0 { if fd.L1.Syntax == 0 {
fd.L1.Syntax = protoreflect.Proto2 fd.L1.Syntax = protoreflect.Proto2
fd.L1.Edition = EditionProto2
} }
if fd.L1.Syntax == protoreflect.Editions { fd.L1.EditionFeatures = getFeaturesFor(fd.L1.Edition)
fd.L1.EditionFeatures = getFeaturesFor(fd.L1.Edition)
}
// Parse editions features from options if any // Parse editions features from options if any
if options != nil { if options != nil {
@@ -267,6 +268,7 @@ func (ed *Enum) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd protorefl
ed.L0.ParentFile = pf ed.L0.ParentFile = pf
ed.L0.Parent = pd ed.L0.Parent = pd
ed.L0.Index = i ed.L0.Index = i
ed.L1.EditionFeatures = featuresFromParentDesc(ed.Parent())
var numValues int var numValues int
for b := b; len(b) > 0; { for b := b; len(b) > 0; {
@@ -443,6 +445,7 @@ func (xd *Extension) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd prot
xd.L0.ParentFile = pf xd.L0.ParentFile = pf
xd.L0.Parent = pd xd.L0.Parent = pd
xd.L0.Index = i xd.L0.Index = i
xd.L1.EditionFeatures = featuresFromParentDesc(pd)
for len(b) > 0 { for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b) num, typ, n := protowire.ConsumeTag(b)
@@ -467,6 +470,38 @@ func (xd *Extension) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd prot
xd.L0.FullName = appendFullName(sb, pd.FullName(), v) xd.L0.FullName = appendFullName(sb, pd.FullName(), v)
case genid.FieldDescriptorProto_Extendee_field_number: case genid.FieldDescriptorProto_Extendee_field_number:
xd.L1.Extendee = PlaceholderMessage(makeFullName(sb, v)) xd.L1.Extendee = PlaceholderMessage(makeFullName(sb, v))
case genid.FieldDescriptorProto_Options_field_number:
xd.unmarshalOptions(v)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]
}
}
if xd.L1.Kind == protoreflect.MessageKind && xd.L1.EditionFeatures.IsDelimitedEncoded {
xd.L1.Kind = protoreflect.GroupKind
}
}
func (xd *Extension) unmarshalOptions(b []byte) {
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
b = b[n:]
switch typ {
case protowire.VarintType:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
switch num {
case genid.FieldOptions_Packed_field_number:
xd.L1.EditionFeatures.IsPacked = protowire.DecodeBool(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FieldOptions_Features_field_number:
xd.L1.EditionFeatures = unmarshalFeatureSet(v, xd.L1.EditionFeatures)
} }
default: default:
m := protowire.ConsumeFieldValue(num, typ, b) m := protowire.ConsumeFieldValue(num, typ, b)

View File

@@ -466,10 +466,10 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd protoref
b = b[m:] b = b[m:]
} }
} }
if fd.Syntax() == protoreflect.Editions && fd.L1.Kind == protoreflect.MessageKind && fd.L1.EditionFeatures.IsDelimitedEncoded { if fd.L1.Kind == protoreflect.MessageKind && fd.L1.EditionFeatures.IsDelimitedEncoded {
fd.L1.Kind = protoreflect.GroupKind fd.L1.Kind = protoreflect.GroupKind
} }
if fd.Syntax() == protoreflect.Editions && fd.L1.EditionFeatures.IsLegacyRequired { if fd.L1.EditionFeatures.IsLegacyRequired {
fd.L1.Cardinality = protoreflect.Required fd.L1.Cardinality = protoreflect.Required
} }
if rawTypeName != nil { if rawTypeName != nil {
@@ -496,13 +496,11 @@ func (fd *Field) unmarshalOptions(b []byte) {
b = b[m:] b = b[m:]
switch num { switch num {
case genid.FieldOptions_Packed_field_number: case genid.FieldOptions_Packed_field_number:
fd.L1.HasPacked = true fd.L1.EditionFeatures.IsPacked = protowire.DecodeBool(v)
fd.L1.IsPacked = protowire.DecodeBool(v)
case genid.FieldOptions_Weak_field_number: case genid.FieldOptions_Weak_field_number:
fd.L1.IsWeak = protowire.DecodeBool(v) fd.L1.IsWeak = protowire.DecodeBool(v)
case FieldOptions_EnforceUTF8: case FieldOptions_EnforceUTF8:
fd.L1.HasEnforceUTF8 = true fd.L1.EditionFeatures.IsUTF8Validated = protowire.DecodeBool(v)
fd.L1.EnforceUTF8 = protowire.DecodeBool(v)
} }
case protowire.BytesType: case protowire.BytesType:
v, m := protowire.ConsumeBytes(b) v, m := protowire.ConsumeBytes(b)
@@ -548,7 +546,6 @@ func (od *Oneof) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd protoref
func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) { func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
var rawTypeName []byte var rawTypeName []byte
var rawOptions []byte var rawOptions []byte
xd.L1.EditionFeatures = featuresFromParentDesc(xd.L1.Extendee)
xd.L2 = new(ExtensionL2) xd.L2 = new(ExtensionL2)
for len(b) > 0 { for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b) num, typ, n := protowire.ConsumeTag(b)
@@ -572,7 +569,6 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
case genid.FieldDescriptorProto_TypeName_field_number: case genid.FieldDescriptorProto_TypeName_field_number:
rawTypeName = v rawTypeName = v
case genid.FieldDescriptorProto_Options_field_number: case genid.FieldDescriptorProto_Options_field_number:
xd.unmarshalOptions(v)
rawOptions = appendOptions(rawOptions, v) rawOptions = appendOptions(rawOptions, v)
} }
default: default:
@@ -580,12 +576,6 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
b = b[m:] b = b[m:]
} }
} }
if xd.Syntax() == protoreflect.Editions && xd.L1.Kind == protoreflect.MessageKind && xd.L1.EditionFeatures.IsDelimitedEncoded {
xd.L1.Kind = protoreflect.GroupKind
}
if xd.Syntax() == protoreflect.Editions && xd.L1.EditionFeatures.IsLegacyRequired {
xd.L1.Cardinality = protoreflect.Required
}
if rawTypeName != nil { if rawTypeName != nil {
name := makeFullName(sb, rawTypeName) name := makeFullName(sb, rawTypeName)
switch xd.L1.Kind { switch xd.L1.Kind {
@@ -598,32 +588,6 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
xd.L2.Options = xd.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Field, rawOptions) xd.L2.Options = xd.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Field, rawOptions)
} }
func (xd *Extension) unmarshalOptions(b []byte) {
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
b = b[n:]
switch typ {
case protowire.VarintType:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
switch num {
case genid.FieldOptions_Packed_field_number:
xd.L2.IsPacked = protowire.DecodeBool(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FieldOptions_Features_field_number:
xd.L1.EditionFeatures = unmarshalFeatureSet(v, xd.L1.EditionFeatures)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]
}
}
}
func (sd *Service) unmarshalFull(b []byte, sb *strs.Builder) { func (sd *Service) unmarshalFull(b []byte, sb *strs.Builder) {
var rawMethods [][]byte var rawMethods [][]byte
var rawOptions []byte var rawOptions []byte

View File

@@ -8,6 +8,7 @@ package filedesc
import ( import (
"fmt" "fmt"
"strings"
"sync" "sync"
"google.golang.org/protobuf/internal/descfmt" "google.golang.org/protobuf/internal/descfmt"
@@ -198,6 +199,16 @@ func (p *Fields) lazyInit() *Fields {
if _, ok := p.byText[d.TextName()]; !ok { if _, ok := p.byText[d.TextName()]; !ok {
p.byText[d.TextName()] = d p.byText[d.TextName()] = d
} }
if isGroupLike(d) {
lowerJSONName := strings.ToLower(d.JSONName())
if _, ok := p.byJSON[lowerJSONName]; !ok {
p.byJSON[lowerJSONName] = d
}
lowerTextName := strings.ToLower(d.TextName())
if _, ok := p.byText[lowerTextName]; !ok {
p.byText[lowerTextName] = d
}
}
if _, ok := p.byNum[d.Number()]; !ok { if _, ok := p.byNum[d.Number()]; !ok {
p.byNum[d.Number()] = d p.byNum[d.Number()] = d
} }

View File

@@ -14,9 +14,13 @@ import (
) )
var defaultsCache = make(map[Edition]EditionFeatures) var defaultsCache = make(map[Edition]EditionFeatures)
var defaultsKeys = []Edition{}
func init() { func init() {
unmarshalEditionDefaults(editiondefaults.Defaults) unmarshalEditionDefaults(editiondefaults.Defaults)
SurrogateProto2.L1.EditionFeatures = getFeaturesFor(EditionProto2)
SurrogateProto3.L1.EditionFeatures = getFeaturesFor(EditionProto3)
SurrogateEdition2023.L1.EditionFeatures = getFeaturesFor(Edition2023)
} }
func unmarshalGoFeature(b []byte, parent EditionFeatures) EditionFeatures { func unmarshalGoFeature(b []byte, parent EditionFeatures) EditionFeatures {
@@ -104,12 +108,15 @@ func unmarshalEditionDefault(b []byte) {
v, m := protowire.ConsumeBytes(b) v, m := protowire.ConsumeBytes(b)
b = b[m:] b = b[m:]
switch num { switch num {
case genid.FeatureSetDefaults_FeatureSetEditionDefault_Features_field_number: case genid.FeatureSetDefaults_FeatureSetEditionDefault_FixedFeatures_field_number:
fs = unmarshalFeatureSet(v, fs)
case genid.FeatureSetDefaults_FeatureSetEditionDefault_OverridableFeatures_field_number:
fs = unmarshalFeatureSet(v, fs) fs = unmarshalFeatureSet(v, fs)
} }
} }
} }
defaultsCache[ed] = fs defaultsCache[ed] = fs
defaultsKeys = append(defaultsKeys, ed)
} }
func unmarshalEditionDefaults(b []byte) { func unmarshalEditionDefaults(b []byte) {
@@ -135,8 +142,15 @@ func unmarshalEditionDefaults(b []byte) {
} }
func getFeaturesFor(ed Edition) EditionFeatures { func getFeaturesFor(ed Edition) EditionFeatures {
if def, ok := defaultsCache[ed]; ok { match := EditionUnknown
return def for _, key := range defaultsKeys {
if key > ed {
break
}
match = key
} }
panic(fmt.Sprintf("unsupported edition: %v", ed)) if match == EditionUnknown {
panic(fmt.Sprintf("unsupported edition: %v", ed))
}
return defaultsCache[match]
} }

View File

@@ -63,6 +63,7 @@ func (e PlaceholderEnum) Options() protoreflect.ProtoMessage { return des
func (e PlaceholderEnum) Values() protoreflect.EnumValueDescriptors { return emptyEnumValues } func (e PlaceholderEnum) Values() protoreflect.EnumValueDescriptors { return emptyEnumValues }
func (e PlaceholderEnum) ReservedNames() protoreflect.Names { return emptyNames } func (e PlaceholderEnum) ReservedNames() protoreflect.Names { return emptyNames }
func (e PlaceholderEnum) ReservedRanges() protoreflect.EnumRanges { return emptyEnumRanges } func (e PlaceholderEnum) ReservedRanges() protoreflect.EnumRanges { return emptyEnumRanges }
func (e PlaceholderEnum) IsClosed() bool { return false }
func (e PlaceholderEnum) ProtoType(protoreflect.EnumDescriptor) { return } func (e PlaceholderEnum) ProtoType(protoreflect.EnumDescriptor) { return }
func (e PlaceholderEnum) ProtoInternal(pragma.DoNotImplement) { return } func (e PlaceholderEnum) ProtoInternal(pragma.DoNotImplement) { return }

View File

@@ -21,6 +21,7 @@ const (
// Enum values for google.protobuf.Edition. // Enum values for google.protobuf.Edition.
const ( const (
Edition_EDITION_UNKNOWN_enum_value = 0 Edition_EDITION_UNKNOWN_enum_value = 0
Edition_EDITION_LEGACY_enum_value = 900
Edition_EDITION_PROTO2_enum_value = 998 Edition_EDITION_PROTO2_enum_value = 998
Edition_EDITION_PROTO3_enum_value = 999 Edition_EDITION_PROTO3_enum_value = 999
Edition_EDITION_2023_enum_value = 1000 Edition_EDITION_2023_enum_value = 1000
@@ -653,6 +654,7 @@ const (
FieldOptions_Targets_field_name protoreflect.Name = "targets" FieldOptions_Targets_field_name protoreflect.Name = "targets"
FieldOptions_EditionDefaults_field_name protoreflect.Name = "edition_defaults" FieldOptions_EditionDefaults_field_name protoreflect.Name = "edition_defaults"
FieldOptions_Features_field_name protoreflect.Name = "features" FieldOptions_Features_field_name protoreflect.Name = "features"
FieldOptions_FeatureSupport_field_name protoreflect.Name = "feature_support"
FieldOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" FieldOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
FieldOptions_Ctype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.ctype" FieldOptions_Ctype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.ctype"
@@ -667,6 +669,7 @@ const (
FieldOptions_Targets_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.targets" FieldOptions_Targets_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.targets"
FieldOptions_EditionDefaults_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.edition_defaults" FieldOptions_EditionDefaults_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.edition_defaults"
FieldOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.features" FieldOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.features"
FieldOptions_FeatureSupport_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.feature_support"
FieldOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.uninterpreted_option" FieldOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.uninterpreted_option"
) )
@@ -684,6 +687,7 @@ const (
FieldOptions_Targets_field_number protoreflect.FieldNumber = 19 FieldOptions_Targets_field_number protoreflect.FieldNumber = 19
FieldOptions_EditionDefaults_field_number protoreflect.FieldNumber = 20 FieldOptions_EditionDefaults_field_number protoreflect.FieldNumber = 20
FieldOptions_Features_field_number protoreflect.FieldNumber = 21 FieldOptions_Features_field_number protoreflect.FieldNumber = 21
FieldOptions_FeatureSupport_field_number protoreflect.FieldNumber = 22
FieldOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 FieldOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
) )
@@ -767,6 +771,33 @@ const (
FieldOptions_EditionDefault_Value_field_number protoreflect.FieldNumber = 2 FieldOptions_EditionDefault_Value_field_number protoreflect.FieldNumber = 2
) )
// Names for google.protobuf.FieldOptions.FeatureSupport.
const (
FieldOptions_FeatureSupport_message_name protoreflect.Name = "FeatureSupport"
FieldOptions_FeatureSupport_message_fullname protoreflect.FullName = "google.protobuf.FieldOptions.FeatureSupport"
)
// Field names for google.protobuf.FieldOptions.FeatureSupport.
const (
FieldOptions_FeatureSupport_EditionIntroduced_field_name protoreflect.Name = "edition_introduced"
FieldOptions_FeatureSupport_EditionDeprecated_field_name protoreflect.Name = "edition_deprecated"
FieldOptions_FeatureSupport_DeprecationWarning_field_name protoreflect.Name = "deprecation_warning"
FieldOptions_FeatureSupport_EditionRemoved_field_name protoreflect.Name = "edition_removed"
FieldOptions_FeatureSupport_EditionIntroduced_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.FeatureSupport.edition_introduced"
FieldOptions_FeatureSupport_EditionDeprecated_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.FeatureSupport.edition_deprecated"
FieldOptions_FeatureSupport_DeprecationWarning_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.FeatureSupport.deprecation_warning"
FieldOptions_FeatureSupport_EditionRemoved_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.FeatureSupport.edition_removed"
)
// Field numbers for google.protobuf.FieldOptions.FeatureSupport.
const (
FieldOptions_FeatureSupport_EditionIntroduced_field_number protoreflect.FieldNumber = 1
FieldOptions_FeatureSupport_EditionDeprecated_field_number protoreflect.FieldNumber = 2
FieldOptions_FeatureSupport_DeprecationWarning_field_number protoreflect.FieldNumber = 3
FieldOptions_FeatureSupport_EditionRemoved_field_number protoreflect.FieldNumber = 4
)
// Names for google.protobuf.OneofOptions. // Names for google.protobuf.OneofOptions.
const ( const (
OneofOptions_message_name protoreflect.Name = "OneofOptions" OneofOptions_message_name protoreflect.Name = "OneofOptions"
@@ -1110,17 +1141,20 @@ const (
// Field names for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault. // Field names for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.
const ( const (
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_name protoreflect.Name = "edition" FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_name protoreflect.Name = "edition"
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_name protoreflect.Name = "features" FeatureSetDefaults_FeatureSetEditionDefault_OverridableFeatures_field_name protoreflect.Name = "overridable_features"
FeatureSetDefaults_FeatureSetEditionDefault_FixedFeatures_field_name protoreflect.Name = "fixed_features"
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition" FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition"
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features" FeatureSetDefaults_FeatureSetEditionDefault_OverridableFeatures_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridable_features"
FeatureSetDefaults_FeatureSetEditionDefault_FixedFeatures_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixed_features"
) )
// Field numbers for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault. // Field numbers for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.
const ( const (
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_number protoreflect.FieldNumber = 3 FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_number protoreflect.FieldNumber = 3
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_number protoreflect.FieldNumber = 2 FeatureSetDefaults_FeatureSetEditionDefault_OverridableFeatures_field_number protoreflect.FieldNumber = 4
FeatureSetDefaults_FeatureSetEditionDefault_FixedFeatures_field_number protoreflect.FieldNumber = 5
) )
// Names for google.protobuf.SourceCodeInfo. // Names for google.protobuf.SourceCodeInfo.

View File

@@ -10,7 +10,7 @@ import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
) )
const File_reflect_protodesc_proto_go_features_proto = "reflect/protodesc/proto/go_features.proto" const File_google_protobuf_go_features_proto = "google/protobuf/go_features.proto"
// Names for google.protobuf.GoFeatures. // Names for google.protobuf.GoFeatures.
const ( const (

View File

@@ -233,9 +233,15 @@ func sizeMessageInfo(p pointer, f *coderFieldInfo, opts marshalOptions) int {
} }
func appendMessageInfo(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { func appendMessageInfo(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) {
calculatedSize := f.mi.sizePointer(p.Elem(), opts)
b = protowire.AppendVarint(b, f.wiretag) b = protowire.AppendVarint(b, f.wiretag)
b = protowire.AppendVarint(b, uint64(f.mi.sizePointer(p.Elem(), opts))) b = protowire.AppendVarint(b, uint64(calculatedSize))
return f.mi.marshalAppendPointer(b, p.Elem(), opts) before := len(b)
b, err := f.mi.marshalAppendPointer(b, p.Elem(), opts)
if measuredSize := len(b) - before; calculatedSize != measuredSize && err == nil {
return nil, errors.MismatchedSizeCalculation(calculatedSize, measuredSize)
}
return b, err
} }
func consumeMessageInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { func consumeMessageInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
@@ -262,14 +268,21 @@ func isInitMessageInfo(p pointer, f *coderFieldInfo) error {
return f.mi.checkInitializedPointer(p.Elem()) return f.mi.checkInitializedPointer(p.Elem())
} }
func sizeMessage(m proto.Message, tagsize int, _ marshalOptions) int { func sizeMessage(m proto.Message, tagsize int, opts marshalOptions) int {
return protowire.SizeBytes(proto.Size(m)) + tagsize return protowire.SizeBytes(opts.Options().Size(m)) + tagsize
} }
func appendMessage(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) { func appendMessage(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) {
mopts := opts.Options()
calculatedSize := mopts.Size(m)
b = protowire.AppendVarint(b, wiretag) b = protowire.AppendVarint(b, wiretag)
b = protowire.AppendVarint(b, uint64(proto.Size(m))) b = protowire.AppendVarint(b, uint64(calculatedSize))
return opts.Options().MarshalAppend(b, m) before := len(b)
b, err := mopts.MarshalAppend(b, m)
if measuredSize := len(b) - before; calculatedSize != measuredSize && err == nil {
return nil, errors.MismatchedSizeCalculation(calculatedSize, measuredSize)
}
return b, err
} }
func consumeMessage(b []byte, m proto.Message, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, err error) { func consumeMessage(b []byte, m proto.Message, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, err error) {
@@ -405,8 +418,8 @@ func consumeGroupType(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf
return f.mi.unmarshalPointer(b, p.Elem(), f.num, opts) return f.mi.unmarshalPointer(b, p.Elem(), f.num, opts)
} }
func sizeGroup(m proto.Message, tagsize int, _ marshalOptions) int { func sizeGroup(m proto.Message, tagsize int, opts marshalOptions) int {
return 2*tagsize + proto.Size(m) return 2*tagsize + opts.Options().Size(m)
} }
func appendGroup(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) { func appendGroup(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) {
@@ -482,10 +495,14 @@ func appendMessageSliceInfo(b []byte, p pointer, f *coderFieldInfo, opts marshal
b = protowire.AppendVarint(b, f.wiretag) b = protowire.AppendVarint(b, f.wiretag)
siz := f.mi.sizePointer(v, opts) siz := f.mi.sizePointer(v, opts)
b = protowire.AppendVarint(b, uint64(siz)) b = protowire.AppendVarint(b, uint64(siz))
before := len(b)
b, err = f.mi.marshalAppendPointer(b, v, opts) b, err = f.mi.marshalAppendPointer(b, v, opts)
if err != nil { if err != nil {
return b, err return b, err
} }
if measuredSize := len(b) - before; siz != measuredSize {
return nil, errors.MismatchedSizeCalculation(siz, measuredSize)
}
} }
return b, nil return b, nil
} }
@@ -520,28 +537,34 @@ func isInitMessageSliceInfo(p pointer, f *coderFieldInfo) error {
return nil return nil
} }
func sizeMessageSlice(p pointer, goType reflect.Type, tagsize int, _ marshalOptions) int { func sizeMessageSlice(p pointer, goType reflect.Type, tagsize int, opts marshalOptions) int {
mopts := opts.Options()
s := p.PointerSlice() s := p.PointerSlice()
n := 0 n := 0
for _, v := range s { for _, v := range s {
m := asMessage(v.AsValueOf(goType.Elem())) m := asMessage(v.AsValueOf(goType.Elem()))
n += protowire.SizeBytes(proto.Size(m)) + tagsize n += protowire.SizeBytes(mopts.Size(m)) + tagsize
} }
return n return n
} }
func appendMessageSlice(b []byte, p pointer, wiretag uint64, goType reflect.Type, opts marshalOptions) ([]byte, error) { func appendMessageSlice(b []byte, p pointer, wiretag uint64, goType reflect.Type, opts marshalOptions) ([]byte, error) {
mopts := opts.Options()
s := p.PointerSlice() s := p.PointerSlice()
var err error var err error
for _, v := range s { for _, v := range s {
m := asMessage(v.AsValueOf(goType.Elem())) m := asMessage(v.AsValueOf(goType.Elem()))
b = protowire.AppendVarint(b, wiretag) b = protowire.AppendVarint(b, wiretag)
siz := proto.Size(m) siz := mopts.Size(m)
b = protowire.AppendVarint(b, uint64(siz)) b = protowire.AppendVarint(b, uint64(siz))
b, err = opts.Options().MarshalAppend(b, m) before := len(b)
b, err = mopts.MarshalAppend(b, m)
if err != nil { if err != nil {
return b, err return b, err
} }
if measuredSize := len(b) - before; siz != measuredSize {
return nil, errors.MismatchedSizeCalculation(siz, measuredSize)
}
} }
return b, nil return b, nil
} }
@@ -582,11 +605,12 @@ func isInitMessageSlice(p pointer, goType reflect.Type) error {
// Slices of messages // Slices of messages
func sizeMessageSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) int { func sizeMessageSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) int {
mopts := opts.Options()
list := listv.List() list := listv.List()
n := 0 n := 0
for i, llen := 0, list.Len(); i < llen; i++ { for i, llen := 0, list.Len(); i < llen; i++ {
m := list.Get(i).Message().Interface() m := list.Get(i).Message().Interface()
n += protowire.SizeBytes(proto.Size(m)) + tagsize n += protowire.SizeBytes(mopts.Size(m)) + tagsize
} }
return n return n
} }
@@ -597,13 +621,17 @@ func appendMessageSliceValue(b []byte, listv protoreflect.Value, wiretag uint64,
for i, llen := 0, list.Len(); i < llen; i++ { for i, llen := 0, list.Len(); i < llen; i++ {
m := list.Get(i).Message().Interface() m := list.Get(i).Message().Interface()
b = protowire.AppendVarint(b, wiretag) b = protowire.AppendVarint(b, wiretag)
siz := proto.Size(m) siz := mopts.Size(m)
b = protowire.AppendVarint(b, uint64(siz)) b = protowire.AppendVarint(b, uint64(siz))
before := len(b)
var err error var err error
b, err = mopts.MarshalAppend(b, m) b, err = mopts.MarshalAppend(b, m)
if err != nil { if err != nil {
return b, err return b, err
} }
if measuredSize := len(b) - before; siz != measuredSize {
return nil, errors.MismatchedSizeCalculation(siz, measuredSize)
}
} }
return b, nil return b, nil
} }
@@ -651,11 +679,12 @@ var coderMessageSliceValue = valueCoderFuncs{
} }
func sizeGroupSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) int { func sizeGroupSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) int {
mopts := opts.Options()
list := listv.List() list := listv.List()
n := 0 n := 0
for i, llen := 0, list.Len(); i < llen; i++ { for i, llen := 0, list.Len(); i < llen; i++ {
m := list.Get(i).Message().Interface() m := list.Get(i).Message().Interface()
n += 2*tagsize + proto.Size(m) n += 2*tagsize + mopts.Size(m)
} }
return n return n
} }
@@ -738,12 +767,13 @@ func makeGroupSliceFieldCoder(fd protoreflect.FieldDescriptor, ft reflect.Type)
} }
} }
func sizeGroupSlice(p pointer, messageType reflect.Type, tagsize int, _ marshalOptions) int { func sizeGroupSlice(p pointer, messageType reflect.Type, tagsize int, opts marshalOptions) int {
mopts := opts.Options()
s := p.PointerSlice() s := p.PointerSlice()
n := 0 n := 0
for _, v := range s { for _, v := range s {
m := asMessage(v.AsValueOf(messageType.Elem())) m := asMessage(v.AsValueOf(messageType.Elem()))
n += 2*tagsize + proto.Size(m) n += 2*tagsize + mopts.Size(m)
} }
return n return n
} }

View File

@@ -9,6 +9,7 @@ import (
"sort" "sort"
"google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/internal/errors"
"google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/genid"
"google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoreflect"
) )
@@ -240,11 +241,16 @@ func appendMapItem(b []byte, keyrv, valrv reflect.Value, mapi *mapInfo, f *coder
size += mapi.keyFuncs.size(key.Value(), mapKeyTagSize, opts) size += mapi.keyFuncs.size(key.Value(), mapKeyTagSize, opts)
size += mapi.valFuncs.size(val, mapValTagSize, opts) size += mapi.valFuncs.size(val, mapValTagSize, opts)
b = protowire.AppendVarint(b, uint64(size)) b = protowire.AppendVarint(b, uint64(size))
before := len(b)
b, err := mapi.keyFuncs.marshal(b, key.Value(), mapi.keyWiretag, opts) b, err := mapi.keyFuncs.marshal(b, key.Value(), mapi.keyWiretag, opts)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return mapi.valFuncs.marshal(b, val, mapi.valWiretag, opts) b, err = mapi.valFuncs.marshal(b, val, mapi.valWiretag, opts)
if measuredSize := len(b) - before; size != measuredSize && err == nil {
return nil, errors.MismatchedSizeCalculation(size, measuredSize)
}
return b, err
} else { } else {
key := mapi.conv.keyConv.PBValueOf(keyrv).MapKey() key := mapi.conv.keyConv.PBValueOf(keyrv).MapKey()
val := pointerOfValue(valrv) val := pointerOfValue(valrv)
@@ -259,7 +265,12 @@ func appendMapItem(b []byte, keyrv, valrv reflect.Value, mapi *mapInfo, f *coder
} }
b = protowire.AppendVarint(b, mapi.valWiretag) b = protowire.AppendVarint(b, mapi.valWiretag)
b = protowire.AppendVarint(b, uint64(valSize)) b = protowire.AppendVarint(b, uint64(valSize))
return f.mi.marshalAppendPointer(b, val, opts) before := len(b)
b, err = f.mi.marshalAppendPointer(b, val, opts)
if measuredSize := len(b) - before; valSize != measuredSize && err == nil {
return nil, errors.MismatchedSizeCalculation(valSize, measuredSize)
}
return b, err
} }
} }

View File

@@ -167,6 +167,7 @@ func aberrantLoadEnumDesc(t reflect.Type) protoreflect.EnumDescriptor {
ed := &filedesc.Enum{L2: new(filedesc.EnumL2)} ed := &filedesc.Enum{L2: new(filedesc.EnumL2)}
ed.L0.FullName = AberrantDeriveFullName(t) // e.g., github_com.user.repo.MyEnum ed.L0.FullName = AberrantDeriveFullName(t) // e.g., github_com.user.repo.MyEnum
ed.L0.ParentFile = filedesc.SurrogateProto3 ed.L0.ParentFile = filedesc.SurrogateProto3
ed.L1.EditionFeatures = ed.L0.ParentFile.L1.EditionFeatures
ed.L2.Values.List = append(ed.L2.Values.List, filedesc.EnumValue{}) ed.L2.Values.List = append(ed.L2.Values.List, filedesc.EnumValue{})
// TODO: Use the presence of a UnmarshalJSON method to determine proto2? // TODO: Use the presence of a UnmarshalJSON method to determine proto2?

View File

@@ -118,7 +118,7 @@ func (xi *ExtensionInfo) initFromLegacy() {
xd.L1.Number = protoreflect.FieldNumber(xi.Field) xd.L1.Number = protoreflect.FieldNumber(xi.Field)
xd.L1.Cardinality = fd.L1.Cardinality xd.L1.Cardinality = fd.L1.Cardinality
xd.L1.Kind = fd.L1.Kind xd.L1.Kind = fd.L1.Kind
xd.L2.IsPacked = fd.L1.IsPacked xd.L1.EditionFeatures = fd.L1.EditionFeatures
xd.L2.Default = fd.L1.Default xd.L2.Default = fd.L1.Default
xd.L1.Extendee = Export{}.MessageDescriptorOf(xi.ExtendedType) xd.L1.Extendee = Export{}.MessageDescriptorOf(xi.ExtendedType)
xd.L2.Enum = ed xd.L2.Enum = ed

View File

@@ -7,7 +7,7 @@ package impl
import ( import (
"bytes" "bytes"
"compress/gzip" "compress/gzip"
"io/ioutil" "io"
"sync" "sync"
"google.golang.org/protobuf/internal/filedesc" "google.golang.org/protobuf/internal/filedesc"
@@ -51,7 +51,7 @@ func legacyLoadFileDesc(b []byte) protoreflect.FileDescriptor {
if err != nil { if err != nil {
panic(err) panic(err)
} }
b2, err := ioutil.ReadAll(zr) b2, err := io.ReadAll(zr)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@@ -204,6 +204,7 @@ func aberrantLoadMessageDescReentrant(t reflect.Type, name protoreflect.FullName
} }
} }
md.L1.EditionFeatures = md.L0.ParentFile.L1.EditionFeatures
// Obtain a list of oneof wrapper types. // Obtain a list of oneof wrapper types.
var oneofWrappers []reflect.Type var oneofWrappers []reflect.Type
methods := make([]reflect.Method, 0, 2) methods := make([]reflect.Method, 0, 2)
@@ -250,6 +251,7 @@ func aberrantLoadMessageDescReentrant(t reflect.Type, name protoreflect.FullName
od := &md.L2.Oneofs.List[n] od := &md.L2.Oneofs.List[n]
od.L0.FullName = md.FullName().Append(protoreflect.Name(tag)) od.L0.FullName = md.FullName().Append(protoreflect.Name(tag))
od.L0.ParentFile = md.L0.ParentFile od.L0.ParentFile = md.L0.ParentFile
od.L1.EditionFeatures = md.L1.EditionFeatures
od.L0.Parent = md od.L0.Parent = md
od.L0.Index = n od.L0.Index = n
@@ -260,6 +262,7 @@ func aberrantLoadMessageDescReentrant(t reflect.Type, name protoreflect.FullName
aberrantAppendField(md, f.Type, tag, "", "") aberrantAppendField(md, f.Type, tag, "", "")
fd := &md.L2.Fields.List[len(md.L2.Fields.List)-1] fd := &md.L2.Fields.List[len(md.L2.Fields.List)-1]
fd.L1.ContainingOneof = od fd.L1.ContainingOneof = od
fd.L1.EditionFeatures = od.L1.EditionFeatures
od.L1.Fields.List = append(od.L1.Fields.List, fd) od.L1.Fields.List = append(od.L1.Fields.List, fd)
} }
} }
@@ -307,14 +310,14 @@ func aberrantAppendField(md *filedesc.Message, goType reflect.Type, tag, tagKey,
fd.L0.Parent = md fd.L0.Parent = md
fd.L0.Index = n fd.L0.Index = n
if fd.L1.IsWeak || fd.L1.HasPacked { if fd.L1.IsWeak || fd.L1.EditionFeatures.IsPacked {
fd.L1.Options = func() protoreflect.ProtoMessage { fd.L1.Options = func() protoreflect.ProtoMessage {
opts := descopts.Field.ProtoReflect().New() opts := descopts.Field.ProtoReflect().New()
if fd.L1.IsWeak { if fd.L1.IsWeak {
opts.Set(opts.Descriptor().Fields().ByName("weak"), protoreflect.ValueOfBool(true)) opts.Set(opts.Descriptor().Fields().ByName("weak"), protoreflect.ValueOfBool(true))
} }
if fd.L1.HasPacked { if fd.L1.EditionFeatures.IsPacked {
opts.Set(opts.Descriptor().Fields().ByName("packed"), protoreflect.ValueOfBool(fd.L1.IsPacked)) opts.Set(opts.Descriptor().Fields().ByName("packed"), protoreflect.ValueOfBool(fd.L1.EditionFeatures.IsPacked))
} }
return opts.Interface() return opts.Interface()
} }
@@ -344,6 +347,7 @@ func aberrantAppendField(md *filedesc.Message, goType reflect.Type, tag, tagKey,
md2.L0.ParentFile = md.L0.ParentFile md2.L0.ParentFile = md.L0.ParentFile
md2.L0.Parent = md md2.L0.Parent = md
md2.L0.Index = n md2.L0.Index = n
md2.L1.EditionFeatures = md.L1.EditionFeatures
md2.L1.IsMapEntry = true md2.L1.IsMapEntry = true
md2.L2.Options = func() protoreflect.ProtoMessage { md2.L2.Options = func() protoreflect.ProtoMessage {

View File

@@ -247,11 +247,10 @@ func (m *extensionMap) Range(f func(protoreflect.FieldDescriptor, protoreflect.V
} }
} }
} }
func (m *extensionMap) Has(xt protoreflect.ExtensionType) (ok bool) { func (m *extensionMap) Has(xd protoreflect.ExtensionTypeDescriptor) (ok bool) {
if m == nil { if m == nil {
return false return false
} }
xd := xt.TypeDescriptor()
x, ok := (*m)[int32(xd.Number())] x, ok := (*m)[int32(xd.Number())]
if !ok { if !ok {
return false return false
@@ -261,25 +260,22 @@ func (m *extensionMap) Has(xt protoreflect.ExtensionType) (ok bool) {
return x.Value().List().Len() > 0 return x.Value().List().Len() > 0
case xd.IsMap(): case xd.IsMap():
return x.Value().Map().Len() > 0 return x.Value().Map().Len() > 0
case xd.Message() != nil:
return x.Value().Message().IsValid()
} }
return true return true
} }
func (m *extensionMap) Clear(xt protoreflect.ExtensionType) { func (m *extensionMap) Clear(xd protoreflect.ExtensionTypeDescriptor) {
delete(*m, int32(xt.TypeDescriptor().Number())) delete(*m, int32(xd.Number()))
} }
func (m *extensionMap) Get(xt protoreflect.ExtensionType) protoreflect.Value { func (m *extensionMap) Get(xd protoreflect.ExtensionTypeDescriptor) protoreflect.Value {
xd := xt.TypeDescriptor()
if m != nil { if m != nil {
if x, ok := (*m)[int32(xd.Number())]; ok { if x, ok := (*m)[int32(xd.Number())]; ok {
return x.Value() return x.Value()
} }
} }
return xt.Zero() return xd.Type().Zero()
} }
func (m *extensionMap) Set(xt protoreflect.ExtensionType, v protoreflect.Value) { func (m *extensionMap) Set(xd protoreflect.ExtensionTypeDescriptor, v protoreflect.Value) {
xd := xt.TypeDescriptor() xt := xd.Type()
isValid := true isValid := true
switch { switch {
case !xt.IsValidValue(v): case !xt.IsValidValue(v):
@@ -292,7 +288,7 @@ func (m *extensionMap) Set(xt protoreflect.ExtensionType, v protoreflect.Value)
isValid = v.Message().IsValid() isValid = v.Message().IsValid()
} }
if !isValid { if !isValid {
panic(fmt.Sprintf("%v: assigning invalid value", xt.TypeDescriptor().FullName())) panic(fmt.Sprintf("%v: assigning invalid value", xd.FullName()))
} }
if *m == nil { if *m == nil {
@@ -302,16 +298,15 @@ func (m *extensionMap) Set(xt protoreflect.ExtensionType, v protoreflect.Value)
x.Set(xt, v) x.Set(xt, v)
(*m)[int32(xd.Number())] = x (*m)[int32(xd.Number())] = x
} }
func (m *extensionMap) Mutable(xt protoreflect.ExtensionType) protoreflect.Value { func (m *extensionMap) Mutable(xd protoreflect.ExtensionTypeDescriptor) protoreflect.Value {
xd := xt.TypeDescriptor()
if xd.Kind() != protoreflect.MessageKind && xd.Kind() != protoreflect.GroupKind && !xd.IsList() && !xd.IsMap() { if xd.Kind() != protoreflect.MessageKind && xd.Kind() != protoreflect.GroupKind && !xd.IsList() && !xd.IsMap() {
panic("invalid Mutable on field with non-composite type") panic("invalid Mutable on field with non-composite type")
} }
if x, ok := (*m)[int32(xd.Number())]; ok { if x, ok := (*m)[int32(xd.Number())]; ok {
return x.Value() return x.Value()
} }
v := xt.New() v := xd.Type().New()
m.Set(xt, v) m.Set(xd, v)
return v return v
} }
@@ -428,7 +423,7 @@ func (m *messageIfaceWrapper) protoUnwrap() interface{} {
// checkField verifies that the provided field descriptor is valid. // checkField verifies that the provided field descriptor is valid.
// Exactly one of the returned values is populated. // Exactly one of the returned values is populated.
func (mi *MessageInfo) checkField(fd protoreflect.FieldDescriptor) (*fieldInfo, protoreflect.ExtensionType) { func (mi *MessageInfo) checkField(fd protoreflect.FieldDescriptor) (*fieldInfo, protoreflect.ExtensionTypeDescriptor) {
var fi *fieldInfo var fi *fieldInfo
if n := fd.Number(); 0 < n && int(n) < len(mi.denseFields) { if n := fd.Number(); 0 < n && int(n) < len(mi.denseFields) {
fi = mi.denseFields[n] fi = mi.denseFields[n]
@@ -457,7 +452,7 @@ func (mi *MessageInfo) checkField(fd protoreflect.FieldDescriptor) (*fieldInfo,
if !ok { if !ok {
panic(fmt.Sprintf("extension %v does not implement protoreflect.ExtensionTypeDescriptor", fd.FullName())) panic(fmt.Sprintf("extension %v does not implement protoreflect.ExtensionTypeDescriptor", fd.FullName()))
} }
return nil, xtd.Type() return nil, xtd
} }
panic(fmt.Sprintf("field %v is invalid", fd.FullName())) panic(fmt.Sprintf("field %v is invalid", fd.FullName()))
} }

View File

@@ -27,8 +27,9 @@ func (m *messageState) protoUnwrap() interface{} {
return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem()) return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem())
} }
func (m *messageState) ProtoMethods() *protoiface.Methods { func (m *messageState) ProtoMethods() *protoiface.Methods {
m.messageInfo().init() mi := m.messageInfo()
return &m.messageInfo().methods mi.init()
return &mi.methods
} }
// ProtoMessageInfo is a pseudo-internal API for allowing the v1 code // ProtoMessageInfo is a pseudo-internal API for allowing the v1 code
@@ -41,8 +42,9 @@ func (m *messageState) ProtoMessageInfo() *MessageInfo {
} }
func (m *messageState) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) { func (m *messageState) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {
m.messageInfo().init() mi := m.messageInfo()
for _, ri := range m.messageInfo().rangeInfos { mi.init()
for _, ri := range mi.rangeInfos {
switch ri := ri.(type) { switch ri := ri.(type) {
case *fieldInfo: case *fieldInfo:
if ri.has(m.pointer()) { if ri.has(m.pointer()) {
@@ -52,77 +54,86 @@ func (m *messageState) Range(f func(protoreflect.FieldDescriptor, protoreflect.V
} }
case *oneofInfo: case *oneofInfo:
if n := ri.which(m.pointer()); n > 0 { if n := ri.which(m.pointer()); n > 0 {
fi := m.messageInfo().fields[n] fi := mi.fields[n]
if !f(fi.fieldDesc, fi.get(m.pointer())) { if !f(fi.fieldDesc, fi.get(m.pointer())) {
return return
} }
} }
} }
} }
m.messageInfo().extensionMap(m.pointer()).Range(f) mi.extensionMap(m.pointer()).Range(f)
} }
func (m *messageState) Has(fd protoreflect.FieldDescriptor) bool { func (m *messageState) Has(fd protoreflect.FieldDescriptor) bool {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.has(m.pointer()) return fi.has(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Has(xt) return mi.extensionMap(m.pointer()).Has(xd)
} }
} }
func (m *messageState) Clear(fd protoreflect.FieldDescriptor) { func (m *messageState) Clear(fd protoreflect.FieldDescriptor) {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
fi.clear(m.pointer()) fi.clear(m.pointer())
} else { } else {
m.messageInfo().extensionMap(m.pointer()).Clear(xt) mi.extensionMap(m.pointer()).Clear(xd)
} }
} }
func (m *messageState) Get(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageState) Get(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.get(m.pointer()) return fi.get(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Get(xt) return mi.extensionMap(m.pointer()).Get(xd)
} }
} }
func (m *messageState) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) { func (m *messageState) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
fi.set(m.pointer(), v) fi.set(m.pointer(), v)
} else { } else {
m.messageInfo().extensionMap(m.pointer()).Set(xt, v) mi.extensionMap(m.pointer()).Set(xd, v)
} }
} }
func (m *messageState) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageState) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.mutable(m.pointer()) return fi.mutable(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Mutable(xt) return mi.extensionMap(m.pointer()).Mutable(xd)
} }
} }
func (m *messageState) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageState) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.newField() return fi.newField()
} else { } else {
return xt.New() return xd.Type().New()
} }
} }
func (m *messageState) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor { func (m *messageState) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor {
m.messageInfo().init() mi := m.messageInfo()
if oi := m.messageInfo().oneofs[od.Name()]; oi != nil && oi.oneofDesc == od { mi.init()
if oi := mi.oneofs[od.Name()]; oi != nil && oi.oneofDesc == od {
return od.Fields().ByNumber(oi.which(m.pointer())) return od.Fields().ByNumber(oi.which(m.pointer()))
} }
panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName())) panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName()))
} }
func (m *messageState) GetUnknown() protoreflect.RawFields { func (m *messageState) GetUnknown() protoreflect.RawFields {
m.messageInfo().init() mi := m.messageInfo()
return m.messageInfo().getUnknown(m.pointer()) mi.init()
return mi.getUnknown(m.pointer())
} }
func (m *messageState) SetUnknown(b protoreflect.RawFields) { func (m *messageState) SetUnknown(b protoreflect.RawFields) {
m.messageInfo().init() mi := m.messageInfo()
m.messageInfo().setUnknown(m.pointer(), b) mi.init()
mi.setUnknown(m.pointer(), b)
} }
func (m *messageState) IsValid() bool { func (m *messageState) IsValid() bool {
return !m.pointer().IsNil() return !m.pointer().IsNil()
@@ -147,8 +158,9 @@ func (m *messageReflectWrapper) protoUnwrap() interface{} {
return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem()) return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem())
} }
func (m *messageReflectWrapper) ProtoMethods() *protoiface.Methods { func (m *messageReflectWrapper) ProtoMethods() *protoiface.Methods {
m.messageInfo().init() mi := m.messageInfo()
return &m.messageInfo().methods mi.init()
return &mi.methods
} }
// ProtoMessageInfo is a pseudo-internal API for allowing the v1 code // ProtoMessageInfo is a pseudo-internal API for allowing the v1 code
@@ -161,8 +173,9 @@ func (m *messageReflectWrapper) ProtoMessageInfo() *MessageInfo {
} }
func (m *messageReflectWrapper) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) { func (m *messageReflectWrapper) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {
m.messageInfo().init() mi := m.messageInfo()
for _, ri := range m.messageInfo().rangeInfos { mi.init()
for _, ri := range mi.rangeInfos {
switch ri := ri.(type) { switch ri := ri.(type) {
case *fieldInfo: case *fieldInfo:
if ri.has(m.pointer()) { if ri.has(m.pointer()) {
@@ -172,77 +185,86 @@ func (m *messageReflectWrapper) Range(f func(protoreflect.FieldDescriptor, proto
} }
case *oneofInfo: case *oneofInfo:
if n := ri.which(m.pointer()); n > 0 { if n := ri.which(m.pointer()); n > 0 {
fi := m.messageInfo().fields[n] fi := mi.fields[n]
if !f(fi.fieldDesc, fi.get(m.pointer())) { if !f(fi.fieldDesc, fi.get(m.pointer())) {
return return
} }
} }
} }
} }
m.messageInfo().extensionMap(m.pointer()).Range(f) mi.extensionMap(m.pointer()).Range(f)
} }
func (m *messageReflectWrapper) Has(fd protoreflect.FieldDescriptor) bool { func (m *messageReflectWrapper) Has(fd protoreflect.FieldDescriptor) bool {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.has(m.pointer()) return fi.has(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Has(xt) return mi.extensionMap(m.pointer()).Has(xd)
} }
} }
func (m *messageReflectWrapper) Clear(fd protoreflect.FieldDescriptor) { func (m *messageReflectWrapper) Clear(fd protoreflect.FieldDescriptor) {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
fi.clear(m.pointer()) fi.clear(m.pointer())
} else { } else {
m.messageInfo().extensionMap(m.pointer()).Clear(xt) mi.extensionMap(m.pointer()).Clear(xd)
} }
} }
func (m *messageReflectWrapper) Get(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageReflectWrapper) Get(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.get(m.pointer()) return fi.get(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Get(xt) return mi.extensionMap(m.pointer()).Get(xd)
} }
} }
func (m *messageReflectWrapper) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) { func (m *messageReflectWrapper) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
fi.set(m.pointer(), v) fi.set(m.pointer(), v)
} else { } else {
m.messageInfo().extensionMap(m.pointer()).Set(xt, v) mi.extensionMap(m.pointer()).Set(xd, v)
} }
} }
func (m *messageReflectWrapper) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageReflectWrapper) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.mutable(m.pointer()) return fi.mutable(m.pointer())
} else { } else {
return m.messageInfo().extensionMap(m.pointer()).Mutable(xt) return mi.extensionMap(m.pointer()).Mutable(xd)
} }
} }
func (m *messageReflectWrapper) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value { func (m *messageReflectWrapper) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value {
m.messageInfo().init() mi := m.messageInfo()
if fi, xt := m.messageInfo().checkField(fd); fi != nil { mi.init()
if fi, xd := mi.checkField(fd); fi != nil {
return fi.newField() return fi.newField()
} else { } else {
return xt.New() return xd.Type().New()
} }
} }
func (m *messageReflectWrapper) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor { func (m *messageReflectWrapper) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor {
m.messageInfo().init() mi := m.messageInfo()
if oi := m.messageInfo().oneofs[od.Name()]; oi != nil && oi.oneofDesc == od { mi.init()
if oi := mi.oneofs[od.Name()]; oi != nil && oi.oneofDesc == od {
return od.Fields().ByNumber(oi.which(m.pointer())) return od.Fields().ByNumber(oi.which(m.pointer()))
} }
panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName())) panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName()))
} }
func (m *messageReflectWrapper) GetUnknown() protoreflect.RawFields { func (m *messageReflectWrapper) GetUnknown() protoreflect.RawFields {
m.messageInfo().init() mi := m.messageInfo()
return m.messageInfo().getUnknown(m.pointer()) mi.init()
return mi.getUnknown(m.pointer())
} }
func (m *messageReflectWrapper) SetUnknown(b protoreflect.RawFields) { func (m *messageReflectWrapper) SetUnknown(b protoreflect.RawFields) {
m.messageInfo().init() mi := m.messageInfo()
m.messageInfo().setUnknown(m.pointer(), b) mi.init()
mi.setUnknown(m.pointer(), b)
} }
func (m *messageReflectWrapper) IsValid() bool { func (m *messageReflectWrapper) IsValid() bool {
return !m.pointer().IsNil() return !m.pointer().IsNil()

View File

@@ -51,8 +51,8 @@ import (
// 10. Send out the CL for review and submit it. // 10. Send out the CL for review and submit it.
const ( const (
Major = 1 Major = 1
Minor = 33 Minor = 34
Patch = 0 Patch = 1
PreRelease = "" PreRelease = ""
) )

View File

@@ -51,6 +51,8 @@ type UnmarshalOptions struct {
// Unmarshal parses the wire-format message in b and places the result in m. // Unmarshal parses the wire-format message in b and places the result in m.
// The provided message must be mutable (e.g., a non-nil pointer to a message). // The provided message must be mutable (e.g., a non-nil pointer to a message).
//
// See the [UnmarshalOptions] type if you need more control.
func Unmarshal(b []byte, m Message) error { func Unmarshal(b []byte, m Message) error {
_, err := UnmarshalOptions{RecursionLimit: protowire.DefaultRecursionLimit}.unmarshal(b, m.ProtoReflect()) _, err := UnmarshalOptions{RecursionLimit: protowire.DefaultRecursionLimit}.unmarshal(b, m.ProtoReflect())
return err return err

View File

@@ -5,12 +5,17 @@
package proto package proto
import ( import (
"errors"
"fmt"
"google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/encoding/messageset"
"google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/internal/order"
"google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/pragma"
"google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/runtime/protoiface" "google.golang.org/protobuf/runtime/protoiface"
protoerrors "google.golang.org/protobuf/internal/errors"
) )
// MarshalOptions configures the marshaler. // MarshalOptions configures the marshaler.
@@ -70,7 +75,32 @@ type MarshalOptions struct {
UseCachedSize bool UseCachedSize bool
} }
// flags turns the specified MarshalOptions (user-facing) into
// protoiface.MarshalInputFlags (used internally by the marshaler).
//
// See impl.marshalOptions.Options for the inverse operation.
func (o MarshalOptions) flags() protoiface.MarshalInputFlags {
var flags protoiface.MarshalInputFlags
// Note: o.AllowPartial is always forced to true by MarshalOptions.marshal,
// which is why it is not a part of MarshalInputFlags.
if o.Deterministic {
flags |= protoiface.MarshalDeterministic
}
if o.UseCachedSize {
flags |= protoiface.MarshalUseCachedSize
}
return flags
}
// Marshal returns the wire-format encoding of m. // Marshal returns the wire-format encoding of m.
//
// This is the most common entry point for encoding a Protobuf message.
//
// See the [MarshalOptions] type if you need more control.
func Marshal(m Message) ([]byte, error) { func Marshal(m Message) ([]byte, error) {
// Treat nil message interface as an empty message; nothing to output. // Treat nil message interface as an empty message; nothing to output.
if m == nil { if m == nil {
@@ -116,6 +146,9 @@ func emptyBytesForMessage(m Message) []byte {
// MarshalAppend appends the wire-format encoding of m to b, // MarshalAppend appends the wire-format encoding of m to b,
// returning the result. // returning the result.
//
// This is a less common entry point than [Marshal], which is only needed if you
// need to supply your own buffers for performance reasons.
func (o MarshalOptions) MarshalAppend(b []byte, m Message) ([]byte, error) { func (o MarshalOptions) MarshalAppend(b []byte, m Message) ([]byte, error) {
// Treat nil message interface as an empty message; nothing to append. // Treat nil message interface as an empty message; nothing to append.
if m == nil { if m == nil {
@@ -145,12 +178,7 @@ func (o MarshalOptions) marshal(b []byte, m protoreflect.Message) (out protoifac
in := protoiface.MarshalInput{ in := protoiface.MarshalInput{
Message: m, Message: m,
Buf: b, Buf: b,
} Flags: o.flags(),
if o.Deterministic {
in.Flags |= protoiface.MarshalDeterministic
}
if o.UseCachedSize {
in.Flags |= protoiface.MarshalUseCachedSize
} }
if methods.Size != nil { if methods.Size != nil {
sout := methods.Size(protoiface.SizeInput{ sout := methods.Size(protoiface.SizeInput{
@@ -168,6 +196,10 @@ func (o MarshalOptions) marshal(b []byte, m protoreflect.Message) (out protoifac
out.Buf, err = o.marshalMessageSlow(b, m) out.Buf, err = o.marshalMessageSlow(b, m)
} }
if err != nil { if err != nil {
var mismatch *protoerrors.SizeMismatchError
if errors.As(err, &mismatch) {
return out, fmt.Errorf("marshaling %s: %v", string(m.Descriptor().FullName()), err)
}
return out, err return out, err
} }
if allowPartial { if allowPartial {

View File

@@ -11,18 +11,21 @@ import (
// HasExtension reports whether an extension field is populated. // HasExtension reports whether an extension field is populated.
// It returns false if m is invalid or if xt does not extend m. // It returns false if m is invalid or if xt does not extend m.
func HasExtension(m Message, xt protoreflect.ExtensionType) bool { func HasExtension(m Message, xt protoreflect.ExtensionType) bool {
// Treat nil message interface as an empty message; no populated fields. // Treat nil message interface or descriptor as an empty message; no populated
if m == nil { // fields.
if m == nil || xt == nil {
return false return false
} }
// As a special-case, we reports invalid or mismatching descriptors // As a special-case, we reports invalid or mismatching descriptors
// as always not being populated (since they aren't). // as always not being populated (since they aren't).
if xt == nil || m.ProtoReflect().Descriptor() != xt.TypeDescriptor().ContainingMessage() { mr := m.ProtoReflect()
xd := xt.TypeDescriptor()
if mr.Descriptor() != xd.ContainingMessage() {
return false return false
} }
return m.ProtoReflect().Has(xt.TypeDescriptor()) return mr.Has(xd)
} }
// ClearExtension clears an extension field such that subsequent // ClearExtension clears an extension field such that subsequent

View File

@@ -47,11 +47,16 @@ func (o MarshalOptions) marshalMessageSet(b []byte, m protoreflect.Message) ([]b
func (o MarshalOptions) marshalMessageSetField(b []byte, fd protoreflect.FieldDescriptor, value protoreflect.Value) ([]byte, error) { func (o MarshalOptions) marshalMessageSetField(b []byte, fd protoreflect.FieldDescriptor, value protoreflect.Value) ([]byte, error) {
b = messageset.AppendFieldStart(b, fd.Number()) b = messageset.AppendFieldStart(b, fd.Number())
b = protowire.AppendTag(b, messageset.FieldMessage, protowire.BytesType) b = protowire.AppendTag(b, messageset.FieldMessage, protowire.BytesType)
b = protowire.AppendVarint(b, uint64(o.Size(value.Message().Interface()))) calculatedSize := o.Size(value.Message().Interface())
b = protowire.AppendVarint(b, uint64(calculatedSize))
before := len(b)
b, err := o.marshalMessage(b, value.Message()) b, err := o.marshalMessage(b, value.Message())
if err != nil { if err != nil {
return b, err return b, err
} }
if measuredSize := len(b) - before; calculatedSize != measuredSize {
return nil, errors.MismatchedSizeCalculation(calculatedSize, measuredSize)
}
b = messageset.AppendFieldEnd(b) b = messageset.AppendFieldEnd(b)
return b, nil return b, nil
} }

View File

@@ -34,6 +34,7 @@ func (o MarshalOptions) size(m protoreflect.Message) (size int) {
if methods != nil && methods.Size != nil { if methods != nil && methods.Size != nil {
out := methods.Size(protoiface.SizeInput{ out := methods.Size(protoiface.SizeInput{
Message: m, Message: m,
Flags: o.flags(),
}) })
return out.Size return out.Size
} }
@@ -42,6 +43,7 @@ func (o MarshalOptions) size(m protoreflect.Message) (size int) {
// This case is mainly used for legacy types with a Marshal method. // This case is mainly used for legacy types with a Marshal method.
out, _ := methods.Marshal(protoiface.MarshalInput{ out, _ := methods.Marshal(protoiface.MarshalInput{
Message: m, Message: m,
Flags: o.flags(),
}) })
return len(out.Buf) return len(out.Buf)
} }

View File

@@ -161,7 +161,7 @@ const (
// IsValid reports whether the syntax is valid. // IsValid reports whether the syntax is valid.
func (s Syntax) IsValid() bool { func (s Syntax) IsValid() bool {
switch s { switch s {
case Proto2, Proto3: case Proto2, Proto3, Editions:
return true return true
default: default:
return false return false

View File

@@ -373,6 +373,8 @@ func (p *SourcePath) appendFieldOptions(b []byte) []byte {
b = p.appendRepeatedField(b, "edition_defaults", (*SourcePath).appendFieldOptions_EditionDefault) b = p.appendRepeatedField(b, "edition_defaults", (*SourcePath).appendFieldOptions_EditionDefault)
case 21: case 21:
b = p.appendSingularField(b, "features", (*SourcePath).appendFeatureSet) b = p.appendSingularField(b, "features", (*SourcePath).appendFeatureSet)
case 22:
b = p.appendSingularField(b, "feature_support", (*SourcePath).appendFieldOptions_FeatureSupport)
case 999: case 999:
b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption)
} }
@@ -519,6 +521,23 @@ func (p *SourcePath) appendFieldOptions_EditionDefault(b []byte) []byte {
return b return b
} }
func (p *SourcePath) appendFieldOptions_FeatureSupport(b []byte) []byte {
if len(*p) == 0 {
return b
}
switch (*p)[0] {
case 1:
b = p.appendSingularField(b, "edition_introduced", nil)
case 2:
b = p.appendSingularField(b, "edition_deprecated", nil)
case 3:
b = p.appendSingularField(b, "deprecation_warning", nil)
case 4:
b = p.appendSingularField(b, "edition_removed", nil)
}
return b
}
func (p *SourcePath) appendUninterpretedOption_NamePart(b []byte) []byte { func (p *SourcePath) appendUninterpretedOption_NamePart(b []byte) []byte {
if len(*p) == 0 { if len(*p) == 0 {
return b return b

View File

@@ -544,6 +544,12 @@ type EnumDescriptor interface {
// ReservedRanges is a list of reserved ranges of enum numbers. // ReservedRanges is a list of reserved ranges of enum numbers.
ReservedRanges() EnumRanges ReservedRanges() EnumRanges
// IsClosed reports whether this enum uses closed semantics.
// See https://protobuf.dev/programming-guides/enum/#definitions.
// Note: the Go protobuf implementation is not spec compliant and treats
// all enums as open enums.
IsClosed() bool
isEnumDescriptor isEnumDescriptor
} }
type isEnumDescriptor interface{ ProtoType(EnumDescriptor) } type isEnumDescriptor interface{ ProtoType(EnumDescriptor) }

16
vendor/modules.txt vendored
View File

@@ -1,7 +1,7 @@
# github.com/beorn7/perks v1.0.1 # github.com/beorn7/perks v1.0.1
## explicit; go 1.11 ## explicit; go 1.11
github.com/beorn7/perks/quantile github.com/beorn7/perks/quantile
# github.com/cespare/xxhash/v2 v2.2.0 # github.com/cespare/xxhash/v2 v2.3.0
## explicit; go 1.11 ## explicit; go 1.11
github.com/cespare/xxhash/v2 github.com/cespare/xxhash/v2
# github.com/davecgh/go-spew v1.1.1 # github.com/davecgh/go-spew v1.1.1
@@ -45,16 +45,16 @@ github.com/pmezard/go-difflib/difflib
github.com/prometheus/client_golang/prometheus github.com/prometheus/client_golang/prometheus
github.com/prometheus/client_golang/prometheus/internal github.com/prometheus/client_golang/prometheus/internal
github.com/prometheus/client_golang/prometheus/promhttp github.com/prometheus/client_golang/prometheus/promhttp
# github.com/prometheus/client_model v0.6.0 # github.com/prometheus/client_model v0.6.1
## explicit; go 1.19 ## explicit; go 1.19
github.com/prometheus/client_model/go github.com/prometheus/client_model/go
# github.com/prometheus/common v0.50.0 # github.com/prometheus/common v0.53.0
## explicit; go 1.21 ## explicit; go 1.20
github.com/prometheus/common/expfmt github.com/prometheus/common/expfmt
github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg
github.com/prometheus/common/model github.com/prometheus/common/model
# github.com/prometheus/procfs v0.13.0 # github.com/prometheus/procfs v0.15.0
## explicit; go 1.19 ## explicit; go 1.21
github.com/prometheus/procfs github.com/prometheus/procfs
github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/fs
github.com/prometheus/procfs/internal/util github.com/prometheus/procfs/internal/util
@@ -71,7 +71,7 @@ golang.org/x/crypto/acme
golang.org/x/crypto/acme/autocert golang.org/x/crypto/acme/autocert
golang.org/x/crypto/bcrypt golang.org/x/crypto/bcrypt
golang.org/x/crypto/blowfish golang.org/x/crypto/blowfish
# golang.org/x/net v0.23.0 # golang.org/x/net v0.25.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/net/idna golang.org/x/net/idna
# golang.org/x/sys v0.20.0 # golang.org/x/sys v0.20.0
@@ -84,7 +84,7 @@ golang.org/x/text/secure/bidirule
golang.org/x/text/transform golang.org/x/text/transform
golang.org/x/text/unicode/bidi golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
# google.golang.org/protobuf v1.33.0 # google.golang.org/protobuf v1.34.1
## explicit; go 1.17 ## explicit; go 1.17
google.golang.org/protobuf/encoding/protodelim google.golang.org/protobuf/encoding/protodelim
google.golang.org/protobuf/encoding/prototext google.golang.org/protobuf/encoding/prototext