chore: update dependencies

This commit is contained in:
Cyrille Nofficial 2022-01-03 19:21:35 +01:00
parent 2cc7ee5070
commit d1788152f7
70 changed files with 2459 additions and 9330 deletions

8
go.mod
View File

@ -3,12 +3,13 @@ module github.com/cyrilix/robocar-camera
go 1.17
require (
github.com/cyrilix/robocar-base v0.1.5
github.com/cyrilix/robocar-protobuf/go v1.0.3
github.com/cyrilix/robocar-base v0.1.6
github.com/cyrilix/robocar-protobuf/go v1.0.4
github.com/eclipse/paho.mqtt.golang v1.3.5
github.com/golang/protobuf v1.5.2
go.uber.org/zap v1.19.1
gocv.io/x/gocv v0.26.0
gocv.io/x/gocv v0.29.0
google.golang.org/protobuf v1.27.1
)
require (
@ -16,5 +17,4 @@ require (
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 // indirect
google.golang.org/protobuf v1.26.0 // indirect
)

17
go.sum
View File

@ -8,10 +8,10 @@ github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/cyrilix/robocar-base v0.1.5 h1:EfbYHB69hgyQCVuzZ9/ifdSrQfXS7+04M8O9BDu1/5w=
github.com/cyrilix/robocar-base v0.1.5/go.mod h1:tb7R5OFoBn9EWNLX3Kzx6R/3cQ9/7r8XsHvlLSESOAM=
github.com/cyrilix/robocar-protobuf/go v1.0.3 h1:iPHw2+7FVXG2C4+Th1m11hQ+2RpAQzlxKhc5M7XOa6Q=
github.com/cyrilix/robocar-protobuf/go v1.0.3/go.mod h1:xb95cK07lYXnKcHZKnGafmAgYRrqZWZgV9LMiJAp+gE=
github.com/cyrilix/robocar-base v0.1.6 h1:VVcSZD8DPsha3XDLxRBMvtcd6uC8CcIjqbxG482dxvo=
github.com/cyrilix/robocar-base v0.1.6/go.mod h1:m5ov/7hpRHi0yMp2prKafL6UEsM2O71Uea85WR0/jjI=
github.com/cyrilix/robocar-protobuf/go v1.0.4 h1:XTolFYbiKw4gQ2l+z/LMZkLrmAUMzlHcQBzp/czlANo=
github.com/cyrilix/robocar-protobuf/go v1.0.4/go.mod h1:1fyGMVm4ZodfYRrbWCEQgtvKyvrhyTBe5zA7/Qeh/H0=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -48,6 +48,7 @@ github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/hybridgroup/mjpeg v0.0.0-20140228234708-4680f319790e/go.mod h1:eagM805MRKrioHYuU7iKLUyFPVKqVV6um5DAvCkUtXs=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@ -65,6 +66,7 @@ github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1Cpa
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@ -94,8 +96,8 @@ go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.19.1 h1:ue41HOKd1vGURxrmeKIgELGb3jPW9DMUDGtsinblHwI=
go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI=
gocv.io/x/gocv v0.26.0 h1:1azNvYEM245YN1bdw/WdX5YJzLg3Sr4STX0MqdWBIXM=
gocv.io/x/gocv v0.26.0/go.mod h1:7Ju5KbPo+R85evmlhhKPVMwXtgDRNX/PtfVfbToSrLU=
gocv.io/x/gocv v0.29.0 h1:Zg5ZoIFSY4oBehoIRoSaSeY+KF+nvqv1O1qNmALiMec=
gocv.io/x/gocv v0.29.0/go.mod h1:oc6FvfYqfBp99p+yOEzs9tbYF9gOrAQSeL/dyIPefJU=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@ -145,8 +147,9 @@ google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9Ywl
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=

View File

@ -4,10 +4,10 @@ import (
"fmt"
"github.com/cyrilix/robocar-protobuf/go/events"
mqtt "github.com/eclipse/paho.mqtt.golang"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes/timestamp"
"go.uber.org/zap"
"gocv.io/x/gocv"
"google.golang.org/protobuf/proto"
"image"
"io"
"sync"
@ -20,14 +20,14 @@ type VideoSource interface {
}
type OpencvCameraPart struct {
client mqtt.Client
vc VideoSource
topic string
topicRoi string
client mqtt.Client
vc VideoSource
topic string
topicRoi string
publishFrequency int
muImgBuffered sync.Mutex
imgBuffered *gocv.Mat
horizon int
horizon int
cancel chan interface{}
}
@ -48,7 +48,7 @@ func New(client mqtt.Client, topic string, topicRoi string, publishFrequency int
client: client,
vc: vc,
topic: topic,
topicRoi: topicRoi,
topicRoi: topicRoi,
publishFrequency: publishFrequency,
imgBuffered: &img,
}
@ -109,6 +109,7 @@ func (o *OpencvCameraPart) publishFrame(tickerTime time.Time, topic string, fram
zap.S().With("topic", topic).Errorf("unable to convert image to jpeg: %v", err)
return
}
defer img.Close()
msg := &events.FrameMessage{
Id: &events.FrameRef{
@ -119,7 +120,7 @@ func (o *OpencvCameraPart) publishFrame(tickerTime time.Time, topic string, fram
Nanos: int32(tickerTime.Nanosecond()),
},
},
Frame: img,
Frame: img.GetBytes(),
}
payload, err := proto.Marshal(msg)
@ -130,7 +131,6 @@ func (o *OpencvCameraPart) publishFrame(tickerTime time.Time, topic string, fram
publish(o.client, topic, &payload)
}
var publish = func(client mqtt.Client, topic string, payload *[]byte) {
token := client.Publish(topic, 0, false, *payload)
token.WaitTimeout(10 * time.Millisecond)

View File

@ -4,9 +4,9 @@ import (
"bytes"
"github.com/cyrilix/robocar-protobuf/go/events"
mqtt "github.com/eclipse/paho.mqtt.golang"
"github.com/golang/protobuf/proto"
"go.uber.org/zap"
"gocv.io/x/gocv"
"google.golang.org/protobuf/proto"
"image/jpeg"
"sync"
"testing"
@ -90,12 +90,12 @@ func TestOpencvCameraPart(t *testing.T) {
// Uncomment to debug image cropping
/*
dir, f := path.Split(fmt.Sprintf("/tmp/%s.jpg", tpc))
os.MkdirAll(dir, os.FileMode(0755))
err = ioutil.WriteFile(path.Join(dir, f), frameMsg.GetFrame(), os.FileMode(0644) )
if err != nil {
t.Errorf("unable to write image for topic %s: %v", tpc, err)
}
*/
dir, f := path.Split(fmt.Sprintf("/tmp/%s.jpg", tpc))
os.MkdirAll(dir, os.FileMode(0755))
err = ioutil.WriteFile(path.Join(dir, f), frameMsg.GetFrame(), os.FileMode(0644) )
if err != nil {
t.Errorf("unable to write image for topic %s: %v", tpc, err)
}
*/
}
}

View File

@ -1,6 +1,6 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc-gen-go v1.27.1
// protoc v3.12.4
// source: events/events.proto

View File

@ -1,324 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"errors"
"fmt"
"google.golang.org/protobuf/encoding/prototext"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/runtime/protoimpl"
)
const (
WireVarint = 0
WireFixed32 = 5
WireFixed64 = 1
WireBytes = 2
WireStartGroup = 3
WireEndGroup = 4
)
// EncodeVarint returns the varint encoded bytes of v.
func EncodeVarint(v uint64) []byte {
return protowire.AppendVarint(nil, v)
}
// SizeVarint returns the length of the varint encoded bytes of v.
// This is equal to len(EncodeVarint(v)).
func SizeVarint(v uint64) int {
return protowire.SizeVarint(v)
}
// DecodeVarint parses a varint encoded integer from b,
// returning the integer value and the length of the varint.
// It returns (0, 0) if there is a parse error.
func DecodeVarint(b []byte) (uint64, int) {
v, n := protowire.ConsumeVarint(b)
if n < 0 {
return 0, 0
}
return v, n
}
// Buffer is a buffer for encoding and decoding the protobuf wire format.
// It may be reused between invocations to reduce memory usage.
type Buffer struct {
buf []byte
idx int
deterministic bool
}
// NewBuffer allocates a new Buffer initialized with buf,
// where the contents of buf are considered the unread portion of the buffer.
func NewBuffer(buf []byte) *Buffer {
return &Buffer{buf: buf}
}
// SetDeterministic specifies whether to use deterministic serialization.
//
// Deterministic serialization guarantees that for a given binary, equal
// messages will always be serialized to the same bytes. This implies:
//
// - Repeated serialization of a message will return the same bytes.
// - Different processes of the same binary (which may be executing on
// different machines) will serialize equal messages to the same bytes.
//
// Note that the deterministic serialization is NOT canonical across
// languages. It is not guaranteed to remain stable over time. It is unstable
// across different builds with schema changes due to unknown fields.
// Users who need canonical serialization (e.g., persistent storage in a
// canonical form, fingerprinting, etc.) should define their own
// canonicalization specification and implement their own serializer rather
// than relying on this API.
//
// If deterministic serialization is requested, map entries will be sorted
// by keys in lexographical order. This is an implementation detail and
// subject to change.
func (b *Buffer) SetDeterministic(deterministic bool) {
b.deterministic = deterministic
}
// SetBuf sets buf as the internal buffer,
// where the contents of buf are considered the unread portion of the buffer.
func (b *Buffer) SetBuf(buf []byte) {
b.buf = buf
b.idx = 0
}
// Reset clears the internal buffer of all written and unread data.
func (b *Buffer) Reset() {
b.buf = b.buf[:0]
b.idx = 0
}
// Bytes returns the internal buffer.
func (b *Buffer) Bytes() []byte {
return b.buf
}
// Unread returns the unread portion of the buffer.
func (b *Buffer) Unread() []byte {
return b.buf[b.idx:]
}
// Marshal appends the wire-format encoding of m to the buffer.
func (b *Buffer) Marshal(m Message) error {
var err error
b.buf, err = marshalAppend(b.buf, m, b.deterministic)
return err
}
// Unmarshal parses the wire-format message in the buffer and
// places the decoded results in m.
// It does not reset m before unmarshaling.
func (b *Buffer) Unmarshal(m Message) error {
err := UnmarshalMerge(b.Unread(), m)
b.idx = len(b.buf)
return err
}
type unknownFields struct{ XXX_unrecognized protoimpl.UnknownFields }
func (m *unknownFields) String() string { panic("not implemented") }
func (m *unknownFields) Reset() { panic("not implemented") }
func (m *unknownFields) ProtoMessage() { panic("not implemented") }
// DebugPrint dumps the encoded bytes of b with a header and footer including s
// to stdout. This is only intended for debugging.
func (*Buffer) DebugPrint(s string, b []byte) {
m := MessageReflect(new(unknownFields))
m.SetUnknown(b)
b, _ = prototext.MarshalOptions{AllowPartial: true, Indent: "\t"}.Marshal(m.Interface())
fmt.Printf("==== %s ====\n%s==== %s ====\n", s, b, s)
}
// EncodeVarint appends an unsigned varint encoding to the buffer.
func (b *Buffer) EncodeVarint(v uint64) error {
b.buf = protowire.AppendVarint(b.buf, v)
return nil
}
// EncodeZigzag32 appends a 32-bit zig-zag varint encoding to the buffer.
func (b *Buffer) EncodeZigzag32(v uint64) error {
return b.EncodeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31))))
}
// EncodeZigzag64 appends a 64-bit zig-zag varint encoding to the buffer.
func (b *Buffer) EncodeZigzag64(v uint64) error {
return b.EncodeVarint(uint64((uint64(v) << 1) ^ uint64((int64(v) >> 63))))
}
// EncodeFixed32 appends a 32-bit little-endian integer to the buffer.
func (b *Buffer) EncodeFixed32(v uint64) error {
b.buf = protowire.AppendFixed32(b.buf, uint32(v))
return nil
}
// EncodeFixed64 appends a 64-bit little-endian integer to the buffer.
func (b *Buffer) EncodeFixed64(v uint64) error {
b.buf = protowire.AppendFixed64(b.buf, uint64(v))
return nil
}
// EncodeRawBytes appends a length-prefixed raw bytes to the buffer.
func (b *Buffer) EncodeRawBytes(v []byte) error {
b.buf = protowire.AppendBytes(b.buf, v)
return nil
}
// EncodeStringBytes appends a length-prefixed raw bytes to the buffer.
// It does not validate whether v contains valid UTF-8.
func (b *Buffer) EncodeStringBytes(v string) error {
b.buf = protowire.AppendString(b.buf, v)
return nil
}
// EncodeMessage appends a length-prefixed encoded message to the buffer.
func (b *Buffer) EncodeMessage(m Message) error {
var err error
b.buf = protowire.AppendVarint(b.buf, uint64(Size(m)))
b.buf, err = marshalAppend(b.buf, m, b.deterministic)
return err
}
// DecodeVarint consumes an encoded unsigned varint from the buffer.
func (b *Buffer) DecodeVarint() (uint64, error) {
v, n := protowire.ConsumeVarint(b.buf[b.idx:])
if n < 0 {
return 0, protowire.ParseError(n)
}
b.idx += n
return uint64(v), nil
}
// DecodeZigzag32 consumes an encoded 32-bit zig-zag varint from the buffer.
func (b *Buffer) DecodeZigzag32() (uint64, error) {
v, err := b.DecodeVarint()
if err != nil {
return 0, err
}
return uint64((uint32(v) >> 1) ^ uint32((int32(v&1)<<31)>>31)), nil
}
// DecodeZigzag64 consumes an encoded 64-bit zig-zag varint from the buffer.
func (b *Buffer) DecodeZigzag64() (uint64, error) {
v, err := b.DecodeVarint()
if err != nil {
return 0, err
}
return uint64((uint64(v) >> 1) ^ uint64((int64(v&1)<<63)>>63)), nil
}
// DecodeFixed32 consumes a 32-bit little-endian integer from the buffer.
func (b *Buffer) DecodeFixed32() (uint64, error) {
v, n := protowire.ConsumeFixed32(b.buf[b.idx:])
if n < 0 {
return 0, protowire.ParseError(n)
}
b.idx += n
return uint64(v), nil
}
// DecodeFixed64 consumes a 64-bit little-endian integer from the buffer.
func (b *Buffer) DecodeFixed64() (uint64, error) {
v, n := protowire.ConsumeFixed64(b.buf[b.idx:])
if n < 0 {
return 0, protowire.ParseError(n)
}
b.idx += n
return uint64(v), nil
}
// DecodeRawBytes consumes a length-prefixed raw bytes from the buffer.
// If alloc is specified, it returns a copy the raw bytes
// rather than a sub-slice of the buffer.
func (b *Buffer) DecodeRawBytes(alloc bool) ([]byte, error) {
v, n := protowire.ConsumeBytes(b.buf[b.idx:])
if n < 0 {
return nil, protowire.ParseError(n)
}
b.idx += n
if alloc {
v = append([]byte(nil), v...)
}
return v, nil
}
// DecodeStringBytes consumes a length-prefixed raw bytes from the buffer.
// It does not validate whether the raw bytes contain valid UTF-8.
func (b *Buffer) DecodeStringBytes() (string, error) {
v, n := protowire.ConsumeString(b.buf[b.idx:])
if n < 0 {
return "", protowire.ParseError(n)
}
b.idx += n
return v, nil
}
// DecodeMessage consumes a length-prefixed message from the buffer.
// It does not reset m before unmarshaling.
func (b *Buffer) DecodeMessage(m Message) error {
v, err := b.DecodeRawBytes(false)
if err != nil {
return err
}
return UnmarshalMerge(v, m)
}
// DecodeGroup consumes a message group from the buffer.
// It assumes that the start group marker has already been consumed and
// consumes all bytes until (and including the end group marker).
// It does not reset m before unmarshaling.
func (b *Buffer) DecodeGroup(m Message) error {
v, n, err := consumeGroup(b.buf[b.idx:])
if err != nil {
return err
}
b.idx += n
return UnmarshalMerge(v, m)
}
// consumeGroup parses b until it finds an end group marker, returning
// the raw bytes of the message (excluding the end group marker) and the
// the total length of the message (including the end group marker).
func consumeGroup(b []byte) ([]byte, int, error) {
b0 := b
depth := 1 // assume this follows a start group marker
for {
_, wtyp, tagLen := protowire.ConsumeTag(b)
if tagLen < 0 {
return nil, 0, protowire.ParseError(tagLen)
}
b = b[tagLen:]
var valLen int
switch wtyp {
case protowire.VarintType:
_, valLen = protowire.ConsumeVarint(b)
case protowire.Fixed32Type:
_, valLen = protowire.ConsumeFixed32(b)
case protowire.Fixed64Type:
_, valLen = protowire.ConsumeFixed64(b)
case protowire.BytesType:
_, valLen = protowire.ConsumeBytes(b)
case protowire.StartGroupType:
depth++
case protowire.EndGroupType:
depth--
default:
return nil, 0, errors.New("proto: cannot parse reserved wire type")
}
if valLen < 0 {
return nil, 0, protowire.ParseError(valLen)
}
b = b[valLen:]
if depth == 0 {
return b0[:len(b0)-len(b)-tagLen], len(b0) - len(b), nil
}
}
}

View File

@ -1,63 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"google.golang.org/protobuf/reflect/protoreflect"
)
// SetDefaults sets unpopulated scalar fields to their default values.
// Fields within a oneof are not set even if they have a default value.
// SetDefaults is recursively called upon any populated message fields.
func SetDefaults(m Message) {
if m != nil {
setDefaults(MessageReflect(m))
}
}
func setDefaults(m protoreflect.Message) {
fds := m.Descriptor().Fields()
for i := 0; i < fds.Len(); i++ {
fd := fds.Get(i)
if !m.Has(fd) {
if fd.HasDefault() && fd.ContainingOneof() == nil {
v := fd.Default()
if fd.Kind() == protoreflect.BytesKind {
v = protoreflect.ValueOf(append([]byte(nil), v.Bytes()...)) // copy the default bytes
}
m.Set(fd, v)
}
continue
}
}
m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool {
switch {
// Handle singular message.
case fd.Cardinality() != protoreflect.Repeated:
if fd.Message() != nil {
setDefaults(m.Get(fd).Message())
}
// Handle list of messages.
case fd.IsList():
if fd.Message() != nil {
ls := m.Get(fd).List()
for i := 0; i < ls.Len(); i++ {
setDefaults(ls.Get(i).Message())
}
}
// Handle map of messages.
case fd.IsMap():
if fd.MapValue().Message() != nil {
ms := m.Get(fd).Map()
ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool {
setDefaults(v.Message())
return true
})
}
}
return true
})
}

View File

@ -1,113 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"encoding/json"
"errors"
"fmt"
"strconv"
protoV2 "google.golang.org/protobuf/proto"
)
var (
// Deprecated: No longer returned.
ErrNil = errors.New("proto: Marshal called with nil")
// Deprecated: No longer returned.
ErrTooLarge = errors.New("proto: message encodes to over 2 GB")
// Deprecated: No longer returned.
ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof")
)
// Deprecated: Do not use.
type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 }
// Deprecated: Do not use.
func GetStats() Stats { return Stats{} }
// Deprecated: Do not use.
func MarshalMessageSet(interface{}) ([]byte, error) {
return nil, errors.New("proto: not implemented")
}
// Deprecated: Do not use.
func UnmarshalMessageSet([]byte, interface{}) error {
return errors.New("proto: not implemented")
}
// Deprecated: Do not use.
func MarshalMessageSetJSON(interface{}) ([]byte, error) {
return nil, errors.New("proto: not implemented")
}
// Deprecated: Do not use.
func UnmarshalMessageSetJSON([]byte, interface{}) error {
return errors.New("proto: not implemented")
}
// Deprecated: Do not use.
func RegisterMessageSetType(Message, int32, string) {}
// Deprecated: Do not use.
func EnumName(m map[int32]string, v int32) string {
s, ok := m[v]
if ok {
return s
}
return strconv.Itoa(int(v))
}
// Deprecated: Do not use.
func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) {
if data[0] == '"' {
// New style: enums are strings.
var repr string
if err := json.Unmarshal(data, &repr); err != nil {
return -1, err
}
val, ok := m[repr]
if !ok {
return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr)
}
return val, nil
}
// Old style: enums are ints.
var val int32
if err := json.Unmarshal(data, &val); err != nil {
return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName)
}
return val, nil
}
// Deprecated: Do not use; this type existed for intenal-use only.
type InternalMessageInfo struct{}
// Deprecated: Do not use; this method existed for intenal-use only.
func (*InternalMessageInfo) DiscardUnknown(m Message) {
DiscardUnknown(m)
}
// Deprecated: Do not use; this method existed for intenal-use only.
func (*InternalMessageInfo) Marshal(b []byte, m Message, deterministic bool) ([]byte, error) {
return protoV2.MarshalOptions{Deterministic: deterministic}.MarshalAppend(b, MessageV2(m))
}
// Deprecated: Do not use; this method existed for intenal-use only.
func (*InternalMessageInfo) Merge(dst, src Message) {
protoV2.Merge(MessageV2(dst), MessageV2(src))
}
// Deprecated: Do not use; this method existed for intenal-use only.
func (*InternalMessageInfo) Size(m Message) int {
return protoV2.Size(MessageV2(m))
}
// Deprecated: Do not use; this method existed for intenal-use only.
func (*InternalMessageInfo) Unmarshal(m Message, b []byte) error {
return protoV2.UnmarshalOptions{Merge: true}.Unmarshal(b, MessageV2(m))
}

View File

@ -1,58 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"google.golang.org/protobuf/reflect/protoreflect"
)
// DiscardUnknown recursively discards all unknown fields from this message
// and all embedded messages.
//
// When unmarshaling a message with unrecognized fields, the tags and values
// of such fields are preserved in the Message. This allows a later call to
// marshal to be able to produce a message that continues to have those
// unrecognized fields. To avoid this, DiscardUnknown is used to
// explicitly clear the unknown fields after unmarshaling.
func DiscardUnknown(m Message) {
if m != nil {
discardUnknown(MessageReflect(m))
}
}
func discardUnknown(m protoreflect.Message) {
m.Range(func(fd protoreflect.FieldDescriptor, val protoreflect.Value) bool {
switch {
// Handle singular message.
case fd.Cardinality() != protoreflect.Repeated:
if fd.Message() != nil {
discardUnknown(m.Get(fd).Message())
}
// Handle list of messages.
case fd.IsList():
if fd.Message() != nil {
ls := m.Get(fd).List()
for i := 0; i < ls.Len(); i++ {
discardUnknown(ls.Get(i).Message())
}
}
// Handle map of messages.
case fd.IsMap():
if fd.MapValue().Message() != nil {
ms := m.Get(fd).Map()
ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool {
discardUnknown(v.Message())
return true
})
}
}
return true
})
// Discard unknown fields.
if len(m.GetUnknown()) > 0 {
m.SetUnknown(nil)
}
}

View File

@ -1,356 +0,0 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"errors"
"fmt"
"reflect"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
"google.golang.org/protobuf/runtime/protoiface"
"google.golang.org/protobuf/runtime/protoimpl"
)
type (
// ExtensionDesc represents an extension descriptor and
// is used to interact with an extension field in a message.
//
// Variables of this type are generated in code by protoc-gen-go.
ExtensionDesc = protoimpl.ExtensionInfo
// ExtensionRange represents a range of message extensions.
// Used in code generated by protoc-gen-go.
ExtensionRange = protoiface.ExtensionRangeV1
// Deprecated: Do not use; this is an internal type.
Extension = protoimpl.ExtensionFieldV1
// Deprecated: Do not use; this is an internal type.
XXX_InternalExtensions = protoimpl.ExtensionFields
)
// ErrMissingExtension reports whether the extension was not present.
var ErrMissingExtension = errors.New("proto: missing extension")
var errNotExtendable = errors.New("proto: not an extendable proto.Message")
// HasExtension reports whether the extension field is present in m
// either as an explicitly populated field or as an unknown field.
func HasExtension(m Message, xt *ExtensionDesc) (has bool) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return false
}
// Check whether any populated known field matches the field number.
xtd := xt.TypeDescriptor()
if isValidExtension(mr.Descriptor(), xtd) {
has = mr.Has(xtd)
} else {
mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool {
has = int32(fd.Number()) == xt.Field
return !has
})
}
// Check whether any unknown field matches the field number.
for b := mr.GetUnknown(); !has && len(b) > 0; {
num, _, n := protowire.ConsumeField(b)
has = int32(num) == xt.Field
b = b[n:]
}
return has
}
// ClearExtension removes the extension field from m
// either as an explicitly populated field or as an unknown field.
func ClearExtension(m Message, xt *ExtensionDesc) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return
}
xtd := xt.TypeDescriptor()
if isValidExtension(mr.Descriptor(), xtd) {
mr.Clear(xtd)
} else {
mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool {
if int32(fd.Number()) == xt.Field {
mr.Clear(fd)
return false
}
return true
})
}
clearUnknown(mr, fieldNum(xt.Field))
}
// ClearAllExtensions clears all extensions from m.
// This includes populated fields and unknown fields in the extension range.
func ClearAllExtensions(m Message) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return
}
mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool {
if fd.IsExtension() {
mr.Clear(fd)
}
return true
})
clearUnknown(mr, mr.Descriptor().ExtensionRanges())
}
// GetExtension retrieves a proto2 extended field from m.
//
// If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil),
// then GetExtension parses the encoded field and returns a Go value of the specified type.
// If the field is not present, then the default value is returned (if one is specified),
// otherwise ErrMissingExtension is reported.
//
// If the descriptor is type incomplete (i.e., ExtensionDesc.ExtensionType is nil),
// then GetExtension returns the raw encoded bytes for the extension field.
func GetExtension(m Message, xt *ExtensionDesc) (interface{}, error) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 {
return nil, errNotExtendable
}
// Retrieve the unknown fields for this extension field.
var bo protoreflect.RawFields
for bi := mr.GetUnknown(); len(bi) > 0; {
num, _, n := protowire.ConsumeField(bi)
if int32(num) == xt.Field {
bo = append(bo, bi[:n]...)
}
bi = bi[n:]
}
// For type incomplete descriptors, only retrieve the unknown fields.
if xt.ExtensionType == nil {
return []byte(bo), nil
}
// If the extension field only exists as unknown fields, unmarshal it.
// This is rarely done since proto.Unmarshal eagerly unmarshals extensions.
xtd := xt.TypeDescriptor()
if !isValidExtension(mr.Descriptor(), xtd) {
return nil, fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m)
}
if !mr.Has(xtd) && len(bo) > 0 {
m2 := mr.New()
if err := (proto.UnmarshalOptions{
Resolver: extensionResolver{xt},
}.Unmarshal(bo, m2.Interface())); err != nil {
return nil, err
}
if m2.Has(xtd) {
mr.Set(xtd, m2.Get(xtd))
clearUnknown(mr, fieldNum(xt.Field))
}
}
// Check whether the message has the extension field set or a default.
var pv protoreflect.Value
switch {
case mr.Has(xtd):
pv = mr.Get(xtd)
case xtd.HasDefault():
pv = xtd.Default()
default:
return nil, ErrMissingExtension
}
v := xt.InterfaceOf(pv)
rv := reflect.ValueOf(v)
if isScalarKind(rv.Kind()) {
rv2 := reflect.New(rv.Type())
rv2.Elem().Set(rv)
v = rv2.Interface()
}
return v, nil
}
// extensionResolver is a custom extension resolver that stores a single
// extension type that takes precedence over the global registry.
type extensionResolver struct{ xt protoreflect.ExtensionType }
func (r extensionResolver) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) {
if xtd := r.xt.TypeDescriptor(); xtd.FullName() == field {
return r.xt, nil
}
return protoregistry.GlobalTypes.FindExtensionByName(field)
}
func (r extensionResolver) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) {
if xtd := r.xt.TypeDescriptor(); xtd.ContainingMessage().FullName() == message && xtd.Number() == field {
return r.xt, nil
}
return protoregistry.GlobalTypes.FindExtensionByNumber(message, field)
}
// GetExtensions returns a list of the extensions values present in m,
// corresponding with the provided list of extension descriptors, xts.
// If an extension is missing in m, the corresponding value is nil.
func GetExtensions(m Message, xts []*ExtensionDesc) ([]interface{}, error) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return nil, errNotExtendable
}
vs := make([]interface{}, len(xts))
for i, xt := range xts {
v, err := GetExtension(m, xt)
if err != nil {
if err == ErrMissingExtension {
continue
}
return vs, err
}
vs[i] = v
}
return vs, nil
}
// SetExtension sets an extension field in m to the provided value.
func SetExtension(m Message, xt *ExtensionDesc, v interface{}) error {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 {
return errNotExtendable
}
rv := reflect.ValueOf(v)
if reflect.TypeOf(v) != reflect.TypeOf(xt.ExtensionType) {
return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", v, xt.ExtensionType)
}
if rv.Kind() == reflect.Ptr {
if rv.IsNil() {
return fmt.Errorf("proto: SetExtension called with nil value of type %T", v)
}
if isScalarKind(rv.Elem().Kind()) {
v = rv.Elem().Interface()
}
}
xtd := xt.TypeDescriptor()
if !isValidExtension(mr.Descriptor(), xtd) {
return fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m)
}
mr.Set(xtd, xt.ValueOf(v))
clearUnknown(mr, fieldNum(xt.Field))
return nil
}
// SetRawExtension inserts b into the unknown fields of m.
//
// Deprecated: Use Message.ProtoReflect.SetUnknown instead.
func SetRawExtension(m Message, fnum int32, b []byte) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return
}
// Verify that the raw field is valid.
for b0 := b; len(b0) > 0; {
num, _, n := protowire.ConsumeField(b0)
if int32(num) != fnum {
panic(fmt.Sprintf("mismatching field number: got %d, want %d", num, fnum))
}
b0 = b0[n:]
}
ClearExtension(m, &ExtensionDesc{Field: fnum})
mr.SetUnknown(append(mr.GetUnknown(), b...))
}
// ExtensionDescs returns a list of extension descriptors found in m,
// containing descriptors for both populated extension fields in m and
// also unknown fields of m that are in the extension range.
// For the later case, an type incomplete descriptor is provided where only
// the ExtensionDesc.Field field is populated.
// The order of the extension descriptors is undefined.
func ExtensionDescs(m Message) ([]*ExtensionDesc, error) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 {
return nil, errNotExtendable
}
// Collect a set of known extension descriptors.
extDescs := make(map[protoreflect.FieldNumber]*ExtensionDesc)
mr.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool {
if fd.IsExtension() {
xt := fd.(protoreflect.ExtensionTypeDescriptor)
if xd, ok := xt.Type().(*ExtensionDesc); ok {
extDescs[fd.Number()] = xd
}
}
return true
})
// Collect a set of unknown extension descriptors.
extRanges := mr.Descriptor().ExtensionRanges()
for b := mr.GetUnknown(); len(b) > 0; {
num, _, n := protowire.ConsumeField(b)
if extRanges.Has(num) && extDescs[num] == nil {
extDescs[num] = nil
}
b = b[n:]
}
// Transpose the set of descriptors into a list.
var xts []*ExtensionDesc
for num, xt := range extDescs {
if xt == nil {
xt = &ExtensionDesc{Field: int32(num)}
}
xts = append(xts, xt)
}
return xts, nil
}
// isValidExtension reports whether xtd is a valid extension descriptor for md.
func isValidExtension(md protoreflect.MessageDescriptor, xtd protoreflect.ExtensionTypeDescriptor) bool {
return xtd.ContainingMessage() == md && md.ExtensionRanges().Has(xtd.Number())
}
// isScalarKind reports whether k is a protobuf scalar kind (except bytes).
// This function exists for historical reasons since the representation of
// scalars differs between v1 and v2, where v1 uses *T and v2 uses T.
func isScalarKind(k reflect.Kind) bool {
switch k {
case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String:
return true
default:
return false
}
}
// clearUnknown removes unknown fields from m where remover.Has reports true.
func clearUnknown(m protoreflect.Message, remover interface {
Has(protoreflect.FieldNumber) bool
}) {
var bo protoreflect.RawFields
for bi := m.GetUnknown(); len(bi) > 0; {
num, _, n := protowire.ConsumeField(bi)
if !remover.Has(num) {
bo = append(bo, bi[:n]...)
}
bi = bi[n:]
}
if bi := m.GetUnknown(); len(bi) != len(bo) {
m.SetUnknown(bo)
}
}
type fieldNum protoreflect.FieldNumber
func (n1 fieldNum) Has(n2 protoreflect.FieldNumber) bool {
return protoreflect.FieldNumber(n1) == n2
}

View File

@ -1,306 +0,0 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"fmt"
"reflect"
"strconv"
"strings"
"sync"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/runtime/protoimpl"
)
// StructProperties represents protocol buffer type information for a
// generated protobuf message in the open-struct API.
//
// Deprecated: Do not use.
type StructProperties struct {
// Prop are the properties for each field.
//
// Fields belonging to a oneof are stored in OneofTypes instead, with a
// single Properties representing the parent oneof held here.
//
// The order of Prop matches the order of fields in the Go struct.
// Struct fields that are not related to protobufs have a "XXX_" prefix
// in the Properties.Name and must be ignored by the user.
Prop []*Properties
// OneofTypes contains information about the oneof fields in this message.
// It is keyed by the protobuf field name.
OneofTypes map[string]*OneofProperties
}
// Properties represents the type information for a protobuf message field.
//
// Deprecated: Do not use.
type Properties struct {
// Name is a placeholder name with little meaningful semantic value.
// If the name has an "XXX_" prefix, the entire Properties must be ignored.
Name string
// OrigName is the protobuf field name or oneof name.
OrigName string
// JSONName is the JSON name for the protobuf field.
JSONName string
// Enum is a placeholder name for enums.
// For historical reasons, this is neither the Go name for the enum,
// nor the protobuf name for the enum.
Enum string // Deprecated: Do not use.
// Weak contains the full name of the weakly referenced message.
Weak string
// Wire is a string representation of the wire type.
Wire string
// WireType is the protobuf wire type for the field.
WireType int
// Tag is the protobuf field number.
Tag int
// Required reports whether this is a required field.
Required bool
// Optional reports whether this is a optional field.
Optional bool
// Repeated reports whether this is a repeated field.
Repeated bool
// Packed reports whether this is a packed repeated field of scalars.
Packed bool
// Proto3 reports whether this field operates under the proto3 syntax.
Proto3 bool
// Oneof reports whether this field belongs within a oneof.
Oneof bool
// Default is the default value in string form.
Default string
// HasDefault reports whether the field has a default value.
HasDefault bool
// MapKeyProp is the properties for the key field for a map field.
MapKeyProp *Properties
// MapValProp is the properties for the value field for a map field.
MapValProp *Properties
}
// OneofProperties represents the type information for a protobuf oneof.
//
// Deprecated: Do not use.
type OneofProperties struct {
// Type is a pointer to the generated wrapper type for the field value.
// This is nil for messages that are not in the open-struct API.
Type reflect.Type
// Field is the index into StructProperties.Prop for the containing oneof.
Field int
// Prop is the properties for the field.
Prop *Properties
}
// String formats the properties in the protobuf struct field tag style.
func (p *Properties) String() string {
s := p.Wire
s += "," + strconv.Itoa(p.Tag)
if p.Required {
s += ",req"
}
if p.Optional {
s += ",opt"
}
if p.Repeated {
s += ",rep"
}
if p.Packed {
s += ",packed"
}
s += ",name=" + p.OrigName
if p.JSONName != "" {
s += ",json=" + p.JSONName
}
if len(p.Enum) > 0 {
s += ",enum=" + p.Enum
}
if len(p.Weak) > 0 {
s += ",weak=" + p.Weak
}
if p.Proto3 {
s += ",proto3"
}
if p.Oneof {
s += ",oneof"
}
if p.HasDefault {
s += ",def=" + p.Default
}
return s
}
// Parse populates p by parsing a string in the protobuf struct field tag style.
func (p *Properties) Parse(tag string) {
// For example: "bytes,49,opt,name=foo,def=hello!"
for len(tag) > 0 {
i := strings.IndexByte(tag, ',')
if i < 0 {
i = len(tag)
}
switch s := tag[:i]; {
case strings.HasPrefix(s, "name="):
p.OrigName = s[len("name="):]
case strings.HasPrefix(s, "json="):
p.JSONName = s[len("json="):]
case strings.HasPrefix(s, "enum="):
p.Enum = s[len("enum="):]
case strings.HasPrefix(s, "weak="):
p.Weak = s[len("weak="):]
case strings.Trim(s, "0123456789") == "":
n, _ := strconv.ParseUint(s, 10, 32)
p.Tag = int(n)
case s == "opt":
p.Optional = true
case s == "req":
p.Required = true
case s == "rep":
p.Repeated = true
case s == "varint" || s == "zigzag32" || s == "zigzag64":
p.Wire = s
p.WireType = WireVarint
case s == "fixed32":
p.Wire = s
p.WireType = WireFixed32
case s == "fixed64":
p.Wire = s
p.WireType = WireFixed64
case s == "bytes":
p.Wire = s
p.WireType = WireBytes
case s == "group":
p.Wire = s
p.WireType = WireStartGroup
case s == "packed":
p.Packed = true
case s == "proto3":
p.Proto3 = true
case s == "oneof":
p.Oneof = true
case strings.HasPrefix(s, "def="):
// The default tag is special in that everything afterwards is the
// default regardless of the presence of commas.
p.HasDefault = true
p.Default, i = tag[len("def="):], len(tag)
}
tag = strings.TrimPrefix(tag[i:], ",")
}
}
// Init populates the properties from a protocol buffer struct tag.
//
// Deprecated: Do not use.
func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) {
p.Name = name
p.OrigName = name
if tag == "" {
return
}
p.Parse(tag)
if typ != nil && typ.Kind() == reflect.Map {
p.MapKeyProp = new(Properties)
p.MapKeyProp.Init(nil, "Key", f.Tag.Get("protobuf_key"), nil)
p.MapValProp = new(Properties)
p.MapValProp.Init(nil, "Value", f.Tag.Get("protobuf_val"), nil)
}
}
var propertiesCache sync.Map // map[reflect.Type]*StructProperties
// GetProperties returns the list of properties for the type represented by t,
// which must be a generated protocol buffer message in the open-struct API,
// where protobuf message fields are represented by exported Go struct fields.
//
// Deprecated: Use protobuf reflection instead.
func GetProperties(t reflect.Type) *StructProperties {
if p, ok := propertiesCache.Load(t); ok {
return p.(*StructProperties)
}
p, _ := propertiesCache.LoadOrStore(t, newProperties(t))
return p.(*StructProperties)
}
func newProperties(t reflect.Type) *StructProperties {
if t.Kind() != reflect.Struct {
panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t))
}
var hasOneof bool
prop := new(StructProperties)
// Construct a list of properties for each field in the struct.
for i := 0; i < t.NumField(); i++ {
p := new(Properties)
f := t.Field(i)
tagField := f.Tag.Get("protobuf")
p.Init(f.Type, f.Name, tagField, &f)
tagOneof := f.Tag.Get("protobuf_oneof")
if tagOneof != "" {
hasOneof = true
p.OrigName = tagOneof
}
// Rename unrelated struct fields with the "XXX_" prefix since so much
// user code simply checks for this to exclude special fields.
if tagField == "" && tagOneof == "" && !strings.HasPrefix(p.Name, "XXX_") {
p.Name = "XXX_" + p.Name
p.OrigName = "XXX_" + p.OrigName
} else if p.Weak != "" {
p.Name = p.OrigName // avoid possible "XXX_" prefix on weak field
}
prop.Prop = append(prop.Prop, p)
}
// Construct a mapping of oneof field names to properties.
if hasOneof {
var oneofWrappers []interface{}
if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofFuncs"); ok {
oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[3].Interface().([]interface{})
}
if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofWrappers"); ok {
oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[0].Interface().([]interface{})
}
if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(protoreflect.ProtoMessage); ok {
if m, ok := m.ProtoReflect().(interface{ ProtoMessageInfo() *protoimpl.MessageInfo }); ok {
oneofWrappers = m.ProtoMessageInfo().OneofWrappers
}
}
prop.OneofTypes = make(map[string]*OneofProperties)
for _, wrapper := range oneofWrappers {
p := &OneofProperties{
Type: reflect.ValueOf(wrapper).Type(), // *T
Prop: new(Properties),
}
f := p.Type.Elem().Field(0)
p.Prop.Name = f.Name
p.Prop.Parse(f.Tag.Get("protobuf"))
// Determine the struct field that contains this oneof.
// Each wrapper is assignable to exactly one parent field.
var foundOneof bool
for i := 0; i < t.NumField() && !foundOneof; i++ {
if p.Type.AssignableTo(t.Field(i).Type) {
p.Field = i
foundOneof = true
}
}
if !foundOneof {
panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t))
}
prop.OneofTypes[p.Prop.OrigName] = p
}
}
return prop
}
func (sp *StructProperties) Len() int { return len(sp.Prop) }
func (sp *StructProperties) Less(i, j int) bool { return false }
func (sp *StructProperties) Swap(i, j int) { return }

View File

@ -1,167 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package proto provides functionality for handling protocol buffer messages.
// In particular, it provides marshaling and unmarshaling between a protobuf
// message and the binary wire format.
//
// See https://developers.google.com/protocol-buffers/docs/gotutorial for
// more information.
//
// Deprecated: Use the "google.golang.org/protobuf/proto" package instead.
package proto
import (
protoV2 "google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/runtime/protoiface"
"google.golang.org/protobuf/runtime/protoimpl"
)
const (
ProtoPackageIsVersion1 = true
ProtoPackageIsVersion2 = true
ProtoPackageIsVersion3 = true
ProtoPackageIsVersion4 = true
)
// GeneratedEnum is any enum type generated by protoc-gen-go
// which is a named int32 kind.
// This type exists for documentation purposes.
type GeneratedEnum interface{}
// GeneratedMessage is any message type generated by protoc-gen-go
// which is a pointer to a named struct kind.
// This type exists for documentation purposes.
type GeneratedMessage interface{}
// Message is a protocol buffer message.
//
// This is the v1 version of the message interface and is marginally better
// than an empty interface as it lacks any method to programatically interact
// with the contents of the message.
//
// A v2 message is declared in "google.golang.org/protobuf/proto".Message and
// exposes protobuf reflection as a first-class feature of the interface.
//
// To convert a v1 message to a v2 message, use the MessageV2 function.
// To convert a v2 message to a v1 message, use the MessageV1 function.
type Message = protoiface.MessageV1
// MessageV1 converts either a v1 or v2 message to a v1 message.
// It returns nil if m is nil.
func MessageV1(m GeneratedMessage) protoiface.MessageV1 {
return protoimpl.X.ProtoMessageV1Of(m)
}
// MessageV2 converts either a v1 or v2 message to a v2 message.
// It returns nil if m is nil.
func MessageV2(m GeneratedMessage) protoV2.Message {
return protoimpl.X.ProtoMessageV2Of(m)
}
// MessageReflect returns a reflective view for a message.
// It returns nil if m is nil.
func MessageReflect(m Message) protoreflect.Message {
return protoimpl.X.MessageOf(m)
}
// Marshaler is implemented by messages that can marshal themselves.
// This interface is used by the following functions: Size, Marshal,
// Buffer.Marshal, and Buffer.EncodeMessage.
//
// Deprecated: Do not implement.
type Marshaler interface {
// Marshal formats the encoded bytes of the message.
// It should be deterministic and emit valid protobuf wire data.
// The caller takes ownership of the returned buffer.
Marshal() ([]byte, error)
}
// Unmarshaler is implemented by messages that can unmarshal themselves.
// This interface is used by the following functions: Unmarshal, UnmarshalMerge,
// Buffer.Unmarshal, Buffer.DecodeMessage, and Buffer.DecodeGroup.
//
// Deprecated: Do not implement.
type Unmarshaler interface {
// Unmarshal parses the encoded bytes of the protobuf wire input.
// The provided buffer is only valid for during method call.
// It should not reset the receiver message.
Unmarshal([]byte) error
}
// Merger is implemented by messages that can merge themselves.
// This interface is used by the following functions: Clone and Merge.
//
// Deprecated: Do not implement.
type Merger interface {
// Merge merges the contents of src into the receiver message.
// It clones all data structures in src such that it aliases no mutable
// memory referenced by src.
Merge(src Message)
}
// RequiredNotSetError is an error type returned when
// marshaling or unmarshaling a message with missing required fields.
type RequiredNotSetError struct {
err error
}
func (e *RequiredNotSetError) Error() string {
if e.err != nil {
return e.err.Error()
}
return "proto: required field not set"
}
func (e *RequiredNotSetError) RequiredNotSet() bool {
return true
}
func checkRequiredNotSet(m protoV2.Message) error {
if err := protoV2.CheckInitialized(m); err != nil {
return &RequiredNotSetError{err: err}
}
return nil
}
// Clone returns a deep copy of src.
func Clone(src Message) Message {
return MessageV1(protoV2.Clone(MessageV2(src)))
}
// Merge merges src into dst, which must be messages of the same type.
//
// Populated scalar fields in src are copied to dst, while populated
// singular messages in src are merged into dst by recursively calling Merge.
// The elements of every list field in src is appended to the corresponded
// list fields in dst. The entries of every map field in src is copied into
// the corresponding map field in dst, possibly replacing existing entries.
// The unknown fields of src are appended to the unknown fields of dst.
func Merge(dst, src Message) {
protoV2.Merge(MessageV2(dst), MessageV2(src))
}
// Equal reports whether two messages are equal.
// If two messages marshal to the same bytes under deterministic serialization,
// then Equal is guaranteed to report true.
//
// Two messages are equal if they are the same protobuf message type,
// have the same set of populated known and extension field values,
// and the same set of unknown fields values.
//
// Scalar values are compared with the equivalent of the == operator in Go,
// except bytes values which are compared using bytes.Equal and
// floating point values which specially treat NaNs as equal.
// Message values are compared by recursively calling Equal.
// Lists are equal if each element value is also equal.
// Maps are equal if they have the same set of keys, where the pair of values
// for each key is also equal.
func Equal(x, y Message) bool {
return protoV2.Equal(MessageV2(x), MessageV2(y))
}
func isMessageSet(md protoreflect.MessageDescriptor) bool {
ms, ok := md.(interface{ IsMessageSet() bool })
return ok && ms.IsMessageSet()
}

View File

@ -1,317 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"bytes"
"compress/gzip"
"fmt"
"io/ioutil"
"reflect"
"strings"
"sync"
"google.golang.org/protobuf/reflect/protodesc"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
"google.golang.org/protobuf/runtime/protoimpl"
)
// filePath is the path to the proto source file.
type filePath = string // e.g., "google/protobuf/descriptor.proto"
// fileDescGZIP is the compressed contents of the encoded FileDescriptorProto.
type fileDescGZIP = []byte
var fileCache sync.Map // map[filePath]fileDescGZIP
// RegisterFile is called from generated code to register the compressed
// FileDescriptorProto with the file path for a proto source file.
//
// Deprecated: Use protoregistry.GlobalFiles.RegisterFile instead.
func RegisterFile(s filePath, d fileDescGZIP) {
// Decompress the descriptor.
zr, err := gzip.NewReader(bytes.NewReader(d))
if err != nil {
panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err))
}
b, err := ioutil.ReadAll(zr)
if err != nil {
panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err))
}
// Construct a protoreflect.FileDescriptor from the raw descriptor.
// Note that DescBuilder.Build automatically registers the constructed
// file descriptor with the v2 registry.
protoimpl.DescBuilder{RawDescriptor: b}.Build()
// Locally cache the raw descriptor form for the file.
fileCache.Store(s, d)
}
// FileDescriptor returns the compressed FileDescriptorProto given the file path
// for a proto source file. It returns nil if not found.
//
// Deprecated: Use protoregistry.GlobalFiles.FindFileByPath instead.
func FileDescriptor(s filePath) fileDescGZIP {
if v, ok := fileCache.Load(s); ok {
return v.(fileDescGZIP)
}
// Find the descriptor in the v2 registry.
var b []byte
if fd, _ := protoregistry.GlobalFiles.FindFileByPath(s); fd != nil {
b, _ = Marshal(protodesc.ToFileDescriptorProto(fd))
}
// Locally cache the raw descriptor form for the file.
if len(b) > 0 {
v, _ := fileCache.LoadOrStore(s, protoimpl.X.CompressGZIP(b))
return v.(fileDescGZIP)
}
return nil
}
// enumName is the name of an enum. For historical reasons, the enum name is
// neither the full Go name nor the full protobuf name of the enum.
// The name is the dot-separated combination of just the proto package that the
// enum is declared within followed by the Go type name of the generated enum.
type enumName = string // e.g., "my.proto.package.GoMessage_GoEnum"
// enumsByName maps enum values by name to their numeric counterpart.
type enumsByName = map[string]int32
// enumsByNumber maps enum values by number to their name counterpart.
type enumsByNumber = map[int32]string
var enumCache sync.Map // map[enumName]enumsByName
var numFilesCache sync.Map // map[protoreflect.FullName]int
// RegisterEnum is called from the generated code to register the mapping of
// enum value names to enum numbers for the enum identified by s.
//
// Deprecated: Use protoregistry.GlobalTypes.RegisterEnum instead.
func RegisterEnum(s enumName, _ enumsByNumber, m enumsByName) {
if _, ok := enumCache.Load(s); ok {
panic("proto: duplicate enum registered: " + s)
}
enumCache.Store(s, m)
// This does not forward registration to the v2 registry since this API
// lacks sufficient information to construct a complete v2 enum descriptor.
}
// EnumValueMap returns the mapping from enum value names to enum numbers for
// the enum of the given name. It returns nil if not found.
//
// Deprecated: Use protoregistry.GlobalTypes.FindEnumByName instead.
func EnumValueMap(s enumName) enumsByName {
if v, ok := enumCache.Load(s); ok {
return v.(enumsByName)
}
// Check whether the cache is stale. If the number of files in the current
// package differs, then it means that some enums may have been recently
// registered upstream that we do not know about.
var protoPkg protoreflect.FullName
if i := strings.LastIndexByte(s, '.'); i >= 0 {
protoPkg = protoreflect.FullName(s[:i])
}
v, _ := numFilesCache.Load(protoPkg)
numFiles, _ := v.(int)
if protoregistry.GlobalFiles.NumFilesByPackage(protoPkg) == numFiles {
return nil // cache is up-to-date; was not found earlier
}
// Update the enum cache for all enums declared in the given proto package.
numFiles = 0
protoregistry.GlobalFiles.RangeFilesByPackage(protoPkg, func(fd protoreflect.FileDescriptor) bool {
walkEnums(fd, func(ed protoreflect.EnumDescriptor) {
name := protoimpl.X.LegacyEnumName(ed)
if _, ok := enumCache.Load(name); !ok {
m := make(enumsByName)
evs := ed.Values()
for i := evs.Len() - 1; i >= 0; i-- {
ev := evs.Get(i)
m[string(ev.Name())] = int32(ev.Number())
}
enumCache.LoadOrStore(name, m)
}
})
numFiles++
return true
})
numFilesCache.Store(protoPkg, numFiles)
// Check cache again for enum map.
if v, ok := enumCache.Load(s); ok {
return v.(enumsByName)
}
return nil
}
// walkEnums recursively walks all enums declared in d.
func walkEnums(d interface {
Enums() protoreflect.EnumDescriptors
Messages() protoreflect.MessageDescriptors
}, f func(protoreflect.EnumDescriptor)) {
eds := d.Enums()
for i := eds.Len() - 1; i >= 0; i-- {
f(eds.Get(i))
}
mds := d.Messages()
for i := mds.Len() - 1; i >= 0; i-- {
walkEnums(mds.Get(i), f)
}
}
// messageName is the full name of protobuf message.
type messageName = string
var messageTypeCache sync.Map // map[messageName]reflect.Type
// RegisterType is called from generated code to register the message Go type
// for a message of the given name.
//
// Deprecated: Use protoregistry.GlobalTypes.RegisterMessage instead.
func RegisterType(m Message, s messageName) {
mt := protoimpl.X.LegacyMessageTypeOf(m, protoreflect.FullName(s))
if err := protoregistry.GlobalTypes.RegisterMessage(mt); err != nil {
panic(err)
}
messageTypeCache.Store(s, reflect.TypeOf(m))
}
// RegisterMapType is called from generated code to register the Go map type
// for a protobuf message representing a map entry.
//
// Deprecated: Do not use.
func RegisterMapType(m interface{}, s messageName) {
t := reflect.TypeOf(m)
if t.Kind() != reflect.Map {
panic(fmt.Sprintf("invalid map kind: %v", t))
}
if _, ok := messageTypeCache.Load(s); ok {
panic(fmt.Errorf("proto: duplicate proto message registered: %s", s))
}
messageTypeCache.Store(s, t)
}
// MessageType returns the message type for a named message.
// It returns nil if not found.
//
// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead.
func MessageType(s messageName) reflect.Type {
if v, ok := messageTypeCache.Load(s); ok {
return v.(reflect.Type)
}
// Derive the message type from the v2 registry.
var t reflect.Type
if mt, _ := protoregistry.GlobalTypes.FindMessageByName(protoreflect.FullName(s)); mt != nil {
t = messageGoType(mt)
}
// If we could not get a concrete type, it is possible that it is a
// pseudo-message for a map entry.
if t == nil {
d, _ := protoregistry.GlobalFiles.FindDescriptorByName(protoreflect.FullName(s))
if md, _ := d.(protoreflect.MessageDescriptor); md != nil && md.IsMapEntry() {
kt := goTypeForField(md.Fields().ByNumber(1))
vt := goTypeForField(md.Fields().ByNumber(2))
t = reflect.MapOf(kt, vt)
}
}
// Locally cache the message type for the given name.
if t != nil {
v, _ := messageTypeCache.LoadOrStore(s, t)
return v.(reflect.Type)
}
return nil
}
func goTypeForField(fd protoreflect.FieldDescriptor) reflect.Type {
switch k := fd.Kind(); k {
case protoreflect.EnumKind:
if et, _ := protoregistry.GlobalTypes.FindEnumByName(fd.Enum().FullName()); et != nil {
return enumGoType(et)
}
return reflect.TypeOf(protoreflect.EnumNumber(0))
case protoreflect.MessageKind, protoreflect.GroupKind:
if mt, _ := protoregistry.GlobalTypes.FindMessageByName(fd.Message().FullName()); mt != nil {
return messageGoType(mt)
}
return reflect.TypeOf((*protoreflect.Message)(nil)).Elem()
default:
return reflect.TypeOf(fd.Default().Interface())
}
}
func enumGoType(et protoreflect.EnumType) reflect.Type {
return reflect.TypeOf(et.New(0))
}
func messageGoType(mt protoreflect.MessageType) reflect.Type {
return reflect.TypeOf(MessageV1(mt.Zero().Interface()))
}
// MessageName returns the full protobuf name for the given message type.
//
// Deprecated: Use protoreflect.MessageDescriptor.FullName instead.
func MessageName(m Message) messageName {
if m == nil {
return ""
}
if m, ok := m.(interface{ XXX_MessageName() messageName }); ok {
return m.XXX_MessageName()
}
return messageName(protoimpl.X.MessageDescriptorOf(m).FullName())
}
// RegisterExtension is called from the generated code to register
// the extension descriptor.
//
// Deprecated: Use protoregistry.GlobalTypes.RegisterExtension instead.
func RegisterExtension(d *ExtensionDesc) {
if err := protoregistry.GlobalTypes.RegisterExtension(d); err != nil {
panic(err)
}
}
type extensionsByNumber = map[int32]*ExtensionDesc
var extensionCache sync.Map // map[messageName]extensionsByNumber
// RegisteredExtensions returns a map of the registered extensions for the
// provided protobuf message, indexed by the extension field number.
//
// Deprecated: Use protoregistry.GlobalTypes.RangeExtensionsByMessage instead.
func RegisteredExtensions(m Message) extensionsByNumber {
// Check whether the cache is stale. If the number of extensions for
// the given message differs, then it means that some extensions were
// recently registered upstream that we do not know about.
s := MessageName(m)
v, _ := extensionCache.Load(s)
xs, _ := v.(extensionsByNumber)
if protoregistry.GlobalTypes.NumExtensionsByMessage(protoreflect.FullName(s)) == len(xs) {
return xs // cache is up-to-date
}
// Cache is stale, re-compute the extensions map.
xs = make(extensionsByNumber)
protoregistry.GlobalTypes.RangeExtensionsByMessage(protoreflect.FullName(s), func(xt protoreflect.ExtensionType) bool {
if xd, ok := xt.(*ExtensionDesc); ok {
xs[int32(xt.TypeDescriptor().Number())] = xd
} else {
// TODO: This implies that the protoreflect.ExtensionType is a
// custom type not generated by protoc-gen-go. We could try and
// convert the type to an ExtensionDesc.
}
return true
})
extensionCache.Store(s, xs)
return xs
}

View File

@ -1,801 +0,0 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"encoding"
"errors"
"fmt"
"reflect"
"strconv"
"strings"
"unicode/utf8"
"google.golang.org/protobuf/encoding/prototext"
protoV2 "google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
)
const wrapTextUnmarshalV2 = false
// ParseError is returned by UnmarshalText.
type ParseError struct {
Message string
// Deprecated: Do not use.
Line, Offset int
}
func (e *ParseError) Error() string {
if wrapTextUnmarshalV2 {
return e.Message
}
if e.Line == 1 {
return fmt.Sprintf("line 1.%d: %v", e.Offset, e.Message)
}
return fmt.Sprintf("line %d: %v", e.Line, e.Message)
}
// UnmarshalText parses a proto text formatted string into m.
func UnmarshalText(s string, m Message) error {
if u, ok := m.(encoding.TextUnmarshaler); ok {
return u.UnmarshalText([]byte(s))
}
m.Reset()
mi := MessageV2(m)
if wrapTextUnmarshalV2 {
err := prototext.UnmarshalOptions{
AllowPartial: true,
}.Unmarshal([]byte(s), mi)
if err != nil {
return &ParseError{Message: err.Error()}
}
return checkRequiredNotSet(mi)
} else {
if err := newTextParser(s).unmarshalMessage(mi.ProtoReflect(), ""); err != nil {
return err
}
return checkRequiredNotSet(mi)
}
}
type textParser struct {
s string // remaining input
done bool // whether the parsing is finished (success or error)
backed bool // whether back() was called
offset, line int
cur token
}
type token struct {
value string
err *ParseError
line int // line number
offset int // byte number from start of input, not start of line
unquoted string // the unquoted version of value, if it was a quoted string
}
func newTextParser(s string) *textParser {
p := new(textParser)
p.s = s
p.line = 1
p.cur.line = 1
return p
}
func (p *textParser) unmarshalMessage(m protoreflect.Message, terminator string) (err error) {
md := m.Descriptor()
fds := md.Fields()
// A struct is a sequence of "name: value", terminated by one of
// '>' or '}', or the end of the input. A name may also be
// "[extension]" or "[type/url]".
//
// The whole struct can also be an expanded Any message, like:
// [type/url] < ... struct contents ... >
seen := make(map[protoreflect.FieldNumber]bool)
for {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value == terminator {
break
}
if tok.value == "[" {
if err := p.unmarshalExtensionOrAny(m, seen); err != nil {
return err
}
continue
}
// This is a normal, non-extension field.
name := protoreflect.Name(tok.value)
fd := fds.ByName(name)
switch {
case fd == nil:
gd := fds.ByName(protoreflect.Name(strings.ToLower(string(name))))
if gd != nil && gd.Kind() == protoreflect.GroupKind && gd.Message().Name() == name {
fd = gd
}
case fd.Kind() == protoreflect.GroupKind && fd.Message().Name() != name:
fd = nil
case fd.IsWeak() && fd.Message().IsPlaceholder():
fd = nil
}
if fd == nil {
typeName := string(md.FullName())
if m, ok := m.Interface().(Message); ok {
t := reflect.TypeOf(m)
if t.Kind() == reflect.Ptr {
typeName = t.Elem().String()
}
}
return p.errorf("unknown field name %q in %v", name, typeName)
}
if od := fd.ContainingOneof(); od != nil && m.WhichOneof(od) != nil {
return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, od.Name())
}
if fd.Cardinality() != protoreflect.Repeated && seen[fd.Number()] {
return p.errorf("non-repeated field %q was repeated", fd.Name())
}
seen[fd.Number()] = true
// Consume any colon.
if err := p.checkForColon(fd); err != nil {
return err
}
// Parse into the field.
v := m.Get(fd)
if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) {
v = m.Mutable(fd)
}
if v, err = p.unmarshalValue(v, fd); err != nil {
return err
}
m.Set(fd, v)
if err := p.consumeOptionalSeparator(); err != nil {
return err
}
}
return nil
}
func (p *textParser) unmarshalExtensionOrAny(m protoreflect.Message, seen map[protoreflect.FieldNumber]bool) error {
name, err := p.consumeExtensionOrAnyName()
if err != nil {
return err
}
// If it contains a slash, it's an Any type URL.
if slashIdx := strings.LastIndex(name, "/"); slashIdx >= 0 {
tok := p.next()
if tok.err != nil {
return tok.err
}
// consume an optional colon
if tok.value == ":" {
tok = p.next()
if tok.err != nil {
return tok.err
}
}
var terminator string
switch tok.value {
case "<":
terminator = ">"
case "{":
terminator = "}"
default:
return p.errorf("expected '{' or '<', found %q", tok.value)
}
mt, err := protoregistry.GlobalTypes.FindMessageByURL(name)
if err != nil {
return p.errorf("unrecognized message %q in google.protobuf.Any", name[slashIdx+len("/"):])
}
m2 := mt.New()
if err := p.unmarshalMessage(m2, terminator); err != nil {
return err
}
b, err := protoV2.Marshal(m2.Interface())
if err != nil {
return p.errorf("failed to marshal message of type %q: %v", name[slashIdx+len("/"):], err)
}
urlFD := m.Descriptor().Fields().ByName("type_url")
valFD := m.Descriptor().Fields().ByName("value")
if seen[urlFD.Number()] {
return p.errorf("Any message unpacked multiple times, or %q already set", urlFD.Name())
}
if seen[valFD.Number()] {
return p.errorf("Any message unpacked multiple times, or %q already set", valFD.Name())
}
m.Set(urlFD, protoreflect.ValueOfString(name))
m.Set(valFD, protoreflect.ValueOfBytes(b))
seen[urlFD.Number()] = true
seen[valFD.Number()] = true
return nil
}
xname := protoreflect.FullName(name)
xt, _ := protoregistry.GlobalTypes.FindExtensionByName(xname)
if xt == nil && isMessageSet(m.Descriptor()) {
xt, _ = protoregistry.GlobalTypes.FindExtensionByName(xname.Append("message_set_extension"))
}
if xt == nil {
return p.errorf("unrecognized extension %q", name)
}
fd := xt.TypeDescriptor()
if fd.ContainingMessage().FullName() != m.Descriptor().FullName() {
return p.errorf("extension field %q does not extend message %q", name, m.Descriptor().FullName())
}
if err := p.checkForColon(fd); err != nil {
return err
}
v := m.Get(fd)
if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) {
v = m.Mutable(fd)
}
v, err = p.unmarshalValue(v, fd)
if err != nil {
return err
}
m.Set(fd, v)
return p.consumeOptionalSeparator()
}
func (p *textParser) unmarshalValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) {
tok := p.next()
if tok.err != nil {
return v, tok.err
}
if tok.value == "" {
return v, p.errorf("unexpected EOF")
}
switch {
case fd.IsList():
lv := v.List()
var err error
if tok.value == "[" {
// Repeated field with list notation, like [1,2,3].
for {
vv := lv.NewElement()
vv, err = p.unmarshalSingularValue(vv, fd)
if err != nil {
return v, err
}
lv.Append(vv)
tok := p.next()
if tok.err != nil {
return v, tok.err
}
if tok.value == "]" {
break
}
if tok.value != "," {
return v, p.errorf("Expected ']' or ',' found %q", tok.value)
}
}
return v, nil
}
// One value of the repeated field.
p.back()
vv := lv.NewElement()
vv, err = p.unmarshalSingularValue(vv, fd)
if err != nil {
return v, err
}
lv.Append(vv)
return v, nil
case fd.IsMap():
// The map entry should be this sequence of tokens:
// < key : KEY value : VALUE >
// However, implementations may omit key or value, and technically
// we should support them in any order.
var terminator string
switch tok.value {
case "<":
terminator = ">"
case "{":
terminator = "}"
default:
return v, p.errorf("expected '{' or '<', found %q", tok.value)
}
keyFD := fd.MapKey()
valFD := fd.MapValue()
mv := v.Map()
kv := keyFD.Default()
vv := mv.NewValue()
for {
tok := p.next()
if tok.err != nil {
return v, tok.err
}
if tok.value == terminator {
break
}
var err error
switch tok.value {
case "key":
if err := p.consumeToken(":"); err != nil {
return v, err
}
if kv, err = p.unmarshalSingularValue(kv, keyFD); err != nil {
return v, err
}
if err := p.consumeOptionalSeparator(); err != nil {
return v, err
}
case "value":
if err := p.checkForColon(valFD); err != nil {
return v, err
}
if vv, err = p.unmarshalSingularValue(vv, valFD); err != nil {
return v, err
}
if err := p.consumeOptionalSeparator(); err != nil {
return v, err
}
default:
p.back()
return v, p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value)
}
}
mv.Set(kv.MapKey(), vv)
return v, nil
default:
p.back()
return p.unmarshalSingularValue(v, fd)
}
}
func (p *textParser) unmarshalSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) {
tok := p.next()
if tok.err != nil {
return v, tok.err
}
if tok.value == "" {
return v, p.errorf("unexpected EOF")
}
switch fd.Kind() {
case protoreflect.BoolKind:
switch tok.value {
case "true", "1", "t", "True":
return protoreflect.ValueOfBool(true), nil
case "false", "0", "f", "False":
return protoreflect.ValueOfBool(false), nil
}
case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind:
if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil {
return protoreflect.ValueOfInt32(int32(x)), nil
}
// The C++ parser accepts large positive hex numbers that uses
// two's complement arithmetic to represent negative numbers.
// This feature is here for backwards compatibility with C++.
if strings.HasPrefix(tok.value, "0x") {
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
return protoreflect.ValueOfInt32(int32(-(int64(^x) + 1))), nil
}
}
case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind:
if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil {
return protoreflect.ValueOfInt64(int64(x)), nil
}
// The C++ parser accepts large positive hex numbers that uses
// two's complement arithmetic to represent negative numbers.
// This feature is here for backwards compatibility with C++.
if strings.HasPrefix(tok.value, "0x") {
if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil {
return protoreflect.ValueOfInt64(int64(-(int64(^x) + 1))), nil
}
}
case protoreflect.Uint32Kind, protoreflect.Fixed32Kind:
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
return protoreflect.ValueOfUint32(uint32(x)), nil
}
case protoreflect.Uint64Kind, protoreflect.Fixed64Kind:
if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil {
return protoreflect.ValueOfUint64(uint64(x)), nil
}
case protoreflect.FloatKind:
// Ignore 'f' for compatibility with output generated by C++,
// but don't remove 'f' when the value is "-inf" or "inf".
v := tok.value
if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" {
v = v[:len(v)-len("f")]
}
if x, err := strconv.ParseFloat(v, 32); err == nil {
return protoreflect.ValueOfFloat32(float32(x)), nil
}
case protoreflect.DoubleKind:
// Ignore 'f' for compatibility with output generated by C++,
// but don't remove 'f' when the value is "-inf" or "inf".
v := tok.value
if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" {
v = v[:len(v)-len("f")]
}
if x, err := strconv.ParseFloat(v, 64); err == nil {
return protoreflect.ValueOfFloat64(float64(x)), nil
}
case protoreflect.StringKind:
if isQuote(tok.value[0]) {
return protoreflect.ValueOfString(tok.unquoted), nil
}
case protoreflect.BytesKind:
if isQuote(tok.value[0]) {
return protoreflect.ValueOfBytes([]byte(tok.unquoted)), nil
}
case protoreflect.EnumKind:
if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil {
return protoreflect.ValueOfEnum(protoreflect.EnumNumber(x)), nil
}
vd := fd.Enum().Values().ByName(protoreflect.Name(tok.value))
if vd != nil {
return protoreflect.ValueOfEnum(vd.Number()), nil
}
case protoreflect.MessageKind, protoreflect.GroupKind:
var terminator string
switch tok.value {
case "{":
terminator = "}"
case "<":
terminator = ">"
default:
return v, p.errorf("expected '{' or '<', found %q", tok.value)
}
err := p.unmarshalMessage(v.Message(), terminator)
return v, err
default:
panic(fmt.Sprintf("invalid kind %v", fd.Kind()))
}
return v, p.errorf("invalid %v: %v", fd.Kind(), tok.value)
}
// Consume a ':' from the input stream (if the next token is a colon),
// returning an error if a colon is needed but not present.
func (p *textParser) checkForColon(fd protoreflect.FieldDescriptor) *ParseError {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != ":" {
if fd.Message() == nil {
return p.errorf("expected ':', found %q", tok.value)
}
p.back()
}
return nil
}
// consumeExtensionOrAnyName consumes an extension name or an Any type URL and
// the following ']'. It returns the name or URL consumed.
func (p *textParser) consumeExtensionOrAnyName() (string, error) {
tok := p.next()
if tok.err != nil {
return "", tok.err
}
// If extension name or type url is quoted, it's a single token.
if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] {
name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0]))
if err != nil {
return "", err
}
return name, p.consumeToken("]")
}
// Consume everything up to "]"
var parts []string
for tok.value != "]" {
parts = append(parts, tok.value)
tok = p.next()
if tok.err != nil {
return "", p.errorf("unrecognized type_url or extension name: %s", tok.err)
}
if p.done && tok.value != "]" {
return "", p.errorf("unclosed type_url or extension name")
}
}
return strings.Join(parts, ""), nil
}
// consumeOptionalSeparator consumes an optional semicolon or comma.
// It is used in unmarshalMessage to provide backward compatibility.
func (p *textParser) consumeOptionalSeparator() error {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != ";" && tok.value != "," {
p.back()
}
return nil
}
func (p *textParser) errorf(format string, a ...interface{}) *ParseError {
pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset}
p.cur.err = pe
p.done = true
return pe
}
func (p *textParser) skipWhitespace() {
i := 0
for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') {
if p.s[i] == '#' {
// comment; skip to end of line or input
for i < len(p.s) && p.s[i] != '\n' {
i++
}
if i == len(p.s) {
break
}
}
if p.s[i] == '\n' {
p.line++
}
i++
}
p.offset += i
p.s = p.s[i:len(p.s)]
if len(p.s) == 0 {
p.done = true
}
}
func (p *textParser) advance() {
// Skip whitespace
p.skipWhitespace()
if p.done {
return
}
// Start of non-whitespace
p.cur.err = nil
p.cur.offset, p.cur.line = p.offset, p.line
p.cur.unquoted = ""
switch p.s[0] {
case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/':
// Single symbol
p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)]
case '"', '\'':
// Quoted string
i := 1
for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' {
if p.s[i] == '\\' && i+1 < len(p.s) {
// skip escaped char
i++
}
i++
}
if i >= len(p.s) || p.s[i] != p.s[0] {
p.errorf("unmatched quote")
return
}
unq, err := unquoteC(p.s[1:i], rune(p.s[0]))
if err != nil {
p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err)
return
}
p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)]
p.cur.unquoted = unq
default:
i := 0
for i < len(p.s) && isIdentOrNumberChar(p.s[i]) {
i++
}
if i == 0 {
p.errorf("unexpected byte %#x", p.s[0])
return
}
p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)]
}
p.offset += len(p.cur.value)
}
// Back off the parser by one token. Can only be done between calls to next().
// It makes the next advance() a no-op.
func (p *textParser) back() { p.backed = true }
// Advances the parser and returns the new current token.
func (p *textParser) next() *token {
if p.backed || p.done {
p.backed = false
return &p.cur
}
p.advance()
if p.done {
p.cur.value = ""
} else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) {
// Look for multiple quoted strings separated by whitespace,
// and concatenate them.
cat := p.cur
for {
p.skipWhitespace()
if p.done || !isQuote(p.s[0]) {
break
}
p.advance()
if p.cur.err != nil {
return &p.cur
}
cat.value += " " + p.cur.value
cat.unquoted += p.cur.unquoted
}
p.done = false // parser may have seen EOF, but we want to return cat
p.cur = cat
}
return &p.cur
}
func (p *textParser) consumeToken(s string) error {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != s {
p.back()
return p.errorf("expected %q, found %q", s, tok.value)
}
return nil
}
var errBadUTF8 = errors.New("proto: bad UTF-8")
func unquoteC(s string, quote rune) (string, error) {
// This is based on C++'s tokenizer.cc.
// Despite its name, this is *not* parsing C syntax.
// For instance, "\0" is an invalid quoted string.
// Avoid allocation in trivial cases.
simple := true
for _, r := range s {
if r == '\\' || r == quote {
simple = false
break
}
}
if simple {
return s, nil
}
buf := make([]byte, 0, 3*len(s)/2)
for len(s) > 0 {
r, n := utf8.DecodeRuneInString(s)
if r == utf8.RuneError && n == 1 {
return "", errBadUTF8
}
s = s[n:]
if r != '\\' {
if r < utf8.RuneSelf {
buf = append(buf, byte(r))
} else {
buf = append(buf, string(r)...)
}
continue
}
ch, tail, err := unescape(s)
if err != nil {
return "", err
}
buf = append(buf, ch...)
s = tail
}
return string(buf), nil
}
func unescape(s string) (ch string, tail string, err error) {
r, n := utf8.DecodeRuneInString(s)
if r == utf8.RuneError && n == 1 {
return "", "", errBadUTF8
}
s = s[n:]
switch r {
case 'a':
return "\a", s, nil
case 'b':
return "\b", s, nil
case 'f':
return "\f", s, nil
case 'n':
return "\n", s, nil
case 'r':
return "\r", s, nil
case 't':
return "\t", s, nil
case 'v':
return "\v", s, nil
case '?':
return "?", s, nil // trigraph workaround
case '\'', '"', '\\':
return string(r), s, nil
case '0', '1', '2', '3', '4', '5', '6', '7':
if len(s) < 2 {
return "", "", fmt.Errorf(`\%c requires 2 following digits`, r)
}
ss := string(r) + s[:2]
s = s[2:]
i, err := strconv.ParseUint(ss, 8, 8)
if err != nil {
return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss)
}
return string([]byte{byte(i)}), s, nil
case 'x', 'X', 'u', 'U':
var n int
switch r {
case 'x', 'X':
n = 2
case 'u':
n = 4
case 'U':
n = 8
}
if len(s) < n {
return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n)
}
ss := s[:n]
s = s[n:]
i, err := strconv.ParseUint(ss, 16, 64)
if err != nil {
return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss)
}
if r == 'x' || r == 'X' {
return string([]byte{byte(i)}), s, nil
}
if i > utf8.MaxRune {
return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss)
}
return string(rune(i)), s, nil
}
return "", "", fmt.Errorf(`unknown escape \%c`, r)
}
func isIdentOrNumberChar(c byte) bool {
switch {
case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z':
return true
case '0' <= c && c <= '9':
return true
}
switch c {
case '-', '+', '.', '_':
return true
}
return false
}
func isWhitespace(c byte) bool {
switch c {
case ' ', '\t', '\n', '\r':
return true
}
return false
}
func isQuote(c byte) bool {
switch c {
case '"', '\'':
return true
}
return false
}

View File

@ -1,560 +0,0 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
"bytes"
"encoding"
"fmt"
"io"
"math"
"sort"
"strings"
"google.golang.org/protobuf/encoding/prototext"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
)
const wrapTextMarshalV2 = false
// TextMarshaler is a configurable text format marshaler.
type TextMarshaler struct {
Compact bool // use compact text format (one line)
ExpandAny bool // expand google.protobuf.Any messages of known types
}
// Marshal writes the proto text format of m to w.
func (tm *TextMarshaler) Marshal(w io.Writer, m Message) error {
b, err := tm.marshal(m)
if len(b) > 0 {
if _, err := w.Write(b); err != nil {
return err
}
}
return err
}
// Text returns a proto text formatted string of m.
func (tm *TextMarshaler) Text(m Message) string {
b, _ := tm.marshal(m)
return string(b)
}
func (tm *TextMarshaler) marshal(m Message) ([]byte, error) {
mr := MessageReflect(m)
if mr == nil || !mr.IsValid() {
return []byte("<nil>"), nil
}
if wrapTextMarshalV2 {
if m, ok := m.(encoding.TextMarshaler); ok {
return m.MarshalText()
}
opts := prototext.MarshalOptions{
AllowPartial: true,
EmitUnknown: true,
}
if !tm.Compact {
opts.Indent = " "
}
if !tm.ExpandAny {
opts.Resolver = (*protoregistry.Types)(nil)
}
return opts.Marshal(mr.Interface())
} else {
w := &textWriter{
compact: tm.Compact,
expandAny: tm.ExpandAny,
complete: true,
}
if m, ok := m.(encoding.TextMarshaler); ok {
b, err := m.MarshalText()
if err != nil {
return nil, err
}
w.Write(b)
return w.buf, nil
}
err := w.writeMessage(mr)
return w.buf, err
}
}
var (
defaultTextMarshaler = TextMarshaler{}
compactTextMarshaler = TextMarshaler{Compact: true}
)
// MarshalText writes the proto text format of m to w.
func MarshalText(w io.Writer, m Message) error { return defaultTextMarshaler.Marshal(w, m) }
// MarshalTextString returns a proto text formatted string of m.
func MarshalTextString(m Message) string { return defaultTextMarshaler.Text(m) }
// CompactText writes the compact proto text format of m to w.
func CompactText(w io.Writer, m Message) error { return compactTextMarshaler.Marshal(w, m) }
// CompactTextString returns a compact proto text formatted string of m.
func CompactTextString(m Message) string { return compactTextMarshaler.Text(m) }
var (
newline = []byte("\n")
endBraceNewline = []byte("}\n")
posInf = []byte("inf")
negInf = []byte("-inf")
nan = []byte("nan")
)
// textWriter is an io.Writer that tracks its indentation level.
type textWriter struct {
compact bool // same as TextMarshaler.Compact
expandAny bool // same as TextMarshaler.ExpandAny
complete bool // whether the current position is a complete line
indent int // indentation level; never negative
buf []byte
}
func (w *textWriter) Write(p []byte) (n int, _ error) {
newlines := bytes.Count(p, newline)
if newlines == 0 {
if !w.compact && w.complete {
w.writeIndent()
}
w.buf = append(w.buf, p...)
w.complete = false
return len(p), nil
}
frags := bytes.SplitN(p, newline, newlines+1)
if w.compact {
for i, frag := range frags {
if i > 0 {
w.buf = append(w.buf, ' ')
n++
}
w.buf = append(w.buf, frag...)
n += len(frag)
}
return n, nil
}
for i, frag := range frags {
if w.complete {
w.writeIndent()
}
w.buf = append(w.buf, frag...)
n += len(frag)
if i+1 < len(frags) {
w.buf = append(w.buf, '\n')
n++
}
}
w.complete = len(frags[len(frags)-1]) == 0
return n, nil
}
func (w *textWriter) WriteByte(c byte) error {
if w.compact && c == '\n' {
c = ' '
}
if !w.compact && w.complete {
w.writeIndent()
}
w.buf = append(w.buf, c)
w.complete = c == '\n'
return nil
}
func (w *textWriter) writeName(fd protoreflect.FieldDescriptor) {
if !w.compact && w.complete {
w.writeIndent()
}
w.complete = false
if fd.Kind() != protoreflect.GroupKind {
w.buf = append(w.buf, fd.Name()...)
w.WriteByte(':')
} else {
// Use message type name for group field name.
w.buf = append(w.buf, fd.Message().Name()...)
}
if !w.compact {
w.WriteByte(' ')
}
}
func requiresQuotes(u string) bool {
// When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted.
for _, ch := range u {
switch {
case ch == '.' || ch == '/' || ch == '_':
continue
case '0' <= ch && ch <= '9':
continue
case 'A' <= ch && ch <= 'Z':
continue
case 'a' <= ch && ch <= 'z':
continue
default:
return true
}
}
return false
}
// writeProto3Any writes an expanded google.protobuf.Any message.
//
// It returns (false, nil) if sv value can't be unmarshaled (e.g. because
// required messages are not linked in).
//
// It returns (true, error) when sv was written in expanded format or an error
// was encountered.
func (w *textWriter) writeProto3Any(m protoreflect.Message) (bool, error) {
md := m.Descriptor()
fdURL := md.Fields().ByName("type_url")
fdVal := md.Fields().ByName("value")
url := m.Get(fdURL).String()
mt, err := protoregistry.GlobalTypes.FindMessageByURL(url)
if err != nil {
return false, nil
}
b := m.Get(fdVal).Bytes()
m2 := mt.New()
if err := proto.Unmarshal(b, m2.Interface()); err != nil {
return false, nil
}
w.Write([]byte("["))
if requiresQuotes(url) {
w.writeQuotedString(url)
} else {
w.Write([]byte(url))
}
if w.compact {
w.Write([]byte("]:<"))
} else {
w.Write([]byte("]: <\n"))
w.indent++
}
if err := w.writeMessage(m2); err != nil {
return true, err
}
if w.compact {
w.Write([]byte("> "))
} else {
w.indent--
w.Write([]byte(">\n"))
}
return true, nil
}
func (w *textWriter) writeMessage(m protoreflect.Message) error {
md := m.Descriptor()
if w.expandAny && md.FullName() == "google.protobuf.Any" {
if canExpand, err := w.writeProto3Any(m); canExpand {
return err
}
}
fds := md.Fields()
for i := 0; i < fds.Len(); {
fd := fds.Get(i)
if od := fd.ContainingOneof(); od != nil {
fd = m.WhichOneof(od)
i += od.Fields().Len()
} else {
i++
}
if fd == nil || !m.Has(fd) {
continue
}
switch {
case fd.IsList():
lv := m.Get(fd).List()
for j := 0; j < lv.Len(); j++ {
w.writeName(fd)
v := lv.Get(j)
if err := w.writeSingularValue(v, fd); err != nil {
return err
}
w.WriteByte('\n')
}
case fd.IsMap():
kfd := fd.MapKey()
vfd := fd.MapValue()
mv := m.Get(fd).Map()
type entry struct{ key, val protoreflect.Value }
var entries []entry
mv.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool {
entries = append(entries, entry{k.Value(), v})
return true
})
sort.Slice(entries, func(i, j int) bool {
switch kfd.Kind() {
case protoreflect.BoolKind:
return !entries[i].key.Bool() && entries[j].key.Bool()
case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind, protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind:
return entries[i].key.Int() < entries[j].key.Int()
case protoreflect.Uint32Kind, protoreflect.Fixed32Kind, protoreflect.Uint64Kind, protoreflect.Fixed64Kind:
return entries[i].key.Uint() < entries[j].key.Uint()
case protoreflect.StringKind:
return entries[i].key.String() < entries[j].key.String()
default:
panic("invalid kind")
}
})
for _, entry := range entries {
w.writeName(fd)
w.WriteByte('<')
if !w.compact {
w.WriteByte('\n')
}
w.indent++
w.writeName(kfd)
if err := w.writeSingularValue(entry.key, kfd); err != nil {
return err
}
w.WriteByte('\n')
w.writeName(vfd)
if err := w.writeSingularValue(entry.val, vfd); err != nil {
return err
}
w.WriteByte('\n')
w.indent--
w.WriteByte('>')
w.WriteByte('\n')
}
default:
w.writeName(fd)
if err := w.writeSingularValue(m.Get(fd), fd); err != nil {
return err
}
w.WriteByte('\n')
}
}
if b := m.GetUnknown(); len(b) > 0 {
w.writeUnknownFields(b)
}
return w.writeExtensions(m)
}
func (w *textWriter) writeSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) error {
switch fd.Kind() {
case protoreflect.FloatKind, protoreflect.DoubleKind:
switch vf := v.Float(); {
case math.IsInf(vf, +1):
w.Write(posInf)
case math.IsInf(vf, -1):
w.Write(negInf)
case math.IsNaN(vf):
w.Write(nan)
default:
fmt.Fprint(w, v.Interface())
}
case protoreflect.StringKind:
// NOTE: This does not validate UTF-8 for historical reasons.
w.writeQuotedString(string(v.String()))
case protoreflect.BytesKind:
w.writeQuotedString(string(v.Bytes()))
case protoreflect.MessageKind, protoreflect.GroupKind:
var bra, ket byte = '<', '>'
if fd.Kind() == protoreflect.GroupKind {
bra, ket = '{', '}'
}
w.WriteByte(bra)
if !w.compact {
w.WriteByte('\n')
}
w.indent++
m := v.Message()
if m2, ok := m.Interface().(encoding.TextMarshaler); ok {
b, err := m2.MarshalText()
if err != nil {
return err
}
w.Write(b)
} else {
w.writeMessage(m)
}
w.indent--
w.WriteByte(ket)
case protoreflect.EnumKind:
if ev := fd.Enum().Values().ByNumber(v.Enum()); ev != nil {
fmt.Fprint(w, ev.Name())
} else {
fmt.Fprint(w, v.Enum())
}
default:
fmt.Fprint(w, v.Interface())
}
return nil
}
// writeQuotedString writes a quoted string in the protocol buffer text format.
func (w *textWriter) writeQuotedString(s string) {
w.WriteByte('"')
for i := 0; i < len(s); i++ {
switch c := s[i]; c {
case '\n':
w.buf = append(w.buf, `\n`...)
case '\r':
w.buf = append(w.buf, `\r`...)
case '\t':
w.buf = append(w.buf, `\t`...)
case '"':
w.buf = append(w.buf, `\"`...)
case '\\':
w.buf = append(w.buf, `\\`...)
default:
if isPrint := c >= 0x20 && c < 0x7f; isPrint {
w.buf = append(w.buf, c)
} else {
w.buf = append(w.buf, fmt.Sprintf(`\%03o`, c)...)
}
}
}
w.WriteByte('"')
}
func (w *textWriter) writeUnknownFields(b []byte) {
if !w.compact {
fmt.Fprintf(w, "/* %d unknown bytes */\n", len(b))
}
for len(b) > 0 {
num, wtyp, n := protowire.ConsumeTag(b)
if n < 0 {
return
}
b = b[n:]
if wtyp == protowire.EndGroupType {
w.indent--
w.Write(endBraceNewline)
continue
}
fmt.Fprint(w, num)
if wtyp != protowire.StartGroupType {
w.WriteByte(':')
}
if !w.compact || wtyp == protowire.StartGroupType {
w.WriteByte(' ')
}
switch wtyp {
case protowire.VarintType:
v, n := protowire.ConsumeVarint(b)
if n < 0 {
return
}
b = b[n:]
fmt.Fprint(w, v)
case protowire.Fixed32Type:
v, n := protowire.ConsumeFixed32(b)
if n < 0 {
return
}
b = b[n:]
fmt.Fprint(w, v)
case protowire.Fixed64Type:
v, n := protowire.ConsumeFixed64(b)
if n < 0 {
return
}
b = b[n:]
fmt.Fprint(w, v)
case protowire.BytesType:
v, n := protowire.ConsumeBytes(b)
if n < 0 {
return
}
b = b[n:]
fmt.Fprintf(w, "%q", v)
case protowire.StartGroupType:
w.WriteByte('{')
w.indent++
default:
fmt.Fprintf(w, "/* unknown wire type %d */", wtyp)
}
w.WriteByte('\n')
}
}
// writeExtensions writes all the extensions in m.
func (w *textWriter) writeExtensions(m protoreflect.Message) error {
md := m.Descriptor()
if md.ExtensionRanges().Len() == 0 {
return nil
}
type ext struct {
desc protoreflect.FieldDescriptor
val protoreflect.Value
}
var exts []ext
m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool {
if fd.IsExtension() {
exts = append(exts, ext{fd, v})
}
return true
})
sort.Slice(exts, func(i, j int) bool {
return exts[i].desc.Number() < exts[j].desc.Number()
})
for _, ext := range exts {
// For message set, use the name of the message as the extension name.
name := string(ext.desc.FullName())
if isMessageSet(ext.desc.ContainingMessage()) {
name = strings.TrimSuffix(name, ".message_set_extension")
}
if !ext.desc.IsList() {
if err := w.writeSingularExtension(name, ext.val, ext.desc); err != nil {
return err
}
} else {
lv := ext.val.List()
for i := 0; i < lv.Len(); i++ {
if err := w.writeSingularExtension(name, lv.Get(i), ext.desc); err != nil {
return err
}
}
}
}
return nil
}
func (w *textWriter) writeSingularExtension(name string, v protoreflect.Value, fd protoreflect.FieldDescriptor) error {
fmt.Fprintf(w, "[%s]:", name)
if !w.compact {
w.WriteByte(' ')
}
if err := w.writeSingularValue(v, fd); err != nil {
return err
}
w.WriteByte('\n')
return nil
}
func (w *textWriter) writeIndent() {
if !w.complete {
return
}
for i := 0; i < w.indent*2; i++ {
w.buf = append(w.buf, ' ')
}
w.complete = false
}

View File

@ -1,78 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
import (
protoV2 "google.golang.org/protobuf/proto"
"google.golang.org/protobuf/runtime/protoiface"
)
// Size returns the size in bytes of the wire-format encoding of m.
func Size(m Message) int {
if m == nil {
return 0
}
mi := MessageV2(m)
return protoV2.Size(mi)
}
// Marshal returns the wire-format encoding of m.
func Marshal(m Message) ([]byte, error) {
b, err := marshalAppend(nil, m, false)
if b == nil {
b = zeroBytes
}
return b, err
}
var zeroBytes = make([]byte, 0, 0)
func marshalAppend(buf []byte, m Message, deterministic bool) ([]byte, error) {
if m == nil {
return nil, ErrNil
}
mi := MessageV2(m)
nbuf, err := protoV2.MarshalOptions{
Deterministic: deterministic,
AllowPartial: true,
}.MarshalAppend(buf, mi)
if err != nil {
return buf, err
}
if len(buf) == len(nbuf) {
if !mi.ProtoReflect().IsValid() {
return buf, ErrNil
}
}
return nbuf, checkRequiredNotSet(mi)
}
// Unmarshal parses a wire-format message in b and places the decoded results in m.
//
// Unmarshal resets m before starting to unmarshal, so any existing data in m is always
// removed. Use UnmarshalMerge to preserve and append to existing data.
func Unmarshal(b []byte, m Message) error {
m.Reset()
return UnmarshalMerge(b, m)
}
// UnmarshalMerge parses a wire-format message in b and places the decoded results in m.
func UnmarshalMerge(b []byte, m Message) error {
mi := MessageV2(m)
out, err := protoV2.UnmarshalOptions{
AllowPartial: true,
Merge: true,
}.UnmarshalState(protoiface.UnmarshalInput{
Buf: b,
Message: mi.ProtoReflect(),
})
if err != nil {
return err
}
if out.Flags&protoiface.UnmarshalInitialized > 0 {
return nil
}
return checkRequiredNotSet(mi)
}

View File

@ -1,34 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
// Bool stores v in a new bool value and returns a pointer to it.
func Bool(v bool) *bool { return &v }
// Int stores v in a new int32 value and returns a pointer to it.
//
// Deprecated: Use Int32 instead.
func Int(v int) *int32 { return Int32(int32(v)) }
// Int32 stores v in a new int32 value and returns a pointer to it.
func Int32(v int32) *int32 { return &v }
// Int64 stores v in a new int64 value and returns a pointer to it.
func Int64(v int64) *int64 { return &v }
// Uint32 stores v in a new uint32 value and returns a pointer to it.
func Uint32(v uint32) *uint32 { return &v }
// Uint64 stores v in a new uint64 value and returns a pointer to it.
func Uint64(v uint64) *uint64 { return &v }
// Float32 stores v in a new float32 value and returns a pointer to it.
func Float32(v float32) *float32 { return &v }
// Float64 stores v in a new float64 value and returns a pointer to it.
func Float64(v float64) *float64 { return &v }
// String stores v in a new string value and returns a pointer to it.
func String(v string) *string { return &v }

104
vendor/gocv.io/x/gocv/CHANGELOG.md generated vendored
View File

@ -1,3 +1,107 @@
0.29.0
---
* **all**
* update to OpenCV 4.5.4
* **build**
* add static build ability on windows
* use tbb for all builds for CPU accelerated operations
* **cuda**
* implement a bunch of per-element operations
* add get/set/reset device functions
* add NewGpuMatWithSize() to preallocate device memory
* Reshape() returns a new GpuMat with the changed data
* correct use of Stream by adding WaitForCompletion() and passing pre-allocated GpuMats
* **docs**
* update ROADMAP from recent contributions
* **videoio**
* Fix open video capture with api test (#895)
* **calib3d**
* added EstimateAffine2D
* findChessboardCornersSB
* **aruco**
* added many functions as part of initial implementation
0.28.0
---
* **all**
* update to OpenCV 4.5.3
* make task and build tag for static build of OpenCV/GoCV on Linux
* add Makefile tasks for OpenCV install on Nvidia Jetson
* add gotest for more colorful test output running tests from containers
* **build**
* correcting output format for code coverage report
* enforce rule that all Go code is correctly formatted
* remove codecov
* **core**
* add NewPointVectorFromMat() and NewPoint2fVectorFromMat() functions
* Fix possible MatProfile race by ordering remove before free.
* **cuda**
* add core functions for GpuMat like Cols(), Rows(), and Type()
* initial implementation for the Flip function
* **docs**
* update ROADMAP from recent contributions
* **examples**
* correct list of examples and fix comment
* **features2d**
* Add NewORBWithParams
* **tracking**
* change MOSSE to KCF
* **highgui**
* Add function CreateTrackbarWithValue to Window type.
* **imgcodec**
* optimize IMEncode avoiding multiple data copies.
* **imgproc**
* Add CircleWithParams function
* Add DilateWithParams() function (#827)
* Add EllipseWithParams function
* Add FillPolyWithParams function
* Add PointPolygonTest function
* Add RectangleWithParams function
* **photo**
* add MergeMertens, AlignMTB and Denoising function (#848)
* **xphoto**
* Add Xphoto contrib (#844)
0.27.0
---
* **all**
* update to OpenCV 4.5.2
* **core**
* add Append() to PointsVector/PointVector
* add cv::RNG
* add implementation for Point2fVector
* add rand functions
* add test coverage for PointsVector
* create new PointsVector/PointVector wrappers to avoid repetitive memory copying for seeming innocent operations involving slices of image.Point
* test coverage for Point2f
* use PointVector for everything that we can to speed up pipeline when passing around Point vectors
* use enum instead of int for Invert Method
* **cuda**
* adding HoughLinesDetector and HoughSegmentDetector
* adding tests for the CannyEdgeDetector
* some refactoring of the API
* adding dockerfiles for OpenCV 4.5.2 with CUDA 11.2
* add GaussianFilter
* correct signature and test for Threshold
* implement SobelFilter
* move arithm module functions into correct location
* rename files to get rid of so many cudas
* add abs function implementation
* **dnn**
* increase test coverage
* **docker**
* make all Dockerfiles names/tags more consistent
* **docs**
* add CUDA functions that need implementation to ROADMAP
* remove invalid sections and add some missing functions from ROADMAP
* **imgproc**
* Add FindContoursWithParams function
* Add ToImageYUV and ToImageYUVWithParams
* **make**
* add make task to show changelog for next release
* **wechat_qrcode**
* disable module in Windows due to linker error
0.26.0
---
* **all**

2
vendor/gocv.io/x/gocv/Dockerfile generated vendored
View File

@ -1,6 +1,6 @@
# to build this docker image:
# docker build .
FROM gocv/opencv:4.5.1
FROM gocv/opencv:4.5.4
ENV GOPATH /go

19
vendor/gocv.io/x/gocv/Dockerfile-test generated vendored Normal file
View File

@ -0,0 +1,19 @@
# To build:
# docker build -f Dockerfile-test -t gocv-test .
#
# To run tests:
# xhost +
# docker run -it --rm -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix gocv-test
# xhost -
#
FROM gocv/opencv:4.5.4 AS gocv-test
ENV GOPATH /go
COPY . /go/src/gocv.io/x/gocv/
WORKDIR /go/src/gocv.io/x/gocv
RUN go get -u github.com/rakyll/gotest
ENTRYPOINT ["gotest", "-v", ".", "./contrib/..."]

18
vendor/gocv.io/x/gocv/Dockerfile-test.gpu-cuda-10 generated vendored Normal file
View File

@ -0,0 +1,18 @@
# To build:
# docker build -f Dockerfile-test.gpu-cuda-10 -t gocv-test-gpu-cuda-10 .
#
# To run tests:
# docker run -it --rm --gpus all gocv-test-gpu-cuda-10
#
FROM gocv/opencv:4.5.4-gpu-cuda-10 AS gocv-gpu-test-cuda-10
ENV GOPATH /go
ENV PATH="${PATH}:/go/bin"
COPY . /go/src/gocv.io/x/gocv/
WORKDIR /go/src/gocv.io/x/gocv
RUN go get -u github.com/rakyll/gotest
ENTRYPOINT ["gotest", "-v", "./cuda/..."]

18
vendor/gocv.io/x/gocv/Dockerfile-test.gpu-cuda-11 generated vendored Normal file
View File

@ -0,0 +1,18 @@
# To build:
# docker build -f Dockerfile-test.gpu-cuda-11 -t gocv-test-gpu-cuda-11 .
#
# To run tests:
# docker run -it --rm --gpus all gocv-test-gpu-cuda-11
#
FROM gocv/opencv:4.5.4-gpu-cuda-11 AS gocv-gpu-test-cuda-11
ENV GOPATH /go
ENV PATH="${PATH}:/go/bin"
COPY . /go/src/gocv.io/x/gocv/
WORKDIR /go/src/gocv.io/x/gocv
RUN go get -u github.com/rakyll/gotest
ENTRYPOINT ["gotest", "-v", "./cuda/..."]

View File

@ -1,12 +1,12 @@
# to build this docker image:
# docker build -f Dockerfile.gpu .
FROM gocv/opencv:4.5.1-gpu AS gocv-gpu-test
FROM gocv/opencv:4.5.4-gpu-cuda-11 AS gocv-gpu
ENV GOPATH /go
COPY . /go/src/gocv.io/x/gocv/
WORKDIR /go/src/gocv.io/x/gocv
RUN go build -tags example -o /build/gocv_cuda_version ./cmd/cuda/
RUN go build -tags cuda -o /build/gocv_cuda_version ./cmd/cuda/
CMD ["/build/gocv_cuda_version"]

View File

@ -1,6 +1,6 @@
# to build this docker image:
# docker build -f Dockerfile.opencv -t gocv/opencv:4.5.1 .
FROM golang:1.15-buster AS opencv
# docker build -f Dockerfile.opencv -t gocv/opencv:4.5.4 .
FROM golang:1.17-buster AS opencv
LABEL maintainer="hybridgroup"
RUN apt-get update && apt-get install -y --no-install-recommends \
@ -10,7 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libjpeg-dev libpng-dev libtiff-dev libdc1394-22-dev && \
rm -rf /var/lib/apt/lists/*
ARG OPENCV_VERSION="4.5.1"
ARG OPENCV_VERSION="4.5.4"
ENV OPENCV_VERSION $OPENCV_VERSION
RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSION}.zip && \
@ -28,6 +28,7 @@ RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSIO
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-${OPENCV_VERSION}/modules \
-D OPENCV_ENABLE_NONFREE=ON \
-D WITH_JASPER=OFF \
-D WITH_TBB=ON \
-D BUILD_DOCS=OFF \
-D BUILD_EXAMPLES=OFF \
-D BUILD_TESTS=OFF \

View File

@ -1,5 +1,5 @@
# to build this docker image:
# docker build -f Dockerfile.opencv-gpu -t gocv/opencv:4.5.1-gpu .
# docker build -f Dockerfile.opencv-gpu-cuda-10 -t gocv/opencv:4.5.4-gpu-cuda-10 .
FROM nvidia/cuda:10.2-cudnn7-devel AS opencv-gpu-base
LABEL maintainer="hybridgroup"
@ -10,7 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libjpeg-dev libpng-dev libtiff-dev libdc1394-22-dev && \
rm -rf /var/lib/apt/lists/*
ARG OPENCV_VERSION="4.5.1"
ARG OPENCV_VERSION="4.5.4"
ENV OPENCV_VERSION $OPENCV_VERSION
RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSION}.zip && \
@ -36,6 +36,7 @@ RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSIO
-D BUILD_opencv_python=NO \
-D BUILD_opencv_python2=NO \
-D BUILD_opencv_python3=NO \
-D WITH_TBB=ON \
-D WITH_CUDA=ON \
-D ENABLE_FAST_MATH=1 \
-D CUDA_FAST_MATH=1 \
@ -53,7 +54,7 @@ RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSIO
# install golang here
FROM opencv-gpu-base AS opencv-gpu-golang
ENV GO_RELEASE=1.15.5
ENV GO_RELEASE=1.17.2
RUN wget https://dl.google.com/go/go${GO_RELEASE}.linux-amd64.tar.gz && \
tar xfv go${GO_RELEASE}.linux-amd64.tar.gz -C /usr/local && \
rm go${GO_RELEASE}.linux-amd64.tar.gz

64
vendor/gocv.io/x/gocv/Dockerfile.opencv-gpu-cuda-11 generated vendored Normal file
View File

@ -0,0 +1,64 @@
# to build this docker image:
# docker build -f Dockerfile.opencv-gpu-cuda-11 -t gocv/opencv:4.5.4-gpu-cuda-11 .
FROM nvidia/cuda:11.4.2-cudnn8-devel-ubuntu20.04 AS opencv-gpu-cuda-11-base
LABEL maintainer="hybridgroup"
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \
git build-essential cmake pkg-config unzip libgtk2.0-dev \
wget curl ca-certificates libcurl4-openssl-dev libssl-dev \
libavcodec-dev libavformat-dev libswscale-dev libtbb2 libtbb-dev \
libjpeg-dev libpng-dev libtiff-dev libdc1394-22-dev && \
rm -rf /var/lib/apt/lists/*
ARG OPENCV_VERSION="4.5.4"
ENV OPENCV_VERSION $OPENCV_VERSION
RUN curl -Lo opencv.zip https://github.com/opencv/opencv/archive/${OPENCV_VERSION}.zip && \
unzip -q opencv.zip && \
curl -Lo opencv_contrib.zip https://github.com/opencv/opencv_contrib/archive/${OPENCV_VERSION}.zip && \
unzip -q opencv_contrib.zip && \
rm opencv.zip opencv_contrib.zip && \
cd opencv-${OPENCV_VERSION} && \
mkdir build && cd build && \
cmake -D CMAKE_BUILD_TYPE=RELEASE \
-D WITH_IPP=OFF \
-D WITH_OPENGL=OFF \
-D WITH_QT=OFF \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-${OPENCV_VERSION}/modules \
-D OPENCV_ENABLE_NONFREE=ON \
-D WITH_JASPER=OFF \
-D BUILD_DOCS=OFF \
-D BUILD_EXAMPLES=OFF \
-D BUILD_TESTS=OFF \
-D BUILD_PERF_TESTS=OFF \
-D BUILD_opencv_java=NO \
-D BUILD_opencv_python=NO \
-D BUILD_opencv_python2=NO \
-D BUILD_opencv_python3=NO \
-D WITH_TBB=ON \
-D WITH_CUDA=ON \
-D ENABLE_FAST_MATH=1 \
-D CUDA_FAST_MATH=1 \
-D WITH_CUBLAS=1 \
-D CUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda/ \
-D BUILD_opencv_cudacodec=OFF \
-D WITH_CUDNN=ON \
-D OPENCV_DNN_CUDA=ON \
-D CUDA_GENERATION=Auto \
-D OPENCV_GENERATE_PKGCONFIG=ON .. && \
make -j $(nproc --all) && \
make preinstall && make install && ldconfig && \
cd / && rm -rf opencv*
# install golang here
FROM opencv-gpu-cuda-11-base AS opencv-gpu-cuda-11-golang
ENV GO_RELEASE=1.17.2
RUN wget https://dl.google.com/go/go${GO_RELEASE}.linux-amd64.tar.gz && \
tar xfv go${GO_RELEASE}.linux-amd64.tar.gz -C /usr/local && \
rm go${GO_RELEASE}.linux-amd64.tar.gz
ENV PATH="${PATH}:/usr/local/go/bin"
CMD ["go version"]

2
vendor/gocv.io/x/gocv/LICENSE.txt generated vendored
View File

@ -1,4 +1,4 @@
Copyright (c) 2017-2020 The Hybrid Group
Copyright (c) 2017-2021 The Hybrid Group
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

85
vendor/gocv.io/x/gocv/Makefile generated vendored
View File

@ -1,11 +1,14 @@
.ONESHELL:
.PHONY: test deps download build clean astyle cmds docker
# GoCV version to use.
GOCV_VERSION?="v0.26.0"
# OpenCV version to use.
OPENCV_VERSION?=4.5.1
OPENCV_VERSION?=4.5.3
# Go version to use when building Docker image
GOVERSION?=1.15.3
GOVERSION?=1.16.2
# Temporary directory to put files into.
TMP_DIR?=/tmp/
@ -16,6 +19,7 @@ BUILD_SHARED_LIBS?=ON
# Package list for each well-known Linux distribution
RPMS=cmake curl wget git gtk2-devel libpng-devel libjpeg-devel libtiff-devel tbb tbb-devel libdc1394-devel unzip gcc-c++
DEBS=unzip wget build-essential cmake curl git libgtk2.0-dev pkg-config libavcodec-dev libavformat-dev libswscale-dev libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libdc1394-22-dev
JETSON=build-essential cmake git unzip pkg-config libjpeg-dev libpng-dev libtiff-dev libavcodec-dev libavformat-dev libswscale-dev libgtk2.0-dev libcanberra-gtk* libxvidcore-dev libx264-dev libgtk-3-dev libtbb2 libtbb-dev libdc1394-22-dev libv4l-dev v4l-utils libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libavresample-dev libvorbis-dev libxine2-dev libfaac-dev libmp3lame-dev libtheora-dev libopencore-amrnb-dev libopencore-amrwb-dev libopenblas-dev libatlas-base-dev libblas-dev liblapack-dev libeigen3-dev gfortran libhdf5-dev protobuf-compiler libprotobuf-dev libgoogle-glog-dev libgflags-dev
explain:
@echo "For quick install with typical defaults of both OpenCV and GoCV, run 'make install'"
@ -47,6 +51,11 @@ deps_debian:
sudo apt-get -y update
sudo apt-get -y install $(DEBS)
deps_jetson:
sudo sh -c "echo '/usr/local/cuda/lib64' >> /etc/ld.so.conf.d/nvidia-tegra.conf"
sudo ldconfig
sudo apt-get -y update
sudo apt-get -y install $(JETSON)
# Download OpenCV source tarballs.
download:
@ -89,7 +98,7 @@ build:
mkdir build
cd build
rm -rf *
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -DOPENCV_GENERATE_PKGCONFIG=ON ..
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -D WITH_TBB=ON -DOPENCV_GENERATE_PKGCONFIG=ON ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
@ -116,13 +125,52 @@ build_raspi_zero:
$(MAKE) preinstall
cd -
# Build OpenCV for NVidia Jetson with CUDA.
build_jetson:
cd $(TMP_DIR)opencv/opencv-$(OPENCV_VERSION)
mkdir build
cd build
rm -rf *
cmake -D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D EIGEN_INCLUDE_PATH=/usr/include/eigen3 \
-D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} \
-D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules \
-D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=OFF -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO \
-D WITH_OPENCL=OFF \
-D WITH_CUDA=ON \
-D CUDA_ARCH_BIN=5.3 \
-D CUDA_ARCH_PTX="" \
-D WITH_CUDNN=ON \
-D WITH_CUBLAS=ON \
-D ENABLE_FAST_MATH=ON \
-D CUDA_FAST_MATH=ON \
-D OPENCV_DNN_CUDA=ON \
-D ENABLE_NEON=ON \
-D WITH_QT=OFF \
-D WITH_OPENMP=ON \
-D WITH_OPENGL=ON \
-D BUILD_TIFF=ON \
-D WITH_FFMPEG=ON \
-D WITH_GSTREAMER=ON \
-D WITH_TBB=ON \
-D BUILD_TBB=ON \
-D BUILD_TESTS=OFF \
-D WITH_EIGEN=ON \
-D WITH_V4L=ON \
-D WITH_LIBV4L=ON \
-D OPENCV_GENERATE_PKGCONFIG=ON ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
# Build OpenCV with non-free contrib modules.
build_nonfree:
cd $(TMP_DIR)opencv/opencv-$(OPENCV_VERSION)
mkdir build
cd build
rm -rf *
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON ..
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -D WITH_TBB=ON -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
@ -133,7 +181,7 @@ build_openvino:
mkdir build
cd build
rm -rf *
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D ENABLE_CXX11=ON -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D WITH_INF_ENGINE=ON -D InferenceEngine_DIR=/usr/local/dldt/inference-engine/build -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON ..
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D ENABLE_CXX11=ON -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D WITH_INF_ENGINE=ON -D InferenceEngine_DIR=/usr/local/dldt/inference-engine/build -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -D WITH_TBB=ON -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
@ -144,7 +192,18 @@ build_cuda:
mkdir build
cd build
rm -rf *
cmake -j $(shell nproc --all) -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_CUDA=ON -DENABLE_FAST_MATH=1 -DCUDA_FAST_MATH=1 -DWITH_CUBLAS=1 -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda/ -DBUILD_opencv_cudacodec=OFF -D WITH_CUDNN=ON -D OPENCV_DNN_CUDA=ON -D CUDA_GENERATION=Auto ..
cmake -j $(shell nproc --all) -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -D WITH_TBB=ON -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_CUDA=ON -DENABLE_FAST_MATH=1 -DCUDA_FAST_MATH=1 -DWITH_CUBLAS=1 -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda/ -DBUILD_opencv_cudacodec=OFF -D WITH_CUDNN=ON -D OPENCV_DNN_CUDA=ON -D CUDA_GENERATION=Auto ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
# Build OpenCV staticly linked
build_static:
cd $(TMP_DIR)opencv/opencv-$(OPENCV_VERSION)
mkdir build
cd build
rm -rf *
cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=OFF -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -DWITH_JASPER=OFF -DWITH_QT=OFF -DWITH_GTK=OFF -DWITH_FFMPEG=OFF -DWITH_TIFF=OFF -DWITH_WEBP=OFF -DWITH_PNG=OFF -DWITH_1394=OFF -DWITH_OPENJPEG=OFF -DOPENCV_GENERATE_PKGCONFIG=ON ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
@ -155,7 +214,7 @@ build_all:
mkdir build
cd build
rm -rf *
cmake -j $(shell nproc --all) -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D ENABLE_CXX11=ON -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D WITH_INF_ENGINE=ON -D InferenceEngine_DIR=/usr/local/dldt/inference-engine/build -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_CUDA=ON -DENABLE_FAST_MATH=1 -DCUDA_FAST_MATH=1 -DWITH_CUBLAS=1 -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda/ -DBUILD_opencv_cudacodec=OFF -D WITH_CUDNN=ON -D OPENCV_DNN_CUDA=ON -D CUDA_GENERATION=Auto ..
cmake -j $(shell nproc --all) -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS} -D ENABLE_CXX11=ON -D OPENCV_EXTRA_MODULES_PATH=$(TMP_DIR)opencv/opencv_contrib-$(OPENCV_VERSION)/modules -D WITH_INF_ENGINE=ON -D InferenceEngine_DIR=/usr/local/dldt/inference-engine/build -D BUILD_DOCS=OFF -D BUILD_EXAMPLES=OFF -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D BUILD_opencv_java=NO -D BUILD_opencv_python=NO -D BUILD_opencv_python2=NO -D BUILD_opencv_python3=NO -D WITH_JASPER=OFF -D WITH_TBB=ON -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_CUDA=ON -DENABLE_FAST_MATH=1 -DCUDA_FAST_MATH=1 -DWITH_CUBLAS=1 -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda/ -DBUILD_opencv_cudacodec=OFF -D WITH_CUDNN=ON -D OPENCV_DNN_CUDA=ON -D CUDA_GENERATION=Auto ..
$(MAKE) -j $(shell nproc --all)
$(MAKE) preinstall
cd -
@ -181,12 +240,18 @@ install_raspi: deps download build_raspi sudo_install clean verify
# Do everything on the raspberry pi zero.
install_raspi_zero: deps download build_raspi_zero sudo_install clean verify
# Do everything on Jetson.
install_jetson: deps download build_jetson sudo_install clean verify
# Do everything with cuda.
install_cuda: deps download sudo_pre_install_clean build_cuda sudo_install clean verify verify_cuda
# Do everything with openvino.
install_openvino: deps download download_openvino sudo_pre_install_clean build_openvino_package sudo_install_openvino build_openvino sudo_install clean verify_openvino
# Do everything statically.
install_static: deps download sudo_pre_install_clean build_static sudo_install clean verify
# Do everything with openvino and cuda.
install_all: deps download download_openvino sudo_pre_install_clean build_openvino_package sudo_install_openvino build_all sudo_install clean verify_openvino verify_cuda
@ -228,7 +293,11 @@ docker:
astyle:
astyle --project=.astylerc --recursive *.cpp,*.h
CMDS=basic-drawing caffe-classifier captest capwindow counter faceblur facedetect find-circles hand-gestures hello-sift img-similarity mjpeg-streamer motion-detect pose saveimage savevideo showimage ssd-facedetect tf-classifier tracking version
releaselog:
git log --pretty=format:"%s" $(GOCV_VERSION)..HEAD
CMDS=basic-drawing caffe-classifier captest capwindow counter dnn-detection dnn-pose-detection dnn-style-transfer faceblur facedetect facedetect-from-url feature-matching find-chessboard find-circles find-lines hand-gestures hello img-similarity mjpeg-streamer motion-detect saveimage savevideo showimage ssd-facedetect tf-classifier tracking version xphoto
cmds:
for cmd in $(CMDS) ; do \
go build -o build/$$cmd cmd/$$cmd/main.go ;

122
vendor/gocv.io/x/gocv/README.md generated vendored
View File

@ -5,13 +5,12 @@
[![Go Reference](https://pkg.go.dev/badge/gocv.io/x/gocv.svg)](https://pkg.go.dev/gocv.io/x/gocv)
[![CircleCI Build status](https://circleci.com/gh/hybridgroup/gocv/tree/dev.svg?style=svg)](https://circleci.com/gh/hybridgroup/gocv/tree/dev)
[![AppVeyor Build status](https://ci.appveyor.com/api/projects/status/9asd5foet54ru69q/branch/dev?svg=true)](https://ci.appveyor.com/project/deadprogram/gocv/branch/dev)
[![codecov](https://codecov.io/gh/hybridgroup/gocv/branch/dev/graph/badge.svg)](https://codecov.io/gh/hybridgroup/gocv)
[![Go Report Card](https://goreportcard.com/badge/github.com/hybridgroup/gocv)](https://goreportcard.com/report/github.com/hybridgroup/gocv)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/hybridgroup/gocv/blob/release/LICENSE.txt)
The GoCV package provides Go language bindings for the [OpenCV 4](http://opencv.org/) computer vision library.
The GoCV package supports the latest releases of Go and OpenCV (v4.5.1) on Linux, macOS, and Windows. We intend to make the Go language a "first-class" client compatible with the latest developments in the OpenCV ecosystem.
The GoCV package supports the latest releases of Go and OpenCV (v4.5.4) on Linux, macOS, and Windows. We intend to make the Go language a "first-class" client compatible with the latest developments in the OpenCV ecosystem.
GoCV supports [CUDA](https://en.wikipedia.org/wiki/CUDA) for hardware acceleration using Nvidia GPUs. Check out the [CUDA README](./cuda/README.md) for more info on how to use GoCV with OpenCV/CUDA.
@ -123,25 +122,28 @@ There are examples in the [cmd directory](./cmd) of this repo in the form of var
## How to install
To install GoCV, run the following command:
To install GoCV, you must first have the matching version of OpenCV installed on your system. The current release of GoCV requires OpenCV 4.5.4.
```
go get -u -d gocv.io/x/gocv
```
To run code that uses the GoCV package, you must also install OpenCV 4.5.1 on your system. Here are instructions for Ubuntu, Raspian, macOS, and Windows.
Here are instructions for Ubuntu, Raspian, macOS, and Windows.
## Ubuntu/Linux
### Installation
You can use `make` to install OpenCV 4.5.1 with the handy `Makefile` included with this repo. If you already have installed OpenCV, you do not need to do so again. The installation performed by the `Makefile` is minimal, so it may remove OpenCV options such as Python or Java wrappers if you have already installed OpenCV some other way.
You can use `make` to install OpenCV 4.5.4 with the handy `Makefile` included with this repo. If you already have installed OpenCV, you do not need to do so again. The installation performed by the `Makefile` is minimal, so it may remove OpenCV options such as Python or Java wrappers if you have already installed OpenCV some other way.
#### Quick Install
The following commands should do everything to download and install OpenCV 4.5.1 on Linux:
First, change directories to where you want to install GoCV, and then use git to clone the repository to your local machine like this:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/folder/with/your/src/
git clone https://github.com/hybridgroup/gocv.git
Make sure to change `$HOME/folder/with/your/src/` to where you actually want to save the code.
Once you have cloned the repo, the following commands should do everything to download and install OpenCV 4.5.4 on Linux:
cd gocv
make install
If you need static opencv libraries
@ -150,8 +152,8 @@ If you need static opencv libraries
If it works correctly, at the end of the entire process, the following message should be displayed:
gocv version: 0.26.0
opencv lib version: 4.5.1
gocv version: 0.29.0
opencv lib version: 4.5.4
That's it, now you are ready to use GoCV.
@ -165,9 +167,9 @@ See the [openvino directory](./openvino) for information.
#### Make Install for OpenVINO and Cuda
The following commands should do everything to download and install OpenCV 4.5.1 with CUDA and OpenVINO on Linux:
The following commands should do everything to download and install OpenCV 4.5.4 with CUDA and OpenVINO on Linux. Make sure to change `$HOME/folder/with/your/src/` to the directory you used to clone GoCV:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/folder/with/gocv/
make install_all
If you need static opencv libraries
@ -176,8 +178,8 @@ If you need static opencv libraries
If it works correctly, at the end of the entire process, the following message should be displayed:
gocv version: 0.26.0
opencv lib version: 4.5.1-openvino
gocv version: 0.29.0
opencv lib version: 4.5.4-openvino
cuda information:
Device 0: "GeForce MX150" 2003Mb, sm_61, Driver/Runtime ver.10.0/10.0
@ -185,11 +187,18 @@ If it works correctly, at the end of the entire process, the following message s
If you have already done the "Quick Install" as described above, you do not need to run any further commands. For the curious, or for custom installations, here are the details for each of the steps that are performed when you run `make install`.
First, change directories to where you want to install GoCV, and then use git to clone the repository to your local machine like this:
cd $HOME/folder/with/your/src/
git clone https://github.com/hybridgroup/gocv.git
Make sure to change `$HOME/folder/with/your/src/` to where you actually want to save the code.
##### Install required packages
First, you need to change the current directory to the location of the GoCV repo, so you can access the `Makefile`:
First, you need to change the current directory to the location where you cloned the GoCV repo, so you can access the `Makefile`:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/folder/with/your/src/gocv
Next, you need to update the system, and install any required packages:
@ -197,7 +206,7 @@ Next, you need to update the system, and install any required packages:
#### Download source
Now, download the OpenCV 4.5.1 and OpenCV Contrib source code:
Now, download the OpenCV 4.5.4 and OpenCV Contrib source code:
make download
@ -223,7 +232,7 @@ To verify your installation you can run one of the included examples.
First, change the current directory to the location of the GoCV repo:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/src/gocv.io/x/gocv
Now you should be able to build or run any of the examples:
@ -231,8 +240,8 @@ Now you should be able to build or run any of the examples:
The version program should output the following:
gocv version: 0.26.0
opencv lib version: 4.5.1
gocv version: 0.29.0
opencv lib version: 4.5.4
#### Cleanup extra files
@ -240,12 +249,6 @@ After the installation is complete, you can remove the extra files and folders:
make clean
### Cache builds
If you are running a version of Go older than v1.10 and not modifying GoCV source, precompile the GoCV package to significantly decrease your build times:
go install gocv.io/x/gocv
### Custom Environment
By default, pkg-config is used to determine the correct flags for compiling and linking OpenCV. This behavior can be disabled by supplying `-tags customenv` when building/running your application. When building with this tag you will need to supply the CGO environment variables yourself.
@ -267,10 +270,10 @@ The project now provides `Dockerfile` which lets you build [GoCV](https://gocv.i
make docker
```
By default Docker image built by running the command above ships [Go](https://golang.org/) version `1.13.5`, but if you would like to build an image which uses different version of `Go` you can override the default value when running the target command:
By default Docker image built by running the command above ships [Go](https://golang.org/) version `1.16.5`, but if you would like to build an image which uses different version of `Go` you can override the default value when running the target command:
```
make docker GOVERSION='1.13.5'
make docker GOVERSION='1.15'
```
#### Running GUI programs in Docker on macOS
@ -317,19 +320,26 @@ There is a Docker image with Alpine 3.7 that has been created by project contrib
### Installation
We have a special installation for the Raspberry Pi that includes some hardware optimizations. You use `make` to install OpenCV 4.5.1 with the handy `Makefile` included with this repo. If you already have installed OpenCV, you do not need to do so again. The installation performed by the `Makefile` is minimal, so it may remove OpenCV options such as Python or Java wrappers if you have already installed OpenCV some other way.
We have a special installation for the Raspberry Pi that includes some hardware optimizations. You use `make` to install OpenCV 4.5.4 with the handy `Makefile` included with this repo. If you already have installed OpenCV, you do not need to do so again. The installation performed by the `Makefile` is minimal, so it may remove OpenCV options such as Python or Java wrappers if you have already installed OpenCV some other way.
#### Quick Install
The following commands should do everything to download and install OpenCV 4.5.1 on Raspbian:
First, change directories to where you want to install GoCV, and then use git to clone the repository to your local machine like this:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/folder/with/your/src/
git clone https://github.com/hybridgroup/gocv.git
Make sure to change `$HOME/folder/with/your/src/` to where you actually want to save the code.
The following make command should do everything to download and install OpenCV 4.5.4 on Raspbian:
cd $HOME/folder/with/your/src/gocv
make install_raspi
If it works correctly, at the end of the entire process, the following message should be displayed:
gocv version: 0.26.0
opencv lib version: 4.5.1
gocv version: 0.29.0
opencv lib version: 4.5.4
That's it, now you are ready to use GoCV.
@ -337,13 +347,13 @@ That's it, now you are ready to use GoCV.
### Installation
You can install OpenCV 4.5.1 using Homebrew.
You can install OpenCV 4.5.4 using Homebrew.
If you already have an earlier version of OpenCV (3.4.x) installed, you should probably remove it before installing the new version:
brew uninstall opencv
You can then install OpenCV 4.5.1:
You can then install OpenCV 4.5.4:
brew install opencv
@ -359,7 +369,7 @@ To verify your installation you can run one of the included examples.
First, change the current directory to the location of the GoCV repo:
cd $GOPATH/src/gocv.io/x/gocv
cd $HOME/folder/with/your/src/gocv
Now you should be able to build or run any of the examples:
@ -367,14 +377,8 @@ Now you should be able to build or run any of the examples:
The version program should output the following:
gocv version: 0.26.0
opencv lib version: 4.5.1
### Cache builds
If you are running a version of Go older than v1.10 and not modifying GoCV source, precompile the GoCV package to significantly decrease your build times:
go install gocv.io/x/gocv
gocv version: 0.29.0
opencv lib version: 4.5.4
### Custom Environment
@ -383,8 +387,8 @@ By default, pkg-config is used to determine the correct flags for compiling and
For example:
export CGO_CXXFLAGS="--std=c++11"
export CGO_CPPFLAGS="-I/usr/local/Cellar/opencv/4.5.1/include"
export CGO_LDFLAGS="-L/usr/local/Cellar/opencv/4.5.1/lib -lopencv_stitching -lopencv_superres -lopencv_videostab -lopencv_aruco -lopencv_bgsegm -lopencv_bioinspired -lopencv_ccalib -lopencv_dnn_objdetect -lopencv_dpm -lopencv_face -lopencv_photo -lopencv_fuzzy -lopencv_hfs -lopencv_img_hash -lopencv_line_descriptor -lopencv_optflow -lopencv_reg -lopencv_rgbd -lopencv_saliency -lopencv_stereo -lopencv_structured_light -lopencv_phase_unwrapping -lopencv_surface_matching -lopencv_tracking -lopencv_datasets -lopencv_dnn -lopencv_plot -lopencv_xfeatures2d -lopencv_shape -lopencv_video -lopencv_ml -lopencv_ximgproc -lopencv_calib3d -lopencv_features2d -lopencv_highgui -lopencv_videoio -lopencv_flann -lopencv_xobjdetect -lopencv_imgcodecs -lopencv_objdetect -lopencv_xphoto -lopencv_imgproc -lopencv_core"
export CGO_CPPFLAGS="-I/usr/local/Cellar/opencv/4.5.4/include"
export CGO_LDFLAGS="-L/usr/local/Cellar/opencv/4.5.4/lib -lopencv_stitching -lopencv_superres -lopencv_videostab -lopencv_aruco -lopencv_bgsegm -lopencv_bioinspired -lopencv_ccalib -lopencv_dnn_objdetect -lopencv_dpm -lopencv_face -lopencv_photo -lopencv_fuzzy -lopencv_hfs -lopencv_img_hash -lopencv_line_descriptor -lopencv_optflow -lopencv_reg -lopencv_rgbd -lopencv_saliency -lopencv_stereo -lopencv_structured_light -lopencv_phase_unwrapping -lopencv_surface_matching -lopencv_tracking -lopencv_datasets -lopencv_dnn -lopencv_plot -lopencv_xfeatures2d -lopencv_shape -lopencv_video -lopencv_ml -lopencv_ximgproc -lopencv_calib3d -lopencv_features2d -lopencv_highgui -lopencv_videoio -lopencv_flann -lopencv_xobjdetect -lopencv_imgcodecs -lopencv_objdetect -lopencv_xphoto -lopencv_imgproc -lopencv_core"
Please note that you will need to run these 3 lines of code one time in your current session in order to build or run the code, in order to setup the needed ENV variables. Once you have done so, you can execute code that uses GoCV with your custom environment like this:
@ -396,7 +400,7 @@ Please note that you will need to run these 3 lines of code one time in your cur
The following assumes that you are running a 64-bit version of Windows 10.
In order to build and install OpenCV 4.5.1 on Windows, you must first download and install MinGW-W64 and CMake, as follows.
In order to build and install OpenCV 4.5.4 on Windows, you must first download and install MinGW-W64 and CMake, as follows.
#### MinGW-W64
@ -412,9 +416,9 @@ Add the `C:\Program Files\mingw-w64\x86_64-7.3.0-posix-seh-rt_v5-rev2\mingw64\bi
Download and install CMake [https://cmake.org/download/](https://cmake.org/download/) to the default location. CMake installer will add CMake to your system path.
#### OpenCV 4.5.1 and OpenCV Contrib Modules
#### OpenCV 4.5.4 and OpenCV Contrib Modules
The following commands should do everything to download and install OpenCV 4.5.1 on Windows:
The following commands should do everything to download and install OpenCV 4.5.4 on Windows:
chdir %GOPATH%\src\gocv.io\x\gocv
win_build_opencv.cmd
@ -435,17 +439,11 @@ Now you should be able to build or run any of the command examples:
The version program should output the following:
gocv version: 0.26.0
opencv lib version: 4.5.1
gocv version: 0.29.0
opencv lib version: 4.5.4
That's it, now you are ready to use GoCV.
### Cache builds
If you are running a version of Go older than v1.10 and not modifying GoCV source, precompile the GoCV package to significantly decrease your build times:
go install gocv.io/x/gocv
### Custom Environment
By default, OpenCV is expected to be in `C:\opencv\build\install\include`. This behavior can be disabled by supplying `-tags customenv` when building/running your application. When building with this tag you will need to supply the CGO environment variables yourself.
@ -456,7 +454,7 @@ For example:
set CGO_CXXFLAGS="--std=c++11"
set CGO_CPPFLAGS=-IC:\opencv\build\install\include
set CGO_LDFLAGS=-LC:\opencv\build\install\x64\mingw\lib -lopencv_core412 -lopencv_face412 -lopencv_videoio412 -lopencv_imgproc412 -lopencv_highgui412 -lopencv_imgcodecs412 -lopencv_objdetect412 -lopencv_features2d412 -lopencv_video412 -lopencv_dnn412 -lopencv_xfeatures2d412 -lopencv_plot412 -lopencv_tracking412 -lopencv_img_hash412
set CGO_LDFLAGS=-LC:\opencv\build\install\x64\mingw\lib -lopencv_core454 -lopencv_face454 -lopencv_videoio454 -lopencv_imgproc454 -lopencv_highgui454 -lopencv_imgcodecs454 -lopencv_objdetect454 -lopencv_features2d454 -lopencv_video454 -lopencv_dnn454 -lopencv_xfeatures2d454 -lopencv_plot454 -lopencv_tracking454 -lopencv_img_hash454
Please note that you will need to run these 3 lines of code one time in your current session in order to build or run the code, in order to setup the needed ENV variables. Once you have done so, you can execute code that uses GoCV with your custom environment like this:
@ -590,6 +588,6 @@ This package was inspired by the original https://github.com/go-opencv/go-opencv
## License
Licensed under the Apache 2.0 license. Copyright (c) 2017-2020 The Hybrid Group.
Licensed under the Apache 2.0 license. Copyright (c) 2017-2021 The Hybrid Group.
Logo generated by GopherizeMe - https://gopherize.me

170
vendor/gocv.io/x/gocv/ROADMAP.md generated vendored
View File

@ -13,7 +13,7 @@ Your pull requests will be greatly appreciated!
## Modules list
- [ ] **core. Core functionality - WORK STARTED**
- [ ] **Basic structures - WORK STARTED**
- [X] **Basic structures**
- [ ] **Operations on arrays - WORK STARTED**. The following functions still need implementation:
- [ ] [Mahalanobis](https://docs.opencv.org/master/d2/de8/group__core__array.html#ga4493aee129179459cbfc6064f051aa7d)
- [ ] [mulTransposed](https://docs.opencv.org/master/d2/de8/group__core__array.html#gadc4e49f8f7a155044e3be1b9e3b270ab)
@ -28,15 +28,17 @@ Your pull requests will be greatly appreciated!
- [ ] [SVBackSubst](https://docs.opencv.org/master/d2/de8/group__core__array.html#gab4e620e6fc6c8a27bb2be3d50a840c0b)
- [ ] [SVDecomp](https://docs.opencv.org/master/d2/de8/group__core__array.html#gab477b5b7b39b370bb03e75b19d2d5109)
- [ ] [theRNG](https://docs.opencv.org/master/d2/de8/group__core__array.html#ga75843061d150ad6564b5447e38e57722)
- [ ] XML/YAML Persistence
- [ ] [FileStorage](https://docs.opencv.org/master/da/d56/classcv_1_1FileStorage.html)
- [ ] **Clustering - WORK STARTED**. The following functions still need implementation:
- [ ] [partition](https://docs.opencv.org/master/d5/d38/group__core__cluster.html#ga2037c989e69b499c1aa271419f3a9b34)
- [ ] Utility and system functions and macros
- [ ] OpenGL interoperability
- [ ] Intel IPP Asynchronous C/C++ Converters
- [ ] Optimization Algorithms
- [ ] OpenCL support
- [ ] [ConjGradSolver](https://docs.opencv.org/master/d0/d21/classcv_1_1ConjGradSolver.html)
- [ ] [DownhillSolver](https://docs.opencv.org/master/d4/d43/classcv_1_1DownhillSolver.html)
- [ ] [solveLP](https://docs.opencv.org/master/da/d01/group__core__optim.html#ga9a06d237a9d38ace891efa1ca1b5d00a)
- [ ] **imgproc. Image processing - WORK STARTED**
- [ ] **Image Filtering - WORK STARTED** The following functions still need implementation:
@ -78,7 +80,6 @@ Your pull requests will be greatly appreciated!
- [ ] [isContourConvex](https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga8abf8010377b58cbc16db6734d92941b)
- [ ] [matchShapes](https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#gaadc90cb16e2362c9bd6e7363e6e4c317)
- [ ] [minEnclosingTriangle](https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga1513e72f6bbdfc370563664f71e0542f)
- [ ] [pointPolygonTest](https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga1a539e8db2135af2566103705d7a5722)
- [ ] [rotatedRectangleIntersection](https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga8740e7645628c59d238b0b22c2abe2d4)
- [ ] **Motion Analysis and Object Tracking - WORK STARTED** The following functions still need implementation:
@ -113,7 +114,7 @@ Your pull requests will be greatly appreciated!
- [ ] **calib3d. Camera Calibration and 3D Reconstruction - WORK STARTED**. The following functions still need implementation:
- [ ] **Camera Calibration - WORK STARTED** The following functions still need implementation:
- [ ] [calibrateCamera](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [X] [calibrateCamera](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [calibrateCameraRO](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [calibrateHandEye](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [calibrationMatrixValues](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
@ -129,13 +130,13 @@ Your pull requests will be greatly appreciated!
- [ ] [decomposeProjectionMatrix](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [drawChessboardCorners](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [drawFrameAxes](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [estimateAffine2D](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [X] [estimateAffine2D](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [estimateAffine3D](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [filterHomographyDecompByVisibleRefpoints](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [filterSpeckles](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [find4QuadCornerSubpix](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [findChessboardCorners](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [findChessboardCornersSB](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [X] [findChessboardCorners](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [X] [findChessboardCornersSB](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [findCirclesGrid](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [findEssentialMat](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
- [ ] [findFundamentalMat](https://docs.opencv.org/master/d9/d0c/group__calib3d.html)
@ -189,13 +190,10 @@ Your pull requests will be greatly appreciated!
- [ ] [denoise_TVL1](https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#ga7602ed5ae17b7de40152b922227c4e4f)
- [ ] [fastNlMeansDenoising](https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#ga4c6b0031f56ea3f98f768881279ffe93)
- [ ] [fastNlMeansDenoisingColored](https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#ga03aa4189fc3e31dafd638d90de335617)
- [ ] [fastNlMeansDenoisingColoredMulti](https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#gaa501e71f52fb2dc17ff8ca5e7d2d3619)
- [ ] [fastNlMeansDenoisingMulti](https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#gaf4421bf068c4d632ea7f0aa38e0bf172)
- [ ] [createAlignMTB](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga2f1fafc885a5d79dbfb3542e08db0244)
- [ ] [createCalibrateDebevec](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga7fed9707ad5f2cc0e633888867109f90)
- [ ] [createCalibrateRobertson](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#gae77813a21cd351a596619e5ff013be5d)
- [ ] [createMergeDebevec](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#gaa8eab36bc764abb2a225db7c945f87f9)
- [ ] [createMergeMertens](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga79d59aa3cb3a7c664e59a4b5acc1ccb6)
- [ ] [createMergeRobertson](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga460d4a1df1a7e8cdcf7445bb87a8fb78)
- [ ] [createTonemap](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#gabcbd653140b93a1fa87ccce94548cd0d)
- [ ] [createTonemapDrago](https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga72bf92bb6b8653ee4be650ac01cf50b6)
@ -211,38 +209,102 @@ Your pull requests will be greatly appreciated!
## CUDA
- [ ] **core. - WORK STARTED** The following functions still need implementation:
- [ ] [cv::cuda::convertFp16](https://docs.opencv.org/master/d8/d40/group__cudacore__init.html#gaa1c52258763197958eb9e6681917f723)
- [ ] [cv::cuda::deviceSupports](https://docs.opencv.org/master/d8/d40/group__cudacore__init.html#ga170b10cc9af4aa8cce8c0afdb4b1d08c)
- [X] [cv::cuda::getDevice](https://docs.opencv.org/master/d8/d40/group__cudacore__init.html#ga6ded4ed8e4fc483a9863d31f34ec9c0e)
- [X] [cv::cuda::resetDevice](https://docs.opencv.org/master/d8/d40/group__cudacore__init.html#ga6153b6f461101374e655a54fc77e725e)
- [X] [cv::cuda::setDevice](https://docs.opencv.org/master/d8/d40/group__cudacore__init.html#gaefa34186b185de47851836dba537828b)
- [ ] **cudaarithm. Operations on Matrices - WORK STARTED** The following functions still need implementation:
- [ ] [cv::cuda::abs](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga54a72bd772494ab34d05406fd76df2b6)
- [ ] [cv::cuda::absdiff](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gac062b283cf46ee90f74a773d3382ab54)
- [ ] [cv::cuda::add](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga5d9794bde97ed23d1c1485249074a8b1)
- [ ] [cv::cuda::addWeighted](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga2cd14a684ea70c6ab2a63ee90ffe6201)
- [ ] [cv::cuda::bitwise_and](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga78d7c1a013877abd4237fbfc4e13bd76)
- [ ] [cv::cuda::bitwise_not](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gae58159a2259ae1acc76b531c171cf06a)
- [ ] [cv::cuda::bitwise_or](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gafd098ee3e51c68daa793999c1da3dfb7)
- [ ] [cv::cuda::bitwise_xor](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga3d95d4faafb099aacf18e8b915a4ad8d)
- [ ] [cv::cuda::cartToPolar](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga82210c7d1c1d42e616e554bf75a53480)
- [ ] [cv::cuda::compare](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga4d41cd679f4a83862a3de71a6057db54)
- [ ] [cv::cuda::divide](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga124315aa226260841e25cc0b9ea99dc3)
- [ ] [cv::cuda::exp](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gac6e51541d3bb0a7a396128e4d5919b61)
- [ ] [cv::cuda::log](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gaae9c60739e2d1a977b4d3250a0be42ca)
- [ ] [cv::cuda::lshift](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gafd072accecb14c9adccdad45e3bf2300)
- [ ] [cv::cuda::magnitude](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga3d17f4fcd79d7c01fadd217969009463)
- [ ] [cv::cuda::magnitudeSqr](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga7613e382d257e150033d0ce4d6098f6a)
- [ ] [cv::cuda::max](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gadb5dd3d870f10c0866035755b929b1e7)
- [ ] [cv::cuda::min](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga74f0b05a65b3d949c237abb5e6c60867)
- [ ] [cv::cuda::multiply](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga497cc0615bf717e1e615143b56f00591)
- [ ] [cv::cuda::phase](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga5b75ec01be06dcd6e27ada09a0d4656a)
- [ ] [cv::cuda::polarToCart](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga01516a286a329c303c2db746513dd9df)
- [ ] [cv::cuda::pow](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga82d04ef4bcc4dfa9bfbe76488007c6c4)
- [ ] [cv::cuda::rshift](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga87af0b66358cc302676f35c1fd56c2ed)
- [ ] [cv::cuda::sqr](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga8aae233da90ce0ffe309ab8004342acb)
- [ ] [cv::cuda::sqrt](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga09303680cb1a5521a922b6d392028d8c)
- [ ] [cv::cuda::subtract](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga6eab60fc250059e2fda79c5636bd067f)
- [ ] **core** The following functions still need implementation:
- [ ] [cv::cuda::copyMakeBorder](https://docs.opencv.org/master/de/d09/group__cudaarithm__core.html#ga5368db7656eacf846b40089c98053a49)
- [ ] [cv::cuda::createLookUpTable](https://docs.opencv.org/master/de/d09/group__cudaarithm__core.html#ga2d9d9780dea8c5cd85d3c19b7e01979c)
- [ ] [cv::cuda::merge](https://docs.opencv.org/master/de/d09/group__cudaarithm__core.html#gaac939dc3b178ee92fb6e7078f342622c)
- [ ] [cv::cuda::split](https://docs.opencv.org/master/de/d09/group__cudaarithm__core.html#gabe5013d55d4ff586b20393913726179e)
- [ ] [cv::cuda::transpose](https://docs.opencv.org/master/de/d09/group__cudaarithm__core.html#ga327b71c3cb811a904ccf5fba37fc29f2)
- [ ] **per-element operations - WORK STARTED** The following functions still need implementation:
- [X] [cv::cuda::absdiff](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gac062b283cf46ee90f74a773d3382ab54)
- [X] [cv::cuda::add](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga5d9794bde97ed23d1c1485249074a8b1)
- [ ] [cv::cuda::addWeighted](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga2cd14a684ea70c6ab2a63ee90ffe6201)
- [X] [cv::cuda::bitwise_and](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga78d7c1a013877abd4237fbfc4e13bd76)
- [X] [cv::cuda::bitwise_not](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gae58159a2259ae1acc76b531c171cf06a)
- [X] [cv::cuda::bitwise_or](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gafd098ee3e51c68daa793999c1da3dfb7)
- [X] [cv::cuda::bitwise_xor](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga3d95d4faafb099aacf18e8b915a4ad8d)
- [ ] [cv::cuda::cartToPolar](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga82210c7d1c1d42e616e554bf75a53480)
- [ ] [cv::cuda::compare](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga4d41cd679f4a83862a3de71a6057db54)
- [X] [cv::cuda::divide](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga124315aa226260841e25cc0b9ea99dc3)
- [X] [cv::cuda::exp](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gac6e51541d3bb0a7a396128e4d5919b61)
- [ ] [cv::cuda::inRange](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gaf611ab6b1d85e951feb6f485b1ed9672)
- [X] [cv::cuda::log](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gaae9c60739e2d1a977b4d3250a0be42ca)
- [ ] [cv::cuda::lshift](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gafd072accecb14c9adccdad45e3bf2300)
- [ ] [cv::cuda::magnitude](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga3d17f4fcd79d7c01fadd217969009463)
- [ ] [cv::cuda::magnitudeSqr](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga7613e382d257e150033d0ce4d6098f6a)
- [X] [cv::cuda::max](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#gadb5dd3d870f10c0866035755b929b1e7)
- [X] [cv::cuda::min](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga74f0b05a65b3d949c237abb5e6c60867)
- [X] [cv::cuda::multiply](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga497cc0615bf717e1e615143b56f00591)
- [ ] [cv::cuda::phase](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga5b75ec01be06dcd6e27ada09a0d4656a)
- [ ] [cv::cuda::polarToCart](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga01516a286a329c303c2db746513dd9df)
- [ ] [cv::cuda::pow](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga82d04ef4bcc4dfa9bfbe76488007c6c4)
- [ ] [cv::cuda::rshift](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga87af0b66358cc302676f35c1fd56c2ed)
- [X] [cv::cuda::sqr](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga8aae233da90ce0ffe309ab8004342acb)
- [X] [cv::cuda::sqrt](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga09303680cb1a5521a922b6d392028d8c)
- [X] [cv::cuda::subtract](https://docs.opencv.org/master/d8/d34/group__cudaarithm__elem.html#ga6eab60fc250059e2fda79c5636bd067f)
- [ ] **matrix reductions** The following functions still need implementation:
- [ ] [cv::cuda::absSum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga690fa79ba4426c53f7d2bebf3d37a32a)
- [ ] [cv::cuda::calcAbsSum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga15c403b76ab2c4d7ed0f5edc09891b7e)
- [ ] [cv::cuda::calcNorm](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga39d2826990d29b7e4b69dbe02bdae2e1)
- [ ] [cv::cuda::calcNormDiff](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga9be3d9a7b6c5760955f37d1039d01265)
- [ ] [cv::cuda::calcSqrSum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#gac998c83597f6c206c78cee16aa87946f)
- [ ] [cv::cuda::calcSum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga98a09144047f09f5cb1d6b6ea8e0856f)
- [ ] [cv::cuda::countNonZero](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga98a09144047f09f5cb1d6b6ea8e0856f)
- [ ] [cv::cuda::findMinMax](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#gae7f5f2aa9f65314470a76fccdff887f2)
- [ ] [cv::cuda::findMinMaxLoc](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga93916bc473a62d215d1130fab84d090a)
- [ ] [cv::cuda::integral](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga07e5104eba4bf45212ac9dbc5bf72ba6)
- [ ] [cv::cuda::meanStdDev](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga990a4db4c6d7e8f0f3a6685ba48fbddc)
- [ ] [cv::cuda::minMax](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga8d7de68c10717cf25e787e3c20d2dfee)
- [ ] [cv::cuda::minMaxLoc](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga5cacbc2a2323c4eaa81e7390c5d9f530)
- [ ] [cv::cuda::norm](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga6c01988a58d92126a7c60a4ab76d8324)
- [ ] [cv::cuda::normalize](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga4da4738b9956a5baaa2f5f8c2fba438a)
- [ ] [cv::cuda::rectStdDev](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#gac311484a4e57cab2ce2cfdc195fda7ee)
- [ ] [cv::cuda::reduce](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga21d57f661db7be093caf2c4378be2007)
- [ ] [cv::cuda::sqrIntegral](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga40c75196202706399a60bf6ba7a052ac)
- [ ] [cv::cuda::sqlSum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga056c804ebf5d2eb9f6f35e3dcb01524c)
- [ ] [cv::cuda::sum](https://docs.opencv.org/master/d5/de6/group__cudaarithm__reduce.html#ga1f582844670199281e8012733b50c582)
- [ ] **Operations on matrices** The following functions still need implementation:
- [ ] [cv::cuda::createConvolution](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#ga2695e05ef624bf3ce03cfbda383a821d)
- [ ] [cv::cuda::createDFT](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#ga0f72d063b73c8bb995678525eb076f10)
- [ ] [cv::cuda::dft](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#gadea99cb15a715c983bcc2870d65a2e78)
- [ ] [cv::cuda::gemm](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#ga42efe211d7a43bbc922da044c4f17130)
- [ ] [cv::cuda::mulAndScaleSpectrums](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#ga5704c25b8be4f19da812e6d98c8ee464)
- [ ] [cv::cuda::mulSpectrums](https://docs.opencv.org/4.5.0/d9/d88/group__cudaarithm__arithm.html#gab3e8900d67c4f59bdc137a0495206cd8)
- [X] **cudabgsegm. Background Segmentation**
- [ ] cudacodec. Video Encoding/Decoding
- [ ] cudafeatures2d. Feature Detection and Description
- [ ] cudafilters. Image Filtering
- [ ] **cudacodec** Video Encoding/Decoding. The following functions still need implementation:
- [ ] [cv::cuda::VideoReader](https://docs.opencv.org/master/db/ded/classcv_1_1cudacodec_1_1VideoReader.html)
- [ ] [cv::cuda::VideoWriter](https://docs.opencv.org/master/df/dde/classcv_1_1cudacodec_1_1VideoWriter.html)
- [ ] **cudafeatures2d** Feature Detection and Description. The following functions still need implementation:
- [ ] [cv::cuda::FastFeatureDetector](https://docs.opencv.org/master/d4/d6a/classcv_1_1cuda_1_1FastFeatureDetector.html)
- [ ] [cv::cuda::ORB](https://docs.opencv.org/master/da/d44/classcv_1_1cuda_1_1ORB.html)
- [ ] **cudafilters. Image Filtering - WORK STARTED** The following functions still need implementation:
- [ ] [cv::cuda::createBoxFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga3113b66e289bad7caef412e6e13ec2be)
- [ ] [cv::cuda::createBoxMaxFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#gaaf4740c51128d23a37f6f1b22cee49e8)
- [ ] [cv::cuda::createBoxMinFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga77fd36949bc8d92aabc120b4b1cfaafa)
- [ ] [cv::cuda::createColumnSumFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#gac13bf7c41a34bfde2a7f33ad8caacfdf)
- [ ] [cv::cuda::createDerivFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga14d76dc6982ce739c67198f52bc16ee1)
- [ ] [cv::cuda::createLaplacianFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga53126e88bb7e6185dcd5628e28e42cd2)
- [ ] [cv::cuda::createLinearFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga57cb1804ad9d1280bf86433858daabf9)
- [ ] [cv::cuda::createMorphologyFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#gae58694e07be6bdbae126f36c75c08ee6)
- [ ] [cv::cuda::createRowSumFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#gaf735de273ccb5072f3c27816fb97a53a)
- [ ] [cv::cuda::createScharrFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#ga4ac8df158e5771ddb0bd5c9091188ce6)
- [ ] [cv::cuda::createSeparableLinearFilter](https://docs.opencv.org/master/dc/d66/group__cudafilters.html#gaf7b79a9a92992044f328dad07a52c4bf)
- [ ] **cudaimgproc. Image Processing - WORK STARTED** The following functions still need implementation:
- [ ] [cv::cuda::TemplateMatching](https://docs.opencv.org/master/d2/d58/classcv_1_1cuda_1_1TemplateMatching.html)
- [ ] [cv::cuda::alphaComp](https://docs.opencv.org/master/db/d8c/group__cudaimgproc__color.html#ga08a698700458d9311390997b57fbf8dc)
@ -256,8 +318,6 @@ Your pull requests will be greatly appreciated!
- [ ] [cv::cuda::histEven](https://docs.opencv.org/master/d8/d0e/group__cudaimgproc__hist.html#gacd3b14279fb77a57a510cb8c89a1856f)
- [ ] [cv::cuda::histRange](https://docs.opencv.org/master/d8/d0e/group__cudaimgproc__hist.html#ga87819085c1059186d9cdeacd92cea783)
- [ ] [cv::cuda::HoughCirclesDetector](https://docs.opencv.org/master/da/d80/classcv_1_1cuda_1_1HoughCirclesDetector.html)
- [ ] [cv::cuda::HoughLinesDetector](https://docs.opencv.org/master/d2/dcd/classcv_1_1cuda_1_1HoughLinesDetector.html)
- [ ] [cv::cuda::HoughSegmentDetector](https://docs.opencv.org/master/d6/df9/classcv_1_1cuda_1_1HoughSegmentDetector.html)
- [ ] [cv::cuda::createGoodFeaturesToTrackDetector](https://docs.opencv.org/master/dc/d6d/group__cudaimgproc__feature.html#ga478b474a598ece101f7e706fee2c8e91)
- [ ] [cv::cuda::createHarrisCorner](https://docs.opencv.org/master/dc/d6d/group__cudaimgproc__feature.html#ga3e5878a803e9bba51added0c10101979)
- [ ] [cv::cuda::createMinEigenValCorner](https://docs.opencv.org/master/dc/d6d/group__cudaimgproc__feature.html#ga7457fd4b53b025f990b1c1dd1b749915)
@ -267,8 +327,8 @@ Your pull requests will be greatly appreciated!
- [ ] [cv::cuda::meanShiftProc](https://docs.opencv.org/master/d0/d05/group__cudaimgproc.html#ga6039dc8ecbe2f912bc83fcc9b3bcca39)
- [ ] [cv::cuda::meanShiftSegmentation](https://docs.opencv.org/master/d0/d05/group__cudaimgproc.html#ga70ed80533a448829dc48cf22b1845c16)
- [ ] cudalegacy. Legacy support
- [X] **cudaobjdetect. Object Detection**
- [ ] **cudaoptflow. Optical Flow - WORK STARTED** The following functions still need implementation:
- [ ] [BroxOpticalFlow](https://docs.opencv.org/master/d7/d18/classcv_1_1cuda_1_1BroxOpticalFlow.html)
- [ ] [DenseOpticalFlow](https://docs.opencv.org/master/d6/d4a/classcv_1_1cuda_1_1DenseOpticalFlow.html)
@ -279,14 +339,22 @@ Your pull requests will be greatly appreciated!
- [ ] [SparseOpticalFlow](https://docs.opencv.org/master/d5/dcf/classcv_1_1cuda_1_1SparseOpticalFlow.html)
- [ ] **[SparsePyrLKOpticalFlow](https://docs.opencv.org/master/d7/d05/classcv_1_1cuda_1_1SparsePyrLKOpticalFlow.html) - WORK STARTED**
- [ ] cudastereo. Stereo Correspondence
- [ ] **cudastereo** Stereo Correspondence
- [ ] [cv::cuda::createDisparityBilateralFilter](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#gaafb5f9902f7a9e74cb2cd4e680569590)
- [ ] [cv::cuda::createStereoBeliefPropagation](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#ga8d22dd80bdfb4e3d7d2ac09e8a07c22b)
- [ ] [cv::cuda::createStereoBM](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#ga77edc901350dd0a7f46ec5aca4138039)
- [ ] [cv::cuda::createStereoConstantSpaceBP](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#gaec3b49c7cf9f7701a6f549a227be4df2)
- [ ] [cv::cuda::createStereoSGM](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#gafb7e5284de5f488d664c3155acb12c93)
- [ ] [cv::cuda::drawColorDisp](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#ga469b23a77938dd7c06861e59cecc08c5)
- [ ] [cv::cuda::reprojectImageTo3D](https://docs.opencv.org/master/dd/d47/group__cudastereo.html#gaff851e3932da0f3e74d1be1d8855f094)
- [X] **cudawarping. Image Warping**
- [ ] cudev. Device layer
## Contrib modules list
- [ ] alphamat. Alpha Matting
- [ ] aruco. ArUco Marker Detection
- [X] **aruco. ArUco Marker Detection - WORK STARTED**
- [ ] barcode. Barcode detecting and decoding methods
- [X] **bgsegm. Improved Background-Foreground Segmentation Methods - WORK STARTED**
- [ ] bioinspired. Biologically inspired vision models and derivated tools
- [ ] ccalib. Custom Calibration Pattern for 3D reconstruction
@ -306,11 +374,12 @@ Your pull requests will be greatly appreciated!
- [ ] intensity_transform. The module brings implementations of intensity transformation algorithms to adjust image contrast.
- [ ] line_descriptor. Binary descriptors for lines extracted from an image
- [ ] mcc. Macbeth Chart module
- [ ] matlab. MATLAB Bridge
- [ ] optflow. Optical Flow Algorithms
- [ ] ovis. OGRE 3D Visualiser
- [ ] phase_unwrapping. Phase Unwrapping API
- [ ] plot. Plot function for Mat data
- [ ] quality. Image Quality Analysis (IQA) API
- [ ] rapid. silhouette based 3D object tracking
- [ ] reg. Image Registration
- [ ] rgbd. RGB-Depth Processing
- [ ] saliency. Saliency API
@ -324,7 +393,8 @@ Your pull requests will be greatly appreciated!
- [ ] **tracking. Tracking API - WORK STARTED**
- [ ] videostab. Video Stabilization
- [ ] viz. 3D Visualizer
- [ ] wechat_qrcode. WeChat QR code detector for detecting and parsing QR code.
- [ ] **xfeatures2d. Extra 2D Features Framework - WORK STARTED**
- [ ] ximgproc. Extended Image Processing
- [ ] xobjdetect. Extended object detection
- [ ] xphoto. Additional photo processing algorithms
- [ ] **xphoto. Additional photo processing algorithms - WORK STARTED**

2
vendor/gocv.io/x/gocv/appveyor.yml generated vendored
View File

@ -8,7 +8,7 @@ platform:
environment:
GOPATH: c:\gopath
GOROOT: c:\go
GOVERSION: 1.15
GOVERSION: 1.16
TEST_EXTERNAL: 1
APPVEYOR_SAVE_CACHE_ON_ERROR: true

View File

@ -3,17 +3,17 @@ if not exist "C:\opencv" mkdir "C:\opencv"
if not exist "C:\opencv\build" mkdir "C:\opencv\build"
if not exist "C:\opencv\testdata" mkdir "C:\opencv\testdata"
appveyor DownloadFile https://github.com/opencv/opencv/archive/4.5.1.zip -FileName c:\opencv\opencv-4.5.1.zip
7z x c:\opencv\opencv-4.5.1.zip -oc:\opencv -y
del c:\opencv\opencv-4.5.1.zip /q
appveyor DownloadFile https://github.com/opencv/opencv_contrib/archive/4.5.1.zip -FileName c:\opencv\opencv_contrib-4.5.1.zip
7z x c:\opencv\opencv_contrib-4.5.1.zip -oc:\opencv -y
del c:\opencv\opencv_contrib-4.5.1.zip /q
appveyor DownloadFile https://github.com/opencv/opencv/archive/4.5.4.zip -FileName c:\opencv\opencv-4.5.4.zip
7z x c:\opencv\opencv-4.5.4.zip -oc:\opencv -y
del c:\opencv\opencv-4.5.4.zip /q
appveyor DownloadFile https://github.com/opencv/opencv_contrib/archive/4.5.4.zip -FileName c:\opencv\opencv_contrib-4.5.4.zip
7z x c:\opencv\opencv_contrib-4.5.4.zip -oc:\opencv -y
del c:\opencv\opencv_contrib-4.5.4.zip /q
cd C:\opencv\build
set PATH=C:\Perl\site\bin;C:\Perl\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files\7-Zip;C:\Program Files\Microsoft\Web Platform Installer\;C:\Tools\PsTools;C:\Program Files (x86)\CMake\bin;C:\go\bin;C:\Tools\NuGet;C:\Program Files\LLVM\bin;C:\Tools\curl\bin;C:\ProgramData\chocolatey\bin;C:\Program Files (x86)\Yarn\bin;C:\Users\appveyor\AppData\Local\Yarn\bin;C:\Program Files\AppVeyor\BuildAgent\
set PATH=%PATH%;C:\mingw-w64\x86_64-7.3.0-posix-seh-rt_v5-rev0\mingw64\bin
dir C:\opencv
cmake C:\opencv\opencv-4.5.1 -G "MinGW Makefiles" -BC:\opencv\build -DENABLE_CXX11=ON -DOPENCV_EXTRA_MODULES_PATH=C:\opencv\opencv_contrib-4.5.1\modules -DBUILD_SHARED_LIBS=ON -DWITH_IPP=OFF -DWITH_MSMF=OFF -DBUILD_EXAMPLES=OFF -DBUILD_TESTS=OFF -DBUILD_PERF_TESTS=OFF -DBUILD_opencv_java=OFF -DBUILD_opencv_python=OFF -DBUILD_opencv_python2=OFF -DBUILD_opencv_python3=OFF -DBUILD_DOCS=OFF -DENABLE_PRECOMPILED_HEADERS=OFF -DBUILD_opencv_saliency=OFF -DCPU_DISPATCH= -DBUILD_opencv_gapi=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON -DWITH_OPENCL_D3D11_NV=OFF -DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t -Wno-dev
cmake C:\opencv\opencv-4.5.4 -G "MinGW Makefiles" -BC:\opencv\build -DENABLE_CXX11=ON -DOPENCV_EXTRA_MODULES_PATH=C:\opencv\opencv_contrib-4.5.4\modules -DBUILD_SHARED_LIBS=ON -DWITH_IPP=OFF -DWITH_MSMF=OFF -DBUILD_EXAMPLES=OFF -DBUILD_TESTS=OFF -DBUILD_PERF_TESTS=OFF -DBUILD_opencv_java=OFF -DBUILD_opencv_python=OFF -DBUILD_opencv_python2=OFF -DBUILD_opencv_python3=OFF -DBUILD_DOCS=OFF -DENABLE_PRECOMPILED_HEADERS=OFF -DBUILD_opencv_saliency=OFF -DBUILD_opencv_wechat_qrcode=OFF -DCPU_DISPATCH= -DBUILD_opencv_gapi=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_ENABLE_NONFREE=ON -DWITH_OPENCL_D3D11_NV=OFF -DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t -DWITH_TBB=ON -Wno-dev
mingw32-make -j%NUMBER_OF_PROCESSORS%
mingw32-make install
appveyor DownloadFile https://raw.githubusercontent.com/opencv/opencv_extra/master/testdata/dnn/bvlc_googlenet.prototxt -FileName C:\opencv\testdata\bvlc_googlenet.prototxt
@ -21,5 +21,5 @@ appveyor DownloadFile http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemod
appveyor DownloadFile https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip -FileName C:\opencv\testdata\inception5h.zip
appveyor DownloadFile https://github.com/onnx/models/raw/master/vision/classification/inception_and_googlenet/googlenet/model/googlenet-9.onnx -FileName C:\opencv\testdata\googlenet-9.onnx
7z x C:\opencv\testdata\inception5h.zip -oC:\opencv\testdata tensorflow_inception_graph.pb -y
rmdir c:\opencv\opencv-4.5.1 /s /q
rmdir c:\opencv\opencv_contrib-4.5.1 /s /q
rmdir c:\opencv\opencv-4.5.4 /s /q
rmdir c:\opencv\opencv_contrib-4.5.4 /s /q

31
vendor/gocv.io/x/gocv/calib3d.cpp generated vendored
View File

@ -37,6 +37,10 @@ Mat GetOptimalNewCameraMatrixWithParams(Mat cameraMatrix,Mat distCoeffs,Size siz
return mat;
}
double CalibrateCamera(Points3fVector objectPoints, Points2fVector imagePoints, Size imageSize, Mat cameraMatrix, Mat distCoeffs, Mat rvecs, Mat tvecs, int flag) {
return cv::calibrateCamera(*objectPoints, *imagePoints, cv::Size(imageSize.width, imageSize.height), *cameraMatrix, *distCoeffs, *rvecs, *tvecs, flag);
}
void Undistort(Mat src, Mat dst, Mat cameraMatrix, Mat distCoeffs, Mat newCameraMatrix) {
cv::undistort(*src, *dst, *cameraMatrix, *distCoeffs, *newCameraMatrix);
}
@ -50,20 +54,25 @@ bool FindChessboardCorners(Mat image, Size patternSize, Mat corners, int flags)
return cv::findChessboardCorners(*image, sz, *corners, flags);
}
bool FindChessboardCornersSB(Mat image, Size patternSize, Mat corners, int flags) {
cv::Size sz(patternSize.width, patternSize.height);
return cv::findChessboardCornersSB(*image, sz, *corners, flags);
}
bool FindChessboardCornersSBWithMeta(Mat image, Size patternSize, Mat corners, int flags, Mat meta) {
cv::Size sz(patternSize.width, patternSize.height);
return cv::findChessboardCornersSB(*image, sz, *corners, flags, *meta);
}
void DrawChessboardCorners(Mat image, Size patternSize, Mat corners, bool patternWasFound) {
cv::Size sz(patternSize.width, patternSize.height);
cv::drawChessboardCorners(*image, sz, *corners, patternWasFound);
}
Mat EstimateAffinePartial2D(Contour2f from, Contour2f to) {
std::vector<cv::Point2f> from_pts;
for (size_t i = 0; i < from.length; i++) {
from_pts.push_back(cv::Point2f(from.points[i].x, from.points[i].y));
}
std::vector<cv::Point2f> to_pts;
for (size_t i = 0; i < to.length; i++) {
to_pts.push_back(cv::Point2f(to.points[i].x, to.points[i].y));
}
return new cv::Mat(cv::estimateAffinePartial2D(from_pts, to_pts));
Mat EstimateAffinePartial2D(Point2fVector from, Point2fVector to) {
return new cv::Mat(cv::estimateAffinePartial2D(*from, *to));
}
Mat EstimateAffine2D(Point2fVector from, Point2fVector to) {
return new cv::Mat(cv::estimateAffine2D(*from, *to));
}

64
vendor/gocv.io/x/gocv/calib3d.go generated vendored
View File

@ -5,7 +5,9 @@ package gocv
#include "calib3d.h"
*/
import "C"
import "image"
import (
"image"
)
// Calib is a wrapper around OpenCV's "Camera Calibration and 3D Reconstruction" of
// Fisheye Camera model
@ -122,6 +124,22 @@ func GetOptimalNewCameraMatrixWithParams(cameraMatrix Mat, distCoeffs Mat, image
return newMat(C.GetOptimalNewCameraMatrixWithParams(cameraMatrix.Ptr(), distCoeffs.Ptr(), sz, C.double(alpha), newSize, &rt, C.bool(centerPrincipalPoint))), toRect(rt)
}
// CalibrateCamera finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
//
// For further details, please see:
// https://docs.opencv.org/master/d9/d0c/group__calib3d.html#ga3207604e4b1a1758aa66acb6ed5aa65d
//
func CalibrateCamera(objectPoints Points3fVector, imagePoints Points2fVector, imageSize image.Point,
cameraMatrix *Mat, distCoeffs *Mat, rvecs *Mat, tvecs *Mat, calibFlag CalibFlag) float64 {
sz := C.struct_Size{
width: C.int(imageSize.X),
height: C.int(imageSize.Y),
}
res := C.CalibrateCamera(objectPoints.p, imagePoints.p, sz, cameraMatrix.p, distCoeffs.p, rvecs.p, tvecs.p, C.int(calibFlag))
return float64(res)
}
func Undistort(src Mat, dst *Mat, cameraMatrix Mat, distCoeffs Mat, newCameraMatrix Mat) {
C.Undistort(src.Ptr(), dst.Ptr(), cameraMatrix.Ptr(), distCoeffs.Ptr(), newCameraMatrix.Ptr())
}
@ -149,9 +167,13 @@ const (
CalibCBFilterQuads
// Run a fast check on the image that looks for chessboard corners, and shortcut the call if none is found. This can drastically speed up the call in the degenerate condition when no chessboard is observed.
CalibCBFastCheck
// Run an exhaustive search to improve detection rate.
CalibCBExhaustive
// Up sample input image to improve sub-pixel accuracy due to aliasing effects.
CalibCBAccuracy
// The detected pattern is allowed to be larger than patternSize (see description).
CalibCBLarger
// The detected pattern must have a marker (see description). This should be used if an accurate camera calibration is required.
CalibCBMarker
)
@ -168,6 +190,32 @@ func FindChessboardCorners(image Mat, patternSize image.Point, corners *Mat, fla
return bool(C.FindChessboardCorners(image.Ptr(), sz, corners.Ptr(), C.int(flags)))
}
// FindChessboardCorners finds the positions of internal corners of the chessboard using a sector based approach.
//
// For further details, please see:
// https://docs.opencv.org/master/d9/d0c/group__calib3d.html#gadc5bcb05cb21cf1e50963df26986d7c9
//
func FindChessboardCornersSB(image Mat, patternSize image.Point, corners *Mat, flags CalibCBFlag) bool {
sz := C.struct_Size{
width: C.int(patternSize.X),
height: C.int(patternSize.Y),
}
return bool(C.FindChessboardCornersSB(image.Ptr(), sz, corners.Ptr(), C.int(flags)))
}
// FindChessboardCornersSBWithMeta finds the positions of internal corners of the chessboard using a sector based approach.
//
// For further details, please see:
// https://docs.opencv.org/master/d9/d0c/group__calib3d.html#ga93efa9b0aa890de240ca32b11253dd4a
//
func FindChessboardCornersSBWithMeta(image Mat, patternSize image.Point, corners *Mat, flags CalibCBFlag, meta *Mat) bool {
sz := C.struct_Size{
width: C.int(patternSize.X),
height: C.int(patternSize.Y),
}
return bool(C.FindChessboardCornersSBWithMeta(image.Ptr(), sz, corners.Ptr(), C.int(flags), meta.Ptr()))
}
// DrawChessboardCorners renders the detected chessboard corners.
//
// For further details, please see:
@ -186,8 +234,14 @@ func DrawChessboardCorners(image *Mat, patternSize image.Point, corners Mat, pat
//
// For further details, please see:
// https://docs.opencv.org/master/d9/d0c/group__calib3d.html#gad767faff73e9cbd8b9d92b955b50062d
func EstimateAffinePartial2D(from, to []Point2f) Mat {
fromPoints := toCPoints2f(from)
toPoints := toCPoints2f(to)
return newMat(C.EstimateAffinePartial2D(fromPoints, toPoints))
func EstimateAffinePartial2D(from, to Point2fVector) Mat {
return newMat(C.EstimateAffinePartial2D(from.p, to.p))
}
// EstimateAffine2D Computes an optimal affine transformation between two 2D point sets.
//
// For further details, please see:
// https://docs.opencv.org/4.0.0/d9/d0c/group__calib3d.html#ga27865b1d26bac9ce91efaee83e94d4dd
func EstimateAffine2D(from, to Point2fVector) Mat {
return newMat(C.EstimateAffine2D(from.p, to.p))
}

6
vendor/gocv.io/x/gocv/calib3d.h generated vendored
View File

@ -19,11 +19,15 @@ void Fisheye_EstimateNewCameraMatrixForUndistortRectify(Mat k, Mat d, Size imgSi
void InitUndistortRectifyMap(Mat cameraMatrix,Mat distCoeffs,Mat r,Mat newCameraMatrix,Size size,int m1type,Mat map1,Mat map2);
Mat GetOptimalNewCameraMatrixWithParams(Mat cameraMatrix,Mat distCoeffs,Size size,double alpha,Size newImgSize,Rect* validPixROI,bool centerPrincipalPoint);
double CalibrateCamera(Points3fVector objectPoints, Points2fVector imagePoints, Size imageSize, Mat cameraMatrix, Mat distCoeffs, Mat rvecs, Mat tvecs, int flag);
void Undistort(Mat src, Mat dst, Mat cameraMatrix, Mat distCoeffs, Mat newCameraMatrix);
void UndistortPoints(Mat distorted, Mat undistorted, Mat k, Mat d, Mat r, Mat p);
bool FindChessboardCorners(Mat image, Size patternSize, Mat corners, int flags);
bool FindChessboardCornersSB(Mat image, Size patternSize, Mat corners, int flags);
bool FindChessboardCornersSBWithMeta(Mat image, Size patternSize, Mat corners, int flags, Mat meta);
void DrawChessboardCorners(Mat image, Size patternSize, Mat corners, bool patternWasFound);
Mat EstimateAffinePartial2D(Contour2f from, Contour2f to);
Mat EstimateAffinePartial2D(Point2fVector from, Point2fVector to);
Mat EstimateAffine2D(Point2fVector from, Point2fVector to);
#ifdef __cplusplus
}
#endif

5
vendor/gocv.io/x/gocv/cgo.go generated vendored
View File

@ -1,4 +1,5 @@
// +build !customenv
//go:build !customenv && !static
// +build !customenv,!static
package gocv
@ -8,6 +9,6 @@ package gocv
#cgo !windows pkg-config: opencv4
#cgo CXXFLAGS: --std=c++11
#cgo windows CPPFLAGS: -IC:/opencv/build/install/include
#cgo windows LDFLAGS: -LC:/opencv/build/install/x64/mingw/lib -lopencv_core451 -lopencv_face451 -lopencv_videoio451 -lopencv_imgproc451 -lopencv_highgui451 -lopencv_imgcodecs451 -lopencv_objdetect451 -lopencv_features2d451 -lopencv_video451 -lopencv_dnn451 -lopencv_xfeatures2d451 -lopencv_plot451 -lopencv_tracking451 -lopencv_img_hash451 -lopencv_calib3d451 -lopencv_bgsegm451 -lopencv_photo451
#cgo windows LDFLAGS: -LC:/opencv/build/install/x64/mingw/lib -lopencv_core454 -lopencv_face454 -lopencv_videoio454 -lopencv_imgproc454 -lopencv_highgui454 -lopencv_imgcodecs454 -lopencv_objdetect454 -lopencv_features2d454 -lopencv_video454 -lopencv_dnn454 -lopencv_xfeatures2d454 -lopencv_plot454 -lopencv_tracking454 -lopencv_img_hash454 -lopencv_calib3d454 -lopencv_bgsegm454 -lopencv_photo454 -lopencv_aruco454
*/
import "C"

14
vendor/gocv.io/x/gocv/cgo_static.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
// +build !customenv,static
package gocv
// Changes here should be mirrored in contrib/cgo_static.go and cuda/cgo_static.go.
/*
#cgo CXXFLAGS: --std=c++11
#cgo !windows CPPFLAGS: -I/usr/local/include -I/usr/local/include/opencv4
#cgo !windows LDFLAGS: -L/usr/local/lib -L/usr/local/lib/opencv4/3rdparty -lopencv_gapi -lopencv_stitching -lopencv_aruco -lopencv_bgsegm -lopencv_bioinspired -lopencv_ccalib -lopencv_dnn_objdetect -lopencv_dpm -lopencv_face -lopencv_fuzzy -lopencv_hfs -lopencv_img_hash -lopencv_line_descriptor -lopencv_quality -lopencv_reg -lopencv_rgbd -lopencv_saliency -lopencv_stereo -lopencv_structured_light -lopencv_phase_unwrapping -lopencv_superres -lopencv_optflow -lopencv_surface_matching -lopencv_tracking -lopencv_datasets -lopencv_text -lopencv_highgui -lopencv_dnn -lopencv_plot -lopencv_videostab -lopencv_video -lopencv_videoio -lopencv_xfeatures2d -lopencv_shape -lopencv_ml -lopencv_ximgproc -lopencv_xobjdetect -lopencv_objdetect -lopencv_calib3d -lopencv_imgcodecs -lopencv_features2d -lopencv_flann -lopencv_xphoto -lopencv_photo -lopencv_imgproc -lopencv_core -littnotify -llibprotobuf -lIlmImf -lquirc -lippiw -lippicv -lade -lz -ljpeg -ldl -lm -lpthread -lrt -lquadmath
#cgo windows CPPFLAGS: -IC:/opencv/build/install/include
#cgo windows LDFLAGS: -LC:/opencv/build/install/x64/mingw/staticlib -lopencv_stereo452 -lopencv_tracking452 -lopencv_superres452 -lopencv_stitching452 -lopencv_optflow452 -lopencv_gapi452 -lopencv_face452 -lopencv_dpm452 -lopencv_dnn_objdetect452 -lopencv_ccalib452 -lopencv_bioinspired452 -lopencv_bgsegm452 -lopencv_aruco452 -lopencv_xobjdetect452 -lopencv_ximgproc452 -lopencv_xfeatures2d452 -lopencv_videostab452 -lopencv_video452 -lopencv_structured_light452 -lopencv_shape452 -lopencv_rgbd452 -lopencv_rapid452 -lopencv_objdetect452 -lopencv_mcc452 -lopencv_highgui452 -lopencv_datasets452 -lopencv_calib3d452 -lopencv_videoio452 -lopencv_text452 -lopencv_line_descriptor452 -lopencv_imgcodecs452 -lopencv_img_hash452 -lopencv_hfs452 -lopencv_fuzzy452 -lopencv_features2d452 -lopencv_dnn_superres452 -lopencv_dnn452 -lopencv_xphoto452 -lopencv_surface_matching452 -lopencv_reg452 -lopencv_quality452 -lopencv_plot452 -lopencv_photo452 -lopencv_phase_unwrapping452 -lopencv_ml452 -lopencv_intensity_transform452 -lopencv_imgproc452 -lopencv_flann452 -lopencv_core452 -lade -lquirc -llibprotobuf -lIlmImf -llibpng -llibopenjp2 -llibwebp -llibtiff -llibjpeg-turbo -lzlib -lkernel32 -lgdi32 -lwinspool -lshell32 -lole32 -loleaut32 -luuid -lcomdlg32 -ladvapi32 -luser32
*/
import "C"

293
vendor/gocv.io/x/gocv/core.cpp generated vendored
View File

@ -565,12 +565,9 @@ double KMeans(Mat data, int k, Mat bestLabels, TermCriteria criteria, int attemp
return ret;
}
double KMeansPoints(Contour points, int k, Mat bestLabels, TermCriteria criteria, int attempts, int flags, Mat centers) {
double KMeansPoints(PointVector points, int k, Mat bestLabels, TermCriteria criteria, int attempts, int flags, Mat centers) {
std::vector<cv::Point2f> pts;
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point2f(points.points[i].x, points.points[i].y));
}
copyPointVectorToPoint2fVector(points, &pts);
double ret = cv::kmeans(pts, k, *bestLabels, *criteria, attempts, flags, *centers);
return ret;
}
@ -852,6 +849,292 @@ Mat Mat_colRange(Mat m,int startrow,int endrow) {
return new cv::Mat(m->colRange(startrow,endrow));
}
PointVector PointVector_New() {
return new std::vector< cv::Point >;
}
PointVector PointVector_NewFromPoints(Contour points) {
std::vector<cv::Point>* cntr = new std::vector<cv::Point>;
for (size_t i = 0; i < points.length; i++) {
cntr->push_back(cv::Point(points.points[i].x, points.points[i].y));
}
return cntr;
}
PointVector PointVector_NewFromMat(Mat mat) {
std::vector<cv::Point>* pts = new std::vector<cv::Point>;
*pts = (std::vector<cv::Point>) *mat;
return pts;
}
Point PointVector_At(PointVector pv, int idx) {
cv::Point p = pv->at(idx);
return Point{.x = p.x, .y = p.y};
}
void PointVector_Append(PointVector pv, Point p) {
pv->push_back(cv::Point(p.x, p.y));
}
int PointVector_Size(PointVector p) {
return p->size();
}
void PointVector_Close(PointVector p) {
p->clear();
delete p;
}
PointsVector PointsVector_New() {
return new std::vector< std::vector< cv::Point > >;
}
PointsVector PointsVector_NewFromPoints(Contours points) {
std::vector< std::vector< cv::Point > >* pv = new std::vector< std::vector< cv::Point > >;
for (size_t i = 0; i < points.length; i++) {
Contour contour = points.contours[i];
std::vector<cv::Point> cntr;
for (size_t i = 0; i < contour.length; i++) {
cntr.push_back(cv::Point(contour.points[i].x, contour.points[i].y));
}
pv->push_back(cntr);
}
return pv;
}
int PointsVector_Size(PointsVector ps) {
return ps->size();
}
PointVector PointsVector_At(PointsVector ps, int idx) {
std::vector< cv::Point >* p = &(ps->at(idx));
return p;
}
void PointsVector_Append(PointsVector psv, PointVector pv) {
psv->push_back(*pv);
}
void PointsVector_Close(PointsVector ps) {
ps->clear();
delete ps;
}
Point2fVector Point2fVector_New() {
return new std::vector< cv::Point2f >;
}
Point2fVector Point2fVector_NewFromPoints(Contour2f points) {
std::vector<cv::Point2f>* cntr = new std::vector<cv::Point2f>;
for (size_t i = 0; i < points.length; i++) {
cntr->push_back(cv::Point2f(points.points[i].x, points.points[i].y));
}
return cntr;
}
Point2fVector Point2fVector_NewFromMat(Mat mat) {
std::vector<cv::Point2f>* pts = new std::vector<cv::Point2f>;
*pts = (std::vector<cv::Point2f>) *mat;
return pts;
}
Point2f Point2fVector_At(Point2fVector pfv, int idx) {
cv::Point2f p = pfv->at(idx);
return Point2f{.x = p.x, .y = p.y};
}
int Point2fVector_Size(Point2fVector pfv) {
return pfv->size();
}
void Point2fVector_Close(Point2fVector pv) {
pv->clear();
delete pv;
}
void IntVector_Close(struct IntVector ivec) {
delete[] ivec.val;
}
RNG TheRNG() {
return &cv::theRNG();
}
void SetRNGSeed(int seed) {
cv::setRNGSeed(seed);
}
void RNG_Fill(RNG rng, Mat mat, int distType, double a, double b, bool saturateRange) {
rng->fill(*mat, distType, a, b, saturateRange);
}
double RNG_Gaussian(RNG rng, double sigma) {
return rng->gaussian(sigma);
}
unsigned int RNG_Next(RNG rng) {
return rng->next();
}
void RandN(Mat mat, Scalar mean, Scalar stddev) {
cv::Scalar m = cv::Scalar(mean.val1, mean.val2, mean.val3, mean.val4);
cv::Scalar s = cv::Scalar(stddev.val1, stddev.val2, stddev.val3, stddev.val4);
cv::randn(*mat, m, s);
}
void RandShuffle(Mat mat) {
cv::randShuffle(*mat);
}
void RandShuffleWithParams(Mat mat, double iterFactor, RNG rng) {
cv::randShuffle(*mat, iterFactor, rng);
}
void RandU(Mat mat, Scalar low, Scalar high) {
cv::Scalar l = cv::Scalar(low.val1, low.val2, low.val3, low.val4);
cv::Scalar h = cv::Scalar(high.val1, high.val2, high.val3, high.val4);
cv::randn(*mat, l, h);
}
void copyPointVectorToPoint2fVector(PointVector src, Point2fVector dest) {
for (size_t i = 0; i < src->size(); i++) {
dest->push_back(cv::Point2f(src->at(i).x, src->at(i).y));
}
}
void StdByteVectorInitialize(void* data) {
new (data) std::vector<uchar>();
}
void StdByteVectorFree(void *data) {
reinterpret_cast<std::vector<uchar> *>(data)->~vector<uchar>();
}
size_t StdByteVectorLen(void *data) {
return reinterpret_cast<std::vector<uchar> *>(data)->size();
}
uint8_t* StdByteVectorData(void *data) {
return reinterpret_cast<std::vector<uchar> *>(data)->data();
}
Points2fVector Points2fVector_New(){
return new std::vector< std::vector< cv::Point2f > >;
}
Points2fVector Points2fVector_NewFromPoints(Contours2f points) {
Points2fVector pv = Points2fVector_New();
for(size_t i = 0;i<points.length;i++){
Contour2f contour2f = points.contours[i];
Point2fVector cntr = Point2fVector_NewFromPoints(contour2f);
Points2fVector_Append(pv, cntr);
}
return pv;
}
int Points2fVector_Size(Points2fVector ps) {
return ps->size();
}
Point2fVector Points2fVector_At(Points2fVector ps, int idx) {
return &(ps->at(idx));
}
void Points2fVector_Append(Points2fVector psv, Point2fVector pv) {
psv->push_back(*pv);
}
void Points2fVector_Close(Points2fVector ps) {
ps->clear();
delete ps;
}
Point3fVector Point3fVector_New() {
return new std::vector< cv::Point3f >;
}
Point3fVector Point3fVector_NewFromPoints(Contour3f points) {
std::vector<cv::Point3f> *cntr = new std::vector<cv::Point3f>;
for(size_t i = 0;i< points.length;i++) {
cntr->push_back(cv::Point3f(
points.points[i].x,
points.points[i].y,
points.points[i].z
));
}
return cntr;
}
Point3fVector Point3fVector_NewFromMat(Mat mat) {
std::vector<cv::Point3f> *pts = new std::vector<cv::Point3f>;
*pts = (std::vector<cv::Point3f>) *mat;
return pts;
}
Point3f Point3fVector_At(Point3fVector pfv, int idx) {
cv::Point3f p = pfv->at(idx);
return Point3f{
.x = p.x,
.y = p.y,
.z = p.z
};
}
void Point3fVector_Append(Point3fVector pfv, Point3f point) {
pfv->push_back(cv::Point3f(point.x, point.y, point.z));
}
int Point3fVector_Size(Point3fVector pfv) {
return pfv->size();
}
void Point3fVector_Close(Point3fVector pv) {
pv->clear();
delete pv;
}
Points3fVector Points3fVector_New(){
return new std::vector< std::vector< cv::Point3f > >;
}
Points3fVector Points3fVector_NewFromPoints(Contours3f points) {
Points3fVector pv = Points3fVector_New();
for(size_t i = 0;i<points.length;i++){
Contour3f contour3f = points.contours[i];
Point3fVector cntr = Point3fVector_NewFromPoints(contour3f);
Points3fVector_Append(pv, cntr);
}
return pv;
}
int Points3fVector_Size(Points3fVector ps) {
return ps->size();
}
Point3fVector Points3fVector_At(Points3fVector ps, int idx) {
return &(ps->at(idx));
}
void Points3fVector_Append(Points3fVector psv, Point3fVector pv) {
psv->push_back(*pv);
}
void Points3fVector_Close(Points3fVector ps) {
ps->clear();
delete ps;
}

681
vendor/gocv.io/x/gocv/core.go generated vendored
View File

@ -166,6 +166,10 @@ type Point2f struct {
Y float32
}
func NewPoint2f(x, y float32) Point2f {
return Point2f{x, y}
}
var ErrEmptyByteSlice = errors.New("empty byte array")
// Mat represents an n-dimensional dense numerical single-channel
@ -1372,7 +1376,7 @@ func InsertChannel(src Mat, dst *Mat, coi int) {
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#gad278044679d4ecf20f7622cc151aaaa2
//
func Invert(src Mat, dst *Mat, flags int) float64 {
func Invert(src Mat, dst *Mat, flags SolveDecompositionFlags) float64 {
ret := C.Mat_Invert(src.p, dst.p, C.int(flags))
return float64(ret)
}
@ -1408,9 +1412,8 @@ func KMeans(data Mat, k int, bestLabels *Mat, criteria TermCriteria, attempts in
// For further details, please see:
// https://docs.opencv.org/master/d5/d38/group__core__cluster.html#ga9a34dc06c6ec9460e90860f15bcd2f88
//
func KMeansPoints(points []image.Point, k int, bestLabels *Mat, criteria TermCriteria, attempts int, flags KMeansFlags, centers *Mat) float64 {
cPoints := toCPoints(points)
ret := C.KMeansPoints(cPoints, C.int(k), bestLabels.p, criteria.p, C.int(attempts), C.int(flags), centers.p)
func KMeansPoints(points PointVector, k int, bestLabels *Mat, criteria TermCriteria, attempts int, flags KMeansFlags, centers *Mat) float64 {
ret := C.KMeansPoints(points.p, C.int(k), bestLabels.p, criteria.p, C.int(attempts), C.int(flags), centers.p)
return float64(ret)
}
@ -2011,6 +2014,278 @@ func (m *Mat) GetVeciAt(row int, col int) Veci {
return v
}
// PointVector is a wrapper around a std::vector< cv::Point >*
// This is needed anytime that you need to pass or receive a collection of points.
type PointVector struct {
p C.PointVector
}
// NewPointVector returns a new empty PointVector.
func NewPointVector() PointVector {
return PointVector{p: C.PointVector_New()}
}
// NewPointVectorFromPoints returns a new PointVector that has been
// initialized to a slice of image.Point.
func NewPointVectorFromPoints(pts []image.Point) PointVector {
p := (*C.struct_Point)(C.malloc(C.size_t(C.sizeof_struct_Point * len(pts))))
defer C.free(unsafe.Pointer(p))
h := &reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(p)),
Len: len(pts),
Cap: len(pts),
}
pa := *(*[]C.Point)(unsafe.Pointer(h))
for j, point := range pts {
pa[j] = C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
}
cpoints := C.struct_Points{
points: (*C.Point)(p),
length: C.int(len(pts)),
}
return PointVector{p: C.PointVector_NewFromPoints(cpoints)}
}
// NewPointVectorFromMat returns a new PointVector that has been
// wrapped around a Mat of type CV_32SC2 with a single columm.
func NewPointVectorFromMat(mat Mat) PointVector {
return PointVector{p: C.PointVector_NewFromMat(mat.p)}
}
// IsNil checks the CGo pointer in the PointVector.
func (pv PointVector) IsNil() bool {
return pv.p == nil
}
// Size returns how many Point are in the PointVector.
func (pv PointVector) Size() int {
return int(C.PointVector_Size(pv.p))
}
// At returns the image.Point
func (pv PointVector) At(idx int) image.Point {
if idx > pv.Size() {
return image.Point{}
}
cp := C.PointVector_At(pv.p, C.int(idx))
return image.Pt(int(cp.x), int(cp.y))
}
// Append appends an image.Point at end of the PointVector.
func (pv PointVector) Append(point image.Point) {
p := C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
C.PointVector_Append(pv.p, p)
return
}
// ToPoints returns a slice of image.Point for the data in this PointVector.
func (pv PointVector) ToPoints() []image.Point {
points := make([]image.Point, pv.Size())
for j := 0; j < pv.Size(); j++ {
points[j] = pv.At(j)
}
return points
}
// Close closes and frees memory for this PointVector.
func (pv PointVector) Close() {
C.PointVector_Close(pv.p)
}
// PointsVector is a wrapper around a std::vector< std::vector< cv::Point > >*
type PointsVector struct {
p C.PointsVector
}
// NewPointsVector returns a new empty PointsVector.
func NewPointsVector() PointsVector {
return PointsVector{p: C.PointsVector_New()}
}
// NewPointsVectorFromPoints returns a new PointsVector that has been
// initialized to a slice of slices of image.Point.
func NewPointsVectorFromPoints(pts [][]image.Point) PointsVector {
points := make([]C.struct_Points, len(pts))
for i, pt := range pts {
p := (*C.struct_Point)(C.malloc(C.size_t(C.sizeof_struct_Point * len(pt))))
defer C.free(unsafe.Pointer(p))
h := &reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(p)),
Len: len(pt),
Cap: len(pt),
}
pa := *(*[]C.Point)(unsafe.Pointer(h))
for j, point := range pt {
pa[j] = C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
}
points[i] = C.struct_Points{
points: (*C.Point)(p),
length: C.int(len(pt)),
}
}
cPoints := C.struct_Contours{
contours: (*C.struct_Points)(&points[0]),
length: C.int(len(pts)),
}
return PointsVector{p: C.PointsVector_NewFromPoints(cPoints)}
}
func (pvs PointsVector) P() C.PointsVector {
return pvs.p
}
// ToPoints returns a slice of slices of image.Point for the data in this PointsVector.
func (pvs PointsVector) ToPoints() [][]image.Point {
ppoints := make([][]image.Point, pvs.Size())
for i := 0; i < pvs.Size(); i++ {
pts := pvs.At(i)
points := make([]image.Point, pts.Size())
for j := 0; j < pts.Size(); j++ {
points[j] = pts.At(j)
}
ppoints[i] = points
}
return ppoints
}
// IsNil checks the CGo pointer in the PointsVector.
func (pvs PointsVector) IsNil() bool {
return pvs.p == nil
}
// Size returns how many vectors of Points are in the PointsVector.
func (pvs PointsVector) Size() int {
return int(C.PointsVector_Size(pvs.p))
}
// At returns the PointVector at that index of the PointsVector.
func (pvs PointsVector) At(idx int) PointVector {
if idx > pvs.Size() {
return PointVector{}
}
return PointVector{p: C.PointsVector_At(pvs.p, C.int(idx))}
}
// Append appends a PointVector at end of the PointsVector.
func (pvs PointsVector) Append(pv PointVector) {
if !pv.IsNil() {
C.PointsVector_Append(pvs.p, pv.p)
}
return
}
// Close closes and frees memory for this PointsVector.
func (pvs PointsVector) Close() {
C.PointsVector_Close(pvs.p)
}
// Point2fVector is a wrapper around a std::vector< cv::Point2f >*
// This is needed anytime that you need to pass or receive a collection of points.
type Point2fVector struct {
p C.Point2fVector
}
// NewPoint2fVector returns a new empty Point2fVector.
func NewPoint2fVector() Point2fVector {
return Point2fVector{p: C.Point2fVector_New()}
}
// NewPoint2fVectorFromPoints returns a new Point2fVector that has been
// initialized to a slice of image.Point.
func NewPoint2fVectorFromPoints(pts []Point2f) Point2fVector {
p := (*C.struct_Point2f)(C.malloc(C.size_t(C.sizeof_struct_Point2f * len(pts))))
defer C.free(unsafe.Pointer(p))
h := &reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(p)),
Len: len(pts),
Cap: len(pts),
}
pa := *(*[]C.Point2f)(unsafe.Pointer(h))
for j, point := range pts {
pa[j] = C.struct_Point2f{
x: C.float(point.X),
y: C.float(point.Y),
}
}
cpoints := C.struct_Points2f{
points: (*C.Point2f)(p),
length: C.int(len(pts)),
}
return Point2fVector{p: C.Point2fVector_NewFromPoints(cpoints)}
}
// NewPoint2fVectorFromMat returns a new Point2fVector that has been
// wrapped around a Mat of type CV_32FC2 with a single columm.
func NewPoint2fVectorFromMat(mat Mat) Point2fVector {
return Point2fVector{p: C.Point2fVector_NewFromMat(mat.p)}
}
// IsNil checks the CGo pointer in the Point2fVector.
func (pfv Point2fVector) IsNil() bool {
return pfv.p == nil
}
// Size returns how many Point are in the PointVector.
func (pfv Point2fVector) Size() int {
return int(C.Point2fVector_Size(pfv.p))
}
// At returns the image.Point
func (pfv Point2fVector) At(idx int) Point2f {
if idx > pfv.Size() {
return Point2f{}
}
cp := C.Point2fVector_At(pfv.p, C.int(idx))
return Point2f{float32(cp.x), float32(cp.y)}
}
// ToPoints returns a slice of image.Point for the data in this PointVector.
func (pfv Point2fVector) ToPoints() []Point2f {
points := make([]Point2f, pfv.Size())
for j := 0; j < pfv.Size(); j++ {
points[j] = pfv.At(j)
}
return points
}
// Close closes and frees memory for this Point2fVector.
func (pfv Point2fVector) Close() {
C.Point2fVector_Close(pfv.p)
}
// GetTickCount returns the number of ticks.
//
// For further details, please see:
@ -2134,3 +2409,401 @@ func (m *Mat) RowRange(start, end int) Mat {
func (m *Mat) ColRange(start, end int) Mat {
return newMat(C.Mat_colRange(m.p, C.int(start), C.int(end)))
}
// RNG Random Number Generator.
// It encapsulates the state (currently, a 64-bit integer) and
// has methods to return scalar random values and to fill arrays
// with random values
//
// For further details, please see:
// https://docs.opencv.org/master/d1/dd6/classcv_1_1RNG.html
//
type RNG struct {
p C.RNG
}
type RNGDistType int
const (
// Uniform distribution
RNGDistUniform RNGDistType = 0
// Normal distribution
RNGDistNormal RNGDistType = 1
)
// TheRNG Returns the default random number generator.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#ga75843061d150ad6564b5447e38e57722
//
func TheRNG() RNG {
return RNG{
p: C.TheRNG(),
}
}
// TheRNG Sets state of default random number generator.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#ga757e657c037410d9e19e819569e7de0f
//
func SetRNGSeed(seed int) {
C.SetRNGSeed(C.int(seed))
}
// Fill Fills arrays with random numbers.
//
// For further details, please see:
// https://docs.opencv.org/master/d1/dd6/classcv_1_1RNG.html#ad26f2b09d9868cf108e84c9814aa682d
//
func (r *RNG) Fill(mat *Mat, distType RNGDistType, a, b float64, saturateRange bool) {
C.RNG_Fill(r.p, mat.p, C.int(distType), C.double(a), C.double(b), C.bool(saturateRange))
}
// Gaussian Returns the next random number sampled from
// the Gaussian distribution.
//
// For further details, please see:
// https://docs.opencv.org/master/d1/dd6/classcv_1_1RNG.html#a8df8ce4dc7d15916cee743e5a884639d
//
func (r *RNG) Gaussian(sigma float64) float64 {
return float64(C.RNG_Gaussian(r.p, C.double(sigma)))
}
// Next The method updates the state using the MWC algorithm
// and returns the next 32-bit random number.
//
// For further details, please see:
// https://docs.opencv.org/master/d1/dd6/classcv_1_1RNG.html#a8df8ce4dc7d15916cee743e5a884639d
//
func (r *RNG) Next() uint {
return uint(C.RNG_Next(r.p))
}
// RandN Fills the array with normally distributed random numbers.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#gaeff1f61e972d133a04ce3a5f81cf6808
//
func RandN(mat *Mat, mean, stddev Scalar) {
meanVal := C.struct_Scalar{
val1: C.double(mean.Val1),
val2: C.double(mean.Val2),
val3: C.double(mean.Val3),
val4: C.double(mean.Val4),
}
stddevVal := C.struct_Scalar{
val1: C.double(stddev.Val1),
val2: C.double(stddev.Val2),
val3: C.double(stddev.Val3),
val4: C.double(stddev.Val4),
}
C.RandN(mat.p, meanVal, stddevVal)
}
// RandShuffle Shuffles the array elements randomly.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#ga6a789c8a5cb56c6dd62506179808f763
//
func RandShuffle(mat *Mat) {
C.RandShuffle(mat.p)
}
// RandShuffleWithParams Shuffles the array elements randomly.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#ga6a789c8a5cb56c6dd62506179808f763
//
func RandShuffleWithParams(mat *Mat, iterFactor float64, rng RNG) {
C.RandShuffleWithParams(mat.p, C.double(iterFactor), rng.p)
}
// RandU Generates a single uniformly-distributed random
// number or an array of random numbers.
//
// For further details, please see:
// https://docs.opencv.org/master/d2/de8/group__core__array.html#ga1ba1026dca0807b27057ba6a49d258c0
//
func RandU(mat *Mat, low, high Scalar) {
lowVal := C.struct_Scalar{
val1: C.double(low.Val1),
val2: C.double(low.Val2),
val3: C.double(low.Val3),
val4: C.double(low.Val4),
}
highVal := C.struct_Scalar{
val1: C.double(high.Val1),
val2: C.double(high.Val2),
val3: C.double(high.Val3),
val4: C.double(high.Val4),
}
C.RandU(mat.p, lowVal, highVal)
}
type NativeByteBuffer struct {
// std::vector is build of 3 pointers And this will not change ever.
stdVectorOpaq [3]uintptr
}
func newNativeByteBuffer() *NativeByteBuffer {
buffer := &NativeByteBuffer{}
C.StdByteVectorInitialize(buffer.nativePointer())
return buffer
}
func (buffer *NativeByteBuffer) nativePointer() unsafe.Pointer {
return unsafe.Pointer(&buffer.stdVectorOpaq[0])
}
func (buffer *NativeByteBuffer) dataPointer() unsafe.Pointer {
return unsafe.Pointer(C.StdByteVectorData(buffer.nativePointer()))
}
// GetBytes returns slice of bytes backed by native buffer
func (buffer *NativeByteBuffer) GetBytes() []byte {
var result []byte
sliceHeader := (*reflect.SliceHeader)(unsafe.Pointer(&result))
vectorLen := int(C.StdByteVectorLen(buffer.nativePointer()))
sliceHeader.Cap = vectorLen
sliceHeader.Len = vectorLen
sliceHeader.Data = uintptr(buffer.dataPointer())
return result
}
// Len - returns length in bytes of underlying buffer
func (buffer *NativeByteBuffer) Len() int {
return int(C.StdByteVectorLen(buffer.nativePointer()))
}
// Close the buffer releasing all its resources
func (buffer *NativeByteBuffer) Close() {
C.StdByteVectorFree(buffer.nativePointer())
}
// Points2fVector is a wrapper around a std::vector< std::vector< cv::Point2f > >*
type Points2fVector struct {
p C.Points2fVector
}
// NewPoints2fVector returns a new empty Points2fVector.
func NewPoints2fVector() Points2fVector {
return Points2fVector{p: C.Points2fVector_New()}
}
// NewPoints2fVectorFromPoints returns a new Points2fVector that has been
// initialized to a slice of slices of Point2f.
func NewPoints2fVectorFromPoints(pts [][]Point2f) Points2fVector {
pvf := NewPoints2fVector()
for j := 0;j<len(pts);j++{
pv := NewPoint2fVectorFromPoints(pts[j])
pvf.Append(pv)
pv.Close()
}
return pvf
}
func (pvs Points2fVector) P() C.Points2fVector {
return pvs.p
}
// ToPoints returns a slice of slices of Point2f for the data in this Points2fVector.
func (pvs Points2fVector) ToPoints() [][]Point2f {
ppoints := make([][]Point2f, pvs.Size())
for j := 0;j < pvs.Size();j++{
pts := pvs.At(j)
points := pts.ToPoints()
ppoints[j] = points
}
return ppoints
}
// IsNil checks the CGo pointer in the Points2fVector.
func (pvs Points2fVector) IsNil() bool {
return pvs.p == nil
}
// Size returns how many vectors of Points are in the Points2fVector.
func (pvs Points2fVector) Size() int {
return int(C.Points2fVector_Size(pvs.p))
}
// At returns the Point2fVector at that index of the Points2fVector.
func (pvs Points2fVector) At(idx int) Point2fVector {
if idx > pvs.Size() {
return Point2fVector{}
}
return Point2fVector{p : C.Points2fVector_At(pvs.p, C.int(idx))}
}
// Append appends a Point2fVector at end of the Points2fVector.
func (pvs Points2fVector) Append(pv Point2fVector) {
if !pv.IsNil() {
C.Points2fVector_Append(pvs.p, pv.p)
}
}
// Close closes and frees memory for this Points2fVector.
func (pvs Points2fVector) Close() {
C.Points2fVector_Close(pvs.p)
}
type Point3f struct {
X float32
Y float32
Z float32
}
func NewPoint3f(x, y, z float32) Point3f {
return Point3f{x, y, z}
}
// Point3fVector is a wrapper around a std::vector< cv::Point3f >*
type Point3fVector struct {
p C.Point3fVector
}
// NewPoint3fVector returns a new empty Point3fVector.
func NewPoint3fVector() Point3fVector {
return Point3fVector{p: C.Point3fVector_New()}
}
// NewPoint3fVectorFromPoints returns a new Point3fVector that has been
// initialized to a slice of image.Point.
func NewPoint3fVectorFromPoints(pts []Point3f) Point3fVector {
p := (*C.struct_Point3f)(C.malloc(C.size_t(C.sizeof_struct_Point3f * len(pts))))
defer C.free(unsafe.Pointer(p))
h := &reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(p)),
Len: len(pts),
Cap: len(pts),
}
pa := *(*[]C.Point3f)(unsafe.Pointer(h))
for j, point := range pts {
pa[j] = C.struct_Point3f{
x: C.float(point.X),
y: C.float(point.Y),
z: C.float(point.Z),
}
}
cPoints := C.struct_Points3f{
points: (*C.Point3f)(p),
length: C.int(len(pts)),
}
return Point3fVector{p: C.Point3fVector_NewFromPoints(cPoints)}
}
// NewPoint3fVectorFromMat returns a new Point3fVector that has been
// wrapped around a Mat of type CV_32FC3 with a single columm.
func NewPoint3fVectorFromMat(mat Mat) Point3fVector {
return Point3fVector{p: C.Point3fVector_NewFromMat(mat.p)}
}
// IsNil checks the CGo pointer in the Point3fVector.
func (pfv Point3fVector) IsNil() bool {
return pfv.p == nil
}
// Size returns how many Point are in the Point3fVector.
func (pfv Point3fVector) Size() int {
return int(C.Point3fVector_Size(pfv.p))
}
// At returns the Point3f
func (pfv Point3fVector) At(idx int) Point3f {
if idx > pfv.Size() {
return Point3f{}
}
cp := C.Point3fVector_At(pfv.p, C.int(idx))
return Point3f{X: float32(cp.x), Y: float32(cp.y), Z: float32(cp.z)}
}
func (pfv Point3fVector) Append(point Point3f) {
C.Point3fVector_Append(pfv.p, C.Point3f{
x: C.float(point.X),
y: C.float(point.Y),
z: C.float(point.Z),
});
}
// ToPoints returns a slice of Point3f for the data in this Point3fVector.
func (pfv Point3fVector) ToPoints() []Point3f {
points := make([]Point3f, pfv.Size())
for j := 0; j < pfv.Size(); j++ {
points[j] = pfv.At(j)
}
return points
}
// Close closes and frees memory for this Point3fVector.
func (pfv Point3fVector) Close() {
C.Point3fVector_Close(pfv.p)
}
// Points3fVector is a wrapper around a std::vector< std::vector< cv::Point3f > >*
type Points3fVector struct {
p C.Points3fVector
}
// NewPoints3fVector returns a new empty Points3fVector.
func NewPoints3fVector() Points3fVector {
return Points3fVector{p: C.Points3fVector_New()}
}
// NewPoints3fVectorFromPoints returns a new Points3fVector that has been
// initialized to a slice of slices of Point3f.
func NewPoints3fVectorFromPoints(pts [][]Point3f) Points3fVector {
pvf := NewPoints3fVector()
for j := 0;j<len(pts);j++{
pv := NewPoint3fVectorFromPoints(pts[j])
pvf.Append(pv)
pv.Close()
}
return pvf
}
// ToPoints returns a slice of slices of Point3f for the data in this Points3fVector.
func (pvs Points3fVector) ToPoints() [][]Point3f {
ppoints := make([][]Point3f, pvs.Size())
for j := 0;j < pvs.Size();j++{
pts := pvs.At(j)
points := pts.ToPoints()
ppoints[j] = points
}
return ppoints
}
// IsNil checks the CGo pointer in the Points3fVector.
func (pvs Points3fVector) IsNil() bool {
return pvs.p == nil
}
// Size returns how many vectors of Points are in the Points3fVector.
func (pvs Points3fVector) Size() int {
return int(C.Points3fVector_Size(pvs.p))
}
// At returns the Point3fVector at that index of the Points3fVector.
func (pvs Points3fVector) At(idx int) Point3fVector {
if idx > pvs.Size() {
return Point3fVector{}
}
return Point3fVector{p : C.Points3fVector_At(pvs.p, C.int(idx))}
}
// Append appends a Point3fVector at end of the Points3fVector.
func (pvs Points3fVector) Append(pv Point3fVector) {
if !pv.IsNil() {
C.Points3fVector_Append(pvs.p, pv.p)
}
}
// Close closes and frees memory for this Points3fVector.
func (pvs Points3fVector) Close() {
C.Points3fVector_Close(pvs.p)
}

113
vendor/gocv.io/x/gocv/core.h generated vendored
View File

@ -44,6 +44,12 @@ typedef struct Point2f {
float y;
} Point2f;
typedef struct Point3f {
float x;
float y;
float z;
} Point3f;
// Wrapper for an individual cv::cvPoint
typedef struct Point {
int x;
@ -62,12 +68,33 @@ typedef struct Points2f {
int length;
} Points2f;
typedef struct Points3f {
Point3f *points;
int length;
} Points3f;
// Contour is alias for Points
typedef Points Contour;
// Contour2f is alias for Points2f
typedef Points2f Contour2f;
typedef struct Contours2f {
Contour2f *contours;
int length;
} Contours2f;
// Contour3f is alias for Points3f
typedef Points3f Contour3f;
// Wrapper for the vector of Points3f vectors aka std::vector< std::vector<Point3f> >
typedef struct Contours3f {
Contour3f *contours;
int length;
} Contours3f;
// Wrapper for the vector of Points vectors aka std::vector< std::vector<Point> >
typedef struct Contours {
Contour* contours;
@ -96,7 +123,7 @@ typedef struct Size {
// Wrapper for an individual cv::RotatedRect
typedef struct RotatedRect {
Contour pts;
Points pts;
Rect boundingRect;
Point center;
Size size;
@ -204,9 +231,23 @@ typedef struct Moment {
#ifdef __cplusplus
typedef cv::Mat* Mat;
typedef cv::TermCriteria* TermCriteria;
typedef cv::RNG* RNG;
typedef std::vector< cv::Point >* PointVector;
typedef std::vector< std::vector< cv::Point > >* PointsVector;
typedef std::vector< cv::Point2f >* Point2fVector;
typedef std::vector< std::vector< cv::Point2f> >* Points2fVector;
typedef std::vector< cv::Point3f >* Point3fVector;
typedef std::vector< std::vector< cv::Point3f > >* Points3fVector;
#else
typedef void* Mat;
typedef void* TermCriteria;
typedef void* RNG;
typedef void* PointVector;
typedef void* PointsVector;
typedef void* Point2fVector;
typedef void* Points2fVector;
typedef void* Point3fVector;
typedef void* Points3fVector;
#endif
// Wrapper for the vector of Mat aka std::vector<Mat>
@ -353,7 +394,7 @@ void Mat_InRangeWithScalar(Mat src, const Scalar lowerb, const Scalar upperb, Ma
void Mat_InsertChannel(Mat src, Mat dst, int coi);
double Mat_Invert(Mat src, Mat dst, int flags);
double KMeans(Mat data, int k, Mat bestLabels, TermCriteria criteria, int attempts, int flags, Mat centers);
double KMeansPoints(Contour points, int k, Mat bestLabels, TermCriteria criteria, int attempts, int flags, Mat centers);
double KMeansPoints(PointVector pts, int k, Mat bestLabels, TermCriteria criteria, int attempts, int flags, Mat centers);
void Mat_Log(Mat src, Mat dst);
void Mat_Magnitude(Mat x, Mat y, Mat magnitude);
void Mat_Max(Mat src1, Mat src2, Mat dst);
@ -398,10 +439,78 @@ double GetTickFrequency();
Mat Mat_rowRange(Mat m,int startrow,int endrow);
Mat Mat_colRange(Mat m,int startrow,int endrow);
PointVector PointVector_New();
PointVector PointVector_NewFromPoints(Contour points);
PointVector PointVector_NewFromMat(Mat mat);
Point PointVector_At(PointVector pv, int idx);
void PointVector_Append(PointVector pv, Point p);
int PointVector_Size(PointVector pv);
void PointVector_Close(PointVector pv);
PointsVector PointsVector_New();
PointsVector PointsVector_NewFromPoints(Contours points);
PointVector PointsVector_At(PointsVector psv, int idx);
void PointsVector_Append(PointsVector psv, PointVector pv);
int PointsVector_Size(PointsVector psv);
void PointsVector_Close(PointsVector psv);
Point2fVector Point2fVector_New();
void Point2fVector_Close(Point2fVector pfv);
Point2fVector Point2fVector_NewFromPoints(Contour2f pts);
Point2fVector Point2fVector_NewFromMat(Mat mat);
Point2f Point2fVector_At(Point2fVector pfv, int idx);
int Point2fVector_Size(Point2fVector pfv);
void IntVector_Close(struct IntVector ivec);
void CStrings_Close(struct CStrings cstrs);
RNG TheRNG();
void SetRNGSeed(int seed);
void RNG_Fill(RNG rng, Mat mat, int distType, double a, double b, bool saturateRange);
double RNG_Gaussian(RNG rng, double sigma);
unsigned int RNG_Next(RNG rng);
void RandN(Mat mat, Scalar mean, Scalar stddev);
void RandShuffle(Mat mat);
void RandShuffleWithParams(Mat mat, double iterFactor, RNG rng);
void RandU(Mat mat, Scalar low, Scalar high);
void copyPointVectorToPoint2fVector(PointVector src, Point2fVector dest);
void StdByteVectorInitialize(void* data);
void StdByteVectorFree(void *data);
size_t StdByteVectorLen(void *data);
uint8_t* StdByteVectorData(void *data);
Points2fVector Points2fVector_New();
Points2fVector Points2fVector_NewFromPoints(Contours2f points);
int Points2fVector_Size(Points2fVector ps);
Point2fVector Points2fVector_At(Points2fVector ps, int idx);
void Points2fVector_Append(Points2fVector psv, Point2fVector pv);
void Points2fVector_Close(Points2fVector ps);
Point3fVector Point3fVector_New();
Point3fVector Point3fVector_NewFromPoints(Contour3f points);
Point3fVector Point3fVector_NewFromMat(Mat mat);
void Point3fVector_Append(Point3fVector pfv, Point3f point);
Point3f Point3fVector_At(Point3fVector pfv, int idx);
int Point3fVector_Size(Point3fVector pfv);
void Point3fVector_Close(Point3fVector pv);
Points3fVector Points3fVector_New();
Points3fVector Points3fVector_NewFromPoints(Contours3f points);
int Points3fVector_Size(Points3fVector ps);
Point3fVector Points3fVector_At(Points3fVector ps, int idx);
void Points3fVector_Append(Points3fVector psv, Point3fVector pv);
void Points3fVector_Close(Points3fVector ps);
#ifdef __cplusplus
}
#endif

View File

@ -237,10 +237,13 @@ struct KeyPoints FastFeatureDetector_Detect(FastFeatureDetector f, Mat src) {
}
ORB ORB_Create() {
// TODO: params
return new cv::Ptr<cv::ORB>(cv::ORB::create());
}
ORB ORB_CreateWithParams(int nfeatures, float scaleFactor, int nlevels, int edgeThreshold, int firstLevel, int WTA_K, int scoreType, int patchSize, int fastThreshold) {
return new cv::Ptr<cv::ORB>(cv::ORB::create(nfeatures, scaleFactor, nlevels, edgeThreshold, firstLevel, WTA_K, static_cast<cv::ORB::ScoreType>(scoreType), patchSize, fastThreshold));
}
void ORB_Close(ORB o) {
delete o;
}

28
vendor/gocv.io/x/gocv/features2d.go generated vendored
View File

@ -321,12 +321,38 @@ type ORB struct {
// NewORB returns a new ORB algorithm
//
// For further details, please see:
// https://docs.opencv.org/master/d7/d19/classcv_1_1AgastFeatureDetector.html
// https://docs.opencv.org/master/db/d95/classcv_1_1ORB.html
//
func NewORB() ORB {
return ORB{p: unsafe.Pointer(C.ORB_Create())}
}
// NewORBWithParams returns a new ORB algorithm with parameters
//
// For further details, please see:
// https://docs.opencv.org/master/db/d95/classcv_1_1ORB.html#aeff0cbe668659b7ca14bb85ff1c4073b
//
func NewORBWithParams(nFeatures int, scaleFactor float32, nLevels int, edgeThreshold int, firstLevel int, WTAK int, scoreType ORBScoreType, patchSize int, fastThreshold int) ORB {
return ORB{p: unsafe.Pointer(C.ORB_CreateWithParams(
C.int(nFeatures),
C.float(scaleFactor),
C.int(nLevels),
C.int(edgeThreshold),
C.int(firstLevel),
C.int(WTAK),
C.int(scoreType),
C.int(patchSize),
C.int(fastThreshold),
))}
}
type ORBScoreType int
const (
ORBScoreTypeHarris ORBScoreType = 0
ORBScoreTypeFAST ORBScoreType = 1
)
// Close ORB.
func (o *ORB) Close() error {
C.ORB_Close((C.ORB)(o.p))

1
vendor/gocv.io/x/gocv/features2d.h generated vendored
View File

@ -69,6 +69,7 @@ void MSER_Close(MSER a);
struct KeyPoints MSER_Detect(MSER a, Mat src);
ORB ORB_Create();
ORB ORB_CreateWithParams(int nfeatures, float scaleFactor, int nlevels, int edgeThreshold, int firstLevel, int WTA_K, int scoreType, int patchSize, int fastThreshold);
void ORB_Close(ORB o);
struct KeyPoints ORB_Detect(ORB o, Mat src);
struct KeyPoints ORB_DetectAndCompute(ORB o, Mat src, Mat mask, Mat desc);

4
vendor/gocv.io/x/gocv/highgui.cpp generated vendored
View File

@ -62,6 +62,10 @@ void Trackbar_Create(const char* winname, const char* trackname, int max) {
cv::createTrackbar(trackname, winname, NULL, max);
}
void Trackbar_CreateWithValue(const char* winname, const char* trackname, int* value, int max) {
cv::createTrackbar(trackname, winname, value, max);
}
int Trackbar_GetPos(const char* winname, const char* trackname) {
return cv::getTrackbarPos(trackname, winname);
}

17
vendor/gocv.io/x/gocv/highgui.go generated vendored
View File

@ -283,6 +283,23 @@ func (w *Window) CreateTrackbar(name string, max int) *Trackbar {
return &Trackbar{name: name, parent: w}
}
// CreateTrackbarWithValue works like CreateTrackbar but also assigns a
// variable value to be a position synchronized with the trackbar.
//
// For further details, please see:
// https://docs.opencv.org/master/d7/dfc/group__highgui.html#gaf78d2155d30b728fc413803745b67a9b
//
func (w *Window) CreateTrackbarWithValue(name string, value *int, max int) *Trackbar {
cName := C.CString(w.name)
defer C.free(unsafe.Pointer(cName))
tName := C.CString(name)
defer C.free(unsafe.Pointer(tName))
C.Trackbar_CreateWithValue(cName, tName, (*C.int)(unsafe.Pointer(value)), C.int(max))
return &Trackbar{name: name, parent: w}
}
// GetPos returns the trackbar position.
//
// For further details, please see:

View File

@ -23,6 +23,7 @@ struct Rects Window_SelectROIs(const char* winname, Mat img);
// Trackbar
void Trackbar_Create(const char* winname, const char* trackname, int max);
void Trackbar_CreateWithValue(const char* winname, const char* trackname, int* value, int max);
int Trackbar_GetPos(const char* winname, const char* trackname);
void Trackbar_SetPos(const char* winname, const char* trackname, int pos);
void Trackbar_SetMin(const char* winname, const char* trackname, int pos);

14
vendor/gocv.io/x/gocv/imgcodecs.cpp generated vendored
View File

@ -21,22 +21,20 @@ bool Image_IMWrite_WithParams(const char* filename, Mat img, IntVector params) {
return cv::imwrite(filename, *img, compression_params);
}
struct ByteArray Image_IMEncode(const char* fileExt, Mat img) {
std::vector<uchar> data;
cv::imencode(fileExt, *img, data);
return toByteArray(reinterpret_cast<const char*>(&data[0]), data.size());
void Image_IMEncode(const char* fileExt, Mat img, void* vector) {
auto vectorPtr = reinterpret_cast<std::vector<uchar> *>(vector);
cv::imencode(fileExt, *img, *vectorPtr);
}
struct ByteArray Image_IMEncode_WithParams(const char* fileExt, Mat img, IntVector params) {
std::vector<uchar> data;
void Image_IMEncode_WithParams(const char* fileExt, Mat img, IntVector params, void* vector) {
auto vectorPtr = reinterpret_cast<std::vector<uchar> *>(vector);
std::vector<int> compression_params;
for (int i = 0, *v = params.val; i < params.length; ++v, ++i) {
compression_params.push_back(*v);
}
cv::imencode(fileExt, *img, data, compression_params);
return toByteArray(reinterpret_cast<const char*>(&data[0]), data.size());
cv::imencode(fileExt, *img, *vectorPtr, compression_params);
}
Mat Image_IMDecode(ByteArray buf, int flags) {

16
vendor/gocv.io/x/gocv/imgcodecs.go generated vendored
View File

@ -197,13 +197,13 @@ const (
// For further details, please see:
// http://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga461f9ac09887e47797a54567df3b8b63
//
func IMEncode(fileExt FileExt, img Mat) (buf []byte, err error) {
func IMEncode(fileExt FileExt, img Mat) (buf *NativeByteBuffer, err error) {
cfileExt := C.CString(string(fileExt))
defer C.free(unsafe.Pointer(cfileExt))
b := C.Image_IMEncode(cfileExt, img.Ptr())
defer C.ByteArray_Release(b)
return toGoBytes(b), nil
buffer := newNativeByteBuffer()
C.Image_IMEncode(cfileExt, img.Ptr(), buffer.nativePointer())
return buffer, nil
}
// IMEncodeWithParams encodes an image Mat into a memory buffer.
@ -216,7 +216,7 @@ func IMEncode(fileExt FileExt, img Mat) (buf []byte, err error) {
// For further details, please see:
// http://docs.opencv.org/master/d4/da8/group__imgcodecs.html#ga461f9ac09887e47797a54567df3b8b63
//
func IMEncodeWithParams(fileExt FileExt, img Mat, params []int) (buf []byte, err error) {
func IMEncodeWithParams(fileExt FileExt, img Mat, params []int) (buf *NativeByteBuffer, err error) {
cfileExt := C.CString(string(fileExt))
defer C.free(unsafe.Pointer(cfileExt))
@ -230,9 +230,9 @@ func IMEncodeWithParams(fileExt FileExt, img Mat, params []int) (buf []byte, err
paramsVector.val = (*C.int)(&cparams[0])
paramsVector.length = (C.int)(len(cparams))
b := C.Image_IMEncode_WithParams(cfileExt, img.Ptr(), paramsVector)
defer C.ByteArray_Release(b)
return toGoBytes(b), nil
b := newNativeByteBuffer()
C.Image_IMEncode_WithParams(cfileExt, img.Ptr(), paramsVector, b.nativePointer())
return b, nil
}
// IMDecode reads an image from a buffer in memory.

5
vendor/gocv.io/x/gocv/imgcodecs.h generated vendored
View File

@ -13,8 +13,9 @@ extern "C" {
Mat Image_IMRead(const char* filename, int flags);
bool Image_IMWrite(const char* filename, Mat img);
bool Image_IMWrite_WithParams(const char* filename, Mat img, IntVector params);
struct ByteArray Image_IMEncode(const char* fileExt, Mat img);
struct ByteArray Image_IMEncode_WithParams(const char* fileExt, Mat img, IntVector params);
void Image_IMEncode(const char* fileExt, Mat img, void* vector);
void Image_IMEncode_WithParams(const char* fileExt, Mat img, IntVector params, void* vector);
Mat Image_IMDecode(ByteArray buf, int flags);
#ifdef __cplusplus

311
vendor/gocv.io/x/gocv/imgproc.cpp generated vendored
View File

@ -1,33 +1,14 @@
#include "imgproc.h"
double ArcLength(Contour curve, bool is_closed) {
std::vector<cv::Point> pts;
for (size_t i = 0; i < curve.length; i++) {
pts.push_back(cv::Point(curve.points[i].x, curve.points[i].y));
}
return cv::arcLength(pts, is_closed);
double ArcLength(PointVector curve, bool is_closed) {
return cv::arcLength(*curve, is_closed);
}
Contour ApproxPolyDP(Contour curve, double epsilon, bool closed) {
std::vector<cv::Point> curvePts;
PointVector ApproxPolyDP(PointVector curve, double epsilon, bool closed) {
PointVector approxCurvePts = new std::vector<cv::Point>;
cv::approxPolyDP(*curve, *approxCurvePts, epsilon, closed);
for (size_t i = 0; i < curve.length; i++) {
curvePts.push_back(cv::Point(curve.points[i].x, curve.points[i].y));
}
std::vector<cv::Point> approxCurvePts;
cv::approxPolyDP(curvePts, approxCurvePts, epsilon, closed);
int length = approxCurvePts.size();
Point* points = new Point[length];
for (size_t i = 0; i < length; i++) {
points[i] = (Point){approxCurvePts[i].x, approxCurvePts[i].y};
}
return (Contour){points, length};
return approxCurvePts;
}
void CvtColor(Mat src, Mat dst, int code) {
@ -95,46 +76,34 @@ double CompareHist(Mat hist1, Mat hist2, int method) {
return cv::compareHist(*hist1, *hist2, method);
}
struct RotatedRect FitEllipse(Points points)
struct RotatedRect FitEllipse(PointVector pts)
{
Point *rpts = new Point[points.length];
std::vector<cv::Point> pts;
cv::RotatedRect bRect = cv::fitEllipse(*pts);
for (size_t i = 0; i < points.length; i++)
{
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
Point pt = {points.points[i].x, points.points[i].y};
rpts[i] = pt;
}
Rect r = {bRect.boundingRect().x, bRect.boundingRect().y, bRect.boundingRect().width, bRect.boundingRect().height};
Point centrpt = {int(lroundf(bRect.center.x)), int(lroundf(bRect.center.y))};
Size szsz = {int(lroundf(bRect.size.width)), int(lroundf(bRect.size.height))};
cv::RotatedRect bRect = cv::fitEllipse(pts);
Rect r = {bRect.boundingRect().x, bRect.boundingRect().y, bRect.boundingRect().width, bRect.boundingRect().height};
Point centrpt = {int(lroundf(bRect.center.x)), int(lroundf(bRect.center.y))};
Size szsz = {int(lroundf(bRect.size.width)), int(lroundf(bRect.size.height))};
RotatedRect rotRect = {(Contour){rpts, 4}, r, centrpt, szsz, bRect.angle};
return rotRect;
}
void ConvexHull(Contour points, Mat hull, bool clockwise, bool returnPoints) {
std::vector<cv::Point> pts;
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
cv::Point2f* pts4 = new cv::Point2f[4];
bRect.points(pts4);
Point* rpts = new Point[4];
for (size_t j = 0; j < 4; j++) {
Point pt = {int(lroundf(pts4[j].x)), int(lroundf(pts4[j].y))};
rpts[j] = pt;
}
cv::convexHull(pts, *hull, clockwise, returnPoints);
delete[] pts4;
RotatedRect rotRect = {Points{rpts, 4}, r, centrpt, szsz, bRect.angle};
return rotRect;
}
void ConvexityDefects(Contour points, Mat hull, Mat result) {
std::vector<cv::Point> pts;
void ConvexHull(PointVector points, Mat hull, bool clockwise, bool returnPoints) {
cv::convexHull(*points, *hull, clockwise, returnPoints);
}
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
}
cv::convexityDefects(pts, *hull, *result);
void ConvexityDefects(PointVector points, Mat hull, Mat result) {
cv::convexityDefects(*points, *hull, *result);
}
void BilateralFilter(Mat src, Mat dst, int d, double sc, double ss) {
@ -160,6 +129,13 @@ void Dilate(Mat src, Mat dst, Mat kernel) {
cv::dilate(*src, *dst, *kernel);
}
void DilateWithParams(Mat src, Mat dst, Mat kernel, Point anchor, int iterations, int borderType, Scalar borderValue) {
cv::Point pt1(anchor.x, anchor.y);
cv::Scalar c = cv::Scalar(borderValue.val1, borderValue.val2, borderValue.val3, borderValue.val4);
cv::dilate(*src, *dst, *kernel, pt1, iterations, borderType, c);
}
void DistanceTransform(Mat src, Mat dst, Mat labels, int distanceType, int maskSize, int labelType) {
cv::distanceTransform(*src, *dst, *labels, distanceType, maskSize, labelType);
}
@ -197,14 +173,8 @@ void PyrUp(Mat src, Mat dst, Size size, int borderType) {
cv::pyrUp(*src, *dst, cvSize, borderType);
}
struct Rect BoundingRect(Contour con) {
std::vector<cv::Point> pts;
for (size_t i = 0; i < con.length; i++) {
pts.push_back(cv::Point(con.points[i].x, con.points[i].y));
}
cv::Rect bRect = cv::boundingRect(pts);
struct Rect BoundingRect(PointVector pts) {
cv::Rect bRect = cv::boundingRect(*pts);
Rect r = {bRect.x, bRect.y, bRect.width, bRect.height};
return r;
}
@ -213,27 +183,15 @@ void BoxPoints(RotatedRect rect, Mat boxPts){
cv::Point2f centerPt(rect.center.x , rect.center.y);
cv::Size2f rSize(rect.size.width, rect.size.height);
cv::RotatedRect rotatedRectangle(centerPt, rSize, rect.angle);
cv::boxPoints(rotatedRectangle, *boxPts);
cv::boxPoints(rotatedRectangle, *boxPts);
}
double ContourArea(Contour con) {
std::vector<cv::Point> pts;
for (size_t i = 0; i < con.length; i++) {
pts.push_back(cv::Point(con.points[i].x, con.points[i].y));
}
return cv::contourArea(pts);
double ContourArea(PointVector pts) {
return cv::contourArea(*pts);
}
struct RotatedRect MinAreaRect(Points points){
std::vector<cv::Point> pts;
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
}
cv::RotatedRect cvrect = cv::minAreaRect(pts);
struct RotatedRect MinAreaRect(PointVector pts){
cv::RotatedRect cvrect = cv::minAreaRect(*pts);
Point* rpts = new Point[4];
cv::Point2f* pts4 = new cv::Point2f[4];
@ -255,38 +213,24 @@ struct RotatedRect MinAreaRect(Points points){
return retrect;
}
void MinEnclosingCircle(Points points, Point2f* center, float* radius){
std::vector<cv::Point> pts;
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
}
void MinEnclosingCircle(PointVector pts, Point2f* center, float* radius){
cv::Point2f center2f;
cv::minEnclosingCircle(pts, center2f, *radius);
cv::minEnclosingCircle(*pts, center2f, *radius);
center->x = center2f.x;
center->y = center2f.y;
}
struct Contours FindContours(Mat src, int mode, int method) {
std::vector<std::vector<cv::Point> > contours;
cv::findContours(*src, contours, mode, method);
PointsVector FindContours(Mat src, Mat hierarchy, int mode, int method) {
PointsVector contours = new std::vector<std::vector<cv::Point> >;
cv::findContours(*src, *contours, *hierarchy, mode, method);
Contour* points = new Contour[contours.size()];
return contours;
}
for (size_t i = 0; i < contours.size(); i++) {
Point* pts = new Point[contours[i].size()];
double PointPolygonTest(PointVector pts, Point pt, bool measureDist) {
cv::Point2f pt1(pt.x, pt.y);
for (size_t j = 0; j < contours[i].size(); j++) {
Point pt = {contours[i][j].x, contours[i][j].y};
pts[j] = pt;
}
points[i] = (Contour){pts, (int)contours[i].size()};
}
Contours cons = {points, (int)contours.size()};
return cons;
return cv::pointPolygonTest(*pts, pt1, measureDist);
}
int ConnectedComponents(Mat src, Mat labels, int connectivity, int ltype, int ccltype){
@ -424,6 +368,13 @@ void Circle(Mat img, Point center, int radius, Scalar color, int thickness) {
cv::circle(*img, p1, radius, c, thickness);
}
void CircleWithParams(Mat img, Point center, int radius, Scalar color, int thickness, int lineType, int shift) {
cv::Point p1(center.x, center.y);
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::circle(*img, p1, radius, c, thickness, lineType, shift);
}
void Ellipse(Mat img, Point center, Point axes, double angle, double
startAngle, double endAngle, Scalar color, int thickness) {
cv::Point p1(center.x, center.y);
@ -433,6 +384,15 @@ void Ellipse(Mat img, Point center, Point axes, double angle, double
cv::ellipse(*img, p1, p2, angle, startAngle, endAngle, c, thickness);
}
void EllipseWithParams(Mat img, Point center, Point axes, double angle, double
startAngle, double endAngle, Scalar color, int thickness, int lineType, int shift) {
cv::Point p1(center.x, center.y);
cv::Point p2(axes.x, axes.y);
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::ellipse(*img, p1, p2, angle, startAngle, endAngle, c, thickness, lineType, shift);
}
void Line(Mat img, Point pt1, Point pt2, Scalar color, int thickness) {
cv::Point p1(pt1.x, pt1.y);
cv::Point p2(pt2.x, pt2.y);
@ -453,44 +413,35 @@ void Rectangle(Mat img, Rect r, Scalar color, int thickness) {
);
}
void FillPoly(Mat img, Contours points, Scalar color) {
std::vector<std::vector<cv::Point> > pts;
for (size_t i = 0; i < points.length; i++) {
Contour contour = points.contours[i];
std::vector<cv::Point> cntr;
for (size_t i = 0; i < contour.length; i++) {
cntr.push_back(cv::Point(contour.points[i].x, contour.points[i].y));
}
pts.push_back(cntr);
}
void RectangleWithParams(Mat img, Rect r, Scalar color, int thickness, int lineType, int shift) {
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::fillPoly(*img, pts, c);
cv::rectangle(
*img,
cv::Point(r.x, r.y),
cv::Point(r.x + r.width, r.y + r.height),
c,
thickness,
lineType,
shift
);
}
void Polylines(Mat img, Contours points, bool isClosed, Scalar color,int thickness) {
std::vector<std::vector<cv::Point> > pts;
for (size_t i = 0; i < points.length; i++) {
Contour contour = points.contours[i];
std::vector<cv::Point> cntr;
for (size_t i = 0; i < contour.length; i++) {
cntr.push_back(cv::Point(contour.points[i].x, contour.points[i].y));
}
pts.push_back(cntr);
}
void FillPoly(Mat img, PointsVector pts, Scalar color) {
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::polylines(*img, pts, isClosed, c, thickness);
cv::fillPoly(*img, *pts, c);
}
void FillPolyWithParams(Mat img, PointsVector pts, Scalar color, int lineType, int shift, Point offset) {
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::fillPoly(*img, *pts, c, lineType, shift, cv::Point(offset.x, offset.y));
}
void Polylines(Mat img, PointsVector pts, bool isClosed, Scalar color,int thickness) {
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::polylines(*img, *pts, isClosed, c, thickness);
}
struct Size GetTextSize(const char* text, int fontFace, double fontScale, int thickness) {
@ -562,85 +513,41 @@ void ApplyCustomColorMap(Mat src, Mat dst, Mat colormap) {
cv::applyColorMap(*src, *dst, *colormap);
}
Mat GetPerspectiveTransform(Contour src, Contour dst) {
std::vector<cv::Point2f> src_pts;
for (size_t i = 0; i < src.length; i++) {
src_pts.push_back(cv::Point2f(src.points[i].x, src.points[i].y));
}
std::vector<cv::Point2f> dst_pts;
for (size_t i = 0; i < dst.length; i++) {
dst_pts.push_back(cv::Point2f(dst.points[i].x, dst.points[i].y));
}
return new cv::Mat(cv::getPerspectiveTransform(src_pts, dst_pts));
}
Mat GetPerspectiveTransform2f(Contour2f src, Contour2f dst) {
Mat GetPerspectiveTransform(PointVector src, PointVector dst) {
std::vector<cv::Point2f> src_pts;
for (size_t i = 0; i < src.length; i++) {
src_pts.push_back(cv::Point2f(src.points[i].x, src.points[i].y));
}
copyPointVectorToPoint2fVector(src, &src_pts);
std::vector<cv::Point2f> dst_pts;
for (size_t i = 0; i < dst.length; i++) {
dst_pts.push_back(cv::Point2f(dst.points[i].x, dst.points[i].y));
}
copyPointVectorToPoint2fVector(dst, &dst_pts);
return new cv::Mat(cv::getPerspectiveTransform(src_pts, dst_pts));
}
Mat GetAffineTransform(Contour src, Contour dst) {
std::vector<cv::Point2f> src_pts;
for (size_t i = 0; i < src.length; i++) {
src_pts.push_back(cv::Point2f(src.points[i].x, src.points[i].y));
}
std::vector<cv::Point2f> dst_pts;
for (size_t i = 0; i < dst.length; i++) {
dst_pts.push_back(cv::Point2f(dst.points[i].x, dst.points[i].y));
}
return new cv::Mat(cv::getAffineTransform(src_pts, dst_pts));
Mat GetPerspectiveTransform2f(Point2fVector src, Point2fVector dst) {
return new cv::Mat(cv::getPerspectiveTransform(*src, *dst));
}
Mat GetAffineTransform2f(Contour2f src, Contour2f dst) {
Mat GetAffineTransform(PointVector src, PointVector dst) {
std::vector<cv::Point2f> src_pts;
for (size_t i = 0; i < src.length; i++) {
src_pts.push_back(cv::Point2f(src.points[i].x, src.points[i].y));
}
copyPointVectorToPoint2fVector(src, &src_pts);
std::vector<cv::Point2f> dst_pts;
for (size_t i = 0; i < dst.length; i++) {
dst_pts.push_back(cv::Point2f(dst.points[i].x, dst.points[i].y));
}
copyPointVectorToPoint2fVector(dst, &dst_pts);
return new cv::Mat(cv::getAffineTransform(src_pts, dst_pts));
}
Mat GetAffineTransform2f(Point2fVector src, Point2fVector dst) {
return new cv::Mat(cv::getAffineTransform(*src, *dst));
}
Mat FindHomography(Mat src, Mat dst, int method, double ransacReprojThreshold, Mat mask, const int maxIters, const double confidence) {
return new cv::Mat(cv::findHomography(*src, *dst, method, ransacReprojThreshold, *mask, maxIters, confidence));
}
void DrawContours(Mat src, Contours contours, int contourIdx, Scalar color, int thickness) {
std::vector<std::vector<cv::Point> > cntrs;
for (size_t i = 0; i < contours.length; i++) {
Contour contour = contours.contours[i];
std::vector<cv::Point> cntr;
for (size_t i = 0; i < contour.length; i++) {
cntr.push_back(cv::Point(contour.points[i].x, contour.points[i].y));
}
cntrs.push_back(cntr);
}
void DrawContours(Mat src, PointsVector contours, int contourIdx, Scalar color, int thickness) {
cv::Scalar c = cv::Scalar(color.val1, color.val2, color.val3, color.val4);
cv::drawContours(*src, cntrs, contourIdx, c, thickness);
cv::drawContours(*src, *contours, contourIdx, c, thickness);
}
void Sobel(Mat src, Mat dst, int ddepth, int dx, int dy, int ksize, double scale, double delta, int borderType) {
@ -672,12 +579,8 @@ void LogPolar(Mat src, Mat dst, Point center, double m, int flags) {
cv::logPolar(*src, *dst, centerPt, m, flags);
}
void FitLine(Contour points, Mat line, int distType, double param, double reps, double aeps) {
std::vector<cv::Point> pts;
for (size_t i = 0; i < points.length; i++) {
pts.push_back(cv::Point(points.points[i].x, points.points[i].y));
}
cv::fitLine(pts, *line, distType, param, reps, aeps);
void FitLine(PointVector pts, Mat line, int distType, double param, double reps, double aeps) {
cv::fitLine(*pts, *line, distType, param, reps, aeps);
}
void LinearPolar(Mat src, Mat dst, Point center, double maxRadius, int flags) {
@ -745,4 +648,4 @@ void Mat_AccumulatedWeighted(Mat src, Mat dst, double alpha) {
void Mat_AccumulatedWeightedWithMask(Mat src, Mat dst, double alpha, Mat mask) {
cv::accumulateWeighted(*src, *dst, alpha, *mask);
}
}

414
vendor/gocv.io/x/gocv/imgproc.go generated vendored
View File

@ -13,25 +13,14 @@ import (
"unsafe"
)
func getPoints(pts *C.Point, l int) []C.Point {
h := &reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(pts)),
Len: l,
Cap: l,
}
return *(*[]C.Point)(unsafe.Pointer(h))
}
// ArcLength calculates a contour perimeter or a curve length.
//
// For further details, please see:
//
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga8d26483c636be6b35c3ec6335798a47c
//
func ArcLength(curve []image.Point, isClosed bool) float64 {
cPoints := toCPoints(curve)
arcLength := C.ArcLength(cPoints, C.bool(isClosed))
return float64(arcLength)
func ArcLength(curve PointVector, isClosed bool) float64 {
return float64(C.ArcLength(curve.p, C.bool(isClosed)))
}
// ApproxPolyDP approximates a polygonal curve(s) with the specified precision.
@ -40,19 +29,8 @@ func ArcLength(curve []image.Point, isClosed bool) float64 {
//
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga0012a5fdaea70b8a9970165d98722b4c
//
func ApproxPolyDP(curve []image.Point, epsilon float64, closed bool) (approxCurve []image.Point) {
cCurve := toCPoints(curve)
cApproxCurve := C.ApproxPolyDP(cCurve, C.double(epsilon), C.bool(closed))
defer C.Points_Close(cApproxCurve)
cApproxCurvePoints := getPoints(cApproxCurve.points, int(cApproxCurve.length))
approxCurve = make([]image.Point, cApproxCurve.length)
for i, cPoint := range cApproxCurvePoints {
approxCurve[i] = image.Pt(int(cPoint.x), int(cPoint.y))
}
return approxCurve
func ApproxPolyDP(curve PointVector, epsilon float64, closed bool) PointVector {
return PointVector{p: C.ApproxPolyDP(curve.p, C.double(epsilon), C.bool(closed))}
}
// ConvexHull finds the convex hull of a point set.
@ -60,9 +38,8 @@ func ApproxPolyDP(curve []image.Point, epsilon float64, closed bool) (approxCurv
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga014b28e56cb8854c0de4a211cb2be656
//
func ConvexHull(points []image.Point, hull *Mat, clockwise bool, returnPoints bool) {
cPoints := toCPoints(points)
C.ConvexHull(cPoints, hull.p, C.bool(clockwise), C.bool(returnPoints))
func ConvexHull(points PointVector, hull *Mat, clockwise bool, returnPoints bool) {
C.ConvexHull(points.p, hull.p, C.bool(clockwise), C.bool(returnPoints))
}
// ConvexityDefects finds the convexity defects of a contour.
@ -70,9 +47,8 @@ func ConvexHull(points []image.Point, hull *Mat, clockwise bool, returnPoints bo
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#gada4437098113fd8683c932e0567f47ba
//
func ConvexityDefects(contour []image.Point, hull Mat, result *Mat) {
cPoints := toCPoints(contour)
C.ConvexityDefects(cPoints, hull.p, result.p)
func ConvexityDefects(contour PointVector, hull Mat, result *Mat) {
C.ConvexityDefects(contour.p, hull.p, result.p)
}
// CvtColor converts an image from one color space to another.
@ -292,6 +268,26 @@ func Dilate(src Mat, dst *Mat, kernel Mat) {
C.Dilate(src.p, dst.p, kernel.p)
}
// DilateWithParams dilates an image by using a specific structuring element.
//
// For further details, please see:
// https://docs.opencv.org/master/d4/d86/group__imgproc__filter.html#ga4ff0f3318642c4f469d0e11f242f3b6c
func DilateWithParams(src Mat, dst *Mat, kernel Mat, anchor image.Point, iterations, borderType BorderType, borderValue color.RGBA) {
cAnchor := C.struct_Point{
x: C.int(anchor.X),
y: C.int(anchor.Y),
}
bv := C.struct_Scalar{
val1: C.double(borderValue.B),
val2: C.double(borderValue.G),
val3: C.double(borderValue.R),
val4: C.double(borderValue.A),
}
C.DilateWithParams(src.p, dst.p, kernel.p, cAnchor, C.int(iterations), C.int(borderType), bv)
}
// DistanceTransformLabelTypes are the types of the DistanceTransform algorithm flag
type DistanceTransformLabelTypes int
@ -406,9 +402,8 @@ const (
// For further details, please see:
// https://docs.opencv.org/3.3.0/d3/dc0/group__imgproc__shape.html#gacb413ddce8e48ff3ca61ed7cf626a366
//
func BoundingRect(contour []image.Point) image.Rectangle {
cContour := toCPoints(contour)
r := C.BoundingRect(cContour)
func BoundingRect(contour PointVector) image.Rectangle {
r := C.BoundingRect(contour.p)
rect := image.Rect(int(r.x), int(r.y), int(r.x+r.width), int(r.y+r.height))
return rect
}
@ -419,8 +414,7 @@ func BoundingRect(contour []image.Point) image.Rectangle {
// https://docs.opencv.org/3.3.0/d3/dc0/group__imgproc__shape.html#gaf78d467e024b4d7936cf9397185d2f5c
//
func BoxPoints(rect RotatedRect, pts *Mat) {
rPoints := toCPoints(rect.Contour)
rPoints := toCPoints(rect.Points)
rRect := C.struct_Rect{
x: C.int(rect.BoundingRect.Min.X),
@ -455,14 +449,13 @@ func BoxPoints(rect RotatedRect, pts *Mat) {
// For further details, please see:
// https://docs.opencv.org/3.3.0/d3/dc0/group__imgproc__shape.html#ga2c759ed9f497d4a618048a2f56dc97f1
//
func ContourArea(contour []image.Point) float64 {
cContour := toCPoints(contour)
result := C.ContourArea(cContour)
func ContourArea(contour PointVector) float64 {
result := C.ContourArea(contour.p)
return float64(result)
}
type RotatedRect struct {
Contour []image.Point
Points []image.Point
BoundingRect image.Rectangle
Center image.Point
Width int
@ -493,15 +486,14 @@ func toPoints(points C.Contour) []image.Point {
// MinAreaRect finds a rotated rectangle of the minimum area enclosing the input 2D point set.
//
// For further details, please see:
// https://docs.opencv.org/3.3.0/d3/dc0/group__imgproc__shape.html#ga3d476a3417130ae5154aea421ca7ead9
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga3d476a3417130ae5154aea421ca7ead9
//
func MinAreaRect(points []image.Point) RotatedRect {
cPoints := toCPoints(points)
result := C.MinAreaRect(cPoints)
func MinAreaRect(points PointVector) RotatedRect {
result := C.MinAreaRect(points.p)
defer C.Points_Close(result.pts)
return RotatedRect{
Contour: toPoints(result.pts),
Points: toPoints(result.pts),
BoundingRect: image.Rect(int(result.boundingRect.x), int(result.boundingRect.y), int(result.boundingRect.x)+int(result.boundingRect.width), int(result.boundingRect.y)+int(result.boundingRect.height)),
Center: image.Pt(int(result.center.x), int(result.center.y)),
Width: int(result.size.width),
@ -515,13 +507,12 @@ func MinAreaRect(points []image.Point) RotatedRect {
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#gaf259efaad93098103d6c27b9e4900ffa
//
func FitEllipse(points []image.Point) RotatedRect {
cPoints := toCPoints(points)
cRect := C.FitEllipse(cPoints)
func FitEllipse(pts PointVector) RotatedRect {
cRect := C.FitEllipse(pts.p)
defer C.Points_Close(cRect.pts)
return RotatedRect{
Contour: toPoints(cRect.pts),
Points: toPoints(cRect.pts),
BoundingRect: image.Rect(int(cRect.boundingRect.x), int(cRect.boundingRect.y), int(cRect.boundingRect.x)+int(cRect.boundingRect.width), int(cRect.boundingRect.y)+int(cRect.boundingRect.height)),
Center: image.Pt(int(cRect.center.x), int(cRect.center.y)),
Width: int(cRect.size.width),
@ -535,11 +526,10 @@ func FitEllipse(points []image.Point) RotatedRect {
//
// For further details, please see:
// https://docs.opencv.org/3.4/d3/dc0/group__imgproc__shape.html#ga8ce13c24081bbc7151e9326f412190f1
func MinEnclosingCircle(points []image.Point) (x, y, radius float32) {
cPoints := toCPoints(points)
func MinEnclosingCircle(pts PointVector) (x, y, radius float32) {
cCenterPoint := C.struct_Point2f{}
var cRadius C.float
C.MinEnclosingCircle(cPoints, &cCenterPoint, &cRadius)
C.MinEnclosingCircle(pts.p, &cCenterPoint, &cRadius)
x, y = float32(cCenterPoint.x), float32(cCenterPoint.y)
radius = float32(cRadius)
return x, y, radius
@ -548,40 +538,34 @@ func MinEnclosingCircle(points []image.Point) (x, y, radius float32) {
// FindContours finds contours in a binary image.
//
// For further details, please see:
// https://docs.opencv.org/3.3.0/d3/dc0/group__imgproc__shape.html#ga17ed9f5d79ae97bd4c7cf18403e1689a
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga95f5b48d01abc7c2e0732db24689837b
//
func FindContours(src Mat, mode RetrievalMode, method ContourApproximationMode) [][]image.Point {
ret := C.FindContours(src.p, C.int(mode), C.int(method))
defer C.Contours_Close(ret)
func FindContours(src Mat, mode RetrievalMode, method ContourApproximationMode) PointsVector {
hierarchy := NewMat()
defer hierarchy.Close()
return FindContoursWithParams(src, &hierarchy, mode, method)
}
cArray := ret.contours
cLength := int(ret.length)
cHdr := reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(cArray)),
Len: cLength,
Cap: cLength,
// FindContoursWithParams finds contours in a binary image.
//
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga17ed9f5d79ae97bd4c7cf18403e1689a
//
func FindContoursWithParams(src Mat, hierarchy *Mat, mode RetrievalMode, method ContourApproximationMode) PointsVector {
return PointsVector{p: C.FindContours(src.p, hierarchy.p, C.int(mode), C.int(method))}
}
// PointPolygonTest performs a point-in-contour test.
//
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga1a539e8db2135af2566103705d7a5722
//
func PointPolygonTest(pts PointVector, pt image.Point, measureDist bool) float64 {
cp := C.struct_Point{
x: C.int(pt.X),
y: C.int(pt.Y),
}
sContours := *(*[]C.Points)(unsafe.Pointer(&cHdr))
contours := make([][]image.Point, cLength)
for i, pts := range sContours {
pArray := pts.points
pLength := int(pts.length)
pHdr := reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(pArray)),
Len: pLength,
Cap: pLength,
}
sPoints := *(*[]C.Point)(unsafe.Pointer(&pHdr))
points := make([]image.Point, pLength)
for j, pt := range sPoints {
points[j] = image.Pt(int(pt.x), int(pt.y))
}
contours[i] = points
}
return contours
return float64(C.PointPolygonTest(pts.p, cp, C.bool(measureDist)))
}
//ConnectedComponentsAlgorithmType specifies the type for ConnectedComponents
@ -1227,6 +1211,27 @@ func Circle(img *Mat, center image.Point, radius int, c color.RGBA, thickness in
C.Circle(img.p, pc, C.int(radius), sColor, C.int(thickness))
}
// CircleWithParams draws a circle.
//
// For further details, please see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#gaf10604b069374903dbd0f0488cb43670
//
func CircleWithParams(img *Mat, center image.Point, radius int, c color.RGBA, thickness int, lineType LineType, shift int) {
pc := C.struct_Point{
x: C.int(center.X),
y: C.int(center.Y),
}
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
val3: C.double(c.R),
val4: C.double(c.A),
}
C.CircleWithParams(img.p, pc, C.int(radius), sColor, C.int(thickness), C.int(lineType), C.int(shift))
}
// Ellipse draws a simple or thick elliptic arc or fills an ellipse sector.
//
// For further details, please see:
@ -1252,6 +1257,31 @@ func Ellipse(img *Mat, center, axes image.Point, angle, startAngle, endAngle flo
C.Ellipse(img.p, pc, pa, C.double(angle), C.double(startAngle), C.double(endAngle), sColor, C.int(thickness))
}
// Ellipse draws a simple or thick elliptic arc or fills an ellipse sector.
//
// For further details, please see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#ga28b2267d35786f5f890ca167236cbc69
//
func EllipseWithParams(img *Mat, center, axes image.Point, angle, startAngle, endAngle float64, c color.RGBA, thickness int, lineType LineType, shift int) {
pc := C.struct_Point{
x: C.int(center.X),
y: C.int(center.Y),
}
pa := C.struct_Point{
x: C.int(axes.X),
y: C.int(axes.Y),
}
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
val3: C.double(c.R),
val4: C.double(c.A),
}
C.EllipseWithParams(img.p, pc, pa, C.double(angle), C.double(startAngle), C.double(endAngle), sColor, C.int(thickness), C.int(lineType), C.int(shift))
}
// Line draws a line segment connecting two points.
//
// For further details, please see:
@ -1302,35 +1332,18 @@ func Rectangle(img *Mat, r image.Rectangle, c color.RGBA, thickness int) {
C.Rectangle(img.p, cRect, sColor, C.int(thickness))
}
// FillPoly fills the area bounded by one or more polygons.
// RectangleWithParams draws a simple, thick, or filled up-right rectangle.
// It renders a rectangle with the desired characteristics to the target Mat image.
//
// For more information, see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#gaf30888828337aa4c6b56782b5dfbd4b7
func FillPoly(img *Mat, pts [][]image.Point, c color.RGBA) {
points := make([]C.struct_Points, len(pts))
for i, pt := range pts {
p := (*C.struct_Point)(C.malloc(C.size_t(C.sizeof_struct_Point * len(pt))))
defer C.free(unsafe.Pointer(p))
pa := getPoints(p, len(pt))
for j, point := range pt {
pa[j] = C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
}
points[i] = C.struct_Points{
points: (*C.Point)(p),
length: C.int(len(pt)),
}
}
cPoints := C.struct_Contours{
contours: (*C.struct_Points)(&points[0]),
length: C.int(len(pts)),
// For further details, please see:
// http://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#ga346ac30b5c74e9b5137576c9ee9e0e8c
//
func RectangleWithParams(img *Mat, r image.Rectangle, c color.RGBA, thickness int, lineType LineType, shift int) {
cRect := C.struct_Rect{
x: C.int(r.Min.X),
y: C.int(r.Min.Y),
width: C.int(r.Size().X),
height: C.int(r.Size().Y),
}
sColor := C.struct_Scalar{
@ -1340,40 +1353,49 @@ func FillPoly(img *Mat, pts [][]image.Point, c color.RGBA) {
val4: C.double(c.A),
}
C.FillPoly(img.p, cPoints, sColor)
C.RectangleWithParams(img.p, cRect, sColor, C.int(thickness), C.int(lineType), C.int(shift))
}
// FillPoly fills the area bounded by one or more polygons.
//
// For more information, see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#gaf30888828337aa4c6b56782b5dfbd4b7
func FillPoly(img *Mat, pts PointsVector, c color.RGBA) {
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
val3: C.double(c.R),
val4: C.double(c.A),
}
C.FillPoly(img.p, pts.p, sColor)
}
// FillPolyWithParams fills the area bounded by one or more polygons.
//
// For more information, see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#gaf30888828337aa4c6b56782b5dfbd4b7
func FillPolyWithParams(img *Mat, pts PointsVector, c color.RGBA, lineType LineType, shift int, offset image.Point) {
offsetP := C.struct_Point{
x: C.int(offset.X),
y: C.int(offset.Y),
}
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
val3: C.double(c.R),
val4: C.double(c.A),
}
C.FillPolyWithParams(img.p, pts.p, sColor, C.int(lineType), C.int(shift), offsetP)
}
// Polylines draws several polygonal curves.
//
// For more information, see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#ga1ea127ffbbb7e0bfc4fd6fd2eb64263c
func Polylines(img *Mat, pts [][]image.Point, isClosed bool, c color.RGBA, thickness int) {
points := make([]C.struct_Points, len(pts))
for i, pt := range pts {
p := (*C.struct_Point)(C.malloc(C.size_t(C.sizeof_struct_Point * len(pt))))
defer C.free(unsafe.Pointer(p))
pa := getPoints(p, len(pt))
for j, point := range pt {
pa[j] = C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
}
points[i] = C.struct_Points{
points: (*C.Point)(p),
length: C.int(len(pt)),
}
}
cPoints := C.struct_Contours{
contours: (*C.struct_Points)(&points[0]),
length: C.int(len(pts)),
}
func Polylines(img *Mat, pts PointsVector, isClosed bool, c color.RGBA, thickness int) {
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
@ -1381,7 +1403,7 @@ func Polylines(img *Mat, pts [][]image.Point, isClosed bool, c color.RGBA, thick
val4: C.double(c.A),
}
C.Polylines(img.p, cPoints, C.bool(isClosed), sColor, C.int(thickness))
C.Polylines(img.p, pts.p, C.bool(isClosed), sColor, C.int(thickness))
}
// HersheyFont are the font libraries included in OpenCV.
@ -1694,10 +1716,8 @@ func ApplyCustomColorMap(src Mat, dst *Mat, customColormap Mat) {
//
// For further details, please see:
// https://docs.opencv.org/master/da/d54/group__imgproc__transform.html#ga8c1ae0e3589a9d77fffc962c49b22043
func GetPerspectiveTransform(src, dst []image.Point) Mat {
srcPoints := toCPoints(src)
dstPoints := toCPoints(dst)
return newMat(C.GetPerspectiveTransform(srcPoints, dstPoints))
func GetPerspectiveTransform(src, dst PointVector) Mat {
return newMat(C.GetPerspectiveTransform(src.p, dst.p))
}
// GetPerspectiveTransform2f returns 3x3 perspective transformation for the
@ -1705,10 +1725,8 @@ func GetPerspectiveTransform(src, dst []image.Point) Mat {
//
// For further details, please see:
// https://docs.opencv.org/master/da/d54/group__imgproc__transform.html#ga8c1ae0e3589a9d77fffc962c49b22043
func GetPerspectiveTransform2f(src, dst []Point2f) Mat {
srcPoints := toCPoints2f(src)
dstPoints := toCPoints2f(dst)
return newMat(C.GetPerspectiveTransform2f(srcPoints, dstPoints))
func GetPerspectiveTransform2f(src, dst Point2fVector) Mat {
return newMat(C.GetPerspectiveTransform2f(src.p, dst.p))
}
// GetAffineTransform returns a 2x3 affine transformation matrix for the
@ -1716,10 +1734,8 @@ func GetPerspectiveTransform2f(src, dst []Point2f) Mat {
//
// For further details, please see:
// https://docs.opencv.org/master/da/d54/group__imgproc__transform.html#ga8f6d378f9f8eebb5cb55cd3ae295a999
func GetAffineTransform(src, dst []image.Point) Mat {
srcPoints := toCPoints(src)
dstPoints := toCPoints(dst)
return newMat(C.GetAffineTransform(srcPoints, dstPoints))
func GetAffineTransform(src, dst PointVector) Mat {
return newMat(C.GetAffineTransform(src.p, dst.p))
}
// GetAffineTransform2f returns a 2x3 affine transformation matrix for the
@ -1727,10 +1743,8 @@ func GetAffineTransform(src, dst []image.Point) Mat {
//
// For further details, please see:
// https://docs.opencv.org/master/da/d54/group__imgproc__transform.html#ga8f6d378f9f8eebb5cb55cd3ae295a999
func GetAffineTransform2f(src, dst []Point2f) Mat {
srcPoints := toCPoints2f(src)
dstPoints := toCPoints2f(dst)
return newMat(C.GetAffineTransform2f(srcPoints, dstPoints))
func GetAffineTransform2f(src, dst Point2fVector) Mat {
return newMat(C.GetAffineTransform2f(src.p, dst.p))
}
type HomographyMethod int
@ -1755,32 +1769,7 @@ func FindHomography(srcPoints Mat, dstPoints *Mat, method HomographyMethod, rans
// For further details, please see:
// https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#ga746c0625f1781f1ffc9056259103edbc
//
func DrawContours(img *Mat, contours [][]image.Point, contourIdx int, c color.RGBA, thickness int) {
cntrs := make([]C.struct_Points, len(contours))
for i, contour := range contours {
p := (*C.struct_Point)(C.malloc(C.size_t(C.sizeof_struct_Point * len(contour))))
pa := getPoints(p, len(contour))
for j, point := range contour {
pa[j] = C.struct_Point{
x: C.int(point.X),
y: C.int(point.Y),
}
}
cntrs[i] = C.struct_Points{
points: (*C.Point)(p),
length: C.int(len(contour)),
}
}
cContours := C.struct_Contours{
contours: (*C.struct_Points)(&cntrs[0]),
length: C.int(len(contours)),
}
func DrawContours(img *Mat, contours PointsVector, contourIdx int, c color.RGBA, thickness int) {
sColor := C.struct_Scalar{
val1: C.double(c.B),
val2: C.double(c.G),
@ -1788,13 +1777,7 @@ func DrawContours(img *Mat, contours [][]image.Point, contourIdx int, c color.RG
val4: C.double(c.A),
}
C.DrawContours(img.p, cContours, C.int(contourIdx), sColor, C.int(thickness))
// now free the contour points
for i := 0; i < len(contours); i++ {
C.free(unsafe.Pointer(cntrs[i].points))
}
C.DrawContours(img.p, contours.p, C.int(contourIdx), sColor, C.int(thickness))
}
// Remap applies a generic geometrical transformation to an image.
@ -1880,9 +1863,8 @@ const (
//
// For further details, please see:
// https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#gaf849da1fdafa67ee84b1e9a23b93f91f
func FitLine(pts []image.Point, line *Mat, distType DistanceTypes, param, reps, aeps float64) {
cPoints := toCPoints(pts)
C.FitLine(cPoints, line.p, C.int(distType), C.double(param), C.double(reps), C.double(aeps))
func FitLine(pts PointVector, line *Mat, distType DistanceTypes, param, reps, aeps float64) {
C.FitLine(pts.p, line.p, C.int(distType), C.double(param), C.double(reps), C.double(aeps))
}
// CLAHE is a wrapper around the cv::CLAHE algorithm.
@ -1994,6 +1976,50 @@ func (m *Mat) ToImage() (image.Image, error) {
}
}
// ToImageYUV converts a Mat to a image.YCbCr using image.YCbCrSubsampleRatio420 as default subsampling param.
func (m *Mat) ToImageYUV() (*image.YCbCr, error) {
img, err := m.ToImage()
if err != nil {
return nil, err
}
bounds := img.Bounds()
converted := image.NewYCbCr(bounds, image.YCbCrSubsampleRatio420)
for row := 0; row < bounds.Max.Y; row++ {
for col := 0; col < bounds.Max.X; col++ {
r, g, b, _ := img.At(col, row).RGBA()
y, cb, cr := color.RGBToYCbCr(uint8(r), uint8(g), uint8(b))
converted.Y[converted.YOffset(col, row)] = y
converted.Cb[converted.COffset(col, row)] = cb
converted.Cr[converted.COffset(col, row)] = cr
}
}
return converted, nil
}
// ToImageYUV converts a Mat to a image.YCbCr using provided YUV subsample ratio param.
func (m *Mat) ToImageYUVWithParams(ratio image.YCbCrSubsampleRatio) (*image.YCbCr, error) {
img, err := m.ToImage()
if err != nil {
return nil, err
}
bounds := img.Bounds()
converted := image.NewYCbCr(bounds, ratio)
for row := 0; row < bounds.Max.Y; row++ {
for col := 0; col < bounds.Max.X; col++ {
r, g, b, _ := img.At(col, row).RGBA()
y, cb, cr := color.RGBToYCbCr(uint8(r), uint8(g), uint8(b))
converted.Y[converted.YOffset(col, row)] = y
converted.Cb[converted.COffset(col, row)] = cb
converted.Cr[converted.COffset(col, row)] = cr
}
}
return converted, nil
}
// ImageToMatRGBA converts image.Image to gocv.Mat,
// which represents RGBA image having 8bit for each component.
// Type of Mat is gocv.MatTypeCV8UC4.

43
vendor/gocv.io/x/gocv/imgproc.h generated vendored
View File

@ -16,20 +16,21 @@ typedef void* CLAHE;
#include "core.h"
double ArcLength(Contour curve, bool is_closed);
Contour ApproxPolyDP(Contour curve, double epsilon, bool closed);
double ArcLength(PointVector curve, bool is_closed);
PointVector ApproxPolyDP(PointVector curve, double epsilon, bool closed);
void CvtColor(Mat src, Mat dst, int code);
void EqualizeHist(Mat src, Mat dst);
void CalcHist(struct Mats mats, IntVector chans, Mat mask, Mat hist, IntVector sz, FloatVector rng, bool acc);
void CalcBackProject(struct Mats mats, IntVector chans, Mat hist, Mat backProject, FloatVector rng, bool uniform);
double CompareHist(Mat hist1, Mat hist2, int method);
void ConvexHull(Contour points, Mat hull, bool clockwise, bool returnPoints);
void ConvexityDefects(Contour points, Mat hull, Mat result);
void ConvexHull(PointVector points, Mat hull, bool clockwise, bool returnPoints);
void ConvexityDefects(PointVector points, Mat hull, Mat result);
void BilateralFilter(Mat src, Mat dst, int d, double sc, double ss);
void Blur(Mat src, Mat dst, Size ps);
void BoxFilter(Mat src, Mat dst, int ddepth, Size ps);
void SqBoxFilter(Mat src, Mat dst, int ddepth, Size ps);
void Dilate(Mat src, Mat dst, Mat kernel);
void DilateWithParams(Mat src, Mat dst, Mat kernel, Point anchor, int iterations, int borderType, Scalar borderValue);
void DistanceTransform(Mat src, Mat dst, Mat labels, int distanceType, int maskSize, int labelType);
void Erode(Mat src, Mat dst, Mat kernel);
void ErodeWithParams(Mat src, Mat dst, Mat kernel, Point anchor, int iterations, int borderType);
@ -37,13 +38,14 @@ void MatchTemplate(Mat image, Mat templ, Mat result, int method, Mat mask);
struct Moment Moments(Mat src, bool binaryImage);
void PyrDown(Mat src, Mat dst, Size dstsize, int borderType);
void PyrUp(Mat src, Mat dst, Size dstsize, int borderType);
struct Rect BoundingRect(Contour con);
struct Rect BoundingRect(PointVector pts);
void BoxPoints(RotatedRect rect, Mat boxPts);
double ContourArea(Contour con);
struct RotatedRect MinAreaRect(Points points);
struct RotatedRect FitEllipse(Points points);
void MinEnclosingCircle(Points points, Point2f* center, float* radius);
struct Contours FindContours(Mat src, int mode, int method);
double ContourArea(PointVector pts);
struct RotatedRect MinAreaRect(PointVector pts);
struct RotatedRect FitEllipse(PointVector pts);
void MinEnclosingCircle(PointVector pts, Point2f* center, float* radius);
PointsVector FindContours(Mat src, Mat hierarchy, int mode, int method);
double PointPolygonTest(PointVector pts, Point pt, bool measureDist);
int ConnectedComponents(Mat src, Mat dst, int connectivity, int ltype, int ccltype);
int ConnectedComponentsWithStats(Mat src, Mat labels, Mat stats, Mat centroids, int connectivity, int ltype, int ccltype);
@ -78,12 +80,17 @@ void AdaptiveThreshold(Mat src, Mat dst, double maxValue, int adaptiveTyp, int t
void ArrowedLine(Mat img, Point pt1, Point pt2, Scalar color, int thickness);
void Circle(Mat img, Point center, int radius, Scalar color, int thickness);
void CircleWithParams(Mat img, Point center, int radius, Scalar color, int thickness, int lineType, int shift);
void Ellipse(Mat img, Point center, Point axes, double angle, double
startAngle, double endAngle, Scalar color, int thickness);
void EllipseWithParams(Mat img, Point center, Point axes, double angle, double
startAngle, double endAngle, Scalar color, int thickness, int lineType, int shift);
void Line(Mat img, Point pt1, Point pt2, Scalar color, int thickness);
void Rectangle(Mat img, Rect rect, Scalar color, int thickness);
void FillPoly(Mat img, Contours points, Scalar color);
void Polylines(Mat img, Contours points, bool isClosed, Scalar color, int thickness);
void RectangleWithParams(Mat img, Rect rect, Scalar color, int thickness, int lineType, int shift);
void FillPoly(Mat img, PointsVector points, Scalar color);
void FillPolyWithParams(Mat img, PointsVector points, Scalar color, int lineType, int shift, Point offset);
void Polylines(Mat img, PointsVector points, bool isClosed, Scalar color, int thickness);
struct Size GetTextSize(const char* text, int fontFace, double fontScale, int thickness);
struct Size GetTextSizeWithBaseline(const char* text, int fontFace, double fontScale, int thickness, int* baseline);
void PutText(Mat img, const char* text, Point org, int fontFace, double fontScale,
@ -100,19 +107,19 @@ void WarpPerspective(Mat src, Mat dst, Mat m, Size dsize);
void Watershed(Mat image, Mat markers);
void ApplyColorMap(Mat src, Mat dst, int colormap);
void ApplyCustomColorMap(Mat src, Mat dst, Mat colormap);
Mat GetPerspectiveTransform(Contour src, Contour dst);
Mat GetPerspectiveTransform2f(Contour2f src, Contour2f dst);
Mat GetAffineTransform(Contour src, Contour dst);
Mat GetAffineTransform2f(Contour2f src, Contour2f dst);
Mat GetPerspectiveTransform(PointVector src, PointVector dst);
Mat GetPerspectiveTransform2f(Point2fVector src, Point2fVector dst);
Mat GetAffineTransform(PointVector src, PointVector dst);
Mat GetAffineTransform2f(Point2fVector src, Point2fVector dst);
Mat FindHomography(Mat src, Mat dst, int method, double ransacReprojThreshold, Mat mask, const int maxIters, const double confidence) ;
void DrawContours(Mat src, Contours contours, int contourIdx, Scalar color, int thickness);
void DrawContours(Mat src, PointsVector contours, int contourIdx, Scalar color, int thickness);
void Sobel(Mat src, Mat dst, int ddepth, int dx, int dy, int ksize, double scale, double delta, int borderType);
void SpatialGradient(Mat src, Mat dx, Mat dy, int ksize, int borderType);
void Remap(Mat src, Mat dst, Mat map1, Mat map2, int interpolation, int borderMode, Scalar borderValue);
void Filter2D(Mat src, Mat dst, int ddepth, Mat kernel, Point anchor, double delta, int borderType);
void SepFilter2D(Mat src, Mat dst, int ddepth, Mat kernelX, Mat kernelY, Point anchor, double delta, int borderType);
void LogPolar(Mat src, Mat dst, Point center, double m, int flags);
void FitLine(Contour points, Mat line, int distType, double param, double reps, double aeps);
void FitLine(PointVector pts, Mat line, int distType, double param, double reps, double aeps);
void LinearPolar(Mat src, Mat dst, Point center, double maxRadius, int flags);
bool ClipLine(Size imgSize, Point pt1, Point pt2);
CLAHE CLAHE_Create();

View File

@ -73,8 +73,10 @@ func newMat(p C.Mat) Mat {
// Close the Mat object.
func (m *Mat) Close() error {
C.Mat_Close(m.p)
// NOTE: The pointer must be removed from the profile before it is deleted to
// avoid a data race.
MatProfile.Remove(m.p)
C.Mat_Close(m.p)
m.p = nil
m.d = nil
return nil

68
vendor/gocv.io/x/gocv/photo.cpp generated vendored
View File

@ -16,3 +16,71 @@ void SeamlessClone(Mat src, Mat dst, Mat mask, Point p, Mat blend, int flags) {
void TextureFlattening(Mat src, Mat mask, Mat dst, float low_threshold, float high_threshold, int kernel_size) {
cv::textureFlattening(*src, *mask, *dst, low_threshold, high_threshold, kernel_size);
}
void FastNlMeansDenoisingColoredMulti( struct Mats src, Mat dst, int imgToDenoiseIndex, int temporalWindowSize){
std::vector<cv::Mat> images;
for (int i = 0; i < src.length; ++i) {
images.push_back(*src.mats[i]);
}
cv::fastNlMeansDenoisingColoredMulti( images, *dst, imgToDenoiseIndex, temporalWindowSize );
}
void FastNlMeansDenoisingColoredMultiWithParams( struct Mats src, Mat dst, int imgToDenoiseIndex, int temporalWindowSize, float h, float hColor, int templateWindowSize, int searchWindowSize ){
std::vector<cv::Mat> images;
for (int i = 0; i < src.length; ++i) {
images.push_back(*src.mats[i]);
}
cv::fastNlMeansDenoisingColoredMulti( images, *dst, imgToDenoiseIndex, temporalWindowSize, h, hColor, templateWindowSize, searchWindowSize );
}
MergeMertens MergeMertens_Create() {
return new cv::Ptr<cv::MergeMertens>(cv::createMergeMertens());
}
MergeMertens MergeMertens_CreateWithParams(float contrast_weight,
float saturation_weight,
float exposure_weight) {
return new cv::Ptr<cv::MergeMertens>(cv::createMergeMertens(
contrast_weight, saturation_weight, exposure_weight));
}
void MergeMertens_Close(MergeMertens b) {
delete b;
}
void MergeMertens_Process(MergeMertens b, struct Mats src, Mat dst) {
std::vector<cv::Mat> images;
for (int i = 0; i < src.length; ++i) {
images.push_back(*src.mats[i]);
}
(*b)->process(images, *dst);
}
AlignMTB AlignMTB_Create() {
return new cv::Ptr<cv::AlignMTB>(cv::createAlignMTB(6,4,false));
}
AlignMTB AlignMTB_CreateWithParams(int max_bits, int exclude_range, bool cut) {
return new cv::Ptr<cv::AlignMTB>(
cv::createAlignMTB(max_bits, exclude_range, cut));
}
void AlignMTB_Close(AlignMTB b) { delete b; }
void AlignMTB_Process(AlignMTB b, struct Mats src, struct Mats *dst) {
std::vector<cv::Mat> srcMats;
for (int i = 0; i < src.length; ++i) {
srcMats.push_back(*src.mats[i]);
}
std::vector<cv::Mat> dstMats;
(*b)->process(srcMats, dstMats);
dst->mats = new Mat[dstMats.size()];
for (size_t i = 0; i < dstMats.size() ; ++i) {
dst->mats[i] = new cv::Mat( dstMats[i] );
}
dst->length = (int)dstMats.size();
}

166
vendor/gocv.io/x/gocv/photo.go generated vendored
View File

@ -5,11 +5,24 @@ package gocv
#include "photo.h"
*/
import "C"
import "image"
import (
"image"
"unsafe"
)
//SeamlessCloneFlags seamlessClone algorithm flags
type SeamlessCloneFlags int
// MergeMertens is a wrapper around the cv::MergeMertens.
type MergeMertens struct {
p unsafe.Pointer // This unsafe pointer will in fact be a C.MergeMertens
}
// AlignMTB is a wrapper around the cv::AlignMTB.
type AlignMTB struct {
p unsafe.Pointer // This unsafe pointer will in fact be a C.AlignMTB
}
const (
// NormalClone The power of the method is fully expressed when inserting objects with complex outlines into a new background.
NormalClone SeamlessCloneFlags = iota
@ -61,3 +74,154 @@ func IlluminationChange(src, mask Mat, dst *Mat, alpha, beta float32) {
func TextureFlattening(src, mask Mat, dst *Mat, lowThreshold, highThreshold float32, kernelSize int) {
C.TextureFlattening(src.p, mask.p, dst.p, C.float(lowThreshold), C.float(highThreshold), C.int(kernelSize))
}
// FastNlMeansDenoisingColoredMulti denoises the selected images.
//
// For further details, please see:
// https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#gaa501e71f52fb2dc17ff8ca5e7d2d3619
//
func FastNlMeansDenoisingColoredMulti(src []Mat, dst *Mat, imgToDenoiseIndex int, temporalWindowSize int) {
cMatArray := make([]C.Mat, len(src))
for i, r := range src {
cMatArray[i] = (C.Mat)(r.p)
}
matsVector := C.struct_Mats{
mats: (*C.Mat)(&cMatArray[0]),
length: C.int(len(src)),
}
C.FastNlMeansDenoisingColoredMulti(matsVector, dst.p, C.int(imgToDenoiseIndex), C.int(temporalWindowSize))
}
// FastNlMeansDenoisingColoredMulti denoises the selected images.
//
// For further details, please see:
// https://docs.opencv.org/master/d1/d79/group__photo__denoise.html#gaa501e71f52fb2dc17ff8ca5e7d2d3619
//
func FastNlMeansDenoisingColoredMultiWithParams(src []Mat, dst *Mat, imgToDenoiseIndex int, temporalWindowSize int, h float32, hColor float32, templateWindowSize int, searchWindowSize int) {
cMatArray := make([]C.Mat, len(src))
for i, r := range src {
cMatArray[i] = (C.Mat)(r.p)
}
matsVector := C.struct_Mats{
mats: (*C.Mat)(&cMatArray[0]),
length: C.int(len(src)),
}
C.FastNlMeansDenoisingColoredMultiWithParams(matsVector, dst.p, C.int(imgToDenoiseIndex), C.int(temporalWindowSize), C.float(h), C.float(hColor), C.int(templateWindowSize), C.int(searchWindowSize))
}
// NewMergeMertens returns returns a new MergeMertens white LDR merge algorithm.
// of type MergeMertens with default parameters.
// MergeMertens algorithm merge the ldr image should result in a HDR image.
//
// For further details, please see:
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html
// https://docs.opencv.org/master/d7/dd6/classcv_1_1MergeMertens.html
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga79d59aa3cb3a7c664e59a4b5acc1ccb6
//
func NewMergeMertens() MergeMertens {
return MergeMertens{p: unsafe.Pointer(C.MergeMertens_Create())}
}
// NewMergeMertensWithParams returns a new MergeMertens white LDR merge algorithm
// of type MergeMertens with customized parameters.
// MergeMertens algorithm merge the ldr image should result in a HDR image.
//
// For further details, please see:
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html
// https://docs.opencv.org/master/d7/dd6/classcv_1_1MergeMertens.html
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga79d59aa3cb3a7c664e59a4b5acc1ccb6
//
func NewMergeMertensWithParams(contrast_weight float32, saturation_weight float32, exposure_weight float32) MergeMertens {
return MergeMertens{p: unsafe.Pointer(C.MergeMertens_CreateWithParams(C.float(contrast_weight), C.float(saturation_weight), C.float(exposure_weight)))}
}
// Close MergeMertens.
func (b *MergeMertens) Close() error {
C.MergeMertens_Close((C.MergeMertens)(b.p)) // Here the unsafe pointer is cast into the right type
b.p = nil
return nil
}
// BalanceWhite computes merge LDR images using the current MergeMertens.
// Return a image MAT : 8bits 3 channel image ( RGB 8 bits )
// For further details, please see:
// https://docs.opencv.org/master/d7/dd6/classcv_1_1MergeMertens.html#a2d2254b2aab722c16954de13a663644d
//
func (b *MergeMertens) Process(src []Mat, dst *Mat) {
cMatArray := make([]C.Mat, len(src))
for i, r := range src {
cMatArray[i] = (C.Mat)(r.p)
}
// Conversion function from a Golang slice into an array of matrices that are understood by OpenCV
matsVector := C.struct_Mats{
mats: (*C.Mat)(&cMatArray[0]),
length: C.int(len(src)),
}
C.MergeMertens_Process((C.MergeMertens)(b.p), matsVector, dst.p)
// Convert a series of double [0.0,1.0] to [0,255] with Golang
dst.ConvertToWithParams(dst, MatTypeCV8UC3, 255.0, 0.0)
}
// NewAlignMTB returns an AlignMTB for converts images to median threshold bitmaps.
// of type AlignMTB converts images to median threshold bitmaps (1 for pixels
// brighter than median luminance and 0 otherwise) and than aligns the resulting
// bitmaps using bit operations.
// For further details, please see:
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html
// https://docs.opencv.org/master/d7/db6/classcv_1_1AlignMTB.html
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga2f1fafc885a5d79dbfb3542e08db0244
//
func NewAlignMTB() AlignMTB {
return AlignMTB{p: unsafe.Pointer(C.AlignMTB_Create())}
}
// NewAlignMTBWithParams returns an AlignMTB for converts images to median threshold bitmaps.
// of type AlignMTB converts images to median threshold bitmaps (1 for pixels
// brighter than median luminance and 0 otherwise) and than aligns the resulting
// bitmaps using bit operations.
// For further details, please see:
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html
// https://docs.opencv.org/master/d7/db6/classcv_1_1AlignMTB.html
// https://docs.opencv.org/master/d6/df5/group__photo__hdr.html#ga2f1fafc885a5d79dbfb3542e08db0244
//
func NewAlignMTBWithParams(max_bits int, exclude_range int, cut bool) AlignMTB {
return AlignMTB{p: unsafe.Pointer(C.AlignMTB_CreateWithParams(C.int(max_bits), C.int(exclude_range), C.bool(cut)))}
}
// Close AlignMTB.
func (b *AlignMTB) Close() error {
C.AlignMTB_Close((C.AlignMTB)(b.p))
b.p = nil
return nil
}
// Process computes an alignment using the current AlignMTB.
//
// For further details, please see:
// https://docs.opencv.org/master/d7/db6/classcv_1_1AlignMTB.html#a37b3417d844f362d781f34155cbcb201
//
func (b *AlignMTB) Process(src []Mat, dst *[]Mat) {
cSrcArray := make([]C.Mat, len(src))
for i, r := range src {
cSrcArray[i] = r.p
}
cSrcMats := C.struct_Mats{
mats: (*C.Mat)(&cSrcArray[0]),
length: C.int(len(src)),
}
cDstMats := C.struct_Mats{}
C.AlignMTB_Process((C.AlignMTB)(b.p), cSrcMats, &cDstMats)
// Pass the matrices by reference from an OpenCV/C++ to a GoCV::Mat object
for i := C.int(0); i < cDstMats.length; i++ {
var tempdst Mat
tempdst.p = C.Mats_get(cDstMats, i)
*dst = append(*dst, tempdst)
}
return
}

27
vendor/gocv.io/x/gocv/photo.h generated vendored
View File

@ -3,12 +3,23 @@
#ifdef __cplusplus
#include <opencv2/opencv.hpp>
#include <opencv2/photo.hpp>
extern "C" {
#endif
#include "core.h"
#ifdef __cplusplus
// see : https://docs.opencv.org/3.4/d7/dd6/classcv_1_1MergeMertens.html
typedef cv::Ptr<cv::MergeMertens> *MergeMertens;
// see : https://docs.opencv.org/master/d7/db6/classcv_1_1AlignMTB.html
typedef cv::Ptr<cv::AlignMTB> *AlignMTB;
#else
typedef void *MergeMertens;
typedef void *AlignMTB;
#endif
void ColorChange(Mat src, Mat mask, Mat dst, float red_mul, float green_mul, float blue_mul);
void SeamlessClone(Mat src, Mat dst, Mat mask, Point p, Mat blend, int flags);
@ -17,8 +28,22 @@ void IlluminationChange(Mat src, Mat mask, Mat dst, float alpha, float beta);
void TextureFlattening(Mat src, Mat mask, Mat dst, float low_threshold, float high_threshold, int kernel_size);
void FastNlMeansDenoisingColoredMulti(struct Mats src, Mat dst, int imgToDenoiseIndex, int temporalWindowSize);
void FastNlMeansDenoisingColoredMultiWithParams(struct Mats src, Mat dst, int imgToDenoiseIndex, int temporalWindowSize, float h, float hColor, int templateWindowSize, int searchWindowSize );
MergeMertens MergeMertens_Create();
MergeMertens MergeMertens_CreateWithParams(float contrast_weight, float saturation_weight, float exposure_weight);
void MergeMertens_Process(MergeMertens b, struct Mats src, Mat dst);
void MergeMertens_Close(MergeMertens b);
AlignMTB AlignMTB_Create();
AlignMTB AlignMTB_CreateWithParams(int max_bits, int exclude_range, bool cut);
void AlignMTB_Process(AlignMTB b, struct Mats src, struct Mats *dst);
void AlignMTB_Close(AlignMTB b);
#ifdef __cplusplus
}
#endif
#endif //_OPENCV3_PHOTO_H
#endif //_OPENCV3_PHOTO_H

2
vendor/gocv.io/x/gocv/version.go generated vendored
View File

@ -7,7 +7,7 @@ package gocv
import "C"
// GoCVVersion of this package, for display purposes.
const GoCVVersion = "0.26.0"
const GoCVVersion = "0.29.0"
// Version returns the current golang package version
func Version() string {

View File

@ -11,18 +11,18 @@ echo.
REM This is why there is no progress bar:
REM https://github.com/PowerShell/PowerShell/issues/2138
echo Downloading: opencv-4.5.1.zip [91MB]
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest -Uri https://github.com/opencv/opencv/archive/4.5.1.zip -OutFile c:\opencv\opencv-4.5.1.zip"
echo Downloading: opencv-4.5.4.zip [91MB]
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest -Uri https://github.com/opencv/opencv/archive/4.5.4.zip -OutFile c:\opencv\opencv-4.5.4.zip"
echo Extracting...
powershell -command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path c:\opencv\opencv-4.5.1.zip -DestinationPath c:\opencv"
del c:\opencv\opencv-4.5.1.zip /q
powershell -command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path c:\opencv\opencv-4.5.4.zip -DestinationPath c:\opencv"
del c:\opencv\opencv-4.5.4.zip /q
echo.
echo Downloading: opencv_contrib-4.5.1.zip [58MB]
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest -Uri https://github.com/opencv/opencv_contrib/archive/4.5.1.zip -OutFile c:\opencv\opencv_contrib-4.5.1.zip"
echo Downloading: opencv_contrib-4.5.4.zip [58MB]
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest -Uri https://github.com/opencv/opencv_contrib/archive/4.5.4.zip -OutFile c:\opencv\opencv_contrib-4.5.4.zip"
echo Extracting...
powershell -command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path c:\opencv\opencv_contrib-4.5.1.zip -DestinationPath c:\opencv"
del c:\opencv\opencv_contrib-4.5.1.zip /q
powershell -command "$ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path c:\opencv\opencv_contrib-4.5.4.zip -DestinationPath c:\opencv"
del c:\opencv\opencv_contrib-4.5.4.zip /q
echo.
echo Done with downloading and extracting sources.
@ -32,9 +32,15 @@ echo on
cd /D C:\opencv\build
set PATH=%PATH%;C:\Program Files (x86)\CMake\bin;C:\mingw-w64\x86_64-6.3.0-posix-seh-rt_v5-rev1\mingw64\bin
cmake C:\opencv\opencv-4.5.1 -G "MinGW Makefiles" -BC:\opencv\build -DENABLE_CXX11=ON -DOPENCV_EXTRA_MODULES_PATH=C:\opencv\opencv_contrib-4.5.1\modules -DBUILD_SHARED_LIBS=ON -DWITH_IPP=OFF -DWITH_MSMF=OFF -DBUILD_EXAMPLES=OFF -DBUILD_TESTS=OFF -DBUILD_PERF_TESTS=OFF -DBUILD_opencv_java=OFF -DBUILD_opencv_python=OFF -DBUILD_opencv_python2=OFF -DBUILD_opencv_python3=OFF -DBUILD_DOCS=OFF -DENABLE_PRECOMPILED_HEADERS=OFF -DBUILD_opencv_saliency=OFF -DCPU_DISPATCH= -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_OPENCL_D3D11_NV=OFF -DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t -Wno-dev
if [%1]==[static] (
echo Build static opencv
set enable_shared=OFF
) else (
set enable_shared=ON
)
cmake C:\opencv\opencv-4.5.4 -G "MinGW Makefiles" -BC:\opencv\build -DENABLE_CXX11=ON -DOPENCV_EXTRA_MODULES_PATH=C:\opencv\opencv_contrib-4.5.4\modules -DBUILD_SHARED_LIBS=%enable_shared% -DWITH_IPP=OFF -DWITH_MSMF=OFF -DBUILD_EXAMPLES=OFF -DBUILD_TESTS=OFF -DBUILD_PERF_TESTS=OFF -DBUILD_opencv_java=OFF -DBUILD_opencv_python=OFF -DBUILD_opencv_python2=OFF -DBUILD_opencv_python3=OFF -DBUILD_DOCS=OFF -DENABLE_PRECOMPILED_HEADERS=OFF -DBUILD_opencv_saliency=OFF -DBUILD_opencv_wechat_qrcode=OFF -DCPU_DISPATCH= -DOPENCV_GENERATE_PKGCONFIG=ON -DWITH_OPENCL_D3D11_NV=OFF -DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t -Wno-dev
mingw32-make -j%NUMBER_OF_PROCESSORS%
mingw32-make install
rmdir c:\opencv\opencv-4.5.1 /s /q
rmdir c:\opencv\opencv_contrib-4.5.1 /s /q
rmdir c:\opencv\opencv-4.5.4 /s /q
rmdir c:\opencv\opencv_contrib-4.5.4 /s /q
chdir /D %GOPATH%\src\gocv.io\x\gocv

View File

@ -744,9 +744,6 @@ func (d decoder) skipValue() error {
// Skip items. This will not validate whether skipped values are
// of the same type or not, same behavior as C++
// TextFormat::Parser::AllowUnknownField(true) version 3.8.0.
if err := d.skipValue(); err != nil {
return err
}
}
}
}

View File

@ -263,3 +263,8 @@ func (e *Encoder) Snapshot() encoderState {
func (e *Encoder) Reset(es encoderState) {
e.encoderState = es
}
// AppendString appends the escaped form of the input string to b.
func AppendString(b []byte, s string) []byte {
return appendString(b, s, false)
}

View File

@ -440,6 +440,13 @@ func legacyMerge(in piface.MergeInput) piface.MergeOutput {
if !ok {
return piface.MergeOutput{}
}
if !in.Source.IsValid() {
// Legacy Marshal methods may not function on nil messages.
// Check for a typed nil source only after we confirm that
// legacy Marshal/Unmarshal methods are present, for
// consistency.
return piface.MergeOutput{Flags: piface.MergeComplete}
}
b, err := marshaler.Marshal()
if err != nil {
return piface.MergeOutput{}

View File

@ -52,8 +52,8 @@ import (
// 10. Send out the CL for review and submit it.
const (
Major = 1
Minor = 26
Patch = 0
Minor = 27
Patch = 1
PreRelease = ""
)

View File

@ -1,276 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package protodesc provides functionality for converting
// FileDescriptorProto messages to/from protoreflect.FileDescriptor values.
//
// The google.protobuf.FileDescriptorProto is a protobuf message that describes
// the type information for a .proto file in a form that is easily serializable.
// The protoreflect.FileDescriptor is a more structured representation of
// the FileDescriptorProto message where references and remote dependencies
// can be directly followed.
package protodesc
import (
"google.golang.org/protobuf/internal/errors"
"google.golang.org/protobuf/internal/filedesc"
"google.golang.org/protobuf/internal/pragma"
"google.golang.org/protobuf/internal/strs"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
"google.golang.org/protobuf/types/descriptorpb"
)
// Resolver is the resolver used by NewFile to resolve dependencies.
// The enums and messages provided must belong to some parent file,
// which is also registered.
//
// It is implemented by protoregistry.Files.
type Resolver interface {
FindFileByPath(string) (protoreflect.FileDescriptor, error)
FindDescriptorByName(protoreflect.FullName) (protoreflect.Descriptor, error)
}
// FileOptions configures the construction of file descriptors.
type FileOptions struct {
pragma.NoUnkeyedLiterals
// AllowUnresolvable configures New to permissively allow unresolvable
// file, enum, or message dependencies. Unresolved dependencies are replaced
// by placeholder equivalents.
//
// The following dependencies may be left unresolved:
// • Resolving an imported file.
// • Resolving the type for a message field or extension field.
// If the kind of the field is unknown, then a placeholder is used for both
// the Enum and Message accessors on the protoreflect.FieldDescriptor.
// • Resolving an enum value set as the default for an optional enum field.
// If unresolvable, the protoreflect.FieldDescriptor.Default is set to the
// first value in the associated enum (or zero if the also enum dependency
// is also unresolvable). The protoreflect.FieldDescriptor.DefaultEnumValue
// is populated with a placeholder.
// • Resolving the extended message type for an extension field.
// • Resolving the input or output message type for a service method.
//
// If the unresolved dependency uses a relative name,
// then the placeholder will contain an invalid FullName with a "*." prefix,
// indicating that the starting prefix of the full name is unknown.
AllowUnresolvable bool
}
// NewFile creates a new protoreflect.FileDescriptor from the provided
// file descriptor message. See FileOptions.New for more information.
func NewFile(fd *descriptorpb.FileDescriptorProto, r Resolver) (protoreflect.FileDescriptor, error) {
return FileOptions{}.New(fd, r)
}
// NewFiles creates a new protoregistry.Files from the provided
// FileDescriptorSet message. See FileOptions.NewFiles for more information.
func NewFiles(fd *descriptorpb.FileDescriptorSet) (*protoregistry.Files, error) {
return FileOptions{}.NewFiles(fd)
}
// New creates a new protoreflect.FileDescriptor from the provided
// file descriptor message. The file must represent a valid proto file according
// to protobuf semantics. The returned descriptor is a deep copy of the input.
//
// Any imported files, enum types, or message types referenced in the file are
// resolved using the provided registry. When looking up an import file path,
// the path must be unique. The newly created file descriptor is not registered
// back into the provided file registry.
func (o FileOptions) New(fd *descriptorpb.FileDescriptorProto, r Resolver) (protoreflect.FileDescriptor, error) {
if r == nil {
r = (*protoregistry.Files)(nil) // empty resolver
}
// Handle the file descriptor content.
f := &filedesc.File{L2: &filedesc.FileL2{}}
switch fd.GetSyntax() {
case "proto2", "":
f.L1.Syntax = protoreflect.Proto2
case "proto3":
f.L1.Syntax = protoreflect.Proto3
default:
return nil, errors.New("invalid syntax: %q", fd.GetSyntax())
}
f.L1.Path = fd.GetName()
if f.L1.Path == "" {
return nil, errors.New("file path must be populated")
}
f.L1.Package = protoreflect.FullName(fd.GetPackage())
if !f.L1.Package.IsValid() && f.L1.Package != "" {
return nil, errors.New("invalid package: %q", f.L1.Package)
}
if opts := fd.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.FileOptions)
f.L2.Options = func() protoreflect.ProtoMessage { return opts }
}
f.L2.Imports = make(filedesc.FileImports, len(fd.GetDependency()))
for _, i := range fd.GetPublicDependency() {
if !(0 <= i && int(i) < len(f.L2.Imports)) || f.L2.Imports[i].IsPublic {
return nil, errors.New("invalid or duplicate public import index: %d", i)
}
f.L2.Imports[i].IsPublic = true
}
for _, i := range fd.GetWeakDependency() {
if !(0 <= i && int(i) < len(f.L2.Imports)) || f.L2.Imports[i].IsWeak {
return nil, errors.New("invalid or duplicate weak import index: %d", i)
}
f.L2.Imports[i].IsWeak = true
}
imps := importSet{f.Path(): true}
for i, path := range fd.GetDependency() {
imp := &f.L2.Imports[i]
f, err := r.FindFileByPath(path)
if err == protoregistry.NotFound && (o.AllowUnresolvable || imp.IsWeak) {
f = filedesc.PlaceholderFile(path)
} else if err != nil {
return nil, errors.New("could not resolve import %q: %v", path, err)
}
imp.FileDescriptor = f
if imps[imp.Path()] {
return nil, errors.New("already imported %q", path)
}
imps[imp.Path()] = true
}
for i := range fd.GetDependency() {
imp := &f.L2.Imports[i]
imps.importPublic(imp.Imports())
}
// Handle source locations.
f.L2.Locations.File = f
for _, loc := range fd.GetSourceCodeInfo().GetLocation() {
var l protoreflect.SourceLocation
// TODO: Validate that the path points to an actual declaration?
l.Path = protoreflect.SourcePath(loc.GetPath())
s := loc.GetSpan()
switch len(s) {
case 3:
l.StartLine, l.StartColumn, l.EndLine, l.EndColumn = int(s[0]), int(s[1]), int(s[0]), int(s[2])
case 4:
l.StartLine, l.StartColumn, l.EndLine, l.EndColumn = int(s[0]), int(s[1]), int(s[2]), int(s[3])
default:
return nil, errors.New("invalid span: %v", s)
}
// TODO: Validate that the span information is sensible?
// See https://github.com/protocolbuffers/protobuf/issues/6378.
if false && (l.EndLine < l.StartLine || l.StartLine < 0 || l.StartColumn < 0 || l.EndColumn < 0 ||
(l.StartLine == l.EndLine && l.EndColumn <= l.StartColumn)) {
return nil, errors.New("invalid span: %v", s)
}
l.LeadingDetachedComments = loc.GetLeadingDetachedComments()
l.LeadingComments = loc.GetLeadingComments()
l.TrailingComments = loc.GetTrailingComments()
f.L2.Locations.List = append(f.L2.Locations.List, l)
}
// Step 1: Allocate and derive the names for all declarations.
// This copies all fields from the descriptor proto except:
// google.protobuf.FieldDescriptorProto.type_name
// google.protobuf.FieldDescriptorProto.default_value
// google.protobuf.FieldDescriptorProto.oneof_index
// google.protobuf.FieldDescriptorProto.extendee
// google.protobuf.MethodDescriptorProto.input
// google.protobuf.MethodDescriptorProto.output
var err error
sb := new(strs.Builder)
r1 := make(descsByName)
if f.L1.Enums.List, err = r1.initEnumDeclarations(fd.GetEnumType(), f, sb); err != nil {
return nil, err
}
if f.L1.Messages.List, err = r1.initMessagesDeclarations(fd.GetMessageType(), f, sb); err != nil {
return nil, err
}
if f.L1.Extensions.List, err = r1.initExtensionDeclarations(fd.GetExtension(), f, sb); err != nil {
return nil, err
}
if f.L1.Services.List, err = r1.initServiceDeclarations(fd.GetService(), f, sb); err != nil {
return nil, err
}
// Step 2: Resolve every dependency reference not handled by step 1.
r2 := &resolver{local: r1, remote: r, imports: imps, allowUnresolvable: o.AllowUnresolvable}
if err := r2.resolveMessageDependencies(f.L1.Messages.List, fd.GetMessageType()); err != nil {
return nil, err
}
if err := r2.resolveExtensionDependencies(f.L1.Extensions.List, fd.GetExtension()); err != nil {
return nil, err
}
if err := r2.resolveServiceDependencies(f.L1.Services.List, fd.GetService()); err != nil {
return nil, err
}
// Step 3: Validate every enum, message, and extension declaration.
if err := validateEnumDeclarations(f.L1.Enums.List, fd.GetEnumType()); err != nil {
return nil, err
}
if err := validateMessageDeclarations(f.L1.Messages.List, fd.GetMessageType()); err != nil {
return nil, err
}
if err := validateExtensionDeclarations(f.L1.Extensions.List, fd.GetExtension()); err != nil {
return nil, err
}
return f, nil
}
type importSet map[string]bool
func (is importSet) importPublic(imps protoreflect.FileImports) {
for i := 0; i < imps.Len(); i++ {
if imp := imps.Get(i); imp.IsPublic {
is[imp.Path()] = true
is.importPublic(imp.Imports())
}
}
}
// NewFiles creates a new protoregistry.Files from the provided
// FileDescriptorSet message. The descriptor set must include only
// valid files according to protobuf semantics. The returned descriptors
// are a deep copy of the input.
func (o FileOptions) NewFiles(fds *descriptorpb.FileDescriptorSet) (*protoregistry.Files, error) {
files := make(map[string]*descriptorpb.FileDescriptorProto)
for _, fd := range fds.File {
if _, ok := files[fd.GetName()]; ok {
return nil, errors.New("file appears multiple times: %q", fd.GetName())
}
files[fd.GetName()] = fd
}
r := &protoregistry.Files{}
for _, fd := range files {
if err := o.addFileDeps(r, fd, files); err != nil {
return nil, err
}
}
return r, nil
}
func (o FileOptions) addFileDeps(r *protoregistry.Files, fd *descriptorpb.FileDescriptorProto, files map[string]*descriptorpb.FileDescriptorProto) error {
// Set the entry to nil while descending into a file's dependencies to detect cycles.
files[fd.GetName()] = nil
for _, dep := range fd.Dependency {
depfd, ok := files[dep]
if depfd == nil {
if ok {
return errors.New("import cycle in file: %q", dep)
}
continue
}
if err := o.addFileDeps(r, depfd, files); err != nil {
return err
}
}
// Delete the entry once dependencies are processed.
delete(files, fd.GetName())
f, err := o.New(fd, r)
if err != nil {
return err
}
return r.RegisterFile(f)
}

View File

@ -1,248 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protodesc
import (
"google.golang.org/protobuf/internal/errors"
"google.golang.org/protobuf/internal/filedesc"
"google.golang.org/protobuf/internal/strs"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/descriptorpb"
)
type descsByName map[protoreflect.FullName]protoreflect.Descriptor
func (r descsByName) initEnumDeclarations(eds []*descriptorpb.EnumDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (es []filedesc.Enum, err error) {
es = make([]filedesc.Enum, len(eds)) // allocate up-front to ensure stable pointers
for i, ed := range eds {
e := &es[i]
e.L2 = new(filedesc.EnumL2)
if e.L0, err = r.makeBase(e, parent, ed.GetName(), i, sb); err != nil {
return nil, err
}
if opts := ed.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.EnumOptions)
e.L2.Options = func() protoreflect.ProtoMessage { return opts }
}
for _, s := range ed.GetReservedName() {
e.L2.ReservedNames.List = append(e.L2.ReservedNames.List, protoreflect.Name(s))
}
for _, rr := range ed.GetReservedRange() {
e.L2.ReservedRanges.List = append(e.L2.ReservedRanges.List, [2]protoreflect.EnumNumber{
protoreflect.EnumNumber(rr.GetStart()),
protoreflect.EnumNumber(rr.GetEnd()),
})
}
if e.L2.Values.List, err = r.initEnumValuesFromDescriptorProto(ed.GetValue(), e, sb); err != nil {
return nil, err
}
}
return es, nil
}
func (r descsByName) initEnumValuesFromDescriptorProto(vds []*descriptorpb.EnumValueDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (vs []filedesc.EnumValue, err error) {
vs = make([]filedesc.EnumValue, len(vds)) // allocate up-front to ensure stable pointers
for i, vd := range vds {
v := &vs[i]
if v.L0, err = r.makeBase(v, parent, vd.GetName(), i, sb); err != nil {
return nil, err
}
if opts := vd.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.EnumValueOptions)
v.L1.Options = func() protoreflect.ProtoMessage { return opts }
}
v.L1.Number = protoreflect.EnumNumber(vd.GetNumber())
}
return vs, nil
}
func (r descsByName) initMessagesDeclarations(mds []*descriptorpb.DescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ms []filedesc.Message, err error) {
ms = make([]filedesc.Message, len(mds)) // allocate up-front to ensure stable pointers
for i, md := range mds {
m := &ms[i]
m.L2 = new(filedesc.MessageL2)
if m.L0, err = r.makeBase(m, parent, md.GetName(), i, sb); err != nil {
return nil, err
}
if opts := md.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.MessageOptions)
m.L2.Options = func() protoreflect.ProtoMessage { return opts }
m.L1.IsMapEntry = opts.GetMapEntry()
m.L1.IsMessageSet = opts.GetMessageSetWireFormat()
}
for _, s := range md.GetReservedName() {
m.L2.ReservedNames.List = append(m.L2.ReservedNames.List, protoreflect.Name(s))
}
for _, rr := range md.GetReservedRange() {
m.L2.ReservedRanges.List = append(m.L2.ReservedRanges.List, [2]protoreflect.FieldNumber{
protoreflect.FieldNumber(rr.GetStart()),
protoreflect.FieldNumber(rr.GetEnd()),
})
}
for _, xr := range md.GetExtensionRange() {
m.L2.ExtensionRanges.List = append(m.L2.ExtensionRanges.List, [2]protoreflect.FieldNumber{
protoreflect.FieldNumber(xr.GetStart()),
protoreflect.FieldNumber(xr.GetEnd()),
})
var optsFunc func() protoreflect.ProtoMessage
if opts := xr.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.ExtensionRangeOptions)
optsFunc = func() protoreflect.ProtoMessage { return opts }
}
m.L2.ExtensionRangeOptions = append(m.L2.ExtensionRangeOptions, optsFunc)
}
if m.L2.Fields.List, err = r.initFieldsFromDescriptorProto(md.GetField(), m, sb); err != nil {
return nil, err
}
if m.L2.Oneofs.List, err = r.initOneofsFromDescriptorProto(md.GetOneofDecl(), m, sb); err != nil {
return nil, err
}
if m.L1.Enums.List, err = r.initEnumDeclarations(md.GetEnumType(), m, sb); err != nil {
return nil, err
}
if m.L1.Messages.List, err = r.initMessagesDeclarations(md.GetNestedType(), m, sb); err != nil {
return nil, err
}
if m.L1.Extensions.List, err = r.initExtensionDeclarations(md.GetExtension(), m, sb); err != nil {
return nil, err
}
}
return ms, nil
}
func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (fs []filedesc.Field, err error) {
fs = make([]filedesc.Field, len(fds)) // allocate up-front to ensure stable pointers
for i, fd := range fds {
f := &fs[i]
if f.L0, err = r.makeBase(f, parent, fd.GetName(), i, sb); err != nil {
return nil, err
}
f.L1.IsProto3Optional = fd.GetProto3Optional()
if opts := fd.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.FieldOptions)
f.L1.Options = func() protoreflect.ProtoMessage { return opts }
f.L1.IsWeak = opts.GetWeak()
f.L1.HasPacked = opts.Packed != nil
f.L1.IsPacked = opts.GetPacked()
}
f.L1.Number = protoreflect.FieldNumber(fd.GetNumber())
f.L1.Cardinality = protoreflect.Cardinality(fd.GetLabel())
if fd.Type != nil {
f.L1.Kind = protoreflect.Kind(fd.GetType())
}
if fd.JsonName != nil {
f.L1.StringName.InitJSON(fd.GetJsonName())
}
}
return fs, nil
}
func (r descsByName) initOneofsFromDescriptorProto(ods []*descriptorpb.OneofDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (os []filedesc.Oneof, err error) {
os = make([]filedesc.Oneof, len(ods)) // allocate up-front to ensure stable pointers
for i, od := range ods {
o := &os[i]
if o.L0, err = r.makeBase(o, parent, od.GetName(), i, sb); err != nil {
return nil, err
}
if opts := od.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.OneofOptions)
o.L1.Options = func() protoreflect.ProtoMessage { return opts }
}
}
return os, nil
}
func (r descsByName) initExtensionDeclarations(xds []*descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (xs []filedesc.Extension, err error) {
xs = make([]filedesc.Extension, len(xds)) // allocate up-front to ensure stable pointers
for i, xd := range xds {
x := &xs[i]
x.L2 = new(filedesc.ExtensionL2)
if x.L0, err = r.makeBase(x, parent, xd.GetName(), i, sb); err != nil {
return nil, err
}
if opts := xd.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.FieldOptions)
x.L2.Options = func() protoreflect.ProtoMessage { return opts }
x.L2.IsPacked = opts.GetPacked()
}
x.L1.Number = protoreflect.FieldNumber(xd.GetNumber())
x.L1.Cardinality = protoreflect.Cardinality(xd.GetLabel())
if xd.Type != nil {
x.L1.Kind = protoreflect.Kind(xd.GetType())
}
if xd.JsonName != nil {
x.L2.StringName.InitJSON(xd.GetJsonName())
}
}
return xs, nil
}
func (r descsByName) initServiceDeclarations(sds []*descriptorpb.ServiceDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ss []filedesc.Service, err error) {
ss = make([]filedesc.Service, len(sds)) // allocate up-front to ensure stable pointers
for i, sd := range sds {
s := &ss[i]
s.L2 = new(filedesc.ServiceL2)
if s.L0, err = r.makeBase(s, parent, sd.GetName(), i, sb); err != nil {
return nil, err
}
if opts := sd.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.ServiceOptions)
s.L2.Options = func() protoreflect.ProtoMessage { return opts }
}
if s.L2.Methods.List, err = r.initMethodsFromDescriptorProto(sd.GetMethod(), s, sb); err != nil {
return nil, err
}
}
return ss, nil
}
func (r descsByName) initMethodsFromDescriptorProto(mds []*descriptorpb.MethodDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ms []filedesc.Method, err error) {
ms = make([]filedesc.Method, len(mds)) // allocate up-front to ensure stable pointers
for i, md := range mds {
m := &ms[i]
if m.L0, err = r.makeBase(m, parent, md.GetName(), i, sb); err != nil {
return nil, err
}
if opts := md.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.MethodOptions)
m.L1.Options = func() protoreflect.ProtoMessage { return opts }
}
m.L1.IsStreamingClient = md.GetClientStreaming()
m.L1.IsStreamingServer = md.GetServerStreaming()
}
return ms, nil
}
func (r descsByName) makeBase(child, parent protoreflect.Descriptor, name string, idx int, sb *strs.Builder) (filedesc.BaseL0, error) {
if !protoreflect.Name(name).IsValid() {
return filedesc.BaseL0{}, errors.New("descriptor %q has an invalid nested name: %q", parent.FullName(), name)
}
// Derive the full name of the child.
// Note that enum values are a sibling to the enum parent in the namespace.
var fullName protoreflect.FullName
if _, ok := parent.(protoreflect.EnumDescriptor); ok {
fullName = sb.AppendFullName(parent.FullName().Parent(), protoreflect.Name(name))
} else {
fullName = sb.AppendFullName(parent.FullName(), protoreflect.Name(name))
}
if _, ok := r[fullName]; ok {
return filedesc.BaseL0{}, errors.New("descriptor %q already declared", fullName)
}
r[fullName] = child
// TODO: Verify that the full name does not already exist in the resolver?
// This is not as critical since most usages of NewFile will register
// the created file back into the registry, which will perform this check.
return filedesc.BaseL0{
FullName: fullName,
ParentFile: parent.ParentFile().(*filedesc.File),
Parent: parent,
Index: idx,
}, nil
}

View File

@ -1,286 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protodesc
import (
"google.golang.org/protobuf/internal/encoding/defval"
"google.golang.org/protobuf/internal/errors"
"google.golang.org/protobuf/internal/filedesc"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
"google.golang.org/protobuf/types/descriptorpb"
)
// resolver is a wrapper around a local registry of declarations within the file
// and the remote resolver. The remote resolver is restricted to only return
// descriptors that have been imported.
type resolver struct {
local descsByName
remote Resolver
imports importSet
allowUnresolvable bool
}
func (r *resolver) resolveMessageDependencies(ms []filedesc.Message, mds []*descriptorpb.DescriptorProto) (err error) {
for i, md := range mds {
m := &ms[i]
for j, fd := range md.GetField() {
f := &m.L2.Fields.List[j]
if f.L1.Cardinality == protoreflect.Required {
m.L2.RequiredNumbers.List = append(m.L2.RequiredNumbers.List, f.L1.Number)
}
if fd.OneofIndex != nil {
k := int(fd.GetOneofIndex())
if !(0 <= k && k < len(md.GetOneofDecl())) {
return errors.New("message field %q has an invalid oneof index: %d", f.FullName(), k)
}
o := &m.L2.Oneofs.List[k]
f.L1.ContainingOneof = o
o.L1.Fields.List = append(o.L1.Fields.List, f)
}
if f.L1.Kind, f.L1.Enum, f.L1.Message, err = r.findTarget(f.Kind(), f.Parent().FullName(), partialName(fd.GetTypeName()), f.IsWeak()); err != nil {
return errors.New("message field %q cannot resolve type: %v", f.FullName(), err)
}
if fd.DefaultValue != nil {
v, ev, err := unmarshalDefault(fd.GetDefaultValue(), f, r.allowUnresolvable)
if err != nil {
return errors.New("message field %q has invalid default: %v", f.FullName(), err)
}
f.L1.Default = filedesc.DefaultValue(v, ev)
}
}
if err := r.resolveMessageDependencies(m.L1.Messages.List, md.GetNestedType()); err != nil {
return err
}
if err := r.resolveExtensionDependencies(m.L1.Extensions.List, md.GetExtension()); err != nil {
return err
}
}
return nil
}
func (r *resolver) resolveExtensionDependencies(xs []filedesc.Extension, xds []*descriptorpb.FieldDescriptorProto) (err error) {
for i, xd := range xds {
x := &xs[i]
if x.L1.Extendee, err = r.findMessageDescriptor(x.Parent().FullName(), partialName(xd.GetExtendee()), false); err != nil {
return errors.New("extension field %q cannot resolve extendee: %v", x.FullName(), err)
}
if x.L1.Kind, x.L2.Enum, x.L2.Message, err = r.findTarget(x.Kind(), x.Parent().FullName(), partialName(xd.GetTypeName()), false); err != nil {
return errors.New("extension field %q cannot resolve type: %v", x.FullName(), err)
}
if xd.DefaultValue != nil {
v, ev, err := unmarshalDefault(xd.GetDefaultValue(), x, r.allowUnresolvable)
if err != nil {
return errors.New("extension field %q has invalid default: %v", x.FullName(), err)
}
x.L2.Default = filedesc.DefaultValue(v, ev)
}
}
return nil
}
func (r *resolver) resolveServiceDependencies(ss []filedesc.Service, sds []*descriptorpb.ServiceDescriptorProto) (err error) {
for i, sd := range sds {
s := &ss[i]
for j, md := range sd.GetMethod() {
m := &s.L2.Methods.List[j]
m.L1.Input, err = r.findMessageDescriptor(m.Parent().FullName(), partialName(md.GetInputType()), false)
if err != nil {
return errors.New("service method %q cannot resolve input: %v", m.FullName(), err)
}
m.L1.Output, err = r.findMessageDescriptor(s.FullName(), partialName(md.GetOutputType()), false)
if err != nil {
return errors.New("service method %q cannot resolve output: %v", m.FullName(), err)
}
}
}
return nil
}
// findTarget finds an enum or message descriptor if k is an enum, message,
// group, or unknown. If unknown, and the name could be resolved, the kind
// returned kind is set based on the type of the resolved descriptor.
func (r *resolver) findTarget(k protoreflect.Kind, scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.Kind, protoreflect.EnumDescriptor, protoreflect.MessageDescriptor, error) {
switch k {
case protoreflect.EnumKind:
ed, err := r.findEnumDescriptor(scope, ref, isWeak)
if err != nil {
return 0, nil, nil, err
}
return k, ed, nil, nil
case protoreflect.MessageKind, protoreflect.GroupKind:
md, err := r.findMessageDescriptor(scope, ref, isWeak)
if err != nil {
return 0, nil, nil, err
}
return k, nil, md, nil
case 0:
// Handle unspecified kinds (possible with parsers that operate
// on a per-file basis without knowledge of dependencies).
d, err := r.findDescriptor(scope, ref)
if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) {
return k, filedesc.PlaceholderEnum(ref.FullName()), filedesc.PlaceholderMessage(ref.FullName()), nil
} else if err == protoregistry.NotFound {
return 0, nil, nil, errors.New("%q not found", ref.FullName())
} else if err != nil {
return 0, nil, nil, err
}
switch d := d.(type) {
case protoreflect.EnumDescriptor:
return protoreflect.EnumKind, d, nil, nil
case protoreflect.MessageDescriptor:
return protoreflect.MessageKind, nil, d, nil
default:
return 0, nil, nil, errors.New("unknown kind")
}
default:
if ref != "" {
return 0, nil, nil, errors.New("target name cannot be specified for %v", k)
}
if !k.IsValid() {
return 0, nil, nil, errors.New("invalid kind: %d", k)
}
return k, nil, nil, nil
}
}
// findDescriptor finds the descriptor by name,
// which may be a relative name within some scope.
//
// Suppose the scope was "fizz.buzz" and the reference was "Foo.Bar",
// then the following full names are searched:
// * fizz.buzz.Foo.Bar
// * fizz.Foo.Bar
// * Foo.Bar
func (r *resolver) findDescriptor(scope protoreflect.FullName, ref partialName) (protoreflect.Descriptor, error) {
if !ref.IsValid() {
return nil, errors.New("invalid name reference: %q", ref)
}
if ref.IsFull() {
scope, ref = "", ref[1:]
}
var foundButNotImported protoreflect.Descriptor
for {
// Derive the full name to search.
s := protoreflect.FullName(ref)
if scope != "" {
s = scope + "." + s
}
// Check the current file for the descriptor.
if d, ok := r.local[s]; ok {
return d, nil
}
// Check the remote registry for the descriptor.
d, err := r.remote.FindDescriptorByName(s)
if err == nil {
// Only allow descriptors covered by one of the imports.
if r.imports[d.ParentFile().Path()] {
return d, nil
}
foundButNotImported = d
} else if err != protoregistry.NotFound {
return nil, errors.Wrap(err, "%q", s)
}
// Continue on at a higher level of scoping.
if scope == "" {
if d := foundButNotImported; d != nil {
return nil, errors.New("resolved %q, but %q is not imported", d.FullName(), d.ParentFile().Path())
}
return nil, protoregistry.NotFound
}
scope = scope.Parent()
}
}
func (r *resolver) findEnumDescriptor(scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.EnumDescriptor, error) {
d, err := r.findDescriptor(scope, ref)
if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) {
return filedesc.PlaceholderEnum(ref.FullName()), nil
} else if err == protoregistry.NotFound {
return nil, errors.New("%q not found", ref.FullName())
} else if err != nil {
return nil, err
}
ed, ok := d.(protoreflect.EnumDescriptor)
if !ok {
return nil, errors.New("resolved %q, but it is not an enum", d.FullName())
}
return ed, nil
}
func (r *resolver) findMessageDescriptor(scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.MessageDescriptor, error) {
d, err := r.findDescriptor(scope, ref)
if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) {
return filedesc.PlaceholderMessage(ref.FullName()), nil
} else if err == protoregistry.NotFound {
return nil, errors.New("%q not found", ref.FullName())
} else if err != nil {
return nil, err
}
md, ok := d.(protoreflect.MessageDescriptor)
if !ok {
return nil, errors.New("resolved %q, but it is not an message", d.FullName())
}
return md, nil
}
// partialName is the partial name. A leading dot means that the name is full,
// otherwise the name is relative to some current scope.
// See google.protobuf.FieldDescriptorProto.type_name.
type partialName string
func (s partialName) IsFull() bool {
return len(s) > 0 && s[0] == '.'
}
func (s partialName) IsValid() bool {
if s.IsFull() {
return protoreflect.FullName(s[1:]).IsValid()
}
return protoreflect.FullName(s).IsValid()
}
const unknownPrefix = "*."
// FullName converts the partial name to a full name on a best-effort basis.
// If relative, it creates an invalid full name, using a "*." prefix
// to indicate that the start of the full name is unknown.
func (s partialName) FullName() protoreflect.FullName {
if s.IsFull() {
return protoreflect.FullName(s[1:])
}
return protoreflect.FullName(unknownPrefix + s)
}
func unmarshalDefault(s string, fd protoreflect.FieldDescriptor, allowUnresolvable bool) (protoreflect.Value, protoreflect.EnumValueDescriptor, error) {
var evs protoreflect.EnumValueDescriptors
if fd.Enum() != nil {
evs = fd.Enum().Values()
}
v, ev, err := defval.Unmarshal(s, fd.Kind(), evs, defval.Descriptor)
if err != nil && allowUnresolvable && evs != nil && protoreflect.Name(s).IsValid() {
v = protoreflect.ValueOfEnum(0)
if evs.Len() > 0 {
v = protoreflect.ValueOfEnum(evs.Get(0).Number())
}
ev = filedesc.PlaceholderEnumValue(fd.Enum().FullName().Parent().Append(protoreflect.Name(s)))
} else if err != nil {
return v, ev, err
}
if fd.Syntax() == protoreflect.Proto3 {
return v, ev, errors.New("cannot be specified under proto3 semantics")
}
if fd.Kind() == protoreflect.MessageKind || fd.Kind() == protoreflect.GroupKind || fd.Cardinality() == protoreflect.Repeated {
return v, ev, errors.New("cannot be specified on composite types")
}
return v, ev, nil
}

View File

@ -1,374 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protodesc
import (
"strings"
"unicode"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/internal/errors"
"google.golang.org/protobuf/internal/filedesc"
"google.golang.org/protobuf/internal/flags"
"google.golang.org/protobuf/internal/genid"
"google.golang.org/protobuf/internal/strs"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/descriptorpb"
)
func validateEnumDeclarations(es []filedesc.Enum, eds []*descriptorpb.EnumDescriptorProto) error {
for i, ed := range eds {
e := &es[i]
if err := e.L2.ReservedNames.CheckValid(); err != nil {
return errors.New("enum %q reserved names has %v", e.FullName(), err)
}
if err := e.L2.ReservedRanges.CheckValid(); err != nil {
return errors.New("enum %q reserved ranges has %v", e.FullName(), err)
}
if len(ed.GetValue()) == 0 {
return errors.New("enum %q must contain at least one value declaration", e.FullName())
}
allowAlias := ed.GetOptions().GetAllowAlias()
foundAlias := false
for i := 0; i < e.Values().Len(); i++ {
v1 := e.Values().Get(i)
if v2 := e.Values().ByNumber(v1.Number()); v1 != v2 {
foundAlias = true
if !allowAlias {
return errors.New("enum %q has conflicting non-aliased values on number %d: %q with %q", e.FullName(), v1.Number(), v1.Name(), v2.Name())
}
}
}
if allowAlias && !foundAlias {
return errors.New("enum %q allows aliases, but none were found", e.FullName())
}
if e.Syntax() == protoreflect.Proto3 {
if v := e.Values().Get(0); v.Number() != 0 {
return errors.New("enum %q using proto3 semantics must have zero number for the first value", v.FullName())
}
// Verify that value names in proto3 do not conflict if the
// case-insensitive prefix is removed.
// See protoc v3.8.0: src/google/protobuf/descriptor.cc:4991-5055
names := map[string]protoreflect.EnumValueDescriptor{}
prefix := strings.Replace(strings.ToLower(string(e.Name())), "_", "", -1)
for i := 0; i < e.Values().Len(); i++ {
v1 := e.Values().Get(i)
s := strs.EnumValueName(strs.TrimEnumPrefix(string(v1.Name()), prefix))
if v2, ok := names[s]; ok && v1.Number() != v2.Number() {
return errors.New("enum %q using proto3 semantics has conflict: %q with %q", e.FullName(), v1.Name(), v2.Name())
}
names[s] = v1
}
}
for j, vd := range ed.GetValue() {
v := &e.L2.Values.List[j]
if vd.Number == nil {
return errors.New("enum value %q must have a specified number", v.FullName())
}
if e.L2.ReservedNames.Has(v.Name()) {
return errors.New("enum value %q must not use reserved name", v.FullName())
}
if e.L2.ReservedRanges.Has(v.Number()) {
return errors.New("enum value %q must not use reserved number %d", v.FullName(), v.Number())
}
}
}
return nil
}
func validateMessageDeclarations(ms []filedesc.Message, mds []*descriptorpb.DescriptorProto) error {
for i, md := range mds {
m := &ms[i]
// Handle the message descriptor itself.
isMessageSet := md.GetOptions().GetMessageSetWireFormat()
if err := m.L2.ReservedNames.CheckValid(); err != nil {
return errors.New("message %q reserved names has %v", m.FullName(), err)
}
if err := m.L2.ReservedRanges.CheckValid(isMessageSet); err != nil {
return errors.New("message %q reserved ranges has %v", m.FullName(), err)
}
if err := m.L2.ExtensionRanges.CheckValid(isMessageSet); err != nil {
return errors.New("message %q extension ranges has %v", m.FullName(), err)
}
if err := (*filedesc.FieldRanges).CheckOverlap(&m.L2.ReservedRanges, &m.L2.ExtensionRanges); err != nil {
return errors.New("message %q reserved and extension ranges has %v", m.FullName(), err)
}
for i := 0; i < m.Fields().Len(); i++ {
f1 := m.Fields().Get(i)
if f2 := m.Fields().ByNumber(f1.Number()); f1 != f2 {
return errors.New("message %q has conflicting fields: %q with %q", m.FullName(), f1.Name(), f2.Name())
}
}
if isMessageSet && !flags.ProtoLegacy {
return errors.New("message %q is a MessageSet, which is a legacy proto1 feature that is no longer supported", m.FullName())
}
if isMessageSet && (m.Syntax() != protoreflect.Proto2 || m.Fields().Len() > 0 || m.ExtensionRanges().Len() == 0) {
return errors.New("message %q is an invalid proto1 MessageSet", m.FullName())
}
if m.Syntax() == protoreflect.Proto3 {
if m.ExtensionRanges().Len() > 0 {
return errors.New("message %q using proto3 semantics cannot have extension ranges", m.FullName())
}
// Verify that field names in proto3 do not conflict if lowercased
// with all underscores removed.
// See protoc v3.8.0: src/google/protobuf/descriptor.cc:5830-5847
names := map[string]protoreflect.FieldDescriptor{}
for i := 0; i < m.Fields().Len(); i++ {
f1 := m.Fields().Get(i)
s := strings.Replace(strings.ToLower(string(f1.Name())), "_", "", -1)
if f2, ok := names[s]; ok {
return errors.New("message %q using proto3 semantics has conflict: %q with %q", m.FullName(), f1.Name(), f2.Name())
}
names[s] = f1
}
}
for j, fd := range md.GetField() {
f := &m.L2.Fields.List[j]
if m.L2.ReservedNames.Has(f.Name()) {
return errors.New("message field %q must not use reserved name", f.FullName())
}
if !f.Number().IsValid() {
return errors.New("message field %q has an invalid number: %d", f.FullName(), f.Number())
}
if !f.Cardinality().IsValid() {
return errors.New("message field %q has an invalid cardinality: %d", f.FullName(), f.Cardinality())
}
if m.L2.ReservedRanges.Has(f.Number()) {
return errors.New("message field %q must not use reserved number %d", f.FullName(), f.Number())
}
if m.L2.ExtensionRanges.Has(f.Number()) {
return errors.New("message field %q with number %d in extension range", f.FullName(), f.Number())
}
if fd.Extendee != nil {
return errors.New("message field %q may not have extendee: %q", f.FullName(), fd.GetExtendee())
}
if f.L1.IsProto3Optional {
if f.Syntax() != protoreflect.Proto3 {
return errors.New("message field %q under proto3 optional semantics must be specified in the proto3 syntax", f.FullName())
}
if f.Cardinality() != protoreflect.Optional {
return errors.New("message field %q under proto3 optional semantics must have optional cardinality", f.FullName())
}
if f.ContainingOneof() != nil && f.ContainingOneof().Fields().Len() != 1 {
return errors.New("message field %q under proto3 optional semantics must be within a single element oneof", f.FullName())
}
}
if f.IsWeak() && !flags.ProtoLegacy {
return errors.New("message field %q is a weak field, which is a legacy proto1 feature that is no longer supported", f.FullName())
}
if f.IsWeak() && (f.Syntax() != protoreflect.Proto2 || !isOptionalMessage(f) || f.ContainingOneof() != nil) {
return errors.New("message field %q may only be weak for an optional message", f.FullName())
}
if f.IsPacked() && !isPackable(f) {
return errors.New("message field %q is not packable", f.FullName())
}
if err := checkValidGroup(f); err != nil {
return errors.New("message field %q is an invalid group: %v", f.FullName(), err)
}
if err := checkValidMap(f); err != nil {
return errors.New("message field %q is an invalid map: %v", f.FullName(), err)
}
if f.Syntax() == protoreflect.Proto3 {
if f.Cardinality() == protoreflect.Required {
return errors.New("message field %q using proto3 semantics cannot be required", f.FullName())
}
if f.Enum() != nil && !f.Enum().IsPlaceholder() && f.Enum().Syntax() != protoreflect.Proto3 {
return errors.New("message field %q using proto3 semantics may only depend on a proto3 enum", f.FullName())
}
}
}
seenSynthetic := false // synthetic oneofs for proto3 optional must come after real oneofs
for j := range md.GetOneofDecl() {
o := &m.L2.Oneofs.List[j]
if o.Fields().Len() == 0 {
return errors.New("message oneof %q must contain at least one field declaration", o.FullName())
}
if n := o.Fields().Len(); n-1 != (o.Fields().Get(n-1).Index() - o.Fields().Get(0).Index()) {
return errors.New("message oneof %q must have consecutively declared fields", o.FullName())
}
if o.IsSynthetic() {
seenSynthetic = true
continue
}
if !o.IsSynthetic() && seenSynthetic {
return errors.New("message oneof %q must be declared before synthetic oneofs", o.FullName())
}
for i := 0; i < o.Fields().Len(); i++ {
f := o.Fields().Get(i)
if f.Cardinality() != protoreflect.Optional {
return errors.New("message field %q belongs in a oneof and must be optional", f.FullName())
}
if f.IsWeak() {
return errors.New("message field %q belongs in a oneof and must not be a weak reference", f.FullName())
}
}
}
if err := validateEnumDeclarations(m.L1.Enums.List, md.GetEnumType()); err != nil {
return err
}
if err := validateMessageDeclarations(m.L1.Messages.List, md.GetNestedType()); err != nil {
return err
}
if err := validateExtensionDeclarations(m.L1.Extensions.List, md.GetExtension()); err != nil {
return err
}
}
return nil
}
func validateExtensionDeclarations(xs []filedesc.Extension, xds []*descriptorpb.FieldDescriptorProto) error {
for i, xd := range xds {
x := &xs[i]
// NOTE: Avoid using the IsValid method since extensions to MessageSet
// may have a field number higher than normal. This check only verifies
// that the number is not negative or reserved. We check again later
// if we know that the extendee is definitely not a MessageSet.
if n := x.Number(); n < 0 || (protowire.FirstReservedNumber <= n && n <= protowire.LastReservedNumber) {
return errors.New("extension field %q has an invalid number: %d", x.FullName(), x.Number())
}
if !x.Cardinality().IsValid() || x.Cardinality() == protoreflect.Required {
return errors.New("extension field %q has an invalid cardinality: %d", x.FullName(), x.Cardinality())
}
if xd.JsonName != nil {
// A bug in older versions of protoc would always populate the
// "json_name" option for extensions when it is meaningless.
// When it did so, it would always use the camel-cased field name.
if xd.GetJsonName() != strs.JSONCamelCase(string(x.Name())) {
return errors.New("extension field %q may not have an explicitly set JSON name: %q", x.FullName(), xd.GetJsonName())
}
}
if xd.OneofIndex != nil {
return errors.New("extension field %q may not be part of a oneof", x.FullName())
}
if md := x.ContainingMessage(); !md.IsPlaceholder() {
if !md.ExtensionRanges().Has(x.Number()) {
return errors.New("extension field %q extends %q with non-extension field number: %d", x.FullName(), md.FullName(), x.Number())
}
isMessageSet := md.Options().(*descriptorpb.MessageOptions).GetMessageSetWireFormat()
if isMessageSet && !isOptionalMessage(x) {
return errors.New("extension field %q extends MessageSet and must be an optional message", x.FullName())
}
if !isMessageSet && !x.Number().IsValid() {
return errors.New("extension field %q has an invalid number: %d", x.FullName(), x.Number())
}
}
if xd.GetOptions().GetWeak() {
return errors.New("extension field %q cannot be a weak reference", x.FullName())
}
if x.IsPacked() && !isPackable(x) {
return errors.New("extension field %q is not packable", x.FullName())
}
if err := checkValidGroup(x); err != nil {
return errors.New("extension field %q is an invalid group: %v", x.FullName(), err)
}
if md := x.Message(); md != nil && md.IsMapEntry() {
return errors.New("extension field %q cannot be a map entry", x.FullName())
}
if x.Syntax() == protoreflect.Proto3 {
switch x.ContainingMessage().FullName() {
case (*descriptorpb.FileOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.EnumOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.EnumValueOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.MessageOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.FieldOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.OneofOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.ExtensionRangeOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.ServiceOptions)(nil).ProtoReflect().Descriptor().FullName():
case (*descriptorpb.MethodOptions)(nil).ProtoReflect().Descriptor().FullName():
default:
return errors.New("extension field %q cannot be declared in proto3 unless extended descriptor options", x.FullName())
}
}
}
return nil
}
// isOptionalMessage reports whether this is an optional message.
// If the kind is unknown, it is assumed to be a message.
func isOptionalMessage(fd protoreflect.FieldDescriptor) bool {
return (fd.Kind() == 0 || fd.Kind() == protoreflect.MessageKind) && fd.Cardinality() == protoreflect.Optional
}
// isPackable checks whether the pack option can be specified.
func isPackable(fd protoreflect.FieldDescriptor) bool {
switch fd.Kind() {
case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind:
return false
}
return fd.IsList()
}
// checkValidGroup reports whether fd is a valid group according to the same
// rules that protoc imposes.
func checkValidGroup(fd protoreflect.FieldDescriptor) error {
md := fd.Message()
switch {
case fd.Kind() != protoreflect.GroupKind:
return nil
case fd.Syntax() != protoreflect.Proto2:
return errors.New("invalid under proto2 semantics")
case md == nil || md.IsPlaceholder():
return errors.New("message must be resolvable")
case fd.FullName().Parent() != md.FullName().Parent():
return errors.New("message and field must be declared in the same scope")
case !unicode.IsUpper(rune(md.Name()[0])):
return errors.New("message name must start with an uppercase")
case fd.Name() != protoreflect.Name(strings.ToLower(string(md.Name()))):
return errors.New("field name must be lowercased form of the message name")
}
return nil
}
// checkValidMap checks whether the field is a valid map according to the same
// rules that protoc imposes.
// See protoc v3.8.0: src/google/protobuf/descriptor.cc:6045-6115
func checkValidMap(fd protoreflect.FieldDescriptor) error {
md := fd.Message()
switch {
case md == nil || !md.IsMapEntry():
return nil
case fd.FullName().Parent() != md.FullName().Parent():
return errors.New("message and field must be declared in the same scope")
case md.Name() != protoreflect.Name(strs.MapEntryName(string(fd.Name()))):
return errors.New("incorrect implicit map entry name")
case fd.Cardinality() != protoreflect.Repeated:
return errors.New("field must be repeated")
case md.Fields().Len() != 2:
return errors.New("message must have exactly two fields")
case md.ExtensionRanges().Len() > 0:
return errors.New("message must not have any extension ranges")
case md.Enums().Len()+md.Messages().Len()+md.Extensions().Len() > 0:
return errors.New("message must not have any nested declarations")
}
kf := md.Fields().Get(0)
vf := md.Fields().Get(1)
switch {
case kf.Name() != genid.MapEntry_Key_field_name || kf.Number() != genid.MapEntry_Key_field_number || kf.Cardinality() != protoreflect.Optional || kf.ContainingOneof() != nil || kf.HasDefault():
return errors.New("invalid key field")
case vf.Name() != genid.MapEntry_Value_field_name || vf.Number() != genid.MapEntry_Value_field_number || vf.Cardinality() != protoreflect.Optional || vf.ContainingOneof() != nil || vf.HasDefault():
return errors.New("invalid value field")
}
switch kf.Kind() {
case protoreflect.BoolKind: // bool
case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind: // int32
case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: // int64
case protoreflect.Uint32Kind, protoreflect.Fixed32Kind: // uint32
case protoreflect.Uint64Kind, protoreflect.Fixed64Kind: // uint64
case protoreflect.StringKind: // string
default:
return errors.New("invalid key kind: %v", kf.Kind())
}
if e := vf.Enum(); e != nil && e.Values().Len() > 0 && e.Values().Get(0).Number() != 0 {
return errors.New("map enum value must have zero number for the first value")
}
return nil
}

View File

@ -1,252 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protodesc
import (
"fmt"
"strings"
"google.golang.org/protobuf/internal/encoding/defval"
"google.golang.org/protobuf/internal/strs"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/descriptorpb"
)
// ToFileDescriptorProto copies a protoreflect.FileDescriptor into a
// google.protobuf.FileDescriptorProto message.
func ToFileDescriptorProto(file protoreflect.FileDescriptor) *descriptorpb.FileDescriptorProto {
p := &descriptorpb.FileDescriptorProto{
Name: proto.String(file.Path()),
Options: proto.Clone(file.Options()).(*descriptorpb.FileOptions),
}
if file.Package() != "" {
p.Package = proto.String(string(file.Package()))
}
for i, imports := 0, file.Imports(); i < imports.Len(); i++ {
imp := imports.Get(i)
p.Dependency = append(p.Dependency, imp.Path())
if imp.IsPublic {
p.PublicDependency = append(p.PublicDependency, int32(i))
}
if imp.IsWeak {
p.WeakDependency = append(p.WeakDependency, int32(i))
}
}
for i, locs := 0, file.SourceLocations(); i < locs.Len(); i++ {
loc := locs.Get(i)
l := &descriptorpb.SourceCodeInfo_Location{}
l.Path = append(l.Path, loc.Path...)
if loc.StartLine == loc.EndLine {
l.Span = []int32{int32(loc.StartLine), int32(loc.StartColumn), int32(loc.EndColumn)}
} else {
l.Span = []int32{int32(loc.StartLine), int32(loc.StartColumn), int32(loc.EndLine), int32(loc.EndColumn)}
}
l.LeadingDetachedComments = append([]string(nil), loc.LeadingDetachedComments...)
if loc.LeadingComments != "" {
l.LeadingComments = proto.String(loc.LeadingComments)
}
if loc.TrailingComments != "" {
l.TrailingComments = proto.String(loc.TrailingComments)
}
if p.SourceCodeInfo == nil {
p.SourceCodeInfo = &descriptorpb.SourceCodeInfo{}
}
p.SourceCodeInfo.Location = append(p.SourceCodeInfo.Location, l)
}
for i, messages := 0, file.Messages(); i < messages.Len(); i++ {
p.MessageType = append(p.MessageType, ToDescriptorProto(messages.Get(i)))
}
for i, enums := 0, file.Enums(); i < enums.Len(); i++ {
p.EnumType = append(p.EnumType, ToEnumDescriptorProto(enums.Get(i)))
}
for i, services := 0, file.Services(); i < services.Len(); i++ {
p.Service = append(p.Service, ToServiceDescriptorProto(services.Get(i)))
}
for i, exts := 0, file.Extensions(); i < exts.Len(); i++ {
p.Extension = append(p.Extension, ToFieldDescriptorProto(exts.Get(i)))
}
if syntax := file.Syntax(); syntax != protoreflect.Proto2 {
p.Syntax = proto.String(file.Syntax().String())
}
return p
}
// ToDescriptorProto copies a protoreflect.MessageDescriptor into a
// google.protobuf.DescriptorProto message.
func ToDescriptorProto(message protoreflect.MessageDescriptor) *descriptorpb.DescriptorProto {
p := &descriptorpb.DescriptorProto{
Name: proto.String(string(message.Name())),
Options: proto.Clone(message.Options()).(*descriptorpb.MessageOptions),
}
for i, fields := 0, message.Fields(); i < fields.Len(); i++ {
p.Field = append(p.Field, ToFieldDescriptorProto(fields.Get(i)))
}
for i, exts := 0, message.Extensions(); i < exts.Len(); i++ {
p.Extension = append(p.Extension, ToFieldDescriptorProto(exts.Get(i)))
}
for i, messages := 0, message.Messages(); i < messages.Len(); i++ {
p.NestedType = append(p.NestedType, ToDescriptorProto(messages.Get(i)))
}
for i, enums := 0, message.Enums(); i < enums.Len(); i++ {
p.EnumType = append(p.EnumType, ToEnumDescriptorProto(enums.Get(i)))
}
for i, xranges := 0, message.ExtensionRanges(); i < xranges.Len(); i++ {
xrange := xranges.Get(i)
p.ExtensionRange = append(p.ExtensionRange, &descriptorpb.DescriptorProto_ExtensionRange{
Start: proto.Int32(int32(xrange[0])),
End: proto.Int32(int32(xrange[1])),
Options: proto.Clone(message.ExtensionRangeOptions(i)).(*descriptorpb.ExtensionRangeOptions),
})
}
for i, oneofs := 0, message.Oneofs(); i < oneofs.Len(); i++ {
p.OneofDecl = append(p.OneofDecl, ToOneofDescriptorProto(oneofs.Get(i)))
}
for i, ranges := 0, message.ReservedRanges(); i < ranges.Len(); i++ {
rrange := ranges.Get(i)
p.ReservedRange = append(p.ReservedRange, &descriptorpb.DescriptorProto_ReservedRange{
Start: proto.Int32(int32(rrange[0])),
End: proto.Int32(int32(rrange[1])),
})
}
for i, names := 0, message.ReservedNames(); i < names.Len(); i++ {
p.ReservedName = append(p.ReservedName, string(names.Get(i)))
}
return p
}
// ToFieldDescriptorProto copies a protoreflect.FieldDescriptor into a
// google.protobuf.FieldDescriptorProto message.
func ToFieldDescriptorProto(field protoreflect.FieldDescriptor) *descriptorpb.FieldDescriptorProto {
p := &descriptorpb.FieldDescriptorProto{
Name: proto.String(string(field.Name())),
Number: proto.Int32(int32(field.Number())),
Label: descriptorpb.FieldDescriptorProto_Label(field.Cardinality()).Enum(),
Options: proto.Clone(field.Options()).(*descriptorpb.FieldOptions),
}
if field.IsExtension() {
p.Extendee = fullNameOf(field.ContainingMessage())
}
if field.Kind().IsValid() {
p.Type = descriptorpb.FieldDescriptorProto_Type(field.Kind()).Enum()
}
if field.Enum() != nil {
p.TypeName = fullNameOf(field.Enum())
}
if field.Message() != nil {
p.TypeName = fullNameOf(field.Message())
}
if field.HasJSONName() {
// A bug in older versions of protoc would always populate the
// "json_name" option for extensions when it is meaningless.
// When it did so, it would always use the camel-cased field name.
if field.IsExtension() {
p.JsonName = proto.String(strs.JSONCamelCase(string(field.Name())))
} else {
p.JsonName = proto.String(field.JSONName())
}
}
if field.Syntax() == protoreflect.Proto3 && field.HasOptionalKeyword() {
p.Proto3Optional = proto.Bool(true)
}
if field.HasDefault() {
def, err := defval.Marshal(field.Default(), field.DefaultEnumValue(), field.Kind(), defval.Descriptor)
if err != nil && field.DefaultEnumValue() != nil {
def = string(field.DefaultEnumValue().Name()) // occurs for unresolved enum values
} else if err != nil {
panic(fmt.Sprintf("%v: %v", field.FullName(), err))
}
p.DefaultValue = proto.String(def)
}
if oneof := field.ContainingOneof(); oneof != nil {
p.OneofIndex = proto.Int32(int32(oneof.Index()))
}
return p
}
// ToOneofDescriptorProto copies a protoreflect.OneofDescriptor into a
// google.protobuf.OneofDescriptorProto message.
func ToOneofDescriptorProto(oneof protoreflect.OneofDescriptor) *descriptorpb.OneofDescriptorProto {
return &descriptorpb.OneofDescriptorProto{
Name: proto.String(string(oneof.Name())),
Options: proto.Clone(oneof.Options()).(*descriptorpb.OneofOptions),
}
}
// ToEnumDescriptorProto copies a protoreflect.EnumDescriptor into a
// google.protobuf.EnumDescriptorProto message.
func ToEnumDescriptorProto(enum protoreflect.EnumDescriptor) *descriptorpb.EnumDescriptorProto {
p := &descriptorpb.EnumDescriptorProto{
Name: proto.String(string(enum.Name())),
Options: proto.Clone(enum.Options()).(*descriptorpb.EnumOptions),
}
for i, values := 0, enum.Values(); i < values.Len(); i++ {
p.Value = append(p.Value, ToEnumValueDescriptorProto(values.Get(i)))
}
for i, ranges := 0, enum.ReservedRanges(); i < ranges.Len(); i++ {
rrange := ranges.Get(i)
p.ReservedRange = append(p.ReservedRange, &descriptorpb.EnumDescriptorProto_EnumReservedRange{
Start: proto.Int32(int32(rrange[0])),
End: proto.Int32(int32(rrange[1])),
})
}
for i, names := 0, enum.ReservedNames(); i < names.Len(); i++ {
p.ReservedName = append(p.ReservedName, string(names.Get(i)))
}
return p
}
// ToEnumValueDescriptorProto copies a protoreflect.EnumValueDescriptor into a
// google.protobuf.EnumValueDescriptorProto message.
func ToEnumValueDescriptorProto(value protoreflect.EnumValueDescriptor) *descriptorpb.EnumValueDescriptorProto {
return &descriptorpb.EnumValueDescriptorProto{
Name: proto.String(string(value.Name())),
Number: proto.Int32(int32(value.Number())),
Options: proto.Clone(value.Options()).(*descriptorpb.EnumValueOptions),
}
}
// ToServiceDescriptorProto copies a protoreflect.ServiceDescriptor into a
// google.protobuf.ServiceDescriptorProto message.
func ToServiceDescriptorProto(service protoreflect.ServiceDescriptor) *descriptorpb.ServiceDescriptorProto {
p := &descriptorpb.ServiceDescriptorProto{
Name: proto.String(string(service.Name())),
Options: proto.Clone(service.Options()).(*descriptorpb.ServiceOptions),
}
for i, methods := 0, service.Methods(); i < methods.Len(); i++ {
p.Method = append(p.Method, ToMethodDescriptorProto(methods.Get(i)))
}
return p
}
// ToMethodDescriptorProto copies a protoreflect.MethodDescriptor into a
// google.protobuf.MethodDescriptorProto message.
func ToMethodDescriptorProto(method protoreflect.MethodDescriptor) *descriptorpb.MethodDescriptorProto {
p := &descriptorpb.MethodDescriptorProto{
Name: proto.String(string(method.Name())),
InputType: fullNameOf(method.Input()),
OutputType: fullNameOf(method.Output()),
Options: proto.Clone(method.Options()).(*descriptorpb.MethodOptions),
}
if method.IsStreamingClient() {
p.ClientStreaming = proto.Bool(true)
}
if method.IsStreamingServer() {
p.ServerStreaming = proto.Bool(true)
}
return p
}
func fullNameOf(d protoreflect.Descriptor) *string {
if d == nil {
return nil
}
if strings.HasPrefix(string(d.FullName()), unknownPrefix) {
return proto.String(string(d.FullName()[len(unknownPrefix):]))
}
return proto.String("." + string(d.FullName()))
}

View File

@ -94,7 +94,8 @@ type Files struct {
// Note that enum values are in the top-level since that are in the same
// scope as the parent enum.
descsByName map[protoreflect.FullName]interface{}
filesByPath map[string]protoreflect.FileDescriptor
filesByPath map[string][]protoreflect.FileDescriptor
numFiles int
}
type packageDescriptor struct {
@ -117,17 +118,16 @@ func (r *Files) RegisterFile(file protoreflect.FileDescriptor) error {
r.descsByName = map[protoreflect.FullName]interface{}{
"": &packageDescriptor{},
}
r.filesByPath = make(map[string]protoreflect.FileDescriptor)
r.filesByPath = make(map[string][]protoreflect.FileDescriptor)
}
path := file.Path()
if prev := r.filesByPath[path]; prev != nil {
if prev := r.filesByPath[path]; len(prev) > 0 {
r.checkGenProtoConflict(path)
err := errors.New("file %q is already registered", file.Path())
err = amendErrorWithCaller(err, prev, file)
if r == GlobalFiles && ignoreConflict(file, err) {
err = nil
err = amendErrorWithCaller(err, prev[0], file)
if !(r == GlobalFiles && ignoreConflict(file, err)) {
return err
}
return err
}
for name := file.Package(); name != ""; name = name.Parent() {
@ -168,7 +168,8 @@ func (r *Files) RegisterFile(file protoreflect.FileDescriptor) error {
rangeTopLevelDescriptors(file, func(d protoreflect.Descriptor) {
r.descsByName[d.FullName()] = d
})
r.filesByPath[path] = file
r.filesByPath[path] = append(r.filesByPath[path], file)
r.numFiles++
return nil
}
@ -308,6 +309,7 @@ func (s *nameSuffix) Pop() (name protoreflect.Name) {
// FindFileByPath looks up a file by the path.
//
// This returns (nil, NotFound) if not found.
// This returns an error if multiple files have the same path.
func (r *Files) FindFileByPath(path string) (protoreflect.FileDescriptor, error) {
if r == nil {
return nil, NotFound
@ -316,13 +318,19 @@ func (r *Files) FindFileByPath(path string) (protoreflect.FileDescriptor, error)
globalMutex.RLock()
defer globalMutex.RUnlock()
}
if fd, ok := r.filesByPath[path]; ok {
return fd, nil
fds := r.filesByPath[path]
switch len(fds) {
case 0:
return nil, NotFound
case 1:
return fds[0], nil
default:
return nil, errors.New("multiple files named %q", path)
}
return nil, NotFound
}
// NumFiles reports the number of registered files.
// NumFiles reports the number of registered files,
// including duplicate files with the same name.
func (r *Files) NumFiles() int {
if r == nil {
return 0
@ -331,10 +339,11 @@ func (r *Files) NumFiles() int {
globalMutex.RLock()
defer globalMutex.RUnlock()
}
return len(r.filesByPath)
return r.numFiles
}
// RangeFiles iterates over all registered files while f returns true.
// If multiple files have the same name, RangeFiles iterates over all of them.
// The iteration order is undefined.
func (r *Files) RangeFiles(f func(protoreflect.FileDescriptor) bool) {
if r == nil {
@ -344,9 +353,11 @@ func (r *Files) RangeFiles(f func(protoreflect.FileDescriptor) bool) {
globalMutex.RLock()
defer globalMutex.RUnlock()
}
for _, file := range r.filesByPath {
if !f(file) {
return
for _, files := range r.filesByPath {
for _, file := range files {
if !f(file) {
return
}
}
}
}

File diff suppressed because it is too large Load Diff

11
vendor/modules.txt vendored
View File

@ -1,8 +1,8 @@
# github.com/cyrilix/robocar-base v0.1.5
# github.com/cyrilix/robocar-base v0.1.6
## explicit; go 1.17
github.com/cyrilix/robocar-base/cli
github.com/cyrilix/robocar-base/service
# github.com/cyrilix/robocar-protobuf/go v1.0.3
# github.com/cyrilix/robocar-protobuf/go v1.0.4
## explicit; go 1.17
github.com/cyrilix/robocar-protobuf/go/events
# github.com/eclipse/paho.mqtt.golang v1.3.5
@ -11,7 +11,6 @@ github.com/eclipse/paho.mqtt.golang
github.com/eclipse/paho.mqtt.golang/packets
# github.com/golang/protobuf v1.5.2
## explicit; go 1.9
github.com/golang/protobuf/proto
github.com/golang/protobuf/ptypes/timestamp
# github.com/gorilla/websocket v1.4.2
## explicit; go 1.12
@ -30,14 +29,14 @@ go.uber.org/zap/internal/bufferpool
go.uber.org/zap/internal/color
go.uber.org/zap/internal/exit
go.uber.org/zap/zapcore
# gocv.io/x/gocv v0.26.0
# gocv.io/x/gocv v0.29.0
## explicit; go 1.13
gocv.io/x/gocv
# golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4
## explicit; go 1.11
golang.org/x/net/internal/socks
golang.org/x/net/proxy
# google.golang.org/protobuf v1.26.0
# google.golang.org/protobuf v1.27.1
## explicit; go 1.9
google.golang.org/protobuf/encoding/prototext
google.golang.org/protobuf/encoding/protowire
@ -60,10 +59,8 @@ google.golang.org/protobuf/internal/set
google.golang.org/protobuf/internal/strs
google.golang.org/protobuf/internal/version
google.golang.org/protobuf/proto
google.golang.org/protobuf/reflect/protodesc
google.golang.org/protobuf/reflect/protoreflect
google.golang.org/protobuf/reflect/protoregistry
google.golang.org/protobuf/runtime/protoiface
google.golang.org/protobuf/runtime/protoimpl
google.golang.org/protobuf/types/descriptorpb
google.golang.org/protobuf/types/known/timestamppb