refactor: new road detection implementation

This commit is contained in:
Cyrille Nofficial 2024-04-14 12:09:06 +02:00
parent 322e6a65ae
commit a66e481df4
7 changed files with 786 additions and 3 deletions

25
.gitignore vendored Normal file
View File

@ -0,0 +1,25 @@
### Go template
# If you prefer the allow list template instead of the deny list, see community template:
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
#
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
# Go workspace file
go.work
road/testdata/cam-image_array_*
rc-road

View File

@ -3,21 +3,27 @@ package main
import (
"flag"
"github.com/cyrilix/robocar-base/cli"
"github.com/cyrilix/robocar-road/pkg/part"
"github.com/cyrilix/robocar-road/road"
"go.uber.org/zap"
"gocv.io/x/gocv"
"image"
"log"
"math"
"os"
)
const (
DefaultClientId = "robocar-road"
DefaultHorizon = 20
DefaultHorizon = 110
)
func main() {
var mqttBroker, username, password, clientId string
var cameraTopic, roadTopic string
var horizon int
var whiteThresholdLow, whiteThresholdHigh int
var cannyThresholdLow, cannyThresholdHigh int
var imgWidth, imgHeight int
err := cli.SetIntDefaultValueFromEnv(&horizon, "HORIZON", DefaultHorizon)
if err != nil {
@ -33,6 +39,23 @@ func main() {
flag.StringVar(&cameraTopic, "mqtt-topic-camera", os.Getenv("MQTT_TOPIC_CAMERA"), "Mqtt topic that contains camera frame values, use MQTT_TOPIC_CAMERA if args not set")
flag.IntVar(&horizon, "horizon", horizon, "Limit horizon in pixels from top, use HORIZON if args not set")
flag.IntVar(&imgWidth, "image-width", 160, "Video pixels width")
flag.IntVar(&imgHeight, "image-height", 128, "Video pixels height")
flag.IntVar(&whiteThresholdLow, "white-threshold-low", 20, "White pixels threshold, low limit")
flag.IntVar(&whiteThresholdHigh, "white-threshold-high", 255, "White pixels threshold, high limit")
flag.IntVar(&cannyThresholdLow, "canny-threshold-low", 100, "White pixels threshold, low limit")
flag.IntVar(&cannyThresholdHigh, "canny-threshold-high", 250, "White pixels threshold, high limit")
var houghLinesRho, houghLinesThreshold, houghLinesMinLineLength, houghLinesMaxLineGap int
var houghLinesTheta float64
flag.IntVar(&houghLinesRho, "hough-lines-rho", 2, "distance resolution in pixels of the Hough grid")
flag.Float64Var(&houghLinesTheta, "hough-lines-theta", 1*math.Pi/180, "angular resolution in radians of the Hough grid, default Pi/180")
flag.IntVar(&houghLinesThreshold, "hough-lines-threshold", 15, "minimum number of votes (intersections in Hough grid cell)")
flag.IntVar(&houghLinesMinLineLength, "hough-lines-min-line-length", 10, "minimum number of pixels making up a line")
flag.IntVar(&houghLinesMaxLineGap, "hough-lines-max-lines-gap", 20, "maximum gap in pixels between connectable line segments")
logLevel := zap.LevelFlag("log", zap.InfoLevel, "log level")
flag.Parse()
@ -60,7 +83,21 @@ func main() {
}
defer client.Disconnect(50)
p := part.NewRoadPart(client, horizon, cameraTopic, roadTopic)
p := road.NewPart(client,
cameraTopic, roadTopic,
road.NewDetector(
road.WithWhiteFilter(whiteThresholdLow, whiteThresholdHigh),
road.WithYellowFilter(
gocv.NewMatFromScalar(gocv.Scalar{Val1: 90., Val2: 100., Val3: 100.}, gocv.MatTypeCV8U),
gocv.NewMatFromScalar(gocv.Scalar{Val1: 110., Val2: 255., Val3: 255.}, gocv.MatTypeCV8U),
),
road.WithCanny(cannyThresholdLow, cannyThresholdHigh),
road.WithGaussianBlur(3),
road.WithRegionOfInterest(imgWidth, imgHeight, horizon),
road.WithPointOnRoad(image.Point{X: imgWidth / 2, Y: imgHeight - 30}),
road.WithHoughLines(houghLinesRho, float32(houghLinesTheta), houghLinesThreshold, houghLinesMinLineLength, houghLinesMaxLineGap),
),
)
defer p.Stop()
cli.HandleExit(p)

View File

@ -0,0 +1,65 @@
package main
import (
"flag"
"github.com/cyrilix/robocar-road/road"
"go.uber.org/zap"
"gocv.io/x/gocv"
"image"
"image/color"
"log"
"os"
)
func main() {
var imgName string
flag.StringVar(&imgName, "image", "", "path to image file")
logLevel := zap.LevelFlag("log", zap.InfoLevel, "log level")
flag.Parse()
if imgName == "" {
zap.S().Errorf("bad image value")
flag.PrintDefaults()
os.Exit(1)
}
if len(os.Args) <= 1 {
flag.PrintDefaults()
os.Exit(1)
}
config := zap.NewDevelopmentConfig()
config.Level = zap.NewAtomicLevelAt(*logLevel)
lgr, err := config.Build()
if err != nil {
log.Fatalf("unable to init logger: %v", err)
}
defer func() {
if err := lgr.Sync(); err != nil {
log.Printf("unable to Sync logger: %v\n", err)
}
}()
zap.ReplaceGlobals(lgr)
d := road.NewDetector(160, 120)
img := gocv.IMRead(imgName, gocv.IMReadColor)
defer func(img *gocv.Mat) {
err := img.Close()
if err != nil {
zap.S().Warnf("unable to close image: %v", err)
}
}(&img)
if img.Empty() {
zap.S().Errorf("image %s is not a valid image", imgName)
os.Exit(1)
}
roadLimits := d.Detect(&img)
gocv.FillPoly(&img, gocv.NewPointsVectorFromPoints([][]image.Point{roadLimits}), color.RGBA{0, 0, 255, 128})
window := gocv.NewWindow("Road")
window.IMShow(img)
window.WaitKey(0)
}

487
road/detect.go Normal file
View File

@ -0,0 +1,487 @@
package road
import (
"github.com/cyrilix/robocar-protobuf/go/events"
"go.uber.org/zap"
"gocv.io/x/gocv"
"image"
"image/color"
"math"
)
type DetectorOption func(*Detector)
func WithWhiteFilter(lowThreshold int, highThreshold int) DetectorOption {
return func(d *Detector) {
err := d.lowerWhite.Close()
if err != nil {
zap.S().Errorf("unable to close lower white filter: %v", err)
}
err = d.upperWhite.Close()
if err != nil {
zap.S().Errorf("unable to close upper white filter: %v", err)
}
d.lowerWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(lowThreshold), Val2: float64(lowThreshold), Val3: float64(lowThreshold), Val4: float64(lowThreshold)}, gocv.MatTypeCV8U)
d.upperWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(highThreshold), Val2: float64(highThreshold), Val3: float64(highThreshold), Val4: float64(highThreshold)}, gocv.MatTypeCV8U)
}
}
func WithYellowFilter(lower gocv.Mat, upper gocv.Mat) DetectorOption {
return func(d *Detector) {
err := d.lowerYellow.Close()
if err != nil {
zap.S().Errorf("unable to close lower yellow filter: %v", err)
}
err = d.upperYellow.Close()
if err != nil {
zap.S().Errorf("unable to close upper yellow filter: %v", err)
}
d.lowerYellow = lower
d.upperYellow = upper
}
}
func WithRegionOfInterest(imgWidth int, imgHeight int, horizon int) DetectorOption {
roi := buildRegionOfInterest(imgWidth, imgWidth, horizon)
return func(d *Detector) {
err := d.roiMask.Close()
if err != nil {
zap.S().Errorf("unable to close roi mask: %v", err)
}
d.roiMask = *roi
}
}
func WithPointOnRoad(pt image.Point) DetectorOption {
return func(d *Detector) {
d.pointOnRoad = pt
}
}
func WithCanny(lowThreshold int, highThreshold int) DetectorOption {
return func(d *Detector) {
d.cannyLowThreshold = float32(lowThreshold)
d.cannyHighThreshold = float32(highThreshold)
}
}
func WithGaussianBlur(kernelSize int) DetectorOption {
return func(d *Detector) {
d.gaussianBlurKernelSize = kernelSize
}
}
func WithHoughLines(rho int, theta float32, threshold int, minLineLength int, maxLineGap int) DetectorOption {
return func(d *Detector) {
d.houghLinesRho = float32(rho)
d.houghLinesTheta = theta
d.houghLinesThreshold = threshold
d.houghLinesMinLineLength = minLineLength
d.houghLinesMaxLineGap = maxLineGap
}
}
func NewDetector(options ...DetectorOption) *Detector {
whiteThreshold := 20.
roiMask := buildRegionOfInterest(160, 120, 110)
pointOnRoad := image.Point{X: 160 / 2, Y: 120 - 30}
d := Detector{
lowerWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: whiteThreshold, Val2: whiteThreshold, Val3: whiteThreshold, Val4: whiteThreshold}, gocv.MatTypeCV8U),
upperWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: 255., Val2: 255., Val3: 255., Val4: 255.}, gocv.MatTypeCV8U),
lowerYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 90., Val2: 100., Val3: 100.}, gocv.MatTypeCV8U),
upperYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 110., Val2: 255., Val3: 255.}, gocv.MatTypeCV8U),
cannyLowThreshold: 100.,
cannyHighThreshold: 250.,
gaussianBlurKernelSize: 3,
// Hough Transform
houghLinesRho: 2, // distance resolution in pixels of the Hough grid
houghLinesTheta: 1 * math.Pi / 180, // angular resolution in radians of the Hough grid
houghLinesThreshold: 15, // minimum number of votes (intersections in Hough grid cell)
houghLinesMinLineLength: 10, // minimum number of pixels making up a line
houghLinesMaxLineGap: 20, // maximum gap in pixels between connectable line segments
roiMask: *roiMask,
pointOnRoad: pointOnRoad,
}
for _, option := range options {
option(&d)
}
return &d
}
func buildRegionOfInterest(imgWidth int, imgHeight int, horizon int) *gocv.Mat {
// RegionOfInterest
// defining a blank roiMask to start with
roiMask := gocv.Zeros(imgHeight, imgWidth, gocv.MatTypeCV8U)
vertices := gocv.NewPointsVectorFromPoints(
[][]image.Point{
{
{0, imgHeight - horizon},
{imgWidth, imgHeight - horizon},
{imgWidth, imgHeight},
{0, imgHeight},
},
},
)
defer vertices.Close()
ignoreMaskColor := color.RGBA{R: 255, G: 255, B: 255, A: 255}
gocv.FillPoly(&roiMask, vertices, ignoreMaskColor)
return &roiMask
}
type Detector struct {
whiteThreshold float64
lowerWhite gocv.Mat
upperWhite gocv.Mat
lowerYellow gocv.Mat
upperYellow gocv.Mat
cannyLowThreshold, cannyHighThreshold float32
gaussianBlurKernelSize int
// Hough lines parameters
houghLinesRho, houghLinesTheta float32
houghLinesThreshold int
houghLinesMinLineLength, houghLinesMaxLineGap int
// Region Of interest
roiMask gocv.Mat
pointOnRoad image.Point
}
func (d *Detector) Close() {
defer func(lowerWhite *gocv.Mat) {
err := lowerWhite.Close()
if err != nil {
zap.S().Errorf("unable to close lowerWhite: %v", err)
}
}(&d.lowerWhite)
defer func(upperWhite *gocv.Mat) {
err := upperWhite.Close()
if err != nil {
zap.S().Errorf("unable to close upperWhite: %v", err)
}
}(&d.upperWhite)
defer func(lowerYellow *gocv.Mat) {
err := lowerYellow.Close()
if err != nil {
zap.S().Errorf("unable to close lowerYellow: %v", err)
}
}(&d.lowerYellow)
defer func(upperYellow *gocv.Mat) {
err := upperYellow.Close()
if err != nil {
zap.S().Errorf("unable to close upperYellow: %v", err)
}
}(&d.upperYellow)
defer func(roiMask *gocv.Mat) {
err := roiMask.Close()
if err != nil {
zap.S().Errorf("unable to close roiMask: %v", err)
}
}(&d.roiMask)
}
func (d *Detector) Detect(img *gocv.Mat) ([]*events.Point, *events.Ellipse) {
// Only keep white and yellow pixels in the image, all other pixels become black
imgFiltered := d.filterColors(img)
defer func(imgFiltered *gocv.Mat) {
err := imgFiltered.Close()
if err != nil {
zap.S().Errorf("unable to close imgFiltered: %v", err)
}
}(imgFiltered)
// # Read in and grayscale the image
imgGray := d.grayscale(imgFiltered)
defer func(imgGray *gocv.Mat) {
err := imgGray.Close()
if err != nil {
zap.S().Errorf("unable to close imgGray: %v", err)
}
}(imgGray)
// Apply Gaussian smoothing
blurGray := d.gaussianBlur(imgGray)
defer func(blurGray *gocv.Mat) {
err := blurGray.Close()
if err != nil {
zap.S().Errorf("unable to close blurGray: %v", err)
}
}(blurGray)
// Apply Canny Edge Detector
edges := d.canny(blurGray)
defer func(edges *gocv.Mat) {
err := edges.Close()
if err != nil {
zap.S().Errorf("unable to close edges: %v", err)
}
}(edges)
maskedEdges := d.applyRegionOfInterest(edges)
defer func(maskedEdges *gocv.Mat) {
err := maskedEdges.Close()
if err != nil {
zap.S().Errorf("unable to close maskedEdges: %v", err)
}
}(maskedEdges)
// Run Hough on edge detected image
road, ellipsis := d.getRoadShapeWithHoughLines(maskedEdges)
return road, ellipsis
}
// Filter the image to include only yellow and white pixels
func (d *Detector) filterColors(img *gocv.Mat) *gocv.Mat {
// Filter white pixels
whiteMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8UC3)
defer func(whiteMask *gocv.Mat) {
err := whiteMask.Close()
if err != nil {
zap.S().Errorf("unable to close whiteMask: %v", err)
}
}(&whiteMask)
gocv.InRange(*img, d.lowerWhite, d.upperWhite, &whiteMask)
// Convert one channel result to 3 channel mask
gocv.Merge([]gocv.Mat{whiteMask, whiteMask, whiteMask}, &whiteMask)
whiteImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
defer func(whiteImage *gocv.Mat) {
err := whiteImage.Close()
if err != nil {
zap.S().Errorf("unable to close whiteImage: %v", err)
}
}(&whiteImage)
gocv.BitwiseAnd(*img, whiteMask, &whiteImage)
// Filter yellow pixels
hsv := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
defer func(hsv *gocv.Mat) {
err := hsv.Close()
if err != nil {
zap.S().Errorf("unable to close hsv: %v", err)
}
}(&hsv)
gocv.CvtColor(*img, &hsv, gocv.ColorBGRToHSV)
yellowMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
defer func(yellowMask *gocv.Mat) {
err := yellowMask.Close()
if err != nil {
zap.S().Errorf("unable to close yellowMask: %v", err)
}
}(&yellowMask)
gocv.InRange(hsv, d.lowerYellow, d.upperYellow, &yellowMask)
// Convert one channel result to 3 channel mask
gocv.Merge([]gocv.Mat{yellowMask, yellowMask, yellowMask}, &yellowMask)
yellowImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
defer func(yellowImage *gocv.Mat) {
err := yellowImage.Close()
if err != nil {
zap.S().Errorf("unable to close yellowImage: %v", err)
}
}(&yellowImage)
gocv.BitwiseAnd(*img, yellowMask, &yellowImage)
// Combine the two above images
image2 := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
gocv.AddWeighted(whiteImage, 1., yellowImage, 1., 0., &image2)
return &image2
}
/*
Applies the Grayscale transform
This will return an image with only one color channel
but NOTE: to see the returned image as grayscale
you should call plt.imshow(gray, cmap='gray')
*/
func (d *Detector) grayscale(img *gocv.Mat) *gocv.Mat {
grayImg := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
gocv.CvtColor(*img, &grayImg, gocv.ColorBGRToGray)
return &grayImg
}
/* Applies the Canny transform */
func (d *Detector) canny(img *gocv.Mat) *gocv.Mat {
edges := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
gocv.Canny(*img, &edges, d.cannyLowThreshold, d.cannyHighThreshold)
return &edges
}
func (d *Detector) gaussianBlur(img *gocv.Mat) *gocv.Mat {
blur := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
gocv.GaussianBlur(*img, &blur, image.Point{X: d.gaussianBlurKernelSize, Y: d.gaussianBlurKernelSize}, 0., 0., gocv.BorderDefault)
return &blur
}
func (d *Detector) getRoadShapeWithHoughLines(img *gocv.Mat) ([]*events.Point, *events.Ellipse) {
lines := gocv.NewMat()
defer func(lines *gocv.Mat) {
err := lines.Close()
if err != nil {
zap.S().Errorf("unable to close lines mar: %v", err)
}
}(&lines)
gocv.HoughLinesP(*img, &lines, d.houghLinesRho, d.houghLinesTheta, d.houghLinesThreshold)
// Generate new image with detected edges
imgLines := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
defer func(imgLines *gocv.Mat) {
err := imgLines.Close()
if err != nil {
zap.S().Errorf("unable to close imgLines: %v", err)
}
}(&imgLines)
for i := 0; i < lines.Rows(); i++ {
x1 := lines.GetIntAt(i, 0)
y1 := lines.GetIntAt(i, 1)
x2 := lines.GetIntAt(i, 2)
y2 := lines.GetIntAt(i, 3)
gocv.Line(&imgLines, image.Point{X: int(x1), Y: int(y1)}, image.Point{X: int(x2), Y: int(y2)}, color.RGBA{
R: 255,
G: 255,
B: 255,
A: 255,
}, 3)
}
// Draw rectangle to add limit on image border
gocv.Rectangle(&imgLines, image.Rectangle{
Min: image.Point{X: 0, Y: 0},
Max: image.Point{X: img.Cols() - 1, Y: img.Rows() - 1},
}, color.RGBA{R: 255, G: 255, B: 255, A: 255}, 1)
kernel := gocv.Ones(8, 8, gocv.MatTypeCV8U)
defer func(kernel *gocv.Mat) {
err := kernel.Close()
if err != nil {
zap.S().Errorf("unable to close kernel: %v", err)
}
}(&kernel)
gocv.Dilate(imgLines, &imgLines, kernel)
gocv.Erode(imgLines, &imgLines, kernel)
cnts := gocv.FindContours(imgLines, gocv.RetrievalList, gocv.ChainApproxSimple)
defer cnts.Close()
for i := 0; i < cnts.Size(); i++ {
cnt := cnts.At(i)
pv := gocv.ApproxPolyDP(cnt, 0.01*gocv.ArcLength(cnt, true), true)
if gocv.PointPolygonTest(pv, d.pointOnRoad, false) > 0 {
ellipse := d.computeEllipsis(&pv)
cntr_result := make([]*events.Point, 0, pv.Size())
for i := 0; i < pv.Size(); i++ {
pt := pv.At(i)
cntr_result = append(cntr_result, &events.Point{X: int32(pt.X), Y: int32(pt.Y)})
}
return cntr_result, ellipse
}
}
return []*events.Point{}, &EllipseNotFound
}
/*
Applies an image mask.
Only keeps the region of the image defined by the polygon
formed from `vertices`. The rest of the image is set to black.
*/
func (d *Detector) applyRegionOfInterest(img *gocv.Mat) *gocv.Mat {
// returning the image only where mask pixels are nonzero
maskedImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
gocv.BitwiseAnd(*img, d.roiMask, &maskedImage)
return &maskedImage
}
var EllipseNotFound = events.Ellipse{Confidence: 0.}
func (d *Detector) computeEllipsis(road *gocv.PointVector) *events.Ellipse {
if road.Size() < 5 {
return &EllipseNotFound
}
rotatedRect := gocv.FitEllipse(*road)
trust := d.computeTrustFromCenter(&rotatedRect.Center)
zap.S().Debugf("Trust: %v", trust)
return &events.Ellipse{
Center: &events.Point{
X: int32(rotatedRect.Center.X),
Y: int32(rotatedRect.Center.Y),
},
Width: int32(rotatedRect.Width),
Height: int32(rotatedRect.Height),
Angle: float32(rotatedRect.Angle),
Confidence: d.computeTrustFromCenter(&rotatedRect.Center),
}
}
func (d *Detector) computeTrustFromCenter(ellipsisCenter *image.Point) float32 {
safeMinX := 48
safeMaxX := 115
safeMinY := 69
safeMaxY := 119
if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX && safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY {
return 1.0
}
if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX {
return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y)
}
if safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY {
return d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X)
}
return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y) * d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X)
}
func (d *Detector) computeTrustOnAxis(safeMax, safeMin, value int) float32 {
trust := 1.
if value > safeMax {
trust = 1. / float64(value-safeMax)
} else if value < safeMin {
trust = 1. / float64(safeMin-value)
}
trust = trust * 10.
if trust > 0.9 {
trust = 0.9
}
if trust < 0. {
trust = 0.
}
return float32(trust)
}

50
road/detect_test.go Normal file
View File

@ -0,0 +1,50 @@
package road
import (
"github.com/cyrilix/robocar-protobuf/go/events"
"gocv.io/x/gocv"
"reflect"
"testing"
)
func TestDetector_Detect(t *testing.T) {
type fields struct {
}
type args struct {
imgName string
}
tests := []struct {
name string
fields fields
args args
want []*events.Point
}{
{
name: "Straight ahead",
fields: fields{},
args: args{imgName: "testdata/input.jpg"},
want: []*events.Point{{X: 2, Y: 53}, {X: 48, Y: 37}, {X: 54, Y: 27}, {X: 72, Y: 26}, {X: 156, Y: 55}, {X: 159, Y: 118}, {X: 20, Y: 114}, {X: 3, Y: 119}},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
d := NewDetector()
defer d.Close()
img := gocv.IMRead(tt.args.imgName, gocv.IMReadColor)
defer func(img *gocv.Mat) {
err := img.Close()
if err != nil {
t.Errorf("unable to close image: %v", err)
}
}(&img)
if img.Empty() {
t.Errorf("unable to open image %v", tt.args.imgName)
t.Fail()
}
if got, _ := d.Detect(&img); !reflect.DeepEqual(got, tt.want) {
t.Errorf("Detect() = %v, want %v", got, tt.want)
}
})
}
}

119
road/part.go Normal file
View File

@ -0,0 +1,119 @@
package road
import (
"github.com/cyrilix/robocar-base/service"
"github.com/cyrilix/robocar-protobuf/go/events"
mqtt "github.com/eclipse/paho.mqtt.golang"
"go.uber.org/zap"
"gocv.io/x/gocv"
"google.golang.org/protobuf/proto"
"log"
)
type RoadPart struct {
client mqtt.Client
frameChan chan frameToProcess
readyForNext chan interface{}
cancel chan interface{}
detector *Detector
cameraTopic, roadTopic string
}
func NewPart(client mqtt.Client, cameraTopic, roadTopic string, detector *Detector) *RoadPart {
return &RoadPart{
client: client,
frameChan: make(chan frameToProcess),
cancel: make(chan interface{}),
detector: detector,
cameraTopic: cameraTopic,
roadTopic: roadTopic,
}
}
func (r *RoadPart) Start() error {
registerCallBacks(r)
var frame = frameToProcess{}
defer func() {
if err := frame.Close(); err != nil {
zap.S().Errorf("unable to close msg: %v", err)
}
}()
for {
select {
case f := <-r.frameChan:
oldFrame := frame
frame = f
if err := oldFrame.Close(); err != nil {
zap.S().Errorf("unable to close msg: %v", err)
}
go r.processFrame(&frame)
case <-r.cancel:
zap.S().Info("Stop service")
return nil
}
}
}
var registerCallBacks = func(r *RoadPart) {
err := service.RegisterCallback(r.client, r.cameraTopic, r.OnFrame)
if err != nil {
log.Panicf("unable to register callback to topic %v:%v", r.cameraTopic, err)
}
}
func (r *RoadPart) Stop() {
defer r.detector.Close()
close(r.readyForNext)
close(r.cancel)
service.StopService("road", r.client, r.roadTopic)
}
func (r *RoadPart) OnFrame(_ mqtt.Client, msg mqtt.Message) {
var frameMsg events.FrameMessage
err := proto.Unmarshal(msg.Payload(), &frameMsg)
if err != nil {
zap.S().Errorf("unable to unmarshal %T message: %v", frameMsg, err)
return
}
img, err := gocv.IMDecode(frameMsg.GetFrame(), gocv.IMReadUnchanged)
if err != nil {
zap.S().Errorf("unable to decode image: %v", err)
return
}
frame := frameToProcess{
ref: frameMsg.GetId(),
Mat: img,
}
r.frameChan <- frame
}
type frameToProcess struct {
ref *events.FrameRef
gocv.Mat
}
func (r *RoadPart) processFrame(frame *frameToProcess) {
img := frame.Mat
cntr, ellipse := r.detector.Detect(&img)
msg := events.RoadMessage{
Contour: cntr,
Ellipse: ellipse,
FrameRef: frame.ref,
}
payload, err := proto.Marshal(&msg)
if err != nil {
zap.S().Errorf("unable to marshal %T to protobuf: %err", msg, err)
return
}
publish(r.client, r.roadTopic, &payload)
}
var publish = func(client mqtt.Client, topic string, payload *[]byte) {
client.Publish(topic, 0, false, *payload)
}

BIN
road/testdata/input.jpg vendored Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB