From a66e481df4a7b9e52f81290776900a761a0d5ca4 Mon Sep 17 00:00:00 2001 From: Cyrille Nofficial Date: Sun, 14 Apr 2024 12:09:06 +0200 Subject: [PATCH] refactor: new road detection implementation --- .gitignore | 25 ++ cmd/rc-road/rc-road.go | 43 +++- cmd/road-debug/road-debug.go | 65 +++++ road/detect.go | 487 +++++++++++++++++++++++++++++++++++ road/detect_test.go | 50 ++++ road/part.go | 119 +++++++++ road/testdata/input.jpg | Bin 0 -> 9560 bytes 7 files changed, 786 insertions(+), 3 deletions(-) create mode 100644 .gitignore create mode 100644 cmd/road-debug/road-debug.go create mode 100644 road/detect.go create mode 100644 road/detect_test.go create mode 100644 road/part.go create mode 100755 road/testdata/input.jpg diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2f29b55 --- /dev/null +++ b/.gitignore @@ -0,0 +1,25 @@ +### Go template +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work + +road/testdata/cam-image_array_* +rc-road diff --git a/cmd/rc-road/rc-road.go b/cmd/rc-road/rc-road.go index 655c937..391d071 100644 --- a/cmd/rc-road/rc-road.go +++ b/cmd/rc-road/rc-road.go @@ -3,21 +3,27 @@ package main import ( "flag" "github.com/cyrilix/robocar-base/cli" - "github.com/cyrilix/robocar-road/pkg/part" + "github.com/cyrilix/robocar-road/road" "go.uber.org/zap" + "gocv.io/x/gocv" + "image" "log" + "math" "os" ) const ( DefaultClientId = "robocar-road" - DefaultHorizon = 20 + DefaultHorizon = 110 ) func main() { var mqttBroker, username, password, clientId string var cameraTopic, roadTopic string var horizon int + var whiteThresholdLow, whiteThresholdHigh int + var cannyThresholdLow, cannyThresholdHigh int + var imgWidth, imgHeight int err := cli.SetIntDefaultValueFromEnv(&horizon, "HORIZON", DefaultHorizon) if err != nil { @@ -33,6 +39,23 @@ func main() { flag.StringVar(&cameraTopic, "mqtt-topic-camera", os.Getenv("MQTT_TOPIC_CAMERA"), "Mqtt topic that contains camera frame values, use MQTT_TOPIC_CAMERA if args not set") flag.IntVar(&horizon, "horizon", horizon, "Limit horizon in pixels from top, use HORIZON if args not set") + flag.IntVar(&imgWidth, "image-width", 160, "Video pixels width") + flag.IntVar(&imgHeight, "image-height", 128, "Video pixels height") + + flag.IntVar(&whiteThresholdLow, "white-threshold-low", 20, "White pixels threshold, low limit") + flag.IntVar(&whiteThresholdHigh, "white-threshold-high", 255, "White pixels threshold, high limit") + + flag.IntVar(&cannyThresholdLow, "canny-threshold-low", 100, "White pixels threshold, low limit") + flag.IntVar(&cannyThresholdHigh, "canny-threshold-high", 250, "White pixels threshold, high limit") + + var houghLinesRho, houghLinesThreshold, houghLinesMinLineLength, houghLinesMaxLineGap int + var houghLinesTheta float64 + flag.IntVar(&houghLinesRho, "hough-lines-rho", 2, "distance resolution in pixels of the Hough grid") + flag.Float64Var(&houghLinesTheta, "hough-lines-theta", 1*math.Pi/180, "angular resolution in radians of the Hough grid, default Pi/180") + flag.IntVar(&houghLinesThreshold, "hough-lines-threshold", 15, "minimum number of votes (intersections in Hough grid cell)") + flag.IntVar(&houghLinesMinLineLength, "hough-lines-min-line-length", 10, "minimum number of pixels making up a line") + flag.IntVar(&houghLinesMaxLineGap, "hough-lines-max-lines-gap", 20, "maximum gap in pixels between connectable line segments") + logLevel := zap.LevelFlag("log", zap.InfoLevel, "log level") flag.Parse() @@ -60,7 +83,21 @@ func main() { } defer client.Disconnect(50) - p := part.NewRoadPart(client, horizon, cameraTopic, roadTopic) + p := road.NewPart(client, + cameraTopic, roadTopic, + road.NewDetector( + road.WithWhiteFilter(whiteThresholdLow, whiteThresholdHigh), + road.WithYellowFilter( + gocv.NewMatFromScalar(gocv.Scalar{Val1: 90., Val2: 100., Val3: 100.}, gocv.MatTypeCV8U), + gocv.NewMatFromScalar(gocv.Scalar{Val1: 110., Val2: 255., Val3: 255.}, gocv.MatTypeCV8U), + ), + road.WithCanny(cannyThresholdLow, cannyThresholdHigh), + road.WithGaussianBlur(3), + road.WithRegionOfInterest(imgWidth, imgHeight, horizon), + road.WithPointOnRoad(image.Point{X: imgWidth / 2, Y: imgHeight - 30}), + road.WithHoughLines(houghLinesRho, float32(houghLinesTheta), houghLinesThreshold, houghLinesMinLineLength, houghLinesMaxLineGap), + ), + ) defer p.Stop() cli.HandleExit(p) diff --git a/cmd/road-debug/road-debug.go b/cmd/road-debug/road-debug.go new file mode 100644 index 0000000..cee144b --- /dev/null +++ b/cmd/road-debug/road-debug.go @@ -0,0 +1,65 @@ +package main + +import ( + "flag" + "github.com/cyrilix/robocar-road/road" + "go.uber.org/zap" + "gocv.io/x/gocv" + "image" + "image/color" + "log" + "os" +) + +func main() { + var imgName string + flag.StringVar(&imgName, "image", "", "path to image file") + + logLevel := zap.LevelFlag("log", zap.InfoLevel, "log level") + flag.Parse() + + if imgName == "" { + zap.S().Errorf("bad image value") + flag.PrintDefaults() + os.Exit(1) + } + + if len(os.Args) <= 1 { + flag.PrintDefaults() + os.Exit(1) + } + + config := zap.NewDevelopmentConfig() + config.Level = zap.NewAtomicLevelAt(*logLevel) + lgr, err := config.Build() + if err != nil { + log.Fatalf("unable to init logger: %v", err) + } + defer func() { + if err := lgr.Sync(); err != nil { + log.Printf("unable to Sync logger: %v\n", err) + } + }() + zap.ReplaceGlobals(lgr) + + d := road.NewDetector(160, 120) + + img := gocv.IMRead(imgName, gocv.IMReadColor) + defer func(img *gocv.Mat) { + err := img.Close() + if err != nil { + zap.S().Warnf("unable to close image: %v", err) + } + }(&img) + if img.Empty() { + zap.S().Errorf("image %s is not a valid image", imgName) + os.Exit(1) + } + roadLimits := d.Detect(&img) + + gocv.FillPoly(&img, gocv.NewPointsVectorFromPoints([][]image.Point{roadLimits}), color.RGBA{0, 0, 255, 128}) + window := gocv.NewWindow("Road") + window.IMShow(img) + window.WaitKey(0) + +} diff --git a/road/detect.go b/road/detect.go new file mode 100644 index 0000000..f4c581a --- /dev/null +++ b/road/detect.go @@ -0,0 +1,487 @@ +package road + +import ( + "github.com/cyrilix/robocar-protobuf/go/events" + "go.uber.org/zap" + "gocv.io/x/gocv" + "image" + "image/color" + "math" +) + +type DetectorOption func(*Detector) + +func WithWhiteFilter(lowThreshold int, highThreshold int) DetectorOption { + return func(d *Detector) { + err := d.lowerWhite.Close() + if err != nil { + zap.S().Errorf("unable to close lower white filter: %v", err) + } + err = d.upperWhite.Close() + if err != nil { + zap.S().Errorf("unable to close upper white filter: %v", err) + } + + d.lowerWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(lowThreshold), Val2: float64(lowThreshold), Val3: float64(lowThreshold), Val4: float64(lowThreshold)}, gocv.MatTypeCV8U) + d.upperWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(highThreshold), Val2: float64(highThreshold), Val3: float64(highThreshold), Val4: float64(highThreshold)}, gocv.MatTypeCV8U) + } +} + +func WithYellowFilter(lower gocv.Mat, upper gocv.Mat) DetectorOption { + return func(d *Detector) { + err := d.lowerYellow.Close() + if err != nil { + zap.S().Errorf("unable to close lower yellow filter: %v", err) + } + err = d.upperYellow.Close() + if err != nil { + zap.S().Errorf("unable to close upper yellow filter: %v", err) + } + + d.lowerYellow = lower + d.upperYellow = upper + } +} + +func WithRegionOfInterest(imgWidth int, imgHeight int, horizon int) DetectorOption { + roi := buildRegionOfInterest(imgWidth, imgWidth, horizon) + return func(d *Detector) { + err := d.roiMask.Close() + if err != nil { + zap.S().Errorf("unable to close roi mask: %v", err) + } + d.roiMask = *roi + } +} + +func WithPointOnRoad(pt image.Point) DetectorOption { + return func(d *Detector) { + d.pointOnRoad = pt + } +} + +func WithCanny(lowThreshold int, highThreshold int) DetectorOption { + return func(d *Detector) { + d.cannyLowThreshold = float32(lowThreshold) + d.cannyHighThreshold = float32(highThreshold) + } +} + +func WithGaussianBlur(kernelSize int) DetectorOption { + return func(d *Detector) { + d.gaussianBlurKernelSize = kernelSize + } +} + +func WithHoughLines(rho int, theta float32, threshold int, minLineLength int, maxLineGap int) DetectorOption { + return func(d *Detector) { + d.houghLinesRho = float32(rho) + d.houghLinesTheta = theta + d.houghLinesThreshold = threshold + d.houghLinesMinLineLength = minLineLength + d.houghLinesMaxLineGap = maxLineGap + } +} + +func NewDetector(options ...DetectorOption) *Detector { + whiteThreshold := 20. + + roiMask := buildRegionOfInterest(160, 120, 110) + + pointOnRoad := image.Point{X: 160 / 2, Y: 120 - 30} + + d := Detector{ + lowerWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: whiteThreshold, Val2: whiteThreshold, Val3: whiteThreshold, Val4: whiteThreshold}, gocv.MatTypeCV8U), + upperWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: 255., Val2: 255., Val3: 255., Val4: 255.}, gocv.MatTypeCV8U), + + lowerYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 90., Val2: 100., Val3: 100.}, gocv.MatTypeCV8U), + upperYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 110., Val2: 255., Val3: 255.}, gocv.MatTypeCV8U), + + cannyLowThreshold: 100., + cannyHighThreshold: 250., + + gaussianBlurKernelSize: 3, + + // Hough Transform + houghLinesRho: 2, // distance resolution in pixels of the Hough grid + houghLinesTheta: 1 * math.Pi / 180, // angular resolution in radians of the Hough grid + houghLinesThreshold: 15, // minimum number of votes (intersections in Hough grid cell) + houghLinesMinLineLength: 10, // minimum number of pixels making up a line + houghLinesMaxLineGap: 20, // maximum gap in pixels between connectable line segments + + roiMask: *roiMask, + + pointOnRoad: pointOnRoad, + } + + for _, option := range options { + option(&d) + } + return &d +} + +func buildRegionOfInterest(imgWidth int, imgHeight int, horizon int) *gocv.Mat { + // RegionOfInterest + // defining a blank roiMask to start with + roiMask := gocv.Zeros(imgHeight, imgWidth, gocv.MatTypeCV8U) + + vertices := gocv.NewPointsVectorFromPoints( + [][]image.Point{ + { + {0, imgHeight - horizon}, + {imgWidth, imgHeight - horizon}, + {imgWidth, imgHeight}, + {0, imgHeight}, + }, + }, + ) + defer vertices.Close() + ignoreMaskColor := color.RGBA{R: 255, G: 255, B: 255, A: 255} + gocv.FillPoly(&roiMask, vertices, ignoreMaskColor) + return &roiMask +} + +type Detector struct { + whiteThreshold float64 + lowerWhite gocv.Mat + upperWhite gocv.Mat + + lowerYellow gocv.Mat + upperYellow gocv.Mat + + cannyLowThreshold, cannyHighThreshold float32 + + gaussianBlurKernelSize int + + // Hough lines parameters + houghLinesRho, houghLinesTheta float32 + houghLinesThreshold int + houghLinesMinLineLength, houghLinesMaxLineGap int + + // Region Of interest + roiMask gocv.Mat + + pointOnRoad image.Point +} + +func (d *Detector) Close() { + defer func(lowerWhite *gocv.Mat) { + err := lowerWhite.Close() + if err != nil { + zap.S().Errorf("unable to close lowerWhite: %v", err) + } + }(&d.lowerWhite) + + defer func(upperWhite *gocv.Mat) { + err := upperWhite.Close() + if err != nil { + zap.S().Errorf("unable to close upperWhite: %v", err) + } + }(&d.upperWhite) + + defer func(lowerYellow *gocv.Mat) { + err := lowerYellow.Close() + if err != nil { + zap.S().Errorf("unable to close lowerYellow: %v", err) + } + }(&d.lowerYellow) + + defer func(upperYellow *gocv.Mat) { + err := upperYellow.Close() + if err != nil { + zap.S().Errorf("unable to close upperYellow: %v", err) + } + }(&d.upperYellow) + + defer func(roiMask *gocv.Mat) { + err := roiMask.Close() + if err != nil { + zap.S().Errorf("unable to close roiMask: %v", err) + } + }(&d.roiMask) +} + +func (d *Detector) Detect(img *gocv.Mat) ([]*events.Point, *events.Ellipse) { + // Only keep white and yellow pixels in the image, all other pixels become black + imgFiltered := d.filterColors(img) + defer func(imgFiltered *gocv.Mat) { + err := imgFiltered.Close() + if err != nil { + zap.S().Errorf("unable to close imgFiltered: %v", err) + } + }(imgFiltered) + + // # Read in and grayscale the image + imgGray := d.grayscale(imgFiltered) + defer func(imgGray *gocv.Mat) { + err := imgGray.Close() + if err != nil { + zap.S().Errorf("unable to close imgGray: %v", err) + } + }(imgGray) + + // Apply Gaussian smoothing + blurGray := d.gaussianBlur(imgGray) + defer func(blurGray *gocv.Mat) { + err := blurGray.Close() + if err != nil { + zap.S().Errorf("unable to close blurGray: %v", err) + } + }(blurGray) + + // Apply Canny Edge Detector + edges := d.canny(blurGray) + defer func(edges *gocv.Mat) { + err := edges.Close() + if err != nil { + zap.S().Errorf("unable to close edges: %v", err) + } + }(edges) + + maskedEdges := d.applyRegionOfInterest(edges) + defer func(maskedEdges *gocv.Mat) { + err := maskedEdges.Close() + if err != nil { + zap.S().Errorf("unable to close maskedEdges: %v", err) + } + }(maskedEdges) + + // Run Hough on edge detected image + road, ellipsis := d.getRoadShapeWithHoughLines(maskedEdges) + return road, ellipsis +} + +// Filter the image to include only yellow and white pixels +func (d *Detector) filterColors(img *gocv.Mat) *gocv.Mat { + + // Filter white pixels + whiteMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8UC3) + defer func(whiteMask *gocv.Mat) { + err := whiteMask.Close() + if err != nil { + zap.S().Errorf("unable to close whiteMask: %v", err) + } + }(&whiteMask) + gocv.InRange(*img, d.lowerWhite, d.upperWhite, &whiteMask) + // Convert one channel result to 3 channel mask + gocv.Merge([]gocv.Mat{whiteMask, whiteMask, whiteMask}, &whiteMask) + + whiteImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type()) + defer func(whiteImage *gocv.Mat) { + err := whiteImage.Close() + if err != nil { + zap.S().Errorf("unable to close whiteImage: %v", err) + } + }(&whiteImage) + gocv.BitwiseAnd(*img, whiteMask, &whiteImage) + + // Filter yellow pixels + hsv := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type()) + defer func(hsv *gocv.Mat) { + err := hsv.Close() + if err != nil { + zap.S().Errorf("unable to close hsv: %v", err) + } + }(&hsv) + gocv.CvtColor(*img, &hsv, gocv.ColorBGRToHSV) + + yellowMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type()) + defer func(yellowMask *gocv.Mat) { + err := yellowMask.Close() + if err != nil { + zap.S().Errorf("unable to close yellowMask: %v", err) + } + }(&yellowMask) + gocv.InRange(hsv, d.lowerYellow, d.upperYellow, &yellowMask) + // Convert one channel result to 3 channel mask + gocv.Merge([]gocv.Mat{yellowMask, yellowMask, yellowMask}, &yellowMask) + + yellowImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type()) + defer func(yellowImage *gocv.Mat) { + err := yellowImage.Close() + if err != nil { + zap.S().Errorf("unable to close yellowImage: %v", err) + } + }(&yellowImage) + gocv.BitwiseAnd(*img, yellowMask, &yellowImage) + + // Combine the two above images + image2 := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type()) + gocv.AddWeighted(whiteImage, 1., yellowImage, 1., 0., &image2) + return &image2 +} + +/* + Applies the Grayscale transform + +This will return an image with only one color channel +but NOTE: to see the returned image as grayscale +you should call plt.imshow(gray, cmap='gray') +*/ +func (d *Detector) grayscale(img *gocv.Mat) *gocv.Mat { + grayImg := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U) + gocv.CvtColor(*img, &grayImg, gocv.ColorBGRToGray) + + return &grayImg +} + +/* Applies the Canny transform */ +func (d *Detector) canny(img *gocv.Mat) *gocv.Mat { + edges := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U) + gocv.Canny(*img, &edges, d.cannyLowThreshold, d.cannyHighThreshold) + + return &edges +} + +func (d *Detector) gaussianBlur(img *gocv.Mat) *gocv.Mat { + blur := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U) + gocv.GaussianBlur(*img, &blur, image.Point{X: d.gaussianBlurKernelSize, Y: d.gaussianBlurKernelSize}, 0., 0., gocv.BorderDefault) + return &blur +} + +func (d *Detector) getRoadShapeWithHoughLines(img *gocv.Mat) ([]*events.Point, *events.Ellipse) { + lines := gocv.NewMat() + defer func(lines *gocv.Mat) { + err := lines.Close() + if err != nil { + zap.S().Errorf("unable to close lines mar: %v", err) + } + }(&lines) + + gocv.HoughLinesP(*img, &lines, d.houghLinesRho, d.houghLinesTheta, d.houghLinesThreshold) + + // Generate new image with detected edges + imgLines := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U) + defer func(imgLines *gocv.Mat) { + err := imgLines.Close() + if err != nil { + zap.S().Errorf("unable to close imgLines: %v", err) + } + }(&imgLines) + + for i := 0; i < lines.Rows(); i++ { + x1 := lines.GetIntAt(i, 0) + y1 := lines.GetIntAt(i, 1) + x2 := lines.GetIntAt(i, 2) + y2 := lines.GetIntAt(i, 3) + + gocv.Line(&imgLines, image.Point{X: int(x1), Y: int(y1)}, image.Point{X: int(x2), Y: int(y2)}, color.RGBA{ + R: 255, + G: 255, + B: 255, + A: 255, + }, 3) + } + + // Draw rectangle to add limit on image border + gocv.Rectangle(&imgLines, image.Rectangle{ + Min: image.Point{X: 0, Y: 0}, + Max: image.Point{X: img.Cols() - 1, Y: img.Rows() - 1}, + }, color.RGBA{R: 255, G: 255, B: 255, A: 255}, 1) + + kernel := gocv.Ones(8, 8, gocv.MatTypeCV8U) + defer func(kernel *gocv.Mat) { + err := kernel.Close() + if err != nil { + zap.S().Errorf("unable to close kernel: %v", err) + } + }(&kernel) + gocv.Dilate(imgLines, &imgLines, kernel) + gocv.Erode(imgLines, &imgLines, kernel) + + cnts := gocv.FindContours(imgLines, gocv.RetrievalList, gocv.ChainApproxSimple) + defer cnts.Close() + + for i := 0; i < cnts.Size(); i++ { + cnt := cnts.At(i) + pv := gocv.ApproxPolyDP(cnt, 0.01*gocv.ArcLength(cnt, true), true) + if gocv.PointPolygonTest(pv, d.pointOnRoad, false) > 0 { + ellipse := d.computeEllipsis(&pv) + cntr_result := make([]*events.Point, 0, pv.Size()) + for i := 0; i < pv.Size(); i++ { + pt := pv.At(i) + cntr_result = append(cntr_result, &events.Point{X: int32(pt.X), Y: int32(pt.Y)}) + } + return cntr_result, ellipse + } + } + return []*events.Point{}, &EllipseNotFound +} + +/* +Applies an image mask. + + Only keeps the region of the image defined by the polygon + formed from `vertices`. The rest of the image is set to black. +*/ +func (d *Detector) applyRegionOfInterest(img *gocv.Mat) *gocv.Mat { + + // returning the image only where mask pixels are nonzero + maskedImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U) + + gocv.BitwiseAnd(*img, d.roiMask, &maskedImage) + return &maskedImage +} + +var EllipseNotFound = events.Ellipse{Confidence: 0.} + +func (d *Detector) computeEllipsis(road *gocv.PointVector) *events.Ellipse { + if road.Size() < 5 { + return &EllipseNotFound + } + + rotatedRect := gocv.FitEllipse(*road) + + trust := d.computeTrustFromCenter(&rotatedRect.Center) + zap.S().Debugf("Trust: %v", trust) + + return &events.Ellipse{ + Center: &events.Point{ + X: int32(rotatedRect.Center.X), + Y: int32(rotatedRect.Center.Y), + }, + Width: int32(rotatedRect.Width), + Height: int32(rotatedRect.Height), + Angle: float32(rotatedRect.Angle), + Confidence: d.computeTrustFromCenter(&rotatedRect.Center), + } +} + +func (d *Detector) computeTrustFromCenter(ellipsisCenter *image.Point) float32 { + safeMinX := 48 + safeMaxX := 115 + safeMinY := 69 + safeMaxY := 119 + + if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX && safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY { + return 1.0 + } + + if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX { + return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y) + } + + if safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY { + return d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X) + } + + return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y) * d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X) +} + +func (d *Detector) computeTrustOnAxis(safeMax, safeMin, value int) float32 { + trust := 1. + if value > safeMax { + trust = 1. / float64(value-safeMax) + } else if value < safeMin { + trust = 1. / float64(safeMin-value) + } + trust = trust * 10. + if trust > 0.9 { + trust = 0.9 + } + if trust < 0. { + trust = 0. + } + return float32(trust) + +} diff --git a/road/detect_test.go b/road/detect_test.go new file mode 100644 index 0000000..9062239 --- /dev/null +++ b/road/detect_test.go @@ -0,0 +1,50 @@ +package road + +import ( + "github.com/cyrilix/robocar-protobuf/go/events" + "gocv.io/x/gocv" + "reflect" + "testing" +) + +func TestDetector_Detect(t *testing.T) { + type fields struct { + } + type args struct { + imgName string + } + tests := []struct { + name string + fields fields + args args + want []*events.Point + }{ + { + name: "Straight ahead", + fields: fields{}, + args: args{imgName: "testdata/input.jpg"}, + want: []*events.Point{{X: 2, Y: 53}, {X: 48, Y: 37}, {X: 54, Y: 27}, {X: 72, Y: 26}, {X: 156, Y: 55}, {X: 159, Y: 118}, {X: 20, Y: 114}, {X: 3, Y: 119}}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := NewDetector() + defer d.Close() + img := gocv.IMRead(tt.args.imgName, gocv.IMReadColor) + defer func(img *gocv.Mat) { + err := img.Close() + if err != nil { + t.Errorf("unable to close image: %v", err) + } + }(&img) + if img.Empty() { + t.Errorf("unable to open image %v", tt.args.imgName) + t.Fail() + } + + if got, _ := d.Detect(&img); !reflect.DeepEqual(got, tt.want) { + t.Errorf("Detect() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/road/part.go b/road/part.go new file mode 100644 index 0000000..7b0aa05 --- /dev/null +++ b/road/part.go @@ -0,0 +1,119 @@ +package road + +import ( + "github.com/cyrilix/robocar-base/service" + "github.com/cyrilix/robocar-protobuf/go/events" + mqtt "github.com/eclipse/paho.mqtt.golang" + "go.uber.org/zap" + "gocv.io/x/gocv" + "google.golang.org/protobuf/proto" + "log" +) + +type RoadPart struct { + client mqtt.Client + frameChan chan frameToProcess + readyForNext chan interface{} + cancel chan interface{} + detector *Detector + cameraTopic, roadTopic string +} + +func NewPart(client mqtt.Client, cameraTopic, roadTopic string, detector *Detector) *RoadPart { + return &RoadPart{ + client: client, + frameChan: make(chan frameToProcess), + cancel: make(chan interface{}), + detector: detector, + cameraTopic: cameraTopic, + roadTopic: roadTopic, + } +} + +func (r *RoadPart) Start() error { + registerCallBacks(r) + + var frame = frameToProcess{} + defer func() { + if err := frame.Close(); err != nil { + zap.S().Errorf("unable to close msg: %v", err) + } + }() + + for { + select { + case f := <-r.frameChan: + oldFrame := frame + frame = f + if err := oldFrame.Close(); err != nil { + zap.S().Errorf("unable to close msg: %v", err) + } + go r.processFrame(&frame) + case <-r.cancel: + zap.S().Info("Stop service") + return nil + } + } +} + +var registerCallBacks = func(r *RoadPart) { + err := service.RegisterCallback(r.client, r.cameraTopic, r.OnFrame) + if err != nil { + log.Panicf("unable to register callback to topic %v:%v", r.cameraTopic, err) + } +} + +func (r *RoadPart) Stop() { + defer r.detector.Close() + close(r.readyForNext) + close(r.cancel) + service.StopService("road", r.client, r.roadTopic) +} + +func (r *RoadPart) OnFrame(_ mqtt.Client, msg mqtt.Message) { + var frameMsg events.FrameMessage + err := proto.Unmarshal(msg.Payload(), &frameMsg) + if err != nil { + zap.S().Errorf("unable to unmarshal %T message: %v", frameMsg, err) + return + } + + img, err := gocv.IMDecode(frameMsg.GetFrame(), gocv.IMReadUnchanged) + if err != nil { + zap.S().Errorf("unable to decode image: %v", err) + return + } + frame := frameToProcess{ + ref: frameMsg.GetId(), + Mat: img, + } + r.frameChan <- frame +} + +type frameToProcess struct { + ref *events.FrameRef + gocv.Mat +} + +func (r *RoadPart) processFrame(frame *frameToProcess) { + img := frame.Mat + + cntr, ellipse := r.detector.Detect(&img) + + msg := events.RoadMessage{ + Contour: cntr, + Ellipse: ellipse, + FrameRef: frame.ref, + } + + payload, err := proto.Marshal(&msg) + if err != nil { + zap.S().Errorf("unable to marshal %T to protobuf: %err", msg, err) + return + } + publish(r.client, r.roadTopic, &payload) +} + +var publish = func(client mqtt.Client, topic string, payload *[]byte) { + client.Publish(topic, 0, false, *payload) +} diff --git a/road/testdata/input.jpg b/road/testdata/input.jpg new file mode 100755 index 0000000000000000000000000000000000000000..5c56d923cda0c781571694e63b88776dc5e925fc GIT binary patch literal 9560 zcmbWcWmFqq)IJ(YAV85qON&F%7IzI^tXQGAYbj0$61>pj!3(rFr4+Xk+@-ingG+EH zScCuhz3*E0ez_m+y?fT#Yi7+pXFbn3bN0-B=6>dW1@PppqKYB_3kv|idN=_0^8k6k zBdq`IKmX`I17iQ@aezQyEnI0PVE(Eo(|e{%Qj08%^v43LA3#RzyriiJ&zb>9u3f53_JKky#F{;y#@!Up2t zf*xQzeh8>}@&F$j`vEl01LBA1fQS14ASn*nOFmiLr&^XEM#wY%;DlT}Cb_B(a_#YB zW&tbr5PX8?FDNLfSXkL!y=E5_5*85^6PJIZps1v*^7g%suAaVup^>$X?FYM$_6|@F zPcLtnkMHL%p<&?>kx_|BUz1Z(zkN^pk@qwIS3zM>@$YIxO>JF$Lt|%G_n)5LzW#xU z$*JkT$eG!>Rn*%0#^%=c&Mx}o^z8iNALjDvKVDb>?Ej_p;Qt?akv{Nx^xqnQ{^N!9 z$orvTlLB#G^5K%nYJn^vPZ{}x@t(;g2S#5ZCw>J2?-fZd1bc zE?^|mxfKfa%3kCOD{-z}WtIx|R1S?HJo+E=EkHCqFDh=Z&x&J|XjRN}z%KP@Dm(xFNwMDG_1V!=hId+}4=&M5)E!H#M=BZN$md7RMIu%i z960DCb3OJ8|ETjG5M!Qf`qUms|8YvNY9Rdb`THO#(!Ba}Qvzf#UvQ zCU;Y{pzh zdx|W**!gvDBv;nrrm{y`c<%7bR{|bu-PTDvQ3rUd$DX{ZxqY4Tct-mT@mz1Dm32)p zAio2!D?L8Kq~7mYZu$@^a%S1z?2zoYZbTEI5f@R!E;tu7P$la7NQ z2NW2a7;@z|yLU*I`q8MeSHG5N^5OP_PMw-9`bOCcjC5%wGf&#}wp9oG%wobAi@=xV zTB^de(b$8o_P(tq7b(Xrg<-E{qyumo^J3j~wuCPK-2-r*9%sxU59EC+40mw1&HM$U z91T`oxN#Qm8cV7_j!Ha&KT4jNKvqlEA<^J-buS(qvpBwaz(tBia46nOB z9lDTgbZ>dpL%p;tupvGBYXbT9wnS)@#PDNkZEdR2%fFljQwIqy+6U3FS+_fukAW*)#Sb0NH(sJgZBVzA@k7I*O>F$l9ks)Yxeg55|6%$BlGtuA5^xm zmS#$-3~uV86g&NM2HLTruA;jRT8XMo<9T7jzM?jH>L)GS{llgX-%6&`-wx~?`ElKP zi!Vx!$bx_*nht;Kyh%eT?*XliK^cF!!sTR-`g-3^);fQCe>73Ix$WaZ+#7T}U2A08 zir25ev`u~Epczp$?=Yv4nP;M*dw3&jb?h$np)QlV@87_D8#1lp_`?5VqS}D_Kd$}@ z-Qpzy1D+e*)pS{@+0u$bA#%kYQ?dzT=+VFOi3ZlAcP!L0+1()wP3*>C2J*Tb3fR*cvjGf z&{qK-6CV_1+|hmy_@qDNZB7c-bSZ-HbO%6q4Gz=0-Q)A5;4{4~P;YYtSIXyN6UV%@ zCYG~{@eaYUy;f-dGiN5neE%i6@3u@EY+FL>esBE7evVdb>JWFYJIndSe#+i3NeM@Z z`fKL(Gxh!0BzqldSTzPYq-Txr-( zAfCt{&``mIDY;g~oT9cuE2QW#CGXp^-?qFlj-Yj3<$SJ?pgHIS2?HGaSr?G6>(f6* zaBM{xXH(A4_qw1y{%AZkc&_#&o269)65BNuTXU{m!g?l3OUV_7ya&vvqV&Usc)ncF zy*@@paM>c_E<2om)chzOPIp4}~O5-)?&R zjygF!k$^4l@17|BlYb5gD%R(yiKcTP!3i7-M9+N*D7Pb@a#xW2}#^9wh(DAn#(+Xd6+u zFC!^cgXDaZg=VoGPbm%xzB=_rJ>H9(8PrF!7GVW%pjq?y!)&D+r5L{U%$o7Ox2L__ z%0O3o;TtB5M5%*sTUQ*RaE7PB!C$oAiO*+*`fH$1`5#zPElvRG>bBmiX&EdT>_pLu z>_Do(@iyEk<2hKiY2)$q@DQWH3Ysg=@HyrFDKKVKgLGc#j4jM=lE)(SKq%S=m?KWs zjjPR%$L!oAd5!#$A?6|0K>t3k(y}fLcztmXAQ+43VER0Ex})pl5%XsL^6y3IPOKSe zOOQ}s2`?hJ)yDRvIO)P>@_0s$x7l+`P}nMV$ZDLXp6DJXFgdyWHsuJV{wt4mlV##Y z(mhd*8sk#%C6E3F8*(|lD6AW5xG+bH%f32iEC}6 z@ynr@G89tK$P=&Ru}rFV3$-!P2yD-g2imkL>f8>e*Z7(iGh?AsIVaj{G~yWFT_DlY zbZR8D)n$!Szz*pe@1ASra}wg4=Tm|u*kmd^5nxqox>Rc#&Ax9c zQh&2}v6#EdWe)l(l>fH5(uXfUrjz8jcySwlBfY9pX5<;(<9nFm>FmJAFh@se%Q-HL zk7Hw>LoNah+DoMMtCPBm8Bo>g`cA)6VOTe)_J-@OOktAR7Ou)@1Ox z-S4`JOWJt2);3jkyX(y9%0aiAzr$@wxxaN$&(Xuwh2N3dCxH3H)X|mGJd`gJ#Bonh zUciJ{qIWH~zxMr!xmIq|r0uc^U@uqs-Yzjtt*iSPKV|$aP`0@KK@>X{0_00}k%va1 zH=|9-Qc{NAqFWy$$N+pdVEWNGbcrw;lPEX`mT1OGktgq^*plXO3z9vMgJ&IXqE)Sy zqifE=N)=1k z8)|IkGU^WL@;a*vftdjX2OQowt$zs#6TYl%>xcwd@%w!!>xKM8;6AS(S)JV9pP?O* zp==-Zoj5+-$&SvpI<@^T3S?Af%yt#fpUQ-j1 z!Nls>zC5}%j;b@7Sg5hHWnq4M2t8gs6NR$iwH;U(4?uVikn+=k+RtSqQ2ZEkF|-B^ zD+|7&DS=L{iLIZ)&(4c=xj$_G8d$Oa;jgA3XUP}7%`Uaa}uo+17>TQd^MpXSmukR zdh@lJ6Sk2R0s$mOh|t3dK_UffATFqjmydEMrt9wy)yM<2MDRc19-+DKW6sFT-r)8* z<*iSCQ8eWrwtI`E1~IDFDwtEV`r0~-CAzo$PAow!2+e|4rXxodB&rB=Y6-@ATv7=0 zRt+;%ZZ`<+c(>J=AJplTrUKn$`Acog`akPgd-@XSB208M~8Lyb8?*x(DaXRQ5*>nvy z9k*6nxp>U3+AT6XN8&Soq{t9pP?V@7B4~jfR20DrW%CXtcrdTR_wx!7?+kBrx)^Or zugK5jbLf2z+9d-VKR@J&kkr6_ao5NjRo7z7^1I2oQ*%Ytrfd>3p_1k>l8M3b-iqB8aH@iw z8$ZnS%6FsFc4-7Ck_#);Dr(~NeZX71DK|juB{QDiqb69zTZ(!WIt3@@$?;76o_oN& z-9_E)k2y!4h_OB9@wL{-wt^s?sP$41S z&d5TnCOGQUnxeyqy|)ZEYfH&-W%)_Xcw|nuK$g!nT zbz_lOiL#@EwyJm4Al}TuJ2WTdY5*VmW_ zd?G|us~hAr^D}^Ot6~L~o9QtpVrfomxH41}`C>RqHTt~k39Q3zuUFG0*#(5|s8yYJ zhETf%emsAsM<^qyC!>UtY&^>+u7>7QGB+ImO$efCLmzGC_YP64E4P|#&$Rb(;>wN? z@oPrv8|fw*b*<1o6ZR77PN{pj{E63lNI#ddeP19U-f}S$V((&F7xes@v|V#6vB~e0 zsj-7gSdT4rYu*jtCivT!gkZ+ZD8HUvBimEp5)QzTVGiwZ%50J$ z^9xpeuli?y_DkAPq{c~O=xOWZrmzi7*1|*C%ad*}BGsnlK{QoSAyNz{;`A%<=KxHgZ?d2H{6)Cl{yCa0@x z1=D?}UjcL;ac>b7Qu(2K{rk^o3|Nb6KnVmlcB4k}Rgz@Y53G{~D6NgC-!e|3yxK^o z5G`J0zd2r{sTO|q+>!`Wl7r5Rv84?~k*N>PgX^Y3+ zxTz~&A$tP&6+@16LNyNV0YZOjClfN;&%rc)9&6;uwkR&E6J?JBw(ivUf%l{xwnYQO zOi+ZC7&Q$9PG~*$p8GsNH)MX!#6dMgUeRTPnTL{-7>WeNZpA5mM(k>YLylKa0B7Wma@{*Wonq?Nnl);c{ zp=A{;ZK}E!zd7gJYq4z@78$d6@v&P4;87uKDB{**O(%a#NP~)$W;{G61D+8pkTv^9 zTWu@-)F#!84P9E>(@%9D#ZR-Y|fj*r?Mj2k$PO^rW5f?K6jC(FC*SHTLo) zuUFP-w!pE?Q7Yi~#S1^9Zwr4b?|mopgI!ehNfI1O2Gk!##WY)1EX?)Ak+)vpz{8kL z+xBDHNn_g~`p&{)c6LpYN2?E?>RT6+7ycEFd($*wh|$3nl6&T11?1x~ z>0fGPPyO@4o3C*c$ixH$~7Ig{ux+8py>(&Gn@?|cU4XGOkt5f>G|8g!wh=#io5>!2T~m|M7EuTLFHRc?4v3>dy`psJ=* z0vcPFOIW9&P_dMBl9k4VeH#{+Uwv=)?}V7JX`G)Cxp=|vF1Yn;`Nf#_^qq-bN=U}8 z=3o@omAGmn18Pn9OJHe_43)HkoqlFyxIxcLsN68N?sRkXATysWw-EB_AAqL=DFbj~ zMde)y0u&;Sc(lv}p|@n#x3(ylkkGzH0feuwc<<@&-@Fv>N*~ssc!I_|`5t{b_MnRG zy*mgP8F2TS+1tOP?h~99QvWcw)NipmT@&%|a6Y?{fx(P!$hRD@naclI*4QV zLbp2}a1`N=D;=@!dDPRNizgd9+I?LALClAAijc;GQfmD(9G#>kmGm)*QzR9wq2|N- zYT;09L}P;zytN$wE3#jP{^EY^jLC`xQ~sKffB9EXPEd#;$z%a{3_|K8^1>nHc3p*aXtwk!2pc#(5l{ zJrA1=)=v9kR|Zv{KCiN*-2257-%|UC^CQzz!olEkMyYO^3=501{K3Vv*S$Z)Q>M=s z#aFC4DSpNBuvjj@;8bQItt|mdAfALPlt94}t~|OAdQa3&Kvg5qtI1QfL9C zW??@IJNOoe1~M=B>X*4xxQ;)xe8ERcb|v7E?Ws5Zd$rC6s~3dx?eiHGY)y16{GR5| zVA0x>!(mmc-?XVi>GT9ApHC}X_tz2BV@0xyZ?VBKE>eRc;cNtw`;MlvCULg%*d#hy2ti@n^VVslSL(o_$C;GQd1yk$kju4X(8kY?5 zYKz~s@}39FH8PD~N|ziois$QaPkqwUb)z?F@Ft0oxG*9&wSI)hSoR;z1{n!Ovm zIG6cSsTU!Djm${sr4=K$n%Am5eUO|?fv}m^wV|?B$`nkneUie1r6jeTFRAW)j)msK z7wJvk^FCJ5h=Lq>v^57qu$W?Na)}e3#UtDoN?*R;@QSGuAeO~Zxijr`nx;;;=%}Tz zWyokw7$|D14bD?VVG1yUM(VoyeE6>kf&{fD+Bj3DY>g zd6FEvE2J=JDeJNr@Sr4*19IxRO) z)cVljoO7_|Iv`59H*BOcVdWjG&}dot#lVh&4a5nh?CYmk_q@S^s**SY8>2)=>i6-S z?;hkn#G&XmbaaXN5y`5S!*hNxWL`A@A|fYTnv=YbyNSKiv4gA;Mq4N9)TRxrNZs(8 z6pIYUOO(Ub3LhSu#$fAtvBC9}AvTa$53vu8-Pu_|erRr80gBw^R<~alQusN7C%*)sy`4;dqUqICalQzxli!m*n*@{hfSgA83rP% zom0lbOG6p2j`r2KRtbVw!>mKR+_1+nMT0&2I=-hhO46}>J7WaQ1771)r?70DNz&o+ z@0Cq?J!{^F$czvQi?h9hrSBqcc+%e}q_&L^HvuIBzAI-IaC=*bydDBfw85!{&?y-iPEYptKhmHi= zY1yxBdlUtZ-e!7?*Xn*!)}4O47K%+Nx_*ugJ}a?C@jH0q)8+nHYE0?K2p14FZ#1|J zHqNkP*vd-oVfU?pCBf?XP*|IkUu5yGNFuc2(i;h&x_`#U>+0F*xQ|;q1*g zqm}&VjW-YF4-ns!MzHTiLKqp?-;v?%Al3?|?mStNizfFX_a`lC*(Jc-h)2=up+J&0@@n!N8=;@wTHU1t2i|iRLC` zveiN29zccBATr}3(?8kp>5Qu$jA;NBQjVG}T$Cpr)JhR%8S@Z0jzqtVQcW+k&7jP5 z*OsiuX2`I)BPVOt_%{13^@_M5pHjM3@N(5NPC%%ejC&!2q!|}$)GR}%Xtma=25?Da@DyW2a7uigWILk(?HKf~0K+5##}4HAPANz;*FGL>HNI27mVlfq3QK`{Gs6PtSg=`PmR z*)P_D5<`t4C$2A642D#*cmvtZIZvQHJ|j?FbHkQ~I@QA5>V)JT%O{~9w6av_{R$-6 zhggIboOkQL)fTYvz$IwcraYv#=#2`|8g;JDd{#~5dF(kn<)Q%J^D^p1BU2^SDAh8m zS=B<_`dB^UCtggVeWO4)s_IaEiDDo`hQgJHUIF}w*q082Dxx`c%vlpu`L5NRNHQen z+zT0lPS#;`zqa|&Z74DlcrG^dQPk}mn}_U6|Z>iP}mM2dFkq7t(oIw~;RD>eA9_q%sk1D3tCqCLL4#(oV< z%4r{)C#en<7|5N@kM1DvmK9FD&$#Ti1haBu~S2+DJtGC$2W)$ z8&rAJeTsA;ZCIjr0d(|$29wTzM0q>4>cn~8ewDTh!n+%r6KL6DoXk|!K>6G{;%_hM!Kgd=n-4!xzO}I00E@8y5>NXy_WWP1xy;1HP>p} z|BPPK7-y<|_`Mc@wsPRJOtu-b#joUHE+Ns_qR)J$Psd)0TC?8ahqF>4Sa%gKy!NhW zlsZSlmP9aZYcMHVj0(1y!+ZaehI!g%i;$7^*K;NNkICGyn6bC%p9T`&h#wb3bAyFe zT%RrNJ~uLYY+u1`{Utj6Q~lRp(;abMD?d+UMj;C1UyFye8v`_dwA&g`)g$rcc9GnH zBg@#WBsYq;InC57716#~xg=rdn$LFr`mX$zFp>~yh_Ph{TxxVK1ZP*gy)ehHv+X-y zbKHP#i)&mzO9MP;9aSd4Qe#F7#wHG2Zf2fp%~3`9xLkdMZ zKK8rzPBSFvOv(dV^#;dP6y52LV+L1(x|fOVpE}e9zDVS-BsDmE@krogz0i$U^ZjQ9 z^0wMEdpaDo0{nrcf^h#=LDaCi-klu#W0OU80kZh+5hm)Y`U4O)TSZHNDtdG!sDtE- z)XVW^EL-%PH0iF0xCeBPe%9CK&MaA?SiF5W