501 lines
14 KiB
Go
501 lines
14 KiB
Go
package road
|
|
|
|
import (
|
|
"github.com/cyrilix/robocar-protobuf/go/events"
|
|
"go.uber.org/zap"
|
|
"gocv.io/x/gocv"
|
|
"image"
|
|
"image/color"
|
|
"math"
|
|
)
|
|
|
|
type DetectorOption func(*Detector)
|
|
|
|
func WithWhiteFilter(lowThreshold int, highThreshold int) DetectorOption {
|
|
return func(d *Detector) {
|
|
err := d.lowerWhite.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close lower white filter: %v", err)
|
|
}
|
|
err = d.upperWhite.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close upper white filter: %v", err)
|
|
}
|
|
|
|
d.lowerWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(lowThreshold), Val2: float64(lowThreshold), Val3: float64(lowThreshold), Val4: float64(lowThreshold)}, gocv.MatTypeCV8U)
|
|
d.upperWhite = gocv.NewMatFromScalar(gocv.Scalar{Val1: float64(highThreshold), Val2: float64(highThreshold), Val3: float64(highThreshold), Val4: float64(highThreshold)}, gocv.MatTypeCV8U)
|
|
}
|
|
}
|
|
|
|
func WithYellowFilter(lower gocv.Mat, upper gocv.Mat) DetectorOption {
|
|
return func(d *Detector) {
|
|
err := d.lowerYellow.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close lower yellow filter: %v", err)
|
|
}
|
|
err = d.upperYellow.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close upper yellow filter: %v", err)
|
|
}
|
|
|
|
d.lowerYellow = lower
|
|
d.upperYellow = upper
|
|
}
|
|
}
|
|
|
|
func WithRegionOfInterest(imgWidth int, imgHeight int, horizon int) DetectorOption {
|
|
roi := buildRegionOfInterest(imgWidth, imgHeight, horizon)
|
|
return func(d *Detector) {
|
|
err := d.roiMask.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close roi mask: %v", err)
|
|
}
|
|
d.roiMask = *roi
|
|
}
|
|
}
|
|
|
|
func WithPointOnRoad(pt image.Point) DetectorOption {
|
|
return func(d *Detector) {
|
|
d.pointOnRoad = pt
|
|
}
|
|
}
|
|
|
|
func WithCanny(lowThreshold int, highThreshold int) DetectorOption {
|
|
return func(d *Detector) {
|
|
d.cannyLowThreshold = float32(lowThreshold)
|
|
d.cannyHighThreshold = float32(highThreshold)
|
|
}
|
|
}
|
|
|
|
func WithGaussianBlur(kernelSize int) DetectorOption {
|
|
return func(d *Detector) {
|
|
d.gaussianBlurKernelSize = kernelSize
|
|
}
|
|
}
|
|
|
|
func WithHoughLines(rho int, theta float32, threshold int, minLineLength int, maxLineGap int) DetectorOption {
|
|
return func(d *Detector) {
|
|
d.houghLinesRho = float32(rho)
|
|
d.houghLinesTheta = theta
|
|
d.houghLinesThreshold = threshold
|
|
d.houghLinesMinLineLength = minLineLength
|
|
d.houghLinesMaxLineGap = maxLineGap
|
|
}
|
|
}
|
|
|
|
func WithMonitor(monitor Monitor) DetectorOption {
|
|
return func(d *Detector) {
|
|
d.monitor = monitor
|
|
}
|
|
}
|
|
|
|
func NewDetector(options ...DetectorOption) *Detector {
|
|
whiteThreshold := 20.
|
|
|
|
roiMask := buildRegionOfInterest(160, 120, 110)
|
|
|
|
pointOnRoad := image.Point{X: 160 / 2, Y: 120 - 30}
|
|
|
|
d := Detector{
|
|
lowerWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: whiteThreshold, Val2: whiteThreshold, Val3: whiteThreshold, Val4: whiteThreshold}, gocv.MatTypeCV8U),
|
|
upperWhite: gocv.NewMatFromScalar(gocv.Scalar{Val1: 255., Val2: 255., Val3: 255., Val4: 255.}, gocv.MatTypeCV8U),
|
|
|
|
lowerYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 90., Val2: 100., Val3: 100.}, gocv.MatTypeCV8U),
|
|
upperYellow: gocv.NewMatFromScalar(gocv.Scalar{Val1: 110., Val2: 255., Val3: 255.}, gocv.MatTypeCV8U),
|
|
|
|
cannyLowThreshold: 100.,
|
|
cannyHighThreshold: 250.,
|
|
|
|
gaussianBlurKernelSize: 3,
|
|
|
|
// Hough Transform
|
|
houghLinesRho: 2, // distance resolution in pixels of the Hough grid
|
|
houghLinesTheta: 1 * math.Pi / 180, // angular resolution in radians of the Hough grid
|
|
houghLinesThreshold: 15, // minimum number of votes (intersections in Hough grid cell)
|
|
houghLinesMinLineLength: 10, // minimum number of pixels making up a line
|
|
houghLinesMaxLineGap: 20, // maximum gap in pixels between connectable line segments
|
|
|
|
roiMask: *roiMask,
|
|
|
|
pointOnRoad: pointOnRoad,
|
|
|
|
monitor: &FakeMonitor{},
|
|
}
|
|
|
|
for _, option := range options {
|
|
option(&d)
|
|
}
|
|
return &d
|
|
}
|
|
|
|
func buildRegionOfInterest(imgWidth int, imgHeight int, horizon int) *gocv.Mat {
|
|
// RegionOfInterest
|
|
// defining a blank roiMask to start with
|
|
roiMask := gocv.Zeros(imgHeight, imgWidth, gocv.MatTypeCV8U)
|
|
|
|
vertices := gocv.NewPointsVectorFromPoints(
|
|
[][]image.Point{
|
|
{
|
|
{0, imgHeight - horizon},
|
|
{imgWidth, imgHeight - horizon},
|
|
{imgWidth, imgHeight},
|
|
{0, imgHeight},
|
|
},
|
|
},
|
|
)
|
|
defer vertices.Close()
|
|
ignoreMaskColor := color.RGBA{R: 255, G: 255, B: 255, A: 255}
|
|
gocv.FillPoly(&roiMask, vertices, ignoreMaskColor)
|
|
return &roiMask
|
|
}
|
|
|
|
type Detector struct {
|
|
whiteThreshold float64
|
|
lowerWhite gocv.Mat
|
|
upperWhite gocv.Mat
|
|
|
|
lowerYellow gocv.Mat
|
|
upperYellow gocv.Mat
|
|
|
|
cannyLowThreshold, cannyHighThreshold float32
|
|
|
|
gaussianBlurKernelSize int
|
|
|
|
// Hough lines parameters
|
|
houghLinesRho, houghLinesTheta float32
|
|
houghLinesThreshold int
|
|
houghLinesMinLineLength, houghLinesMaxLineGap int
|
|
|
|
// Region Of interest
|
|
roiMask gocv.Mat
|
|
|
|
pointOnRoad image.Point
|
|
|
|
monitor Monitor
|
|
}
|
|
|
|
func (d *Detector) Close() {
|
|
defer func(lowerWhite *gocv.Mat) {
|
|
err := lowerWhite.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close lowerWhite: %v", err)
|
|
}
|
|
}(&d.lowerWhite)
|
|
|
|
defer func(upperWhite *gocv.Mat) {
|
|
err := upperWhite.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close upperWhite: %v", err)
|
|
}
|
|
}(&d.upperWhite)
|
|
|
|
defer func(lowerYellow *gocv.Mat) {
|
|
err := lowerYellow.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close lowerYellow: %v", err)
|
|
}
|
|
}(&d.lowerYellow)
|
|
|
|
defer func(upperYellow *gocv.Mat) {
|
|
err := upperYellow.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close upperYellow: %v", err)
|
|
}
|
|
}(&d.upperYellow)
|
|
|
|
defer func(roiMask *gocv.Mat) {
|
|
err := roiMask.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close roiMask: %v", err)
|
|
}
|
|
}(&d.roiMask)
|
|
}
|
|
|
|
func (d *Detector) Detect(img *gocv.Mat) ([]*events.Point, *events.Ellipse) {
|
|
// Only keep white and yellow pixels in the image, all other pixels become black
|
|
imgFiltered := d.filterColors(img)
|
|
defer func(imgFiltered *gocv.Mat) {
|
|
err := imgFiltered.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close imgFiltered: %v", err)
|
|
}
|
|
}(imgFiltered)
|
|
|
|
// # Read in and grayscale the image
|
|
imgGray := d.grayscale(imgFiltered)
|
|
defer func(imgGray *gocv.Mat) {
|
|
err := imgGray.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close imgGray: %v", err)
|
|
}
|
|
}(imgGray)
|
|
|
|
// Apply Gaussian smoothing
|
|
blurGray := d.gaussianBlur(imgGray)
|
|
defer func(blurGray *gocv.Mat) {
|
|
err := blurGray.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close blurGray: %v", err)
|
|
}
|
|
}(blurGray)
|
|
|
|
// Apply Canny Edge Detector
|
|
edges := d.canny(blurGray)
|
|
defer func(edges *gocv.Mat) {
|
|
err := edges.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close edges: %v", err)
|
|
}
|
|
}(edges)
|
|
|
|
maskedEdges := d.applyRegionOfInterest(edges)
|
|
defer func(maskedEdges *gocv.Mat) {
|
|
err := maskedEdges.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close maskedEdges: %v", err)
|
|
}
|
|
}(maskedEdges)
|
|
|
|
// Run Hough on edge detected image
|
|
road, ellipsis := d.getRoadShapeWithHoughLines(maskedEdges)
|
|
|
|
d.monitor.Increment()
|
|
|
|
return road, ellipsis
|
|
}
|
|
|
|
// Filter the image to include only yellow and white pixels
|
|
func (d *Detector) filterColors(img *gocv.Mat) *gocv.Mat {
|
|
|
|
// Filter white pixels
|
|
whiteMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8UC3)
|
|
defer func(whiteMask *gocv.Mat) {
|
|
err := whiteMask.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close whiteMask: %v", err)
|
|
}
|
|
}(&whiteMask)
|
|
gocv.InRange(*img, d.lowerWhite, d.upperWhite, &whiteMask)
|
|
// Convert one channel result to 3 channel mask
|
|
gocv.Merge([]gocv.Mat{whiteMask, whiteMask, whiteMask}, &whiteMask)
|
|
|
|
whiteImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
|
|
defer func(whiteImage *gocv.Mat) {
|
|
err := whiteImage.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close whiteImage: %v", err)
|
|
}
|
|
}(&whiteImage)
|
|
gocv.BitwiseAnd(*img, whiteMask, &whiteImage)
|
|
|
|
// Filter yellow pixels
|
|
hsv := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
|
|
defer func(hsv *gocv.Mat) {
|
|
err := hsv.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close hsv: %v", err)
|
|
}
|
|
}(&hsv)
|
|
gocv.CvtColor(*img, &hsv, gocv.ColorBGRToHSV)
|
|
|
|
yellowMask := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
|
|
defer func(yellowMask *gocv.Mat) {
|
|
err := yellowMask.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close yellowMask: %v", err)
|
|
}
|
|
}(&yellowMask)
|
|
gocv.InRange(hsv, d.lowerYellow, d.upperYellow, &yellowMask)
|
|
// Convert one channel result to 3 channel mask
|
|
gocv.Merge([]gocv.Mat{yellowMask, yellowMask, yellowMask}, &yellowMask)
|
|
|
|
yellowImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
|
|
defer func(yellowImage *gocv.Mat) {
|
|
err := yellowImage.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close yellowImage: %v", err)
|
|
}
|
|
}(&yellowImage)
|
|
gocv.BitwiseAnd(*img, yellowMask, &yellowImage)
|
|
|
|
// Combine the two above images
|
|
image2 := gocv.NewMatWithSize(img.Rows(), img.Cols(), img.Type())
|
|
gocv.AddWeighted(whiteImage, 1., yellowImage, 1., 0., &image2)
|
|
return &image2
|
|
}
|
|
|
|
/*
|
|
Applies the Grayscale transform
|
|
|
|
This will return an image with only one color channel
|
|
but NOTE: to see the returned image as grayscale
|
|
you should call plt.imshow(gray, cmap='gray')
|
|
*/
|
|
func (d *Detector) grayscale(img *gocv.Mat) *gocv.Mat {
|
|
grayImg := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
|
|
gocv.CvtColor(*img, &grayImg, gocv.ColorBGRToGray)
|
|
|
|
return &grayImg
|
|
}
|
|
|
|
/* Applies the Canny transform */
|
|
func (d *Detector) canny(img *gocv.Mat) *gocv.Mat {
|
|
edges := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
|
|
gocv.Canny(*img, &edges, d.cannyLowThreshold, d.cannyHighThreshold)
|
|
|
|
return &edges
|
|
}
|
|
|
|
func (d *Detector) gaussianBlur(img *gocv.Mat) *gocv.Mat {
|
|
blur := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
|
|
gocv.GaussianBlur(*img, &blur, image.Point{X: d.gaussianBlurKernelSize, Y: d.gaussianBlurKernelSize}, 0., 0., gocv.BorderDefault)
|
|
return &blur
|
|
}
|
|
|
|
func (d *Detector) getRoadShapeWithHoughLines(img *gocv.Mat) ([]*events.Point, *events.Ellipse) {
|
|
lines := gocv.NewMat()
|
|
defer func(lines *gocv.Mat) {
|
|
err := lines.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close lines mar: %v", err)
|
|
}
|
|
}(&lines)
|
|
|
|
gocv.HoughLinesP(*img, &lines, d.houghLinesRho, d.houghLinesTheta, d.houghLinesThreshold)
|
|
|
|
// Generate new image with detected edges
|
|
imgLines := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
|
|
defer func(imgLines *gocv.Mat) {
|
|
err := imgLines.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close imgLines: %v", err)
|
|
}
|
|
}(&imgLines)
|
|
|
|
for i := 0; i < lines.Rows(); i++ {
|
|
x1 := lines.GetIntAt(i, 0)
|
|
y1 := lines.GetIntAt(i, 1)
|
|
x2 := lines.GetIntAt(i, 2)
|
|
y2 := lines.GetIntAt(i, 3)
|
|
|
|
gocv.Line(&imgLines, image.Point{X: int(x1), Y: int(y1)}, image.Point{X: int(x2), Y: int(y2)}, color.RGBA{
|
|
R: 255,
|
|
G: 255,
|
|
B: 255,
|
|
A: 255,
|
|
}, 3)
|
|
}
|
|
|
|
// Draw rectangle to add limit on image border
|
|
gocv.Rectangle(&imgLines, image.Rectangle{
|
|
Min: image.Point{X: 0, Y: 0},
|
|
Max: image.Point{X: img.Cols() - 1, Y: img.Rows() - 1},
|
|
}, color.RGBA{R: 255, G: 255, B: 255, A: 255}, 1)
|
|
|
|
kernel := gocv.Ones(8, 8, gocv.MatTypeCV8U)
|
|
defer func(kernel *gocv.Mat) {
|
|
err := kernel.Close()
|
|
if err != nil {
|
|
zap.S().Errorf("unable to close kernel: %v", err)
|
|
}
|
|
}(&kernel)
|
|
gocv.Dilate(imgLines, &imgLines, kernel)
|
|
gocv.Erode(imgLines, &imgLines, kernel)
|
|
|
|
cnts := gocv.FindContours(imgLines, gocv.RetrievalList, gocv.ChainApproxSimple)
|
|
defer cnts.Close()
|
|
|
|
for i := 0; i < cnts.Size(); i++ {
|
|
cnt := cnts.At(i)
|
|
pv := gocv.ApproxPolyDP(cnt, 0.01*gocv.ArcLength(cnt, true), true)
|
|
if gocv.PointPolygonTest(pv, d.pointOnRoad, false) > 0 {
|
|
ellipse := d.computeEllipsis(&pv)
|
|
cntr_result := make([]*events.Point, 0, pv.Size())
|
|
for i := 0; i < pv.Size(); i++ {
|
|
pt := pv.At(i)
|
|
cntr_result = append(cntr_result, &events.Point{X: int32(pt.X), Y: int32(pt.Y)})
|
|
}
|
|
return cntr_result, ellipse
|
|
}
|
|
}
|
|
return []*events.Point{}, &EllipseNotFound
|
|
}
|
|
|
|
/*
|
|
Applies an image mask.
|
|
|
|
Only keeps the region of the image defined by the polygon
|
|
formed from `vertices`. The rest of the image is set to black.
|
|
*/
|
|
func (d *Detector) applyRegionOfInterest(img *gocv.Mat) *gocv.Mat {
|
|
|
|
// returning the image only where mask pixels are nonzero
|
|
maskedImage := gocv.NewMatWithSize(img.Rows(), img.Cols(), gocv.MatTypeCV8U)
|
|
|
|
gocv.BitwiseAnd(*img, d.roiMask, &maskedImage)
|
|
return &maskedImage
|
|
}
|
|
|
|
var EllipseNotFound = events.Ellipse{Confidence: 0.}
|
|
|
|
func (d *Detector) computeEllipsis(road *gocv.PointVector) *events.Ellipse {
|
|
if road.Size() < 5 {
|
|
return &EllipseNotFound
|
|
}
|
|
|
|
rotatedRect := gocv.FitEllipse(*road)
|
|
|
|
trust := d.computeTrustFromCenter(&rotatedRect.Center)
|
|
zap.S().Debugf("Trust: %v", trust)
|
|
|
|
return &events.Ellipse{
|
|
Center: &events.Point{
|
|
X: int32(rotatedRect.Center.X),
|
|
Y: int32(rotatedRect.Center.Y),
|
|
},
|
|
Width: int32(rotatedRect.Width),
|
|
Height: int32(rotatedRect.Height),
|
|
Angle: float32(rotatedRect.Angle),
|
|
Confidence: d.computeTrustFromCenter(&rotatedRect.Center),
|
|
}
|
|
}
|
|
|
|
func (d *Detector) computeTrustFromCenter(ellipsisCenter *image.Point) float32 {
|
|
safeMinX := 48
|
|
safeMaxX := 115
|
|
safeMinY := 69
|
|
safeMaxY := 119
|
|
|
|
if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX && safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY {
|
|
return 1.0
|
|
}
|
|
|
|
if safeMinX <= ellipsisCenter.X && ellipsisCenter.X <= safeMaxX {
|
|
return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y)
|
|
}
|
|
|
|
if safeMinY <= ellipsisCenter.Y && ellipsisCenter.Y <= safeMaxY {
|
|
return d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X)
|
|
}
|
|
|
|
return d.computeTrustOnAxis(safeMaxY, safeMinY, ellipsisCenter.Y) * d.computeTrustOnAxis(safeMaxX, safeMinX, ellipsisCenter.X)
|
|
}
|
|
|
|
func (d *Detector) computeTrustOnAxis(safeMax, safeMin, value int) float32 {
|
|
trust := 1.
|
|
if value > safeMax {
|
|
trust = 1. / float64(value-safeMax)
|
|
} else if value < safeMin {
|
|
trust = 1. / float64(safeMin-value)
|
|
}
|
|
trust = trust * 10.
|
|
if trust > 0.9 {
|
|
trust = 0.9
|
|
}
|
|
if trust < 0. {
|
|
trust = 0.
|
|
}
|
|
return float32(trust)
|
|
|
|
}
|