OpenCV Support

Tags: think and control
Personhours: 8
OpenCV Support By Arjun

Task: Add OpenCV support to vision pipeline

We recently refactored our vision code to allow us to easily swap out vision implementations. We had already implemented TensorFlow, but we hadn't implemented code for using OpenCV instead of TensorFlow. Using the GRIP pipeline we designed earlier, we wrote a class called OpenCVIntegration, which implements VisionProvider. This new class allows us to use OpenCV instead of TensorFlow for our vision implementation.
Our code for OpenCVIntegration is shown below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
public class OpenCVIntegration implements VisionProvider {

    private VuforiaLocalizer vuforia;
    private Queue<VuforiaLocalizer.CloseableFrame> q;
    private int state = -3;
    private Mat mat;
    private List<MatOfPoint> contours;
    private Point lowest;

    private void initVuforia() {
        VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
        parameters.vuforiaLicenseKey = RC.VUFORIA_LICENSE_KEY;
        parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
        vuforia = ClassFactory.getInstance().createVuforia(parameters);
    }

    public void initializeVision(HardwareMap hardwareMap, Telemetry telemetry) {
        initVuforia();
        q = vuforia.getFrameQueue();
        state = -2;

    }

    public void shutdownVision() {}

    public GoldPos detect() {
        if (state == -2) {
            if (q.isEmpty())
                return GoldPos.HOLD_STATE;
            VuforiaLocalizer.CloseableFrame frame = q.poll();
            Image img = VisionUtils.getImageFromFrame(frame, PIXEL_FORMAT.RGB565);
            Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(img.getPixels());
            mat = VisionUtils.bitmapToMat(bm, CvType.CV_8UC3);
        } else if (state == -1) {
            RoverRuckusGripPipeline pipeline = new RoverRuckusGripPipeline();
            pipeline.process(mat);
            contours = pipeline.filterContoursOutput();
        } else if (state == 0) {
            if (contours.size() == 0)
                return GoldPos.NONE_FOUND;
            lowest = centroidish(contours.get(0));
        } else if (state < contours.size()) {
            Point centroid = centroidish(contours.get(state));
            if (lowest.y > centroid.y)
                lowest = centroid;
        } else if (state == contours.size()) {
            if (lowest.x < 320d / 3)
                return GoldPos.LEFT;
            else if (lowest.x < 640d / 3)
                return GoldPos.MIDDLE;
            else
                return GoldPos.RIGHT;
        } else {
            return GoldPos.ERROR2;
        }
        state++;
        return GoldPos.HOLD_STATE;
    }

    private static Point centroidish(MatOfPoint matOfPoint) {
        Rect br = Imgproc.boundingRect(matOfPoint);
        return new Point(br.x + br.width/2,br.y + br.height/2);
    }
}public class OpenCVIntegration implements VisionProvider {

    private VuforiaLocalizer vuforia;
    private Queue<VuforiaLocalizer.CloseableFrame> q;
    private int state = -3;
    private Mat mat;
    private List<MatOfPoint> contours;
    private Point lowest;

    private void initVuforia() {
        VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
        parameters.vuforiaLicenseKey = RC.VUFORIA_LICENSE_KEY;
        parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
        vuforia = ClassFactory.getInstance().createVuforia(parameters);
    }

    public void initializeVision(HardwareMap hardwareMap, Telemetry telemetry) {
        initVuforia();
        q = vuforia.getFrameQueue();
        state = -2;

    }

    public void shutdownVision() {}

    public GoldPos detect() {
        if (state == -2) {
            if (q.isEmpty())
                return GoldPos.HOLD_STATE;
            VuforiaLocalizer.CloseableFrame frame = q.poll();
            Image img = VisionUtils.getImageFromFrame(frame, PIXEL_FORMAT.RGB565);
            Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(img.getPixels());
            mat = VisionUtils.bitmapToMat(bm, CvType.CV_8UC3);
        } else if (state == -1) {
            RoverRuckusGripPipeline pipeline = new RoverRuckusGripPipeline();
            pipeline.process(mat);
            contours = pipeline.filterContoursOutput();
        } else if (state == 0) {
            if (contours.size() == 0)
                return GoldPos.NONE_FOUND;
            lowest = centroidish(contours.get(0));
        } else if (state < contours.size()) {
            Point centroid = centroidish(contours.get(state));
            if (lowest.y > centroid.y)
                lowest = centroid;
        } else if (state == contours.size()) {
            if (lowest.x < 320d / 3)
                return GoldPos.LEFT;
            else if (lowest.x < 640d / 3)
                return GoldPos.MIDDLE;
            else
                return GoldPos.RIGHT;
        } else {
            return GoldPos.ERROR2;
        }
        state++;
        return GoldPos.HOLD_STATE;
    }

    private static Point centroidish(MatOfPoint matOfPoint) {
        Rect br = Imgproc.boundingRect(matOfPoint);
        return new Point(br.x + br.width/2,br.y + br.height/2);
    }
}

Date | December 31, 2018