That’s what I originally anticipated more advanced teams would gravitate towards. I’m attaching a sample from our internal repository that shows using the Tfod VisionProcessor from EOCV, you could use it as a reference for using the AprilTag one.
That being said, we still have not confirmed whether it is in fact an issue with your custom processor or not.
/*
* Copyright (c) 2023 FIRST
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior
* written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.tests;
import android.graphics.Canvas;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibrationHelper;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibrationIdentity;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Mat;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
import org.openftc.easyopencv.OpenCvCameraRotation;
import org.openftc.easyopencv.OpenCvInternalCamera2;
import org.openftc.easyopencv.OpenCvWebcam;
import org.openftc.easyopencv.TimestampedOpenCvPipeline;
@TeleOp
@Disabled
public class TestTfodProcessorWithEocv extends LinearOpMode
{
boolean USE_WEBCAM = false;
OpenCvCamera camera;
TfodProcessor tfProcessor;
private static final String TFOD_MODEL_ASSET = "CenterStage.tflite";
private static final String[] TFOD_LABELS ={
"Pixel"
};
@Override
public void runOpMode()
{
tfProcessor = new TfodProcessor.Builder()
.setModelAssetName(TFOD_MODEL_ASSET)
.setModelLabels(TFOD_LABELS)
.setIsModelTensorFlow2(true)
.setIsModelQuantized(true)
.setModelInputSize(300)
.setModelAspectRatio(16.0 / 9.0)
.build();
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
if (USE_WEBCAM)
{
camera = OpenCvCameraFactory.getInstance().createWebcam(hardwareMap.get(WebcamName.class, "Webcam 1"), cameraMonitorViewId);
}
else
{
camera = OpenCvCameraFactory.getInstance().createInternalCamera2(OpenCvInternalCamera2.CameraDirection.BACK, cameraMonitorViewId);
}
camera.setViewportRenderer(OpenCvCamera.ViewportRenderer.NATIVE_VIEW);
camera.setViewportRenderingPolicy(OpenCvCamera.ViewportRenderingPolicy.OPTIMIZE_VIEW);
camera.openCameraDeviceAsync(new OpenCvCamera.AsyncCameraOpenListener()
{
@Override
public void onOpened()
{
MyPipeline myPipeline = new MyPipeline(tfProcessor);
if (camera instanceof OpenCvWebcam)
{
myPipeline.noteCalibrationIdentity(((OpenCvWebcam) camera).getCalibrationIdentity());
}
camera.startStreaming(640, 480, OpenCvCameraRotation.SENSOR_NATIVE);
camera.setPipeline(myPipeline);
}
@Override
public void onError(int errorCode)
{
}
});
waitForStart();
while (opModeIsActive())
{
for (Recognition recognition : tfProcessor.getRecognitions())
{
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
}
}
telemetry.update();
}
static class MyPipeline extends TimestampedOpenCvPipeline
{
private VisionProcessor processor;
private CameraCalibrationIdentity ident;
public MyPipeline(VisionProcessor processor)
{
this.processor = processor;
}
public void noteCalibrationIdentity(CameraCalibrationIdentity ident)
{
this.ident = ident;
}
@Override
public void init(Mat firstFrame)
{
CameraCalibration calibration = CameraCalibrationHelper.getInstance().getCalibration(ident, firstFrame.width(), firstFrame.height());
processor.init(firstFrame.width(), firstFrame.height(), calibration);
}
@Override
public Mat processFrame(Mat input, long captureTimeNanos)
{
Object drawCtx = processor.processFrame(input, captureTimeNanos);
requestViewportDrawHook(drawCtx);
return input;
}
@Override
public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext)
{
processor.onDrawFrame(canvas, onscreenWidth, onscreenHeight, scaleBmpPxToCanvasPx, scaleCanvasDensity, userContext);
}
}
}