-
Notifications
You must be signed in to change notification settings - Fork 0
/
RedSightPipeline
289 lines (262 loc) · 11.1 KB
/
RedSightPipeline
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
package org.firstinspires.ftc.teamcode.Vision;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import org.openftc.easyopencv.OpenCvPipeline;
//@Disabled
public class RedSightPipeline extends OpenCvPipeline {
/*
* An enum to define the skystone position
*/
public enum SkystonePosition
{
LEFT,
CENTER,
RIGHT
}
/*
* Some color constants
*/
public final Scalar BLUE = new Scalar(0, 0, 255);
public final Scalar GREEN = new Scalar(0, 255, 0);
/*
* The core values which define the location and size of the sample regions
*/
static final Point REGION1_TOPLEFT_ANCHOR_POINT = new Point(0 / 2,300 / 2);
static final Point REGION2_TOPLEFT_ANCHOR_POINT = new Point(277 / 2,300 / 2);
static final Point REGION3_TOPLEFT_ANCHOR_POINT = new Point(275,300 / 2);
static final int REGION_WIDTH = 45;
static final int REGION_HEIGHT = 45;
/*
* Points which actually define the sample region rectangles, derived from above values
*
* Example of how points A and B work to define a rectangle
*
* ------------------------------------
* | (0,0) Point A |
* | |
* | |
* | |
* | |
* | |
* | |
* | Point B (70,50) |
* ------------------------------------
*
*/
Point region1_pointA = new Point(
REGION1_TOPLEFT_ANCHOR_POINT.x,
REGION1_TOPLEFT_ANCHOR_POINT.y);
Point region1_pointB = new Point(
REGION1_TOPLEFT_ANCHOR_POINT.x + REGION_WIDTH,
REGION1_TOPLEFT_ANCHOR_POINT.y + REGION_HEIGHT);
Point region2_pointA = new Point(
REGION2_TOPLEFT_ANCHOR_POINT.x,
REGION2_TOPLEFT_ANCHOR_POINT.y);
Point region2_pointB = new Point(
REGION2_TOPLEFT_ANCHOR_POINT.x + REGION_WIDTH,
REGION2_TOPLEFT_ANCHOR_POINT.y + REGION_HEIGHT);
Point region3_pointA = new Point(
REGION3_TOPLEFT_ANCHOR_POINT.x,
REGION3_TOPLEFT_ANCHOR_POINT.y);
Point region3_pointB = new Point(
REGION3_TOPLEFT_ANCHOR_POINT.x + REGION_WIDTH,
REGION3_TOPLEFT_ANCHOR_POINT.y + REGION_HEIGHT);
/*
* Working variables
*/
Mat region1_Cb, region2_Cb, region3_Cb;
Mat YCrCb = new Mat();
Mat Cb = new Mat();
int avg1, avg2, avg3;
// Volatile since accessed by OpMode thread w/o synchronization
private volatile SkystonePosition position = SkystonePosition.LEFT;
private Telemetry telemetry;
public RedSightPipeline(Telemetry telemetry) {
this.telemetry = telemetry;
}
/*
* This function takes the RGB frame, converts to YCrCb,
* and extracts the Cb channel to the 'Cb' variable
*/
void inputToCb(Mat input)
{
Imgproc.cvtColor(input, YCrCb, Imgproc.COLOR_RGB2YCrCb);
Core.extractChannel(YCrCb, Cb, 1); //coi 0 is B&W, 1 is blue, red is 2.
}
public void init(Mat firstFrame)
{
/*
* We need to call this in order to make sure the 'Cb'
* object is initialized, so that the submats we make
* will still be linked to it on subsequent frames. (If
* the object were to only be initialized in processFrame,
* then the submats would become delinked because the backing
* buffer would be re-allocated the first time a real frame
* was crunched)
*/
inputToCb(firstFrame);
/*
* Submats are a persistent reference to a region of the parent
* buffer. Any changes to the child affect the parent, and the
* reverse also holds true.
*/
region1_Cb = Cb.submat(new Rect(region1_pointA, region1_pointB));
region2_Cb = Cb.submat(new Rect(region2_pointA, region2_pointB));
region3_Cb = Cb.submat(new Rect(region3_pointA, region3_pointB));
}
public Mat processFrame(Mat input)
{
/*
* Overview of what we're doing:
*
* We first convert to YCrCb color space, from RGB color space.
* Why do we do this? Well, in the RGB color space, chroma and
* luma are intertwined. In YCrCb, chroma and luma are separated.
* YCrCb is a 3-channel color space, just like RGB. YCrCb's 3 channels
* are Y, the luma channel (which essentially just a B&W image), the
* Cr channel, which records the difference from red, and the Cb channel,
* which records the difference from blue. Because chroma and luma are
* not related in YCrCb, vision code written to look for certain values
* in the Cr/Cb channels will not be severely affected by differing
* light intensity, since that difference would most likely just be
* reflected in the Y channel.
*
* After we've converted to YCrCb, we extract just the 2nd channel, the
* Cb channel. We do this because stones are bright yellow and contrast
* STRONGLY on the Cb channel against everything else, including SkyStones
* (because SkyStones have a black label).
*
* We then take the average pixel value of 3 different regions on that Cb
* channel, one positioned over each stone. The brightest of the 3 regions
* is where we assume the SkyStone to be, since the normal stones show up
* extremely darkly.
*
* We also draw rectangles on the screen showing where the sample regions
* are, as well as drawing a solid rectangle over top the sample region
* we believe is on top of the SkyStone.
*
* In order for this whole process to work correctly, each sample region
* should be positioned in the center of each of the first 3 stones, and
* be small enough such that only the stone is sampled, and not any of the
* surroundings.
*/
/*
* Get the Cb channel of the input frame after conversion to YCrCb
*/
inputToCb(input);
/*
* Compute the average pixel value of each submat region. We're
* taking the average of a single channel buffer, so the value
* we need is at index 0. We could have also taken the average
* pixel value of the 3-channel image, and referenced the value
* at index 2 here.
*/
avg1 = (int) Core.mean(region1_Cb).val[0];
avg2 = (int) Core.mean(region2_Cb).val[0];
avg3 = (int) Core.mean(region3_Cb).val[0];
/*
* Draw a rectangle showing sample region 1 on the screen.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region1_pointA, // First point which defines the rectangle
region1_pointB, // Second point which defines the rectangle
BLUE, // The color the rectangle is drawn in
2); // Thickness of the rectangle lines
/*
* Draw a rectangle showing sample region 2 on the screen.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region2_pointA, // First point which defines the rectangle
region2_pointB, // Second point which defines the rectangle
BLUE, // The color the rectangle is drawn in
2); // Thickness of the rectangle lines
/*
* Draw a rectangle showing sample region 3 on the screen.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region3_pointA, // First point which defines the rectangle
region3_pointB, // Second point which defines the rectangle
BLUE, // The color the rectangle is drawn in
2); // Thickness of the rectangle lines
/*
* Find the max of the 3 averages
*/
int minOneTwo = Math.min(avg1, avg2);
int min = Math.min(minOneTwo, avg3);
/*
* Now that we found the max, we actually need to go and
* figure out which sample region that value was from
*/
if(min == avg1) // Was it from region 1?
{
position = SkystonePosition.LEFT; // Record our analysis
/*
* Draw a solid rectangle on top of the chosen region.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region1_pointA, // First point which defines the rectangle
region1_pointB, // Second point which defines the rectangle
GREEN, // The color the rectangle is drawn in
-1); // Negative thickness means solid fill
}
else if(min == avg2) // Was it from region 2?
{
position = SkystonePosition.CENTER; // Record our analysis
/*
* Draw a solid rectangle on top of the chosen region.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region2_pointA, // First point which defines the rectangle
region2_pointB, // Second point which defines the rectangle
GREEN, // The color the rectangle is drawn in
-1); // Negative thickness means solid fill
}
else if(min == avg3) // Was it from region 3?
{
position = SkystonePosition.RIGHT; // Record our analysis
/*
* Draw a solid rectangle on top of the chosen region.
* Simply a visual aid. Serves no functional purpose.
*/
Imgproc.rectangle(
input, // Buffer to draw on
region3_pointA, // First point which defines the rectangle
region3_pointB, // Second point which defines the rectangle
GREEN, // The color the rectangle is drawn in
-1); // Negative thickness means solid fill
}
telemetry.addData("[Pattern]", position);
telemetry.addData("avg1", avg1);
telemetry.addData("avg2", avg2);
telemetry.addData("avg3", avg3);
telemetry.update();
/*
* Render the 'input' buffer to the viewport. But note this is not
* simply rendering the raw camera feed, because we called functions
* to add some annotations to this buffer earlier up.
*/
return input;
}
/*
* Call this from the OpMode thread to obtain the latest analysis
*/
public SkystonePosition getAnalysis()
{
return position;
}
}