Skip to content

Commit 3b726dc

Browse files
committed
Fixed regression introduced with refactoring the image loading process
1 parent ac22036 commit 3b726dc

File tree

2 files changed

+96
-93
lines changed

2 files changed

+96
-93
lines changed

lib/provider/native/robotjs-screen-action.class.ts

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,24 @@ import { Region } from "../../region.class";
44
import { ScreenActionProvider } from "./screen-action-provider.interface";
55

66
export class ScreenAction implements ScreenActionProvider {
7+
8+
private static determinePixelDensity(
9+
screen: Region,
10+
screenShot: robot.Bitmap,
11+
): { scaleX: number; scaleY: number } {
12+
return {
13+
scaleX: screenShot.width / screen.width,
14+
scaleY: screenShot.height / screen.height,
15+
};
16+
}
717
constructor() {}
818

919
public grabScreen(): Promise<Image> {
1020
return new Promise((resolve, reject) => {
1121
const screenShot = robot.screen.capture();
1222
if (screenShot) {
1323
const screenSize = robot.getScreenSize();
14-
const pixelScaling = this.determinePixelDensity(
24+
const pixelScaling = ScreenAction.determinePixelDensity(
1525
new Region(0, 0, screenSize.width, screenSize.height),
1626
screenShot,
1727
);
@@ -20,7 +30,7 @@ export class ScreenAction implements ScreenActionProvider {
2030
screenShot.width,
2131
screenShot.height,
2232
screenShot.image,
23-
3,
33+
4,
2434
pixelScaling,
2535
),
2636
);
@@ -39,13 +49,13 @@ export class ScreenAction implements ScreenActionProvider {
3949
region.height,
4050
);
4151
if (screenShot) {
42-
const pixelScaling = this.determinePixelDensity(region, screenShot);
52+
const pixelScaling = ScreenAction.determinePixelDensity(region, screenShot);
4353
resolve(
4454
new Image(
4555
screenShot.width,
4656
screenShot.height,
4757
screenShot.image,
48-
3,
58+
4,
4959
pixelScaling,
5060
),
5161
);
@@ -67,14 +77,4 @@ export class ScreenAction implements ScreenActionProvider {
6777
const screenSize = robot.getScreenSize();
6878
return new Region(0, 0, screenSize.width, screenSize.height);
6979
}
70-
71-
private determinePixelDensity(
72-
screen: Region,
73-
screenShot: robot.Bitmap,
74-
): { scaleX: number; scaleY: number } {
75-
return {
76-
scaleX: screenShot.width / screen.width,
77-
scaleY: screenShot.height / screen.height,
78-
};
79-
}
8080
}

lib/provider/opencv/template-matching-finder.class.ts

Lines changed: 82 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,89 @@ import { FinderInterface } from "./finder.interface";
88
export class TemplateMatchingFinder implements FinderInterface {
99
private static scaleStep = 0.5;
1010

11+
private static async match(haystack: cv.Mat, needle: cv.Mat): Promise<MatchResult> {
12+
const match = await haystack.matchTemplateAsync(
13+
needle,
14+
cv.TM_SQDIFF_NORMED,
15+
);
16+
const minMax = await match.minMaxLocAsync();
17+
return new MatchResult(
18+
1.0 - minMax.minVal,
19+
new Region(
20+
minMax.minLoc.x,
21+
minMax.minLoc.y,
22+
Math.min(needle.cols, haystack.cols),
23+
Math.min(needle.rows, haystack.rows),
24+
),
25+
);
26+
}
27+
28+
private static async scale(image: cv.Mat, scaleFactor: number): Promise<cv.Mat> {
29+
const scaledRows = Math.max(Math.floor(image.rows * scaleFactor), 1.0);
30+
const scaledCols = Math.max(Math.floor(image.cols * scaleFactor), 1.0);
31+
return image.resizeAsync(scaledRows, scaledCols, 0, 0, cv.INTER_AREA);
32+
}
33+
34+
private static async scaleAndMatchNeedle(
35+
haystack: cv.Mat,
36+
needle: cv.Mat,
37+
): Promise<MatchResult> {
38+
const scaledNeedle = await TemplateMatchingFinder.scale(
39+
needle,
40+
TemplateMatchingFinder.scaleStep,
41+
);
42+
const matchResult = await TemplateMatchingFinder.match(haystack, scaledNeedle);
43+
// cv.imwriteAsync(`${"scaled_needle.png"}`, scaledNeedle);
44+
console.log(`Scaled needle: ${matchResult.confidence}`);
45+
return new MatchResult(
46+
matchResult.confidence,
47+
new Region(
48+
matchResult.location.left,
49+
matchResult.location.top,
50+
scaledNeedle.cols,
51+
scaledNeedle.rows,
52+
),
53+
);
54+
}
55+
56+
private static determineScaledSearchRegion(matchRequest: MatchRequest): Region {
57+
const searchRegion = matchRequest.searchRegion;
58+
searchRegion.width *= matchRequest.haystack.pixelDensity.scaleX;
59+
searchRegion.height *= matchRequest.haystack.pixelDensity.scaleY;
60+
return searchRegion;
61+
}
62+
63+
private static async scaleAndMatchHaystack(
64+
haystack: cv.Mat,
65+
needle: cv.Mat,
66+
): Promise<MatchResult> {
67+
const scaledHaystack = await TemplateMatchingFinder.scale(
68+
haystack,
69+
TemplateMatchingFinder.scaleStep,
70+
);
71+
const matchResult = await TemplateMatchingFinder.match(scaledHaystack, needle);
72+
// cv.imwriteAsync(`${"scaled_haystack.png"}`, scaledHaystack);
73+
console.log(`Scaled haystack: ${matchResult.confidence}`);
74+
return new MatchResult(
75+
matchResult.confidence,
76+
new Region(
77+
matchResult.location.left / TemplateMatchingFinder.scaleStep,
78+
matchResult.location.top / TemplateMatchingFinder.scaleStep,
79+
needle.cols,
80+
needle.rows,
81+
),
82+
);
83+
}
84+
1185
constructor() {
1286
}
1387

1488
public async findMatches(matchRequest: MatchRequest): Promise<MatchResult[]> {
1589
let needle = await this.loadImage(matchRequest.pathToNeedle);
1690
if (needle.empty) {
17-
throw new Error(`Failed to load ${matchRequest.pathToNeedle}, got empty image.`);
91+
throw new Error(
92+
`Failed to load ${matchRequest.pathToNeedle}, got empty image.`,
93+
);
1894
}
1995
let haystack = await this.loadHaystack(matchRequest);
2096

@@ -26,10 +102,10 @@ export class TemplateMatchingFinder implements FinderInterface {
26102
// cv.imwriteAsync(`${"input_haystack.png"}`, haystack);
27103

28104
const matchResults = [];
29-
matchResults.push(await this.match(haystack, needle));
105+
matchResults.push(await TemplateMatchingFinder.match(haystack, needle));
30106
if (matchRequest.searchMultipleScales) {
31-
matchResults.push(await this.scaleAndMatchHaystack(haystack, needle));
32-
matchResults.push(await this.scaleAndMatchNeedle(haystack, needle));
107+
matchResults.push(await TemplateMatchingFinder.scaleAndMatchHaystack(haystack, needle));
108+
matchResults.push(await TemplateMatchingFinder.scaleAndMatchNeedle(haystack, needle));
33109
}
34110

35111
// Compensate pixel density
@@ -63,7 +139,7 @@ export class TemplateMatchingFinder implements FinderInterface {
63139
img: Image,
64140
roi?: Region,
65141
): Promise<cv.Mat> {
66-
const mat = new cv.Mat(img.data, img.height, img.width, cv.CV_8UC4);
142+
const mat = await new cv.Mat(img.data, img.height, img.width, cv.CV_8UC4).cvtColorAsync(cv.COLOR_BGRA2BGR);
67143
if (roi) {
68144
return Promise.resolve(
69145
mat.getRegion(new cv.Rect(roi.left, roi.top, roi.width, roi.height)),
@@ -92,7 +168,7 @@ export class TemplateMatchingFinder implements FinderInterface {
92168
}
93169

94170
private async loadHaystack(matchRequest: MatchRequest): Promise<cv.Mat> {
95-
const searchRegion = this.determineScaledSearchRegion(matchRequest);
171+
const searchRegion = TemplateMatchingFinder.determineScaledSearchRegion(matchRequest);
96172
if (matchRequest.haystack.hasAlphaChannel) {
97173
return await this.fromImageWithAlphaChannel(
98174
matchRequest.haystack,
@@ -105,77 +181,4 @@ export class TemplateMatchingFinder implements FinderInterface {
105181
);
106182
}
107183
}
108-
109-
private determineScaledSearchRegion(matchRequest: MatchRequest): Region {
110-
const searchRegion = matchRequest.searchRegion;
111-
searchRegion.width *= matchRequest.haystack.pixelDensity.scaleX;
112-
searchRegion.height *= matchRequest.haystack.pixelDensity.scaleY;
113-
return searchRegion;
114-
}
115-
116-
private async scaleAndMatchHaystack(
117-
haystack: cv.Mat,
118-
needle: cv.Mat,
119-
): Promise<MatchResult> {
120-
const scaledHaystack = await this.scale(
121-
haystack,
122-
TemplateMatchingFinder.scaleStep,
123-
);
124-
const matchResult = await this.match(scaledHaystack, needle);
125-
// cv.imwriteAsync(`${"scaled_haystack.png"}`, scaledHaystack);
126-
console.log(`Scaled haystack: ${matchResult.confidence}`);
127-
return new MatchResult(
128-
matchResult.confidence,
129-
new Region(
130-
matchResult.location.left / TemplateMatchingFinder.scaleStep,
131-
matchResult.location.top / TemplateMatchingFinder.scaleStep,
132-
needle.cols,
133-
needle.rows,
134-
),
135-
);
136-
}
137-
138-
private async scaleAndMatchNeedle(
139-
haystack: cv.Mat,
140-
needle: cv.Mat,
141-
): Promise<MatchResult> {
142-
const scaledNeedle = await this.scale(
143-
needle,
144-
TemplateMatchingFinder.scaleStep,
145-
);
146-
const matchResult = await this.match(haystack, scaledNeedle);
147-
// cv.imwriteAsync(`${"scaled_needle.png"}`, scaledNeedle);
148-
console.log(`Scaled needle: ${matchResult.confidence}`);
149-
return new MatchResult(
150-
matchResult.confidence,
151-
new Region(
152-
matchResult.location.left,
153-
matchResult.location.top,
154-
scaledNeedle.cols,
155-
scaledNeedle.rows,
156-
),
157-
);
158-
}
159-
160-
private async match(haystack: cv.Mat, needle: cv.Mat): Promise<MatchResult> {
161-
const match = await haystack.matchTemplateAsync(
162-
needle,
163-
cv.TM_SQDIFF_NORMED,
164-
);
165-
const minMax = await match.minMaxLocAsync();
166-
return new MatchResult(
167-
1.0 - minMax.minVal,
168-
new Region(
169-
minMax.minLoc.x,
170-
minMax.minLoc.y,
171-
Math.min(needle.cols, haystack.cols),
172-
Math.min(needle.rows, haystack.rows)),
173-
);
174-
}
175-
176-
private async scale(image: cv.Mat, scaleFactor: number): Promise<cv.Mat> {
177-
const scaledRows = Math.max(Math.floor(image.rows * scaleFactor), 1.0);
178-
const scaledCols = Math.max(Math.floor(image.cols * scaleFactor), 1.0);
179-
return image.resizeAsync(scaledRows, scaledCols, 0, 0, cv.INTER_AREA);
180-
}
181184
}

0 commit comments

Comments
 (0)