@@ -30,11 +30,25 @@ struct SQLTests {
30
30
31
31
let regexID = /#\d+L?/
32
32
let regexPlanId = /plan_id=\d+/
33
+ let regexLocation = /file:[a-zA-Z0-9\.\-\/ \\] +/
34
+ let regexOwner = /(runner|185)/
35
+
36
+ private func cleanUp( _ str: String ) -> String {
37
+ return removeOwner ( removeID ( removeLocation ( str) ) )
38
+ }
33
39
34
40
private func removeID( _ str: String ) -> String {
35
41
return str. replacing ( regexPlanId, with: " plan_id= " ) . replacing ( regexID, with: " # " )
36
42
}
37
43
44
+ private func removeLocation( _ str: String ) -> String {
45
+ return str. replacing ( regexLocation, with: " * " )
46
+ }
47
+
48
+ private func removeOwner( _ str: String ) -> String {
49
+ return str. replacing ( regexOwner, with: " * " )
50
+ }
51
+
38
52
@Test
39
53
func testRemoveID( ) {
40
54
#expect( removeID ( " 123 " ) == " 123 " )
@@ -44,6 +58,17 @@ struct SQLTests {
44
58
#expect( removeID ( " plan_id=123 " ) == " plan_id= " )
45
59
}
46
60
61
+ @Test
62
+ func removeLocation( ) {
63
+ #expect( removeLocation ( " file:/abc " ) == " * " )
64
+ }
65
+
66
+ @Test
67
+ func removeOwner( ) {
68
+ #expect( removeOwner ( " runner " ) == " * " )
69
+ #expect( removeOwner ( " 185 " ) == " * " )
70
+ }
71
+
47
72
#if !os(Linux)
48
73
@Test
49
74
func runAll( ) async throws {
@@ -54,8 +79,8 @@ struct SQLTests {
54
79
55
80
let sql = try String ( contentsOf: URL ( fileURLWithPath: " \( path) / \( name) " ) , encoding: . utf8)
56
81
let jsonData = try encoder. encode ( try await spark. sql ( sql) . collect ( ) )
57
- let answer = removeID ( String ( data: jsonData, encoding: . utf8) !)
58
- let expected = removeID ( try String ( contentsOf: URL ( fileURLWithPath: " \( path) / \( name) .json " ) , encoding: . utf8) )
82
+ let answer = cleanUp ( String ( data: jsonData, encoding: . utf8) !)
83
+ let expected = cleanUp ( try String ( contentsOf: URL ( fileURLWithPath: " \( path) / \( name) .json " ) , encoding: . utf8) )
59
84
#expect( answer == expected. trimmingCharacters ( in: . whitespacesAndNewlines) )
60
85
}
61
86
await spark. stop ( )
0 commit comments