Skip to content

Commit 4be4918

Browse files
committed
formatting fixed
1 parent 31aec59 commit 4be4918

File tree

1 file changed

+69
-58
lines changed

1 file changed

+69
-58
lines changed
Lines changed: 69 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
// This test checks whether certain operations in virtual memory extension work as expectd.
1+
// This test checks whether certain operations in virtual memory extension work
2+
// as expectd.
23

34
// RUN: %{build} -o %t.out
45
// RUN: %{run} %t.out
@@ -8,103 +9,113 @@
89
int main() {
910

1011
constexpr size_t NumberOfIterations = 3;
11-
std::array<size_t, NumberOfIterations> NumberOfElementsPerIteration{10,100,1000};
12-
12+
std::array<size_t, NumberOfIterations> NumberOfElementsPerIteration{10, 100,
13+
1000};
14+
1315
sycl::queue Q;
1416
sycl::context Context = Q.get_context();
15-
sycl::device Device = Q.get_device();
16-
17-
//A check should be performed that we can successfully perform and immediately release a valid reservation.
18-
for(const size_t RequiredNumElements: NumberOfElementsPerIteration){
17+
sycl::device Device = Q.get_device();
18+
19+
// A check should be performed that we can successfully perform and
20+
// immediately release a valid reservation.
21+
for (const size_t RequiredNumElements : NumberOfElementsPerIteration) {
1922
size_t BytesRequired = RequiredNumElements * sizeof(int);
2023
size_t UsedGranularity = GetLCMGranularity(Device, Context);
2124
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
22-
uintptr_t VirtualMemoryPtr = syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
25+
uintptr_t VirtualMemoryPtr =
26+
syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
2327
syclext::free_virtual_mem(VirtualMemoryPtr, AlignedByteSize, Context);
2428
}
2529

26-
27-
// A check should be performed that we can successfully map and immediately unmap a virtual memory range.
28-
for(const size_t RequiredNumElements: NumberOfElementsPerIteration){
30+
// A check should be performed that we can successfully map and immediately
31+
// unmap a virtual memory range.
32+
for (const size_t RequiredNumElements : NumberOfElementsPerIteration) {
2933
size_t BytesRequired = RequiredNumElements * sizeof(int);
3034
size_t UsedGranularity = GetLCMGranularity(Device, Context);
3135
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
32-
uintptr_t VirtualMemoryPtr = syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
36+
uintptr_t VirtualMemoryPtr =
37+
syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
3338
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
34-
void* MappedPtr =
35-
PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
36-
syclext::address_access_mode::read_write);
39+
void *MappedPtr = PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
40+
syclext::address_access_mode::read_write);
3741
syclext::unmap(MappedPtr, AlignedByteSize, Context);
3842
syclext::free_virtual_mem(VirtualMemoryPtr, AlignedByteSize, Context);
3943
}
4044

4145
{
42-
// Check should be performed that methods get_context(), get_device() and size() return correct values (i.e. ones which were passed to physical_mem constructor).
43-
size_t BytesRequired = NumberOfElementsPerIteration[2] * sizeof(int);
44-
size_t UsedGranularity = GetLCMGranularity(Device, Context);
45-
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
46-
47-
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
46+
// Check should be performed that methods get_context(), get_device() and
47+
// size() return correct values (i.e. ones which were passed to physical_mem
48+
// constructor).
49+
size_t BytesRequired = NumberOfElementsPerIteration[2] * sizeof(int);
50+
size_t UsedGranularity = GetLCMGranularity(Device, Context);
51+
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
52+
53+
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
4854

49-
PhysicalMem.get_context();
50-
PhysicalMem.get_device();
51-
PhysicalMem.size();
55+
PhysicalMem.get_context();
56+
PhysicalMem.get_device();
57+
PhysicalMem.size();
5258

53-
assert(PhysicalMem.get_device() == Device &&
54-
"device passed to physical_mem must be the same as returned from get_device()");
59+
assert(PhysicalMem.get_device() == Device &&
60+
"device passed to physical_mem must be the same as returned from "
61+
"get_device()");
5562

56-
assert(PhysicalMem.get_context() == Context &&
57-
"context passed to physical_mem must be the same as returned from get_context()");
63+
assert(PhysicalMem.get_context() == Context &&
64+
"context passed to physical_mem must be the same as returned from "
65+
"get_context()");
5866

59-
assert(PhysicalMem.size() == AlignedByteSize &&
60-
"size in bytes passed to physical_mem must be the same as returned from size()");
67+
assert(PhysicalMem.size() == AlignedByteSize &&
68+
"size in bytes passed to physical_mem must be the same as returned "
69+
"from size()");
6170
}
6271

6372
{
64-
// Check to see if value returned from a valid call to map() is the same as reinterpret_cast<void *>(ptr).
65-
size_t BytesRequired = NumberOfElementsPerIteration[2] * sizeof(int);
66-
size_t UsedGranularity = GetLCMGranularity(Device, Context);
67-
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
73+
// Check to see if value returned from a valid call to map() is the same as
74+
// reinterpret_cast<void *>(ptr).
75+
size_t BytesRequired = NumberOfElementsPerIteration[2] * sizeof(int);
76+
size_t UsedGranularity = GetLCMGranularity(Device, Context);
77+
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
6878

69-
uintptr_t VirtualMemoryPtr =
70-
syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
79+
uintptr_t VirtualMemoryPtr =
80+
syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
7181

72-
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
82+
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
7383

74-
void *MappedPtr =
75-
PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
76-
syclext::address_access_mode::read_write);
84+
void *MappedPtr = PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
85+
syclext::address_access_mode::read_write);
7786

78-
assert(MappedPtr == reinterpret_cast<void*>(VirtualMemoryPtr) &&
79-
"ptrs arent equal");
87+
assert(MappedPtr == reinterpret_cast<void *>(VirtualMemoryPtr) &&
88+
"ptrs arent equal");
8089

81-
syclext::unmap(MappedPtr, AlignedByteSize, Context);
82-
syclext::free_virtual_mem(VirtualMemoryPtr, AlignedByteSize, Context);
90+
syclext::unmap(MappedPtr, AlignedByteSize, Context);
91+
syclext::free_virtual_mem(VirtualMemoryPtr, AlignedByteSize, Context);
8392
}
84-
85-
// Check to see if can change access mode of a virtual memory range and immediately see it changed.
86-
for(const size_t RequiredNumElements: NumberOfElementsPerIteration){
93+
94+
// Check to see if can change access mode of a virtual memory range and
95+
// immediately see it changed.
96+
for (const size_t RequiredNumElements : NumberOfElementsPerIteration) {
8797
size_t BytesRequired = RequiredNumElements * sizeof(int);
8898
size_t UsedGranularity = GetLCMGranularity(Device, Context);
8999
size_t AlignedByteSize = GetAlignedByteSize(BytesRequired, UsedGranularity);
90-
uintptr_t VirtualMemoryPtr = syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
100+
uintptr_t VirtualMemoryPtr =
101+
syclext::reserve_virtual_mem(0, AlignedByteSize, Context);
91102
syclext::physical_mem PhysicalMem{Device, Context, AlignedByteSize};
92-
void* MappedPtr =
93-
PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
94-
syclext::address_access_mode::read_write);
95-
96-
103+
void *MappedPtr = PhysicalMem.map(VirtualMemoryPtr, AlignedByteSize,
104+
syclext::address_access_mode::read_write);
105+
97106
syclext::set_access_mode(MappedPtr, AlignedByteSize,
98-
syclext::address_access_mode::read, Context);
99-
100-
syclext::address_access_mode CurrentAccessMode = syclext::get_access_mode(MappedPtr, AlignedByteSize, Context);
107+
syclext::address_access_mode::read, Context);
108+
109+
syclext::address_access_mode CurrentAccessMode =
110+
syclext::get_access_mode(MappedPtr, AlignedByteSize, Context);
101111

102112
assert(CurrentAccessMode == syclext::address_access_mode::read &&
103-
"access mode must be address_access_mode::read after change with set_access_mode()");
113+
"access mode must be address_access_mode::read after change with "
114+
"set_access_mode()");
104115

105116
syclext::unmap(MappedPtr, AlignedByteSize, Context);
106117
syclext::free_virtual_mem(VirtualMemoryPtr, AlignedByteSize, Context);
107118
}
108-
119+
109120
return 0;
110121
}

0 commit comments

Comments
 (0)