Skip to content

Commit 66b31b4

Browse files
authored
Clean up input file parsers and add system tests for JSON input files (#121)
* Enable direct joint node assocations by using the jointNode options variable when the model is created. This uses joint node associations defined in JSON input files. The legacy serializer behavior is preserved -- instead of using the listed joint node, we use sequential nodes from the node list to correspond to the joints. That's how createAndRunModel used to extract the node associations. * Additional clean up of options: (1) eliminate direct global population from legacy serializer. The output options are (for now) part of the options struct... until we can house them elsewhere. This frees the options from interactions with any global variables. (2) Updates to the json serializer: actually use node assocations now that the model will utilize them. Also include outputType and vtkOutputType until we move them. * replace hardcoded requirements on binary location in system tests (test_integration.py) with an option for the binary location passed to pytest. That enables simple running of pytest regardless of the particular location of the binary directory. For example: pytest Tests/SystemTests --relativeExePath="../svOneDSolver_build/bin/OneDSolver" Also: make the system tests actually execute tests in the temporary directory so that we don't have artifacts generated in whatever directory we run the tests from. * add system tests for conversion to JSON input files and simulation using JSON input files. Add documentation for the executable usage and running unit and system tests
1 parent d98fd52 commit 66b31b4

17 files changed

+442
-231
lines changed

.github/workflows/test.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,12 @@ jobs:
1515
make
1616
- name: install dependencies
1717
run: |
18-
pip install pytest pytest-cov pytest-mock
18+
pip install pytest pytest-cov pytest-mock pytest-xdist
1919
pip install numpy
20-
- name: result tests
20+
- name: result simulation system tests
2121
run: |
22-
pytest Tests/SystemTests
22+
# We're providing the path to the EXE based on the prior build solver step.
23+
pytest Tests/SystemTests -n auto --relativeExePath="build_skyline/bin/OneDSolver"
2324
- name: result unit tests
2425
run: |
2526
cd build_skyline

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,3 +41,6 @@ build
4141

4242
# Vscode
4343
.vscode/
44+
45+
# Python
46+
venv/

CMakeLists.txt

Lines changed: 12 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11

2-
CMAKE_MINIMUM_REQUIRED(VERSION 3.11)
2+
CMAKE_MINIMUM_REQUIRED(VERSION 3.18)
33

44
# COMMON SETTINGS
55
PROJECT(OneDSolver)
@@ -71,6 +71,17 @@ ELSEIF(sparseSolverType STREQUAL "csparse")
7171
"${CMAKE_CURRENT_SOURCE_DIR}/Code/Source/sparse/csparse/*.h")
7272
ENDIF()
7373

74+
# NLOHMANN JSON FOR SERIALIZATION
75+
include(FetchContent)
76+
FetchContent_Declare(
77+
nlohmann_json
78+
GIT_REPOSITORY https://github.com/nlohmann/json.git
79+
GIT_TAG v3.11.2 # You can specify a version or use the latest tag
80+
)
81+
FetchContent_MakeAvailable(nlohmann_json)
82+
# Global include for nlohmann headers
83+
include_directories(${nlohmann_json_SOURCE_DIR}/single_include)
84+
7485
# COPY DATASET FOLDER
7586
# FILE(COPY "${CMAKE_CURRENT_SOURCE_DIR}/tests" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}")
7687

@@ -111,16 +122,6 @@ if (BUILD_SV_INSTALLER)
111122
add_subdirectory("${CMAKE_SOURCE_DIR}/Distribution")
112123
ENDIF()
113124

114-
# NLOHMANN JSON FOR SERIALIZATION
115-
include(FetchContent)
116-
FetchContent_Declare(
117-
nlohmann_json
118-
GIT_REPOSITORY https://github.com/nlohmann/json.git
119-
GIT_TAG v3.11.2 # You can specify a version or use the latest tag
120-
)
121-
FetchContent_MakeAvailable(nlohmann_json)
122-
target_include_directories(${PROJECT_NAME} PUBLIC ${nlohmann_json_SOURCE_DIR}/single_include)
123-
124125
# Unit tests and Google Test
125126
if(ENABLE_UNIT_TEST)
126127

@@ -174,13 +175,6 @@ if(ENABLE_UNIT_TEST)
174175
# handle both use cases.
175176
target_link_libraries(UnitTestsExecutable gtest gtest_main pthread)
176177

177-
# We need to include Nlohmann when compiling the unit test executable
178-
# A better strategy would be to build up a list of what we're going to
179-
# include and reuse it.
180-
# Another option: we could simply build the library of cvOneD functionality
181-
# and link against that both here and in the main executable.
182-
target_include_directories(UnitTestsExecutable PRIVATE ${nlohmann_json_SOURCE_DIR}/single_include)
183-
184178
# It's likely we'll need to include additional directories for some
185179
# versions of the unit tests. That can be updated when/if we update
186180
# the general strategy.

Code/Source/cvOneDModelManager.cxx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ int cvOneDModelManager::CreateNode(char * nodeName,double x,double y,double z){
188188
}
189189

190190

191-
int cvOneDModelManager::CreateJoint(char * jointName,double x,double y,double z,
191+
int cvOneDModelManager::CreateJoint(const char * jointName,double x,double y,double z,
192192
int numInSegs,int numOutSegs,
193193
int *InSegs,int *OutSegs){
194194

Code/Source/cvOneDModelManager.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ class cvOneDModelManager{
6969

7070

7171
// CREATE JOINT
72-
int CreateJoint(char* jointName,double x,double y,double z,
72+
int CreateJoint(const char* jointName,double x,double y,double z,
7373
int numInSegs,int numOutSegs,
7474
int *InSegs,int *OutSegs);
7575

Code/Source/cvOneDOptions.h

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
#include <string>
3636
#include <math.h>
3737
#include <memory>
38+
#include <optional>
3839
#include "cvOneDTypes.h"
3940
#include "cvOneDException.h"
4041

@@ -49,21 +50,14 @@ struct options{
4950

5051
// NODE DATA
5152
cvStringVec nodeName;
52-
// Note that these coordinates are also used
53-
// for the joints through an implicit mapping
5453
cvDoubleVec nodeXcoord;
5554
cvDoubleVec nodeYcoord;
5655
cvDoubleVec nodeZcoord;
5756

5857
// JOINT DATA
5958
cvStringVec jointName;
60-
61-
// "jointNode" isn't used currently -- but we want
62-
// to start integrating it. It can be used to make
63-
// the mapping from joint->node explicit. Right
64-
// now, the mapping is implicit: each joint is
65-
// associated with the node of the same index in
66-
// the list of nodes.
59+
// jointNode: name of the associated node that we will use
60+
// to get the XYZ coordinates when creating the joint.
6761
cvStringVec jointNode;
6862

6963
cvStringVec jointInletName;
@@ -121,7 +115,12 @@ struct options{
121115
double convergenceTolerance;
122116
int useIV;
123117
int useStab;
124-
string outputType;
118+
119+
// These are to preserve legacy behavior and are
120+
// expected to be eventually migrated into a
121+
// post-processing step.
122+
string outputType = "TEXT";
123+
std::optional<int> vtkOutputType = std::nullopt;
125124
};
126125

127126
void validateOptions(options const& opts);

Code/Source/cvOneDOptionsJsonParser.cxx

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,13 @@ void parseSolverOptions(const nlohmann::ordered_json& jsonData, options& opts) t
206206
opts.useIV = solverOptions.at("useIV").get<int>();
207207
opts.useStab = solverOptions.at("useStab").get<int>();
208208

209-
// Doesn't seem like this is used...but we'll provide a default anyway
210-
opts.outputType = solverOptions.value("outputType", "None");
209+
// Until this is migrated elsewhere, we'll (optionally) store the solver options.
210+
if(solverOptions.contains("outputType")){
211+
opts.outputType = solverOptions.at("outputType").get<std::string>();
212+
}
213+
if(solverOptions.contains("vtkOutputType")){
214+
opts.vtkOutputType = solverOptions.at("vtkOutputType").get<int>();
215+
}
211216

212217
} catch (const std::exception& e) {
213218
throw std::runtime_error("Error parsing 'solverOptions': " + std::string(e.what()));

Code/Source/cvOneDOptionsJsonSerializer.cxx

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -81,29 +81,22 @@ nlohmann::ordered_json serialize_joint_data(const options& opts) {
8181
nlohmann::ordered_json j = nlohmann::ordered_json::array();
8282

8383
// Check consistency of the joint inlet and outlet vectors
84-
check_consistent_size("joints", opts.jointName,
84+
check_consistent_size("joints", opts.jointName, opts.jointNode,
8585
opts.jointInletListNames, opts.jointInletListNumber,
8686
opts.jointOutletListNames, opts.jointOutletListNumber,
8787
opts.jointInletList, opts.jointOutletList);
8888

8989
size_t jointCount = opts.jointName.size();
9090

91-
// There must be enough nodes for us to make associations. This is
92-
// an implicit assumption made in the current joint creation code.
93-
if( opts.nodeName.size() < jointCount ){
94-
throw std::runtime_error("The implicit joint-node association requires that there be at least as many nodes as there are joints.");
95-
}
96-
9791
for (size_t i = 0; i < jointCount; ++i) {
9892
nlohmann::ordered_json joint;
9993

10094
// Add JOINT data
101-
// We're adding the node based on the index so we
102-
// can start incorporating the explicit association
103-
// from the implicit one currently used in the code.
95+
// Now that we're actually populating the joint node associations
96+
// when deserializing the legacy format, we can utilize them here.
10497
joint["joint"] = {
105-
{"id", "J" + std::to_string(i + 1)},
106-
{"associatedNode", opts.nodeName.at(i)},
98+
{"id", opts.jointName.at(i)},
99+
{"associatedNode", opts.jointNode.at(i)},
107100
{"inletName", opts.jointInletListNames.at(i)},
108101
{"outletName", opts.jointOutletListNames.at(i)}
109102
};
@@ -225,7 +218,13 @@ nlohmann::ordered_json serializeSolverOptions(options const& opts) {
225218
solverOptions["convergenceTolerance"] = opts.convergenceTolerance;
226219
solverOptions["useIV"] = opts.useIV;
227220
solverOptions["useStab"] = opts.useStab;
221+
222+
// For now, we're serializing the output data.
223+
// In the future, we'll want to migrate these.
228224
solverOptions["outputType"] = opts.outputType;
225+
if(opts.vtkOutputType){
226+
solverOptions["vtkOutputType"] = *opts.vtkOutputType;
227+
}
229228

230229
return solverOptions;
231230
}

Code/Source/cvOneDOptionsLegacySerializer.cxx

Lines changed: 33 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
#include <string.h>
3737

3838
#include "cvOneDUtility.h"
39-
#include "cvOneDGlobal.h"
4039

4140
namespace cvOneD{
4241

@@ -126,10 +125,25 @@ void readOptionsLegacyFormat(string inputFile, options* opts){
126125
try{
127126
// Get Joint Name
128127
opts->jointName.push_back(tokenizedString[1]);
129-
// opts->jointNode.push_back(atof(tokenizedString[2].c_str()));
130-
opts->jointNode.push_back(tokenizedString[2]);
128+
129+
// The legacy input files included a "joint node", but it is
130+
// never utilized in simulation. The legacy behavior was, instead
131+
// to simply associate each joint with the sequentially listed
132+
// nodeXcoord, nodeYcoord, and nodeZcoord.
133+
//
134+
// To preserve this legacy behavior, we're not going to deserialize
135+
// the jointNode. Instead, we're going to replace "jointNode" with
136+
// the sequential node item name. That's **wrong** -- we should be
137+
// using "jointNode"!
138+
// But! it preserves the legacy behavior of the existing legacy
139+
// options files when we refactor the actual utilization of the
140+
// options to use the jointNode.
141+
//
142+
// The actual population of the jointNode option is done in a
143+
// post-processing step after we finish processing the file.
131144
opts->jointInletName.push_back(tokenizedString[3]);
132145
opts->jointOutletName.push_back(tokenizedString[4]);
146+
133147
}catch(...){
134148
throw cvException(string("ERROR: Invalid JOINT Format. Line " + to_string(lineCount) + "\n").c_str());
135149
}
@@ -258,22 +272,13 @@ void readOptionsLegacyFormat(string inputFile, options* opts){
258272
}else if(tokenizedString.size() < 2){
259273
throw cvException(string("ERROR: Not enough parameters for OUTPUT token. Line " + to_string(lineCount) + "\n").c_str());
260274
}
261-
// Output Type
262-
if(upper_string(tokenizedString[1]) == "TEXT"){
263-
cvOneDGlobal::outputType = OutputTypeScope::OUTPUT_TEXT;
264-
}else if(upper_string(tokenizedString[1]) == "VTK"){
265-
cvOneDGlobal::outputType = OutputTypeScope::OUTPUT_VTK;
266-
}else if(upper_string(tokenizedString[1]) == "BOTH"){
267-
cvOneDGlobal::outputType = OutputTypeScope::OUTPUT_BOTH;
268-
}else{
269-
throw cvException("ERROR: Invalid OUTPUT Type.\n");
270-
}
275+
276+
// Collect and store the output type data
277+
opts->outputType = tokenizedString[1];
271278
if(tokenizedString.size() > 2){
272-
cvOneDGlobal::vtkOutputType = atoi(tokenizedString[2].c_str());
273-
if(cvOneDGlobal::vtkOutputType > 1){
274-
throw cvException("ERROR: Invalid OUTPUT VTK Type.\n");
275-
}
279+
opts->vtkOutputType = atoi(tokenizedString[2].c_str());
276280
}
281+
277282
}else if(upper_string(tokenizedString[0]) == std::string("DATATABLE")){
278283
// printf("Found Data Table.\n");
279284
try{
@@ -361,6 +366,17 @@ void readOptionsLegacyFormat(string inputFile, options* opts){
361366
// Increment Line Count
362367
lineCount++;
363368
}
369+
370+
// Post-process: to preserve legacy behavior, we're
371+
// making each joint node the corresponding sequential
372+
// node from the node array. See comment above for additional details.
373+
if(opts->jointName.size() > opts->nodeName.size()){
374+
throw cvException("ERROR: there must be at least as many nodes as there are joints.\n");
375+
}
376+
for(size_t k = 0; k < opts->jointName.size(); ++k){
377+
opts->jointNode.push_back(opts->nodeName.at(k));
378+
}
379+
364380
// Close File
365381
infile.close();
366382
}

0 commit comments

Comments
 (0)