Skip to content
Closed

Staging #1865

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ building with static depencies is not completely possible because some system
libraries must be shared libraries for DataFed to be interoperable. If this
setting is turned on DataFed will build it's libraries as shared and try to
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

issue (typo): Fix spelling of "dependencies" and possessive "its" in this option description.

In this description, please change "depencies" to "dependencies" and "build it's libraries" to "build its libraries" (possessive).

link to shared libraries." OFF)
OPTION(ENABLE_END_TO_END_API_TESTS "Enable end-to-end API testing" FALSE)
OPTION(ENABLE_END_TO_END_API_TESTS "Enable end-to-end API testing" FALSE)
OPTION(ENABLE_END_TO_END_WEB_TESTS "Enable end-to-end web testing with Playwright" FALSE)
OPTION(ENABLE_FOXX_TESTS "Enable Foxx testing, off by default because it
will overwrite the test database." FALSE)
Expand Down Expand Up @@ -88,7 +88,7 @@ endif()

set(CMAKE_PREFIX_PATH "${DEPENDENCY_INSTALL_PATH}")

if ( BUILD_TESTS )
if ( BUILD_TESTS )
include(CTest)
ENABLE_TESTING()
set(CMAKE_CTEST_ARGUMENTS "--output-on-failure")
Expand All @@ -112,13 +112,13 @@ if( NOT DEFINED ENABLE_END_TO_END_TESTS )
endif()

if(CMAKE_MAKE_PROGRAM MATCHES "(make|gmake)")
add_definitions( -Wall -Wextra )
add_definitions( -Wall -Wextra )
endif()


if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON OR BUILD_PYTHON_CLIENT OR BUILD_WEB_SERVER)
if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON OR BUILD_PYTHON_CLIENT OR BUILD_WEB_SERVER)



# Create file glob here because need to be made visible here as well
file(GLOB_RECURSE ProtoFiles "${PROJECT_SOURCE_DIR}/common/proto3/common/*.proto")
include(./cmake/Protobuf.cmake)
Expand All @@ -128,7 +128,7 @@ endif()
if( BUILD_WEB_SERVER )
include(./cmake/Web.cmake)

file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/common/proto3/common/"
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/common/proto3/common/"
DESTINATION "${CMAKE_CURRENT_SOURCE_DIR}/web/proto3/")

if( ENABLE_UNIT_TESTS )
Expand All @@ -143,7 +143,7 @@ if( BUILD_AUTHZ_TESTS )
include(./cmake/GlobusCommon.cmake)
endif()

if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON)
if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON)

include_directories( "/usr/include/globus" )

Expand Down Expand Up @@ -171,7 +171,7 @@ if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON)

endif()

if( BUILD_COMMON )
if( BUILD_COMMON )
add_subdirectory( common )
endif()

Expand All @@ -190,7 +190,7 @@ endif()
# Must occur before building authz, location of mock keys are defined here
# ENV DATAFED_MOCK_CORE_PUB_KEY
if (ENABLE_INTEGRATION_TESTS OR ENABLE_END_TO_END_TESTS)
add_subdirectory(tests)
add_subdirectory(tests)
endif()

if( BUILD_REPO_SERVER OR BUILD_AUTHZ)
Expand Down Expand Up @@ -233,7 +233,7 @@ if( INSTALL_WEB_SERVER )
install( FILES ${PROJECT_SOURCE_DIR}/web/version.js DESTINATION ${DATAFED_INSTALL_PATH}/web )
endif()

if( INSTALL_FOXX )
if( INSTALL_FOXX )
install(CODE "execute_process(COMMAND ${DataFed_SOURCE_DIR}/scripts/install_foxx.sh
OUTPUT_VARIABLE _out
ERROR_VARIABLE _err
Expand Down
36 changes: 18 additions & 18 deletions cmake/Version.cmake
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@


set(DATAFED_RELEASE_YEAR 2025)
set(DATAFED_RELEASE_MONTH 11)
set(DATAFED_RELEASE_DAY 17)
set(DATAFED_RELEASE_HOUR 14)
set(DATAFED_RELEASE_MONTH 12)
set(DATAFED_RELEASE_DAY 9)
set(DATAFED_RELEASE_HOUR 10)
set(DATAFED_RELEASE_MINUTE 0)

set(DATAFED_COMMON_LIB_MAJOR 1)
set(DATAFED_COMMON_LIB_MAJOR 2)
set(DATAFED_COMMON_LIB_MINOR 0)
set(DATAFED_COMMON_LIB_PATCH 3)
set(DATAFED_COMMON_LIB_PATCH 0)

set(DATAFED_COMMON_PROTOCOL_API_MAJOR 1)
set(DATAFED_COMMON_PROTOCOL_API_MINOR 2)
set(DATAFED_COMMON_PROTOCOL_API_MAJOR 2)
set(DATAFED_COMMON_PROTOCOL_API_MINOR 0)
set(DATAFED_COMMON_PROTOCOL_API_PATCH 0)

set(DATAFED_CORE_MAJOR 1)
set(DATAFED_CORE_MINOR 1)
set(DATAFED_CORE_MAJOR 2)
set(DATAFED_CORE_MINOR 0)
set(DATAFED_CORE_PATCH 0)

set(DATAFED_FOXX_MAJOR 1)
Expand All @@ -26,20 +26,20 @@ set(DATAFED_FOXX_API_MAJOR 1)
set(DATAFED_FOXX_API_MINOR 2)
set(DATAFED_FOXX_API_PATCH 0)

set(DATAFED_WEB_MAJOR 1)
set(DATAFED_WEB_MINOR 1)
set(DATAFED_WEB_PATCH 1)
set(DATAFED_WEB_MAJOR 2)
set(DATAFED_WEB_MINOR 0)
set(DATAFED_WEB_PATCH 0)

set(DATAFED_REPO_MAJOR 1)
set(DATAFED_REPO_MAJOR 2)
set(DATAFED_REPO_MINOR 0)
set(DATAFED_REPO_PATCH 1)
set(DATAFED_REPO_PATCH 0)

set(DATAFED_AUTHZ_MAJOR 1)
set(DATAFED_AUTHZ_MAJOR 2)
set(DATAFED_AUTHZ_MINOR 0)
set(DATAFED_AUTHZ_PATCH 2)
set(DATAFED_AUTHZ_PATCH 0)

set(DATAFED_PYTHON_CLIENT_MAJOR 3)
set(DATAFED_PYTHON_CLIENT_MINOR 1)
set(DATAFED_PYTHON_CLIENT_MAJOR 4)
set(DATAFED_PYTHON_CLIENT_MINOR 0)
set(DATAFED_PYTHON_CLIENT_PATCH 0)
set(DATAFED_PYTHON_CLIENT_RELEASE_TYPE "")
set(DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER "")
3 changes: 2 additions & 1 deletion common/source/ServerFactory.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@

// Local private includes
#include "servers/Proxy.hpp"
#include "servers/ProxyBasicZMQ.hpp"

// Local public includes
#include "common/IServer.hpp"
Expand Down Expand Up @@ -39,7 +40,7 @@ std::unique_ptr<IServer> ServerFactory::create(
"been provided that will never be used!");
}
return std::unique_ptr<IServer>(
new Proxy(socket_options, socket_credentials, m_log_context));
new ProxyBasicZMQ(socket_options, socket_credentials, m_log_context));
}

EXCEPT_PARAM(1, "Error Server type unsupported");
Expand Down
3 changes: 2 additions & 1 deletion common/source/communicators/ZeroMQCommunicator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -373,10 +373,11 @@ void receiveBody(IMessage &msg, Buffer &buffer, ProtoBufFactory &factory,
if (frame_size > 0) {

if (zmq_msg_size(&zmq_msg) != frame_size) {
size_t msg_size = zmq_msg_size(&zmq_msg);
zmq_msg_close(&zmq_msg);
EXCEPT_PARAM(1, "RCV Invalid message body received. Expected: "
<< frame_size
<< ", got: " << zmq_msg_size(&zmq_msg));
<< ", got: " << msg_size);
}

// Deserialize wire bytes into Envelope
Expand Down
16 changes: 5 additions & 11 deletions core/database/foxx/api/query_router.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,6 @@ router

g_lib.procInputParam(req.body, "title", false, obj);

//console.log("qry/create filter:",obj.qry_filter);

var qry = g_db.q.save(obj, {
returnNew: true,
}).new;
Expand Down Expand Up @@ -123,7 +121,7 @@ router
qry_begin: joi.string().required(),
qry_end: joi.string().required(),
qry_filter: joi.string().allow("").required(),
params: joi.any().required(),
params: joi.object().required(),
limit: joi.number().integer().required(),
query: joi.any().required(),
})
Expand Down Expand Up @@ -167,6 +165,7 @@ router
qry.qry_begin = req.body.qry_begin;
qry.qry_end = req.body.qry_end;
qry.qry_filter = req.body.qry_filter;

qry.params = req.body.params;
qry.limit = req.body.limit;
qry.query = req.body.query;
Expand All @@ -176,7 +175,6 @@ router
qry.params.cols = null;
}*/

//console.log("qry/upd filter:",obj.qry_filter);
qry = g_db._update(qry._id, qry, {
mergeObjects: false,
returnNew: true,
Expand Down Expand Up @@ -229,7 +227,7 @@ router
qry_begin: joi.string().required(),
qry_end: joi.string().required(),
qry_filter: joi.string().allow("").required(),
params: joi.any().required(),
params: joi.object().required(),
limit: joi.number().integer().required(),
query: joi.any().required(),
})
Expand Down Expand Up @@ -593,10 +591,6 @@ function execQuery(client, mode, published, orig_query) {

qry += query.qry_end;

//console.log( "execqry" );
//console.log( "qry", qry );
//console.log( "params", query.params );

// Enforce query paging limits
if (query.params.cnt > g_lib.MAX_PAGE_SIZE) {
query.params.cnt = g_lib.MAX_PAGE_SIZE;
Expand Down Expand Up @@ -720,7 +714,7 @@ router

const query = {
...req.body,
params: JSON.parse(req.body.params),
params: req.body.params,
};
results = execQuery(client, req.body.mode, req.body.published, query);

Expand Down Expand Up @@ -761,7 +755,7 @@ router
qry_begin: joi.string().required(),
qry_end: joi.string().required(),
qry_filter: joi.string().optional().allow(""),
params: joi.string().required(),
params: joi.object().required(),
limit: joi.number().integer().required(),
})
.required(),
Expand Down
17 changes: 10 additions & 7 deletions core/database/foxx/api/user_router.js
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,7 @@ router
result = [user.new];
},
});
res.send(result);
logger.logRequestSuccess({
client: req.queryParams.client,
correlationId: req.headers["x-correlation-id"],
Expand Down Expand Up @@ -1160,32 +1161,34 @@ router

router
.get("/token/get/expiring", function (req, res) {
let result = null;
let extra_log_info = [];
const desc = `User access tokens expiring in ${req.queryParams.expires_in} seconds`;
try {
logger.logRequestStarted({
client: req.queryParams.client,
correlationId: req.headers["x-correlation-id"],
httpVerb: "GET",
routePath: basePath + "/token/get/expiring",
status: "Started",
description: "Getting expiring user access token",
description: desc,
});

results = g_db._query(
const results = g_db._query(
"for i in u filter i.expiration != Null && i.expiration < @exp return {id:i._id,access:i.access,refresh:i.refresh,expiration:i.expiration}",
{
exp: Math.floor(Date.now() / 1000) + req.queryParams.expires_in,
},
);
res.send(results);
extra_log_info = results.toArray();
logger.logRequestSuccess({
client: req.queryParams.client,
correlationId: req.headers["x-correlation-id"],
httpVerb: "GET",
routePath: basePath + "/token/get/expiring",
status: "Success",
description: "Getting expiring user access token",
extra: results,
description: desc,
extra: { expiring_token_count: extra_log_info.length },
});
} catch (e) {
logger.logRequestFailure({
Expand All @@ -1194,8 +1197,8 @@ router
httpVerb: "GET",
routePath: basePath + "/token/get/expiring",
status: "Failure",
description: "Getting expiring user access token",
extra: result,
description: desc,
extra: { expiring_token_count: extra_log_info.length },
error: e,
});
g_lib.handleException(e, res);
Expand Down
4 changes: 2 additions & 2 deletions core/server/CoreServer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ void Server::msgRouter(LogContext log_context, int thread_count) {
}

ServerFactory server_factory(log_context);
auto proxy = server_factory.create(ServerType::PROXY_BASIC_ZMQ,
auto proxy = server_factory.create(ServerType::PROXY_CUSTOM,
socket_options, socket_credentials);

// Ceate worker threads
Expand Down Expand Up @@ -439,7 +439,7 @@ void Server::ioInsecure(LogContext log_context, int thread_count) {
}

ServerFactory server_factory(log_context);
auto proxy = server_factory.create(ServerType::PROXY_BASIC_ZMQ,
auto proxy = server_factory.create(ServerType::PROXY_CUSTOM,
socket_options, socket_credentials);

proxy->run();
Expand Down
4 changes: 2 additions & 2 deletions core/server/DatabaseAPI.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1309,7 +1309,7 @@ void DatabaseAPI::generalSearch(const SDMS::SearchRequest &a_request,
payload["qry_begin"] = qry_begin;
payload["qry_end"] = qry_end;
payload["qry_filter"] = qry_filter;
payload["params"] = "{" + params + "}";
payload["params"] = params;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

issue (bug_risk): The params field is still sent as a string while the Foxx API now expects a JSON object, which can break validation and query execution.

On the Foxx side, params is now validated as joi.object() and is no longer JSON.parsed from a string, but here payload["params"] is still assigned from a std::string. That will serialize as a JSON string value (e.g. "params": "{...}") instead of an object ("params": { ... }), causing Joi validation and execQuery to receive the wrong shape. Please ensure params is sent as a JSON object, e.g. by parsing it (payload["params"] = nlohmann::json::parse(params);) or constructing the JSON object directly.

payload["limit"] = to_string(cnt);

string body = payload.dump(-1, ' ', true);
Expand Down Expand Up @@ -3943,7 +3943,7 @@ uint32_t DatabaseAPI::parseSearchRequest(const SDMS::SearchRequest &a_request,
a_qry_begin = a_qry_begin;
a_qry_end = a_qry_end;
a_qry_filter = a_qry_filter;

a_params = "{" + a_params + "}";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

question: Wrapping a_params with braces at this layer risks double-wrapping or inconsistent formatting depending on how callers construct a_params.

Previously, this function left a_params untouched and generalSearch added the braces. Now the braces are added here while generalSearch passes params through. This only works if all callers always pass a raw fragment (e.g. "cnt:10, offset:0") and never include braces.

If any caller already passes a braced string, this will yield "{{...}}" and break parsing. Consider standardizing a_params (always raw fragment vs. always full JSON) and ensuring that exactly one well-defined layer converts it to a JSON object (potentially by using nlohmann::json instead of string concatenation). Also double-check existing call sites for any that already include braces.

return cnt;
}

Expand Down
3 changes: 3 additions & 0 deletions tests/end-to-end/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ if( ENABLE_END_TO_END_API_TESTS )
add_test(NAME end_to_end_alloc COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/test_api_alloc.py")
add_test(NAME end_to_end_collection COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/test_api_collection.py")
add_test(NAME end_to_end_record COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/test_api_record.py")
add_test(NAME end_to_end_query COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/test_api_query.py")

# Note because these tests are all using the same database we cannot run most of them concurrently
# They must be run sequentially so that concurrent API calls do not create problems
Expand All @@ -26,5 +27,7 @@ if( ENABLE_END_TO_END_API_TESTS )
set_tests_properties(end_to_end_alloc PROPERTIES FIXTURES_SETUP FIX_ALLOC)
set_tests_properties(end_to_end_collection PROPERTIES FIXTURES_REQUIRED FIX_ALLOC)
set_tests_properties(end_to_end_record PROPERTIES FIXTURES_REQUIRED FIX_ALLOC)
set_tests_properties(end_to_end_record PROPERTIES FIXTURES_SETUP FIX_RECORD)
set_tests_properties(end_to_end_query PROPERTIES FIXTURES_REQUIRED FIX_RECORD)

endif()
Loading
Loading