Skip to content

Commit 55cbafc

Browse files
committed
fix: set the conf for VFS at the export level itself
Changes: - Based on the Enable ACL and Security Label param, update the conf beforehand rather than updating after bring up Signed-off-by: Manimaran-MM <manim@redhat.com>
1 parent 60eff4d commit 55cbafc

File tree

4 files changed

+484
-482
lines changed

4 files changed

+484
-482
lines changed

ci_utils/vfs/vfs_setup.py

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -47,11 +47,11 @@ def configure_export(self):
4747
Path = "/{self.vfs_volume}";
4848
Pseudo = "/{self.vfs_volume}";
4949
Access_type = RW;
50-
Disable_ACL = False;
50+
Disable_ACL = {str(not self.enable_acl).capitalize()};
5151
Protocols = "3","4";
5252
Transports = "UDP","TCP";
5353
SecType = "sys";
54-
Security_Label = False;
54+
Security_Label = {str(self.security_label).capitalize()};
5555
FSAL {{
5656
Name = VFS;
5757
}}
@@ -78,11 +78,14 @@ def validate_export(self):
7878
run_cmd(self.session, "grep --with-filename -e '' /etc/ganesha/exports/*.conf", check=False)
7979
raise RuntimeError(f"Export {self.vfs_volume} not found!")
8080

81+
# Commenting out below enablement as the changes are already done in the export configuration
82+
# Retaining the code for future reference for enabling other features
83+
8184
# -------------------------------
8285
# Enable ACL if required
8386
# -------------------------------
84-
def enable_acl_if_required(self):
85-
logger.info("[TEST]: Checking if ACL needs to be enabled")
87+
# def enable_acl_if_required(self):
88+
# logger.info("[TEST]: Checking if ACL needs to be enabled")
8689
# if self.enable_acl:
8790
# # logger.info("Enabling ACL for volume...")
8891
# # run_cmd(self.session, f"sed -i s/'Disable_ACL = .*'/'Disable_ACL = false;'/g {self.export_conf}")
@@ -99,20 +102,20 @@ def enable_acl_if_required(self):
99102
# -------------------------------
100103
# Enable Security_Label if required
101104
# -------------------------------
102-
def enable_security_label_if_required(self):
103-
logger.info("[TEST]: Checking if Security_Label needs to be enabled")
104-
if self.security_label:
105-
logger.info("Enabling Security_Label for volume...")
106-
run_cmd(self.session, f"sed -i s/'Security_Label = .*'/'Security_Label = True;'/g {self.export_conf}")
107-
run_cmd(self.session, f"cat {self.export_conf}")
108-
export_id, _ = run_cmd(self.session, f"grep 'Export_Id' {self.export_conf} | sed 's/^[[:space:]]*Export_Id.*=[[:space:]]*\\([0-9]*\\).*/\\1/'")
109-
run_cmd(
110-
self.session,
111-
f"dbus-send --type=method_call --print-reply --system "
112-
f"--dest=org.ganesha.nfsd /org/ganesha/nfsd/ExportMgr "
113-
f"org.ganesha.nfsd.exportmgr.UpdateExport string:{self.export_conf} "
114-
f"string:\"EXPORT(Export_Id = {export_id})\""
115-
)
105+
# def enable_security_label_if_required(self):
106+
# logger.info("[TEST]: Checking if Security_Label needs to be enabled")
107+
# if self.security_label:
108+
# logger.info("Enabling Security_Label for volume...")
109+
# run_cmd(self.session, f"sed -i s/'Security_Label = .*'/'Security_Label = True;'/g {self.export_conf}")
110+
# run_cmd(self.session, f"cat {self.export_conf}")
111+
# export_id, _ = run_cmd(self.session, f"grep 'Export_Id' {self.export_conf} | sed 's/^[[:space:]]*Export_Id.*=[[:space:]]*\\([0-9]*\\).*/\\1/'")
112+
# run_cmd(
113+
# self.session,
114+
# f"dbus-send --type=method_call --print-reply --system "
115+
# f"--dest=org.ganesha.nfsd /org/ganesha/nfsd/ExportMgr "
116+
# f"org.ganesha.nfsd.exportmgr.UpdateExport string:{self.export_conf} "
117+
# f"string:\"EXPORT(Export_Id = {export_id})\""
118+
# )
116119

117120
# -------------------------------
118121
# Main export workflow
@@ -123,6 +126,6 @@ def export_volume(self):
123126
self.configure_export()
124127
run_cmd(self.session, "sleep 5")
125128
self.validate_export()
126-
self.enable_acl_if_required()
127-
self.enable_security_label_if_required()
129+
# self.enable_acl_if_required()
130+
# self.enable_security_label_if_required()
128131
logger.info("Export completed successfully.")

ci_utils/virtual_machine/vm_setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def setup_network(self):
7979
)
8080
run_cmd(self.session, f"virsh net-define {remote_xml}", check=False)
8181
run_cmd(self.session, "virsh net-start default")
82-
run_cmd(self.session, "virsh net-autostart default"
82+
run_cmd(self.session, "virsh net-autostart default")
8383
self.network = "default"
8484

8585
# -----------------------

jobs/Jenkinsfile.gatecheck

Lines changed: 105 additions & 105 deletions
Original file line numberDiff line numberDiff line change
@@ -165,116 +165,116 @@ pipeline {
165165
}
166166
}
167167

168-
// stage('Pre-Check Patchset - Initial') {
169-
// steps {
170-
// script {
171-
// preCheckGerritPatchset()
172-
// }
173-
// }
174-
// }
175-
176-
// stage('Run Checkpatch, Clang and FSAL Tests') {
177-
// steps {
178-
// sh """
179-
// export GIT_CLONE_DEPTH=2
180-
// export NODE_COUNT=1
181-
182-
// echo "[PIPELINE]: Reserving nodes for tests..."
183-
// pytest -v -s ci-tests/tests/test_reserve_nodes.py
184-
185-
// echo "[PIPELINE]: Setting up common test environment..."
186-
// pytest -c ci-tests/ci_utils/pytest.ini -v -s -m checkpatch_fsal ci-tests/tests/test_ci_pre_req.py
187-
188-
// echo "[PIPELINE]: Running tests..."
189-
// pytest -v -n 6 --capture=no --junitxml=report-checkpatch-fsal.xml ci-tests/tests/test_checkpatch_fsal.py
190-
// """
191-
// }
168+
stage('Pre-Check Patchset - Initial') {
169+
steps {
170+
script {
171+
preCheckGerritPatchset()
172+
}
173+
}
174+
}
175+
176+
stage('Run Checkpatch, Clang and FSAL Tests') {
177+
steps {
178+
sh """
179+
export GIT_CLONE_DEPTH=2
180+
export NODE_COUNT=1
181+
182+
echo "[PIPELINE]: Reserving nodes for tests..."
183+
pytest -v -s ci-tests/tests/test_reserve_nodes.py
184+
185+
echo "[PIPELINE]: Setting up common test environment..."
186+
pytest -c ci-tests/ci_utils/pytest.ini -v -s -m checkpatch_fsal ci-tests/tests/test_ci_pre_req.py
187+
188+
echo "[PIPELINE]: Running tests..."
189+
pytest -v -n 6 --capture=no --junitxml=report-checkpatch-fsal.xml ci-tests/tests/test_checkpatch_fsal.py
190+
"""
191+
}
192192

193-
// post {
194-
// always {
195-
// echo "Deleting reserved nodes..."
196-
// sh 'pytest -v -s ci-tests/tests/test_delete_nodes.py'
193+
post {
194+
always {
195+
echo "Deleting reserved nodes..."
196+
sh 'pytest -v -s ci-tests/tests/test_delete_nodes.py'
197197

198-
// script {
199-
// def filesToCat = [
200-
// "${WORKSPACE}/failures/checkpatch_logs.json",
201-
// "${WORKSPACE}/failures/clang_logs.json",
202-
// "${WORKSPACE}/summary_checkpatch_fsal.txt",
203-
// "${WORKSPACE}/summary_status.txt"
204-
// ]
205-
206-
// filesToCat.each { file ->
207-
// if (fileExists(file)) {
208-
// sh "cat ${file}"
209-
// }
210-
// }
211-
212-
213-
// def notifyFlag = '--notify NONE'
214-
// def verifiedFlag = ''
215-
// def postCheckpatchFailure = false
216-
// def postClangFailure = false
217-
// def statusFile = "${WORKSPACE}/summary_status.txt"
218-
// def checkpatchJSONFile = "${WORKSPACE}/failures/checkpatch_logs.json"
219-
// def clangJSONFile = "${WORKSPACE}/failures/clang_logs.json"
220-
221-
// if (fileExists(statusFile)) {
222-
// def content = readFile(statusFile).trim()
223-
// echo "Summary status: ${content}"
224-
225-
// if (content.contains('Failed')) {
226-
// notifyFlag = '--notify ALL'
227-
// echo "Test failed → setting notifyFlag=${notifyFlag}"
228-
// }
229-
// } else {
230-
// echo "summary_status.txt not found, using default flags"
231-
// }
232-
233-
// // Check for Clang-format & Checkpatch failures in the JSON file to post to gerrit as inline comments
234-
// if (fileExists(checkpatchJSONFile)) {
235-
// def checkpatch_content = readFile(checkpatchJSONFile).trim()
236-
// echo "Checkpatch JSON content: ${checkpatch_content}"
198+
script {
199+
def filesToCat = [
200+
"${WORKSPACE}/failures/checkpatch_logs.json",
201+
"${WORKSPACE}/failures/clang_logs.json",
202+
"${WORKSPACE}/summary_checkpatch_fsal.txt",
203+
"${WORKSPACE}/summary_status.txt"
204+
]
205+
206+
filesToCat.each { file ->
207+
if (fileExists(file)) {
208+
sh "cat ${file}"
209+
}
210+
}
211+
212+
213+
def notifyFlag = '--notify NONE'
214+
def verifiedFlag = ''
215+
def postCheckpatchFailure = false
216+
def postClangFailure = false
217+
def statusFile = "${WORKSPACE}/summary_status.txt"
218+
def checkpatchJSONFile = "${WORKSPACE}/failures/checkpatch_logs.json"
219+
def clangJSONFile = "${WORKSPACE}/failures/clang_logs.json"
220+
221+
if (fileExists(statusFile)) {
222+
def content = readFile(statusFile).trim()
223+
echo "Summary status: ${content}"
224+
225+
if (content.contains('Failed')) {
226+
notifyFlag = '--notify ALL'
227+
echo "Test failed → setting notifyFlag=${notifyFlag}"
228+
}
229+
} else {
230+
echo "summary_status.txt not found, using default flags"
231+
}
232+
233+
// Check for Clang-format & Checkpatch failures in the JSON file to post to gerrit as inline comments
234+
if (fileExists(checkpatchJSONFile)) {
235+
def checkpatch_content = readFile(checkpatchJSONFile).trim()
236+
echo "Checkpatch JSON content: ${checkpatch_content}"
237237

238-
// if (!checkpatch_content.contains("Checkpatch OK")) {
239-
// postCheckpatchFailure = true
240-
// echo "Checkpatch failure detected → setting postCheckpatchFailure=${postCheckpatchFailure}"
241-
// }
242-
// }
238+
if (!checkpatch_content.contains("Checkpatch OK")) {
239+
postCheckpatchFailure = true
240+
echo "Checkpatch failure detected → setting postCheckpatchFailure=${postCheckpatchFailure}"
241+
}
242+
}
243243

244-
// if (fileExists(clangJSONFile)) {
245-
// def clang_content = readFile(clangJSONFile).trim()
246-
// echo "Clang JSON content: ${clang_content}"
244+
if (fileExists(clangJSONFile)) {
245+
def clang_content = readFile(clangJSONFile).trim()
246+
echo "Clang JSON content: ${clang_content}"
247247

248-
// if (!clang_content.contains("clang-format OK")) {
249-
// postClangFailure = true
250-
// echo "Clang-format failure detected → setting postClangFailure=${postClangFailure}"
251-
// }
252-
// }
253-
254-
// postToGerrit("${WORKSPACE}/summary_checkpatch_fsal.txt", notifyFlag, verifiedFlag, postCheckpatchFailure, "${WORKSPACE}/failures/checkpatch_logs.json", postClangFailure, "${WORKSPACE}/failures/clang_logs.json")
255-
256-
// def failuresFolder = "${WORKSPACE}/failures"
257-
// def failuresFiles = findFiles(glob: 'failures/**')
258-
259-
// if (fileExists("${WORKSPACE}/failures")) {
260-
// archiveArtifacts artifacts: 'failures/**', allowEmptyArchive: true
261-
// } else {
262-
// echo "No failures folder found, skipping artifact archive."
263-
// }
264-
// }
265-
// // Publish test results
266-
// junit 'report-checkpatch-fsal.xml'
267-
// }
268-
// }
269-
// }
270-
271-
// stage('Pre-Check Patchset - Intermediate') {
272-
// steps {
273-
// script {
274-
// preCheckGerritPatchset()
275-
// }
276-
// }
277-
// }
248+
if (!clang_content.contains("clang-format OK")) {
249+
postClangFailure = true
250+
echo "Clang-format failure detected → setting postClangFailure=${postClangFailure}"
251+
}
252+
}
253+
254+
postToGerrit("${WORKSPACE}/summary_checkpatch_fsal.txt", notifyFlag, verifiedFlag, postCheckpatchFailure, "${WORKSPACE}/failures/checkpatch_logs.json", postClangFailure, "${WORKSPACE}/failures/clang_logs.json")
255+
256+
def failuresFolder = "${WORKSPACE}/failures"
257+
def failuresFiles = findFiles(glob: 'failures/**')
258+
259+
if (fileExists("${WORKSPACE}/failures")) {
260+
archiveArtifacts artifacts: 'failures/**', allowEmptyArchive: true
261+
} else {
262+
echo "No failures folder found, skipping artifact archive."
263+
}
264+
}
265+
// Publish test results
266+
junit 'report-checkpatch-fsal.xml'
267+
}
268+
}
269+
}
270+
271+
stage('Pre-Check Patchset - Intermediate') {
272+
steps {
273+
script {
274+
preCheckGerritPatchset()
275+
}
276+
}
277+
}
278278

279279
stage('Run Pynfs and Cthon Tests') {
280280
steps {

0 commit comments

Comments
 (0)