Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ class TestScalarSubQuery4a1:
fileIdx = 0
saved_count = 0 # total number of queries saved so far
maxFileQueryNum = 10000000 # max number of queries to save in a single file
tableNames = ["tb1"] #["tb1", "tb3", "tbe", "st1"]
# tableNames = ["tb1"] #["tb1", "tb3", "tbe", "st1"]
tableNames = ["tb1", "tb3", "tbe", "st1"]

subSqls = [
# select
Expand Down Expand Up @@ -182,11 +183,10 @@ def execCase(self):
self.querySql = self.subSqls[self.mainIdx].replace("{scalarSql}", "(" + self.subSqls[self.secondIdx] + ")")
self.querySql = self.querySql.replace("{scalarSql}", self.scalarSqls[self.subIdx])
self.querySql = self.querySql.replace("{tableName}", self.tableNames[self.tableIdx])

self.generated_queries_file.write("explain " + self.querySql.strip() + "\G;\n")
self.generated_queries_file.write("explain verbose true " + self.querySql.strip() + "\G;\n")
#self.generated_queries_file.write("explain analyze " + self.querySql.strip() + "\G\n")
#self.generated_queries_file.write("explain analyze verbose true " + self.querySql.strip() + "\G;\n")
self.generated_queries_file.write("explain analyze " + self.querySql.strip() + "\G\n")
self.generated_queries_file.write("explain analyze verbose true " + self.querySql.strip() + "\G;\n")
self.generated_queries_file.flush()

self.generated_queries_file.close()
Expand Down
18 changes: 9 additions & 9 deletions test/ci/cases.task
Original file line number Diff line number Diff line change
Expand Up @@ -312,14 +312,14 @@
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub3b.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub3c.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub3d.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4a1.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4a2.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4b1.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4b2.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4c1.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4c2.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4d1.py
,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4d2.py
90,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4a1.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4a2.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4b1.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4b2.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4c1.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4c2.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4d1.py
# ,,n,.,pytest cases/09-DataQuerying/08-SubQuery/test_scalar_sub4d2.py
,,y,.,./ci/pytest.sh pytest cases/09-DataQuerying/08-SubQuery/test_query_sub_interval.py
,,y,.,./ci/pytest.sh pytest cases/09-DataQuerying/08-SubQuery/test_nestedQuery.py
,,y,.,./ci/pytest.sh pytest cases/09-DataQuerying/08-SubQuery/test_nestedQuery_str.py
Expand Down Expand Up @@ -845,7 +845,7 @@
,,y,.,./ci/pytest.sh pytest cases/24-Users/test_user_token.py
,,y,.,./ci/pytest.sh pytest cases/24-Users/test_user_totp.py
,,y,.,./ci/pytest.sh pytest cases/24-Users/test_user_whitelist.py
# ,,n,.,pytest cases/24-Users/test_user_passwd.py
,,n,.,pytest cases/24-Users/test_user_passwd.py

# 25-Privileges
,,y,.,./ci/pytest.sh pytest cases/25-Privileges/test_priv_basic.py -N 3
Expand Down
4 changes: 2 additions & 2 deletions test/ci/container_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ if [ "$build_no_asan" = "true" ]; then
-v ${REP_REAL_PATH}/community/tools/taosadapter:${REP_DIR}/community/tools/taosadapter \
-v ${REP_REAL_PATH}/community/tools/taosws-rs:${REP_DIR}/community/tools/taosws-rs \
-v ${REP_REAL_PATH}/community/tools/taosws-rs/target:${REP_DIR}/community/tools/taosws-rs/target \
--rm --ulimit core=-1 tdengine-ci:0.1 sh -c "cd $REP_DIR;apt update -y && apt install groff -y;mv /root/.cargo/config /root/.cargo/config_bak;rm -rf debug;mkdir -p debug;cd debug;cmake .. $BUILD_HTTP_OPT -DCOVER=true -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DCMAKE_EXPORT_COMPILE_COMMANDS=1 -DBUILD_CONTRIB=false ;make -j|| exit 1"
--rm --ulimit core=-1 tdengine-ci:0.1 sh -c "cd $REP_DIR;apt update -y && apt install groff -y;rm -rf debug;mkdir -p debug;cd debug;cmake .. $BUILD_HTTP_OPT -DCOVER=true -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DCMAKE_EXPORT_COMPILE_COMMANDS=1 -DBUILD_CONTRIB=false ;make -j|| exit 1"
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The removal of "mv /root/.cargo/config /root/.cargo/config_bak;" from the docker run command appears unrelated to the PR's stated purpose of enhancing SQL execution concurrency. This change is not explained in the PR title or description. If this change is intentional and necessary, it should be documented. If it's accidental, it should be reverted. This could potentially affect the Rust build process for taosws-rs.

Copilot uses AI. Check for mistakes.
# -v ${REP_REAL_PATH}/community/contrib/jemalloc/:${REP_DIR}/community/contrib/jemalloc \


Expand All @@ -118,7 +118,7 @@ docker run \
-v ${REP_REAL_PATH}/community/tools/taosadapter:${REP_DIR}/community/tools/taosadapter \
-v ${REP_REAL_PATH}/community/tools/taosws-rs:${REP_DIR}/community/tools/taosws-rs \
-v ${REP_REAL_PATH}/community/tools/taosws-rs/target:${REP_DIR}/community/tools/taosws-rs/target \
--rm --ulimit core=-1 tdengine-ci:0.1 sh -c "cd $REP_DIR;apt update -y && apt install groff -y;mv /root/.cargo/config /root/.cargo/config_bak;rm -rf debug;mkdir -p debug;cd debug;cmake .. $BUILD_HTTP_OPT -DCOVER=true -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true -DCMAKE_BUILD_TYPE=Debug -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DBUILD_CONTRIB=false;make -j|| exit 1 "
--rm --ulimit core=-1 tdengine-ci:0.1 sh -c "cd $REP_DIR;apt update -y && apt install groff -y;rm -rf debug;mkdir -p debug;cd debug;cmake .. $BUILD_HTTP_OPT -DCOVER=true -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true -DCMAKE_BUILD_TYPE=Debug -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DBUILD_CONTRIB=false;make -j|| exit 1 "
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Similar to line 96, the removal of "mv /root/.cargo/config /root/.cargo/config_bak;" from this docker run command is unrelated to the PR's stated purpose. This change should either be documented or reverted.

Copilot uses AI. Check for mistakes.

mv ${REP_REAL_PATH}/debug ${WORKDIR}/debugSan
date
Expand Down
37 changes: 29 additions & 8 deletions test/new_test_framework/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

import random
import string
import concurrent
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The import statement should be import concurrent.futures instead of just import concurrent. The code at line 3006 uses concurrent.futures.ThreadPoolExecutor which requires the full module path to be imported. Other files in the codebase consistently import concurrent.futures or use from concurrent.futures import ThreadPoolExecutor.

Suggested change
import concurrent
import concurrent.futures

Copilot uses AI. Check for mistakes.
import requests
import time
import socket
Expand Down Expand Up @@ -2973,17 +2974,37 @@ def generate_query_result_file(self, test_case, idx, sql):
# print(f"taosCmd:{taosCmd}, currentPath:{os.getcwd()}")
os.system(taosCmd)
return self.query_result_file

def run_sql(self, sql, db):
tdsql = self.newTdSql()
if db:
try:
tdsql.execute(f"USE {db};")
except Exception as e:
tdLog.error(f"USE数据库失败: {db}\n{e}")
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"SQL执行失败: {sql}\n{e}")
Comment on lines +2984 to +2988
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The error messages contain Chinese characters ("USE数据库失败" and "SQL执行失败"). While the rest of the codebase appears to use English for log messages, these Chinese messages are inconsistent and may cause issues with character encoding or readability for international developers. Consider using English error messages for consistency.

Suggested change
tdLog.error(f"USE数据库失败: {db}\n{e}")
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"SQL执行失败: {sql}\n{e}")
tdLog.error(f"Failed to execute USE for database '{db}':\n{e}")
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"Failed to execute SQL '{sql}':\n{e}")

Copilot uses AI. Check for mistakes.
Comment on lines +2980 to +2988
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The run_sql method creates a new database connection for each SQL statement via self.newTdSql(). In concurrent execution with max_workers=8, this could create many simultaneous connections, potentially exhausting connection pools or causing performance issues. Consider reusing connections or implementing connection pooling. Additionally, the connections created here are never explicitly closed, which may lead to resource leaks.

Suggested change
if db:
try:
tdsql.execute(f"USE {db};")
except Exception as e:
tdLog.error(f"USE数据库失败: {db}\n{e}")
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"SQL执行失败: {sql}\n{e}")
try:
if db:
try:
tdsql.execute(f"USE {db};")
except Exception as e:
tdLog.error(f"USE数据库失败: {db}\n{e}")
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"SQL执行失败: {sql}\n{e}")
finally:
# Ensure that any resources associated with this tdsql instance are released
try:
if hasattr(tdsql, "close") and callable(getattr(tdsql, "close")):
tdsql.close()
except Exception as e:
tdLog.error(f"关闭数据库连接失败: {e}")

Copilot uses AI. Check for mistakes.
Comment on lines +2985 to +2988
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The try-except block around execute_ignore_error is unnecessary because execute_ignore_error already catches all exceptions internally. This creates redundant error handling that will never be triggered.

Suggested change
try:
tdsql.execute_ignore_error(sql)
except Exception as e:
tdLog.error(f"SQL执行失败: {sql}\n{e}")
tdsql.execute_ignore_error(sql)

Copilot uses AI. Check for mistakes.

def execute_query_file(self, inputfile):
def execute_query_file(self, inputfile, max_workers=8):
if not os.path.exists(inputfile):
tdLog.exit(f"Input file '{inputfile}' does not exist.")
else:
cfgPath = self.getClientCfgPath()
tdLog.info(f"Executing query file: {inputfile}")
if platform.system().lower() == "windows":
os.system(f"taos -c {cfgPath} -f {inputfile} > nul 2>&1")
else:
os.system(f"taos -c {cfgPath} -f {inputfile} > /dev/null 2>&1")
return

tdLog.info(f"Executing query file: {inputfile}")

with open(inputfile, 'r') as f:
lines = [line.strip() for line in f if line.strip()]
# 假设第一行是 use 语句
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comments in the code are in Chinese ("假设第一行是 use 语句" means "Assume the first line is a use statement"). This is inconsistent with the rest of the codebase which uses English comments. Consider translating to English for consistency and international collaboration.

Suggested change
# 假设第一行是 use 语句
# Assume the first line is a USE statement

Copilot uses AI. Check for mistakes.
db = lines[0].split()[1].rstrip(';')
sql_lines = [
line.replace('\\G', '').rstrip(';') + ';'
for line in lines[1:]
Comment on lines +2999 to +3003
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The SQL parsing assumes the first line is a USE statement and blindly splits on spaces and takes the second element. This will fail or produce incorrect results if: 1) The first line has leading/trailing whitespace with the database name not in position [1], 2) The first line is a comment, 3) The first line is empty after stripping (though filtered by line.strip()), 4) The USE statement has different formatting (e.g., multiple spaces, tabs). Consider using a regex pattern or more robust parsing, and validate that the first line is actually a USE statement.

Suggested change
# 假设第一行是 use 语句
db = lines[0].split()[1].rstrip(';')
sql_lines = [
line.replace('\\G', '').rstrip(';') + ';'
for line in lines[1:]
# 查找并解析 USE 语句以获取数据库名
use_line = None
db = None
for line in lines:
match = re.match(r'(?i)^use\s+([^\s;]+)', line)
if match:
use_line = line
db = match.group(1)
break
if db is None:
tdLog.exit("No valid USE statement found in input file.")
return
sql_lines = [
line.replace('\\G', '').rstrip(';') + ';'
for line in lines
if line != use_line

Copilot uses AI. Check for mistakes.
]
Comment on lines +3001 to +3004
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The SQL line processing strips and re-adds semicolons, and removes '\G' markers. However, this processing doesn't handle: 1) Multi-line SQL statements, 2) SQL comments (-- or /* */), 3) String literals that might contain ';' or '\G', 4) Empty lines between statements (already filtered but no separator logic). The original implementation using 'taos -f' would handle all these cases correctly. Consider more robust SQL parsing or document these limitations.

Copilot uses AI. Check for mistakes.

with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
executor.map(lambda sql: self.run_sql(sql, db), sql_lines)
Comment on lines +3006 to +3007
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The concurrent execution fundamentally changes the semantics of execute_query_file. The original implementation executed SQL statements sequentially using the taos CLI (via os.system), which would properly handle dependencies between statements. The new implementation executes all statements concurrently, which will break if statements have dependencies (e.g., CREATE TABLE followed by INSERT INTO that table). This is a breaking API change that will cause existing tests to fail or behave incorrectly. The change should either: 1) Be behind a feature flag or new parameter, 2) Parse and group dependent statements, or 3) Be documented as a breaking change with migration guidance.

Copilot uses AI. Check for mistakes.
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The executor.map() call doesn't collect or handle return values, and any exceptions raised in worker threads will be silently ignored unless the map iterator is consumed. Since the return value isn't being used, exceptions won't propagate. Consider using list(executor.map(...)) to force execution and exception propagation, or use executor.submit() with as_completed() to properly handle errors from individual tasks.

Suggested change
executor.map(lambda sql: self.run_sql(sql, db), sql_lines)
list(executor.map(lambda sql: self.run_sql(sql, db), sql_lines))

Copilot uses AI. Check for mistakes.

def generate_query_result(self, inputfile, test_case):
if not os.path.exists(inputfile):
Expand Down
13 changes: 13 additions & 0 deletions test/new_test_framework/utils/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,6 +432,19 @@ def executeTimes(self, sql, times):
time.sleep(1)
continue

def execute_ignore_error(self, sql, show=False):
"""
Executes a SQL statement, ignore all errors, no retry.
"""
self.sql = sql
if show:
tdLog.info(sql)
try:
self.affectedRows = self.cursor.execute(sql)
return self.affectedRows
except Exception as e:
Copy link

Copilot AI Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The execute_ignore_error method catches exceptions but doesn't log them, making debugging difficult. Since the method's purpose is to ignore errors, it should at least log the exception at debug or info level so users can see what errors occurred. This is especially important in concurrent execution scenarios where multiple errors may occur simultaneously.

Suggested change
except Exception as e:
except Exception as e:
tdLog.info("Ignoring error while executing sql: %s, %s" % (sql, repr(e)))

Copilot uses AI. Check for mistakes.
return None

def execute(self, sql, queryTimes=10, show=False):
"""
Executes a SQL statement.
Expand Down
2 changes: 1 addition & 1 deletion tests/ci/dockerfile_ci
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ COPY .gitconfig /root/.gitconfig
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
locales psmisc sudo tree libgeos-dev libgflags-dev ruby-full \
libgoogle-glog-dev libsnappy-dev liblzma-dev libz-dev \
libgoogle-glog-dev libsnappy-dev liblzma-dev zlib1g-dev \
libjansson-dev zlib1g pkg-config build-essential valgrind rsync vim \
libjemalloc-dev openssh-server screen sshpass net-tools dirmngr gnupg \
apt-transport-https ca-certificates software-properties-common iputils-ping \
Expand Down
Loading