diff --git a/.github/workflows/cluster-it-1c1d1a.yml b/.github/workflows/cluster-it-1c1d1a.yml
index f1b583f8efe51..d4c40fa7ad889 100644
--- a/.github/workflows/cluster-it-1c1d1a.yml
+++ b/.github/workflows/cluster-it-1c1d1a.yml
@@ -59,5 +59,5 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: cluster-log-ainode-${{ matrix.os }}
- path: integration-test/target/ainode-logs
+ path: integration-test/target/*-logs
retention-days: 30
diff --git a/integration-test/src/assembly/mpp-test.xml b/integration-test/src/assembly/mpp-test.xml
index 4bcd32c7ee0e2..3915c4593a946 100644
--- a/integration-test/src/assembly/mpp-test.xml
+++ b/integration-test/src/assembly/mpp-test.xml
@@ -63,7 +63,7 @@
lib
- ${project.basedir}/../iotdb-core/ainode/dist/
+ ${project.basedir}/../iotdb-core/ainode/dist/ainode/
0755
diff --git a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/node/AINodeWrapper.java b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/node/AINodeWrapper.java
index 20c31f4614666..e118d6c3a98ff 100644
--- a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/node/AINodeWrapper.java
+++ b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/node/AINodeWrapper.java
@@ -25,9 +25,9 @@
import org.apache.tsfile.external.commons.io.file.PathUtils;
import org.slf4j.Logger;
-import java.io.BufferedWriter;
import java.io.File;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.LinkOption;
@@ -37,6 +37,7 @@
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
+import java.util.Properties;
import java.util.stream.Stream;
import static org.apache.iotdb.it.env.cluster.ClusterConstant.AI_NODE_NAME;
@@ -62,15 +63,19 @@ public class AINodeWrapper extends AbstractNodeWrapper {
public static final String CACHE_BUILT_IN_MODEL_PATH = "/data/ainode/models/weights";
private void replaceAttribute(String[] keys, String[] values, String filePath) {
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath, true))) {
- for (int i = 0; i < keys.length; i++) {
- String line = keys[i] + "=" + values[i];
- writer.newLine();
- writer.write(line);
- }
+ Properties props = new Properties();
+ try (FileInputStream in = new FileInputStream(filePath)) {
+ props.load(in);
+ } catch (IOException e) {
+ logger.warn("Failed to load existing AINode properties from {}, because: ", filePath, e);
+ }
+ for (int i = 0; i < keys.length; i++) {
+ props.setProperty(keys[i], values[i]);
+ }
+ try (FileOutputStream out = new FileOutputStream(filePath)) {
+ props.store(out, "Updated by AINode integration-test env");
} catch (IOException e) {
- logger.error(
- "Failed to set attribute for AINode in file: {} because {}", filePath, e.getMessage());
+ logger.error("Failed to save properties to {}, because:", filePath, e);
}
}
diff --git a/integration-test/src/test/java/org/apache/iotdb/ainode/it/AINodeConcurrentInferenceIT.java b/integration-test/src/test/java/org/apache/iotdb/ainode/it/AINodeConcurrentInferenceIT.java
index 42fcf1d30d6c6..a08990d472fe6 100644
--- a/integration-test/src/test/java/org/apache/iotdb/ainode/it/AINodeConcurrentInferenceIT.java
+++ b/integration-test/src/test/java/org/apache/iotdb/ainode/it/AINodeConcurrentInferenceIT.java
@@ -90,33 +90,6 @@ private static void prepareDataForTableModel() throws SQLException {
}
}
- // @Test
- public void concurrentCPUCallInferenceTest() throws SQLException, InterruptedException {
- concurrentCPUCallInferenceTest("timer_xl");
- concurrentCPUCallInferenceTest("sundial");
- }
-
- private void concurrentCPUCallInferenceTest(String modelId)
- throws SQLException, InterruptedException {
- try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TREE_SQL_DIALECT);
- Statement statement = connection.createStatement()) {
- final int threadCnt = 4;
- final int loop = 10;
- final int predictLength = 96;
- statement.execute(String.format("LOAD MODEL %s TO DEVICES 'cpu'", modelId));
- checkModelOnSpecifiedDevice(statement, modelId, "cpu");
- concurrentInference(
- statement,
- String.format(
- "CALL INFERENCE(%s, 'SELECT s FROM root.AI', predict_length=%d)",
- modelId, predictLength),
- threadCnt,
- loop,
- predictLength);
- statement.execute(String.format("UNLOAD MODEL %s FROM DEVICES 'cpu'", modelId));
- }
- }
-
// @Test
public void concurrentGPUCallInferenceTest() throws SQLException, InterruptedException {
concurrentGPUCallInferenceTest("timer_xl");
@@ -150,39 +123,6 @@ private void concurrentGPUCallInferenceTest(String modelId)
String forecastUDTFSql =
"SELECT forecast(s, 'MODEL_ID'='%s', 'PREDICT_LENGTH'='%d') FROM root.AI";
- @Test
- public void concurrentCPUForecastTest() throws SQLException, InterruptedException {
- concurrentCPUForecastTest("timer_xl", forecastUDTFSql);
- concurrentCPUForecastTest("sundial", forecastUDTFSql);
- concurrentCPUForecastTest("timer_xl", forecastTableFunctionSql);
- concurrentCPUForecastTest("sundial", forecastTableFunctionSql);
- }
-
- private void concurrentCPUForecastTest(String modelId, String selectSQL)
- throws SQLException, InterruptedException {
- try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT);
- Statement statement = connection.createStatement()) {
- final int threadCnt = 4;
- final int loop = 10;
- final int predictLength = 96;
- statement.execute(String.format("LOAD MODEL %s TO DEVICES 'cpu'", modelId));
- checkModelOnSpecifiedDevice(statement, modelId, "cpu");
- long startTime = System.currentTimeMillis();
- concurrentInference(
- statement,
- String.format(selectSQL, modelId, predictLength),
- threadCnt,
- loop,
- predictLength);
- long endTime = System.currentTimeMillis();
- LOGGER.info(
- String.format(
- "Model %s concurrent inference %d reqs (%d threads, %d loops) in CPU takes time: %dms",
- modelId, threadCnt * loop, threadCnt, loop, endTime - startTime));
- statement.execute(String.format("UNLOAD MODEL %s FROM DEVICES 'cpu'", modelId));
- }
- }
-
@Test
public void concurrentGPUForecastTest() throws SQLException, InterruptedException {
concurrentGPUForecastTest("timer_xl", forecastUDTFSql);
@@ -221,7 +161,7 @@ private void checkModelOnSpecifiedDevice(Statement statement, String modelId, St
throws SQLException, InterruptedException {
Set targetDevices = ImmutableSet.copyOf(device.split(","));
LOGGER.info("Checking model: {} on target devices: {}", modelId, targetDevices);
- for (int retry = 0; retry < 20; retry++) {
+ for (int retry = 0; retry < 200; retry++) {
Set foundDevices = new HashSet<>();
try (final ResultSet resultSet =
statement.executeQuery(String.format("SHOW LOADED MODELS '%s'", device))) {
diff --git a/iotdb-core/ainode/.gitignore b/iotdb-core/ainode/.gitignore
index 8cc2098c3fd85..bdb2698ec782c 100644
--- a/iotdb-core/ainode/.gitignore
+++ b/iotdb-core/ainode/.gitignore
@@ -14,8 +14,6 @@
# generated by maven
/iotdb/ainode/conf/
-# .whl of ainode, generated by Poetry
+# generated by pyinstaller
/dist/
-
-# the config to build ainode, it will be generated automatically
-pyproject.toml
+/build/
diff --git a/iotdb-core/ainode/ainode.spec b/iotdb-core/ainode/ainode.spec
new file mode 100644
index 0000000000000..a131b2bcff217
--- /dev/null
+++ b/iotdb-core/ainode/ainode.spec
@@ -0,0 +1,199 @@
+# -*- mode: python ; coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under this License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from pathlib import Path
+
+# Get project root directory
+project_root = Path(SPECPATH).parent
+
+block_cipher = None
+
+# Auto-collect all submodules of large dependency libraries
+# Using collect_all automatically includes all dependencies and avoids manual maintenance of hiddenimports
+from PyInstaller.utils.hooks import collect_all, collect_submodules, collect_data_files
+
+# Collect only essential data files and binaries for large libraries
+# Using collect_all for all submodules slows down startup significantly.
+# However, for certain libraries with many dynamic imports (e.g., torch, transformers, safetensors),
+# collect_all is necessary to ensure all required modules are included.
+# For other libraries, we use lighter-weight collection methods to improve startup time.
+all_datas = []
+all_binaries = []
+all_hiddenimports = []
+
+# Only collect essential data files and binaries for critical libraries
+# This reduces startup time by avoiding unnecessary module imports
+essential_libraries = {
+ 'torch': True, # Keep collect_all for torch as it has many dynamic imports
+ 'transformers': True, # Keep collect_all for transformers
+ 'safetensors': True, # Keep collect_all for safetensors
+}
+
+# For other libraries, use selective collection to speed up startup
+other_libraries = ['sktime', 'scipy', 'pandas', 'sklearn', 'statsmodels', 'optuna']
+
+for lib in essential_libraries:
+ try:
+ lib_datas, lib_binaries, lib_hiddenimports = collect_all(lib)
+ all_datas.extend(lib_datas)
+ all_binaries.extend(lib_binaries)
+ all_hiddenimports.extend(lib_hiddenimports)
+ except Exception:
+ pass
+
+# For other libraries, only collect submodules (lighter weight)
+# This relies on PyInstaller's dependency analysis to include what's actually used
+for lib in other_libraries:
+ try:
+ submodules = collect_submodules(lib)
+ all_hiddenimports.extend(submodules)
+ # Only collect essential data files and binaries, not all submodules
+ # This significantly reduces startup time
+ try:
+ lib_datas, lib_binaries, _ = collect_all(lib)
+ all_datas.extend(lib_datas)
+ all_binaries.extend(lib_binaries)
+ except Exception:
+ # If collect_all fails, try collect_data_files for essential data only
+ try:
+ lib_datas = collect_data_files(lib)
+ all_datas.extend(lib_datas)
+ except Exception:
+ pass
+ except Exception:
+ pass
+
+# Project-specific packages that need their submodules collected
+# Only list top-level packages - collect_submodules will recursively collect all submodules
+TOP_LEVEL_PACKAGES = [
+ 'iotdb.ainode.core', # This will include all sub-packages: manager, model, inference, etc.
+ 'iotdb.thrift', # This will include all thrift sub-packages
+]
+
+# Collect all submodules for project packages automatically
+# Using top-level packages avoids duplicate collection
+for package in TOP_LEVEL_PACKAGES:
+ try:
+ submodules = collect_submodules(package)
+ all_hiddenimports.extend(submodules)
+ except Exception:
+ # If package doesn't exist or collection fails, add the package itself
+ all_hiddenimports.append(package)
+
+# Add parent packages to ensure they are included
+all_hiddenimports.extend(['iotdb', 'iotdb.ainode'])
+
+# Multiprocessing support for PyInstaller
+# When using multiprocessing with PyInstaller, we need to ensure proper handling
+multiprocessing_modules = [
+ 'multiprocessing',
+ 'multiprocessing.spawn',
+ 'multiprocessing.popen_spawn_posix',
+ 'multiprocessing.popen_spawn_win32',
+ 'multiprocessing.popen_fork',
+ 'multiprocessing.popen_forkserver',
+ 'multiprocessing.context',
+ 'multiprocessing.reduction',
+ 'multiprocessing.util',
+ 'torch.multiprocessing',
+ 'torch.multiprocessing.spawn',
+]
+
+# Additional dependencies that may need explicit import
+# These are external libraries that might use dynamic imports
+external_dependencies = [
+ 'huggingface_hub',
+ 'tokenizers',
+ 'hf_xet',
+ 'einops',
+ 'dynaconf',
+ 'tzlocal',
+ 'thrift',
+ 'psutil',
+ 'requests',
+]
+
+all_hiddenimports.extend(multiprocessing_modules)
+all_hiddenimports.extend(external_dependencies)
+
+# Analyze main entry file
+# Note: Do NOT add virtual environment site-packages to pathex manually.
+# When PyInstaller is run from the virtual environment's Python, it automatically
+# detects and uses the virtual environment's site-packages.
+a = Analysis(
+ ['iotdb/ainode/core/script.py'],
+ pathex=[str(project_root)],
+ binaries=all_binaries,
+ datas=all_datas,
+ hiddenimports=all_hiddenimports,
+ hookspath=[],
+ hooksconfig={},
+ runtime_hooks=[],
+ excludes=[
+ # Exclude unnecessary modules to reduce size and improve startup time
+ # Note: Do not exclude unittest, as torch and other libraries require it
+ # Only exclude modules that are definitely not used and not required by dependencies
+ 'matplotlib',
+ 'IPython',
+ 'jupyter',
+ 'notebook',
+ 'pytest',
+ 'test',
+ 'tests'
+ ],
+ win_no_prefer_redirects=False,
+ win_private_assemblies=False,
+ cipher=block_cipher,
+ noarchive=True, # Set to True to speed up startup - files are not archived into PYZ
+)
+
+# Package all PYZ files
+pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
+
+# Create executable (onedir mode for faster startup)
+exe = EXE(
+ pyz,
+ a.scripts,
+ [],
+ exclude_binaries=True,
+ name='ainode',
+ debug=False,
+ bootloader_ignore_signals=False,
+ strip=False,
+ upx=True,
+ console=True,
+ disable_windowed_traceback=False,
+ argv_emulation=False,
+ target_arch=None,
+ codesign_identity=None,
+ entitlements_file=None,
+)
+
+# Collect all files into a directory (onedir mode)
+coll = COLLECT(
+ exe,
+ a.binaries,
+ a.zipfiles,
+ a.datas,
+ strip=False,
+ upx=True,
+ upx_exclude=[],
+ name='ainode',
+)
\ No newline at end of file
diff --git a/iotdb-core/ainode/ainode.xml b/iotdb-core/ainode/ainode.xml
index beab4b69c01dd..6de635c133854 100644
--- a/iotdb-core/ainode/ainode.xml
+++ b/iotdb-core/ainode/ainode.xml
@@ -42,6 +42,10 @@
+
+ iotdb/ainode/conf
+ conf
+
resources/conf
conf
@@ -52,19 +56,8 @@
0755
- dist
+ dist/ainode
lib
-
- *.whl
-
-
-
- ${project.basedir}/../../scripts/conf
- conf
-
- ainode-env.*
- **/ainode-env.*
-
0755
diff --git a/iotdb-core/ainode/build_binary.py b/iotdb-core/ainode/build_binary.py
new file mode 100644
index 0000000000000..1b5f99da21493
--- /dev/null
+++ b/iotdb-core/ainode/build_binary.py
@@ -0,0 +1,596 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+"""
+PyInstaller build script (Python version)
+"""
+
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+
+def get_venv_base_dir():
+ """
+ Get the base directory for virtual environments outside the project.
+
+ Returns:
+ Path: Base directory path
+ - Linux/macOS: ~/.cache/iotdb-ainode-build/
+ - Windows: %LOCALAPPDATA%\\iotdb-ainode-build\\
+ """
+ if sys.platform == "win32":
+ localappdata = os.environ.get("LOCALAPPDATA") or os.environ.get(
+ "APPDATA", os.path.expanduser("~")
+ )
+ base_dir = Path(localappdata) / "iotdb-ainode-build"
+ else:
+ base_dir = Path.home() / ".cache" / "iotdb-ainode-build"
+
+ return base_dir
+
+
+def setup_venv():
+ """
+ Create virtual environment outside the project directory.
+
+ The virtual environment is created in a platform-specific location:
+ - Linux/macOS: ~/.cache/iotdb-ainode-build//
+ - Windows: %LOCALAPPDATA%\\iotdb-ainode-build\\\\
+
+ The same venv is reused across multiple builds of the same project.
+
+ Returns:
+ Path: Path to the virtual environment directory
+ """
+ script_dir = Path(__file__).parent
+ venv_base_dir = get_venv_base_dir()
+ venv_dir = venv_base_dir / script_dir.name
+
+ if venv_dir.exists():
+ print(f"Virtual environment already exists at: {venv_dir}")
+ return venv_dir
+
+ venv_base_dir.mkdir(parents=True, exist_ok=True)
+ print(f"Creating virtual environment at: {venv_dir}")
+ subprocess.run([sys.executable, "-m", "venv", str(venv_dir)], check=True)
+ print("Virtual environment created successfully")
+ return venv_dir
+
+
+def get_venv_python(venv_dir):
+ """Get Python executable path in virtual environment"""
+ if sys.platform == "win32":
+ return venv_dir / "Scripts" / "python.exe"
+ else:
+ return venv_dir / "bin" / "python"
+
+
+def update_pip(venv_python):
+ """Update pip in the virtual environment to the latest version."""
+ print("Updating pip...")
+ subprocess.run(
+ [str(venv_python), "-m", "pip", "install", "--upgrade", "pip"], check=True
+ )
+ print("pip updated successfully")
+
+
+def install_poetry(venv_python):
+ """Install poetry 2.2.1 in the virtual environment."""
+ print("Installing poetry 2.2.1...")
+ subprocess.run(
+ [
+ str(venv_python),
+ "-m",
+ "pip",
+ "install",
+ "poetry==2.2.1",
+ ],
+ check=True,
+ )
+ # Get installed version
+ version_result = subprocess.run(
+ [str(venv_python), "-m", "poetry", "--version"],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ print(f"Poetry installed: {version_result.stdout.strip()}")
+
+
+def get_venv_env(venv_dir):
+ """
+ Get environment variables configured for the virtual environment.
+
+ Sets VIRTUAL_ENV and prepends the venv's bin/Scripts directory to PATH
+ so that tools installed in the venv take precedence.
+ Also sets POETRY_VIRTUALENVS_PATH to force poetry to use our venv.
+
+ Returns:
+ dict: Environment variables dictionary
+ """
+ env = os.environ.copy()
+ env["VIRTUAL_ENV"] = str(venv_dir.absolute())
+
+ venv_bin = str(venv_dir / ("Scripts" if sys.platform == "win32" else "bin"))
+ env["PATH"] = f"{venv_bin}{os.pathsep}{env.get('PATH', '')}"
+
+ # Force poetry to use our virtual environment by setting POETRY_VIRTUALENVS_PATH
+ # This tells poetry where to look for/create virtual environments
+ env["POETRY_VIRTUALENVS_PATH"] = str(venv_dir.parent.absolute())
+
+ return env
+
+
+def get_poetry_executable(venv_dir):
+ """Get poetry executable path in the virtual environment."""
+ if sys.platform == "win32":
+ return venv_dir / "Scripts" / "poetry.exe"
+ else:
+ return venv_dir / "bin" / "poetry"
+
+
+def install_dependencies(venv_python, venv_dir, script_dir):
+ """
+ Install project dependencies using poetry.
+
+ Configures poetry to use the external virtual environment and installs
+ all dependencies from pyproject.toml.
+ """
+ print("Installing dependencies with poetry...")
+ venv_env = get_venv_env(venv_dir)
+ poetry_exe = get_poetry_executable(venv_dir)
+
+ # Configure poetry settings
+ print("Configuring poetry settings...")
+ try:
+ # Set poetry to not create venvs in project directory
+ subprocess.run(
+ [str(poetry_exe), "config", "virtualenvs.in-project", "false"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ # Set poetry virtualenvs path to our venv directory's parent
+ # This forces poetry to look for/create venvs in the same location as our venv
+ subprocess.run(
+ [
+ str(poetry_exe),
+ "config",
+ "virtualenvs.path",
+ str(venv_dir.parent.absolute()),
+ ],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ # Ensure poetry can use virtual environments
+ subprocess.run(
+ [str(poetry_exe), "config", "virtualenvs.create", "true"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ except Exception as e:
+ print(f"Warning: Failed to configure poetry settings: {e}")
+ # Continue anyway, as these may not be critical
+
+ # Remove any existing poetry virtual environments for this project
+ # This ensures poetry will use our specified virtual environment
+ print("Removing any existing poetry virtual environments...")
+ remove_result = subprocess.run(
+ [str(poetry_exe), "env", "remove", "--all"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=False, # Don't fail if no venv exists
+ capture_output=True,
+ text=True,
+ )
+ if remove_result.stdout:
+ print(remove_result.stdout.strip())
+ if remove_result.stderr:
+ stderr = remove_result.stderr.strip()
+ # Ignore "No virtualenv has been activated" error
+ if "no virtualenv" not in stderr.lower():
+ print(remove_result.stderr.strip())
+
+ # Verify the virtual environment Python is valid before configuring poetry
+ print(f"Verifying virtual environment Python at: {venv_python}")
+ if not venv_python.exists():
+ print(f"ERROR: Virtual environment Python not found at: {venv_python}")
+ sys.exit(1)
+
+ python_version_result = subprocess.run(
+ [str(venv_python), "--version"],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ if python_version_result.returncode != 0:
+ print(f"ERROR: Virtual environment Python is not executable: {venv_python}")
+ sys.exit(1)
+ print(f" Python version: {python_version_result.stdout.strip()}")
+
+ # Instead of using poetry env use (which creates new venvs), we'll use a different approach:
+ # 1. Create a symlink from poetry's expected venv location to our venv
+ # 2. Or, directly use poetry install with VIRTUAL_ENV set (poetry should detect it)
+ #
+ # The issue is that poetry env use creates venvs with hash-based names in its cache.
+ # We need to work around this by either:
+ # - Creating a symlink from poetry's expected location to our venv
+ # - Or bypassing poetry env use entirely and using poetry install directly
+
+ # Strategy: Create a symlink from poetry's expected venv location to our venv
+ # Poetry creates venvs with names like: --py
+ # We need to find out what poetry would name our venv, then create a symlink
+
+ print(f"Configuring poetry to use virtual environment at: {venv_dir}")
+
+ # Get poetry's expected venv name by checking what it would create
+ # First, let's try poetry env use, but catch if it tries to create a new venv
+ result = subprocess.run(
+ [str(poetry_exe), "env", "use", str(venv_python)],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=False,
+ capture_output=True,
+ text=True,
+ )
+
+ output_text = (result.stdout or "") + (result.stderr or "")
+
+ # If poetry is creating a new venv, we need to stop it and use a different approach
+ if (
+ "Creating virtualenv" in output_text
+ or "Creating virtual environment" in output_text
+ or "Using virtualenv:" in output_text
+ ):
+ print("Poetry is attempting to create/use a new virtual environment.")
+ print(
+ "Stopping this and using alternative approach: creating symlink to our venv..."
+ )
+
+ # Extract the venv path poetry is trying to create/use
+ # Look for patterns like "Using virtualenv: /path/to/venv" or "Creating virtualenv name in /path"
+ import re
+
+ poetry_venv_path = None
+
+ # Try to extract from "Using virtualenv: /path/to/venv"
+ using_match = re.search(r"Using virtualenv:\s*([^\s\n]+)", output_text)
+ if using_match:
+ poetry_venv_path = Path(using_match.group(1))
+
+ # If not found, try to extract from "Creating virtualenv name in /path"
+ if not poetry_venv_path:
+ creating_match = re.search(
+ r"Creating virtualenv[^\n]*in\s+([^\s\n]+)", output_text
+ )
+ if creating_match:
+ venv_dir_path = Path(creating_match.group(1))
+ # Extract venv name from the output
+ name_match = re.search(r"Creating virtualenv\s+([^\s]+)", output_text)
+ if name_match:
+ venv_name = name_match.group(1)
+ poetry_venv_path = venv_dir_path / venv_name
+
+ # If still not found, try to find any path in pypoetry/virtualenvs
+ if not poetry_venv_path:
+ pypoetry_match = re.search(
+ r"([^\s]+pypoetry[^\s]*virtualenvs[^\s]+)", output_text
+ )
+ if pypoetry_match:
+ poetry_venv_path = Path(pypoetry_match.group(1))
+
+ if poetry_venv_path:
+ print(f"Poetry wants to create/use venv at: {poetry_venv_path}")
+
+ # Remove the venv poetry just created (if it exists)
+ if poetry_venv_path.exists() and poetry_venv_path.is_dir():
+ print(f"Removing poetry's newly created venv: {poetry_venv_path}")
+ shutil.rmtree(poetry_venv_path, ignore_errors=True)
+
+ # Create a symlink from poetry's expected location to our venv
+ print(f"Creating symlink from {poetry_venv_path} to {venv_dir}")
+ try:
+ if poetry_venv_path.exists() or poetry_venv_path.is_symlink():
+ if poetry_venv_path.is_symlink():
+ poetry_venv_path.unlink()
+ elif poetry_venv_path.is_dir():
+ shutil.rmtree(poetry_venv_path, ignore_errors=True)
+ poetry_venv_path.parent.mkdir(parents=True, exist_ok=True)
+ poetry_venv_path.symlink_to(venv_dir)
+ print(f"✓ Symlink created successfully")
+ except Exception as e:
+ print(f"WARNING: Failed to create symlink: {e}")
+ print("Will try to use poetry install directly with VIRTUAL_ENV set")
+ else:
+ print("Could not determine poetry's venv path from output")
+ print(f"Output was: {output_text}")
+ else:
+ if result.stdout:
+ print(result.stdout.strip())
+ if result.stderr:
+ stderr = result.stderr.strip()
+ if stderr:
+ print(f"Poetry output: {stderr}")
+
+ # Verify poetry is using the correct virtual environment BEFORE running lock/install
+ # This is critical - if poetry uses the wrong venv, dependencies won't be installed correctly
+ print("Verifying poetry virtual environment...")
+
+ # Wait a moment for symlink to be recognized (if we created one)
+ import time
+
+ time.sleep(0.5)
+
+ verify_result = subprocess.run(
+ [str(poetry_exe), "env", "info", "--path"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=False, # Don't fail if poetry hasn't activated a venv yet
+ capture_output=True,
+ text=True,
+ )
+
+ expected_venv_path_resolved = str(Path(venv_dir.absolute()).resolve())
+
+ # If poetry env info fails, it might mean poetry hasn't activated the venv yet
+ if verify_result.returncode != 0:
+ print(
+ "Warning: poetry env info failed, poetry may not have activated the virtual environment yet"
+ )
+ print(
+ "This may be okay if we created a symlink - poetry should use it when running commands"
+ )
+ poetry_venv_path_resolved = None
+ else:
+ poetry_venv_path = verify_result.stdout.strip()
+
+ # Normalize paths for comparison (resolve symlinks, etc.)
+ poetry_venv_path_resolved = str(Path(poetry_venv_path).resolve())
+
+ # Only verify path if we successfully got poetry's venv path
+ if poetry_venv_path_resolved is not None:
+ if poetry_venv_path_resolved != expected_venv_path_resolved:
+ print(
+ f"ERROR: Poetry is using {poetry_venv_path}, but expected {expected_venv_path_resolved}"
+ )
+ print(
+ "Poetry must use the virtual environment we created for the build to work correctly."
+ )
+ print("The symlink approach may not have worked. Please check the symlink.")
+ sys.exit(1)
+ else:
+ print(
+ f"✓ Poetry is correctly using virtual environment: {poetry_venv_path}"
+ )
+ else:
+ print("Warning: Could not verify poetry virtual environment path")
+ print(
+ "Continuing anyway - poetry should use the venv via symlink or VIRTUAL_ENV"
+ )
+
+ # Update lock file and install dependencies
+ # Re-verify environment before each command to ensure poetry doesn't switch venvs
+ def verify_poetry_env():
+ verify_result = subprocess.run(
+ [str(poetry_exe), "env", "info", "--path"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=False, # Don't fail if poetry env info is not available
+ capture_output=True,
+ text=True,
+ )
+ if verify_result.returncode == 0:
+ current_path = str(Path(verify_result.stdout.strip()).resolve())
+ expected_path = str(Path(venv_dir.absolute()).resolve())
+ if current_path != expected_path:
+ print(
+ f"ERROR: Poetry switched to different virtual environment: {current_path}"
+ )
+ print(f"Expected: {expected_path}")
+ sys.exit(1)
+ # If poetry env info fails, we can't verify, but continue anyway
+ # Poetry should still use the Python we specified via env use
+ return True
+
+ print("Running poetry lock...")
+ verify_poetry_env() # Verify before lock
+ result = subprocess.run(
+ [str(poetry_exe), "lock"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ if result.stdout:
+ print(result.stdout)
+ if result.stderr:
+ print(result.stderr)
+ verify_poetry_env() # Verify after lock
+
+ print("Running poetry install...")
+ verify_poetry_env() # Verify before install
+ result = subprocess.run(
+ [str(poetry_exe), "install"],
+ cwd=str(script_dir),
+ env=venv_env,
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ if result.stdout:
+ print(result.stdout)
+ if result.stderr:
+ print(result.stderr)
+ verify_poetry_env() # Verify after install
+
+ # Verify installation by checking if key packages are installed
+ # This is critical - if packages aren't installed, PyInstaller won't find them
+ print("Verifying package installation...")
+ test_packages = ["torch", "transformers", "tokenizers"]
+ missing_packages = []
+ for package in test_packages:
+ test_result = subprocess.run(
+ [str(venv_python), "-c", f"import {package}; print({package}.__version__)"],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ if test_result.returncode == 0:
+ version = test_result.stdout.strip()
+ print(f" ✓ {package} {version} installed")
+ else:
+ error_msg = (
+ test_result.stderr.strip() if test_result.stderr else "Unknown error"
+ )
+ print(f" ✗ {package} NOT found in virtual environment: {error_msg}")
+ missing_packages.append(package)
+
+ if missing_packages:
+ print(
+ f"\nERROR: Required packages are missing from virtual environment: {', '.join(missing_packages)}"
+ )
+ print("This indicates that poetry did not install dependencies correctly.")
+ print("Please check the poetry install output above for errors.")
+ sys.exit(1)
+
+ print("Dependencies installed successfully")
+
+
+def check_pyinstaller(venv_python):
+ """
+ Check if PyInstaller is installed.
+
+ PyInstaller should be installed via poetry install from pyproject.toml.
+ If it's missing, it means poetry install failed or didn't complete.
+ """
+ try:
+ result = subprocess.run(
+ [
+ str(venv_python),
+ "-c",
+ "import PyInstaller; print(PyInstaller.__version__)",
+ ],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ version = result.stdout.strip()
+ print(f"PyInstaller version: {version}")
+ return True
+ except (subprocess.CalledProcessError, FileNotFoundError):
+ print("ERROR: PyInstaller is not installed in the virtual environment")
+ print("PyInstaller should be installed via poetry install from pyproject.toml")
+ print(
+ "This indicates that poetry install may have failed or didn't complete correctly."
+ )
+ return False
+
+
+def build():
+ """
+ Execute the complete build process.
+
+ Steps:
+ 1. Setup virtual environment (outside project directory)
+ 2. Update pip and install 2.2.1 poetry
+ 3. Install project dependencies (including PyInstaller from pyproject.toml)
+ 4. Build executable using PyInstaller
+ """
+ script_dir = Path(__file__).parent
+
+ venv_dir = setup_venv()
+ venv_python = get_venv_python(venv_dir)
+
+ update_pip(venv_python)
+ install_poetry(venv_python)
+ install_dependencies(venv_python, venv_dir, script_dir)
+
+ if not check_pyinstaller(venv_python):
+ sys.exit(1)
+
+ print("=" * 50)
+ print("IoTDB AINode PyInstaller Build Script")
+ print("=" * 50)
+ print()
+
+ print("Starting build...")
+ print()
+
+ spec_file = script_dir / "ainode.spec"
+ if not spec_file.exists():
+ print(f"Error: Spec file not found: {spec_file}")
+ sys.exit(1)
+
+ # Set up environment for PyInstaller
+ # When using venv_python, PyInstaller should automatically detect the virtual environment
+ # and use its site-packages. We should NOT manually add site-packages to pathex.
+ pyinstaller_env = get_venv_env(venv_dir)
+
+ # Verify we're using the correct Python
+ python_prefix_result = subprocess.run(
+ [str(venv_python), "-c", "import sys; print(sys.prefix)"],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ python_prefix = python_prefix_result.stdout.strip()
+ print(f"Using Python from: {python_prefix}")
+
+ # Ensure PyInstaller runs from the virtual environment
+ # The venv_python should automatically set up the correct environment
+ cmd = [
+ str(venv_python),
+ "-m",
+ "PyInstaller",
+ "--noconfirm",
+ str(spec_file),
+ ]
+
+ try:
+ subprocess.run(cmd, check=True, env=pyinstaller_env)
+ except subprocess.CalledProcessError as e:
+ print(f"\nError: Build failed: {e}")
+ sys.exit(1)
+
+ print()
+ print("=" * 50)
+ print("Build completed!")
+ print("=" * 50)
+ print()
+ print("Executable location: dist/ainode/ainode")
+ print()
+ print("Usage:")
+ print(" ./dist/ainode/ainode start # Start AINode")
+ print()
+
+
+if __name__ == "__main__":
+ build()
diff --git a/iotdb-core/ainode/iotdb/ainode/core/config.py b/iotdb-core/ainode/iotdb/ainode/core/config.py
index 328f4a5faa610..afcf0683d7d04 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/config.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/config.py
@@ -16,6 +16,7 @@
# under the License.
#
import os
+import re
from iotdb.ainode.core.constant import (
AINODE_BUILD_INFO,
@@ -37,8 +38,6 @@
AINODE_INFERENCE_MODEL_MEM_USAGE_MAP,
AINODE_LOG_DIR,
AINODE_MODELS_DIR,
- AINODE_ROOT_CONF_DIRECTORY_NAME,
- AINODE_ROOT_DIR,
AINODE_RPC_ADDRESS,
AINODE_RPC_PORT,
AINODE_SYSTEM_DIR,
@@ -315,9 +314,7 @@ def _load_config_from_file(self) -> None:
if "ainode_id" in system_configs:
self._config.set_ainode_id(int(system_configs["ainode_id"]))
- git_file = os.path.join(
- AINODE_ROOT_DIR, AINODE_ROOT_CONF_DIRECTORY_NAME, AINODE_CONF_GIT_FILE_NAME
- )
+ git_file = os.path.join(AINODE_CONF_DIRECTORY_NAME, AINODE_CONF_GIT_FILE_NAME)
if os.path.exists(git_file):
git_configs = load_properties(git_file)
if "git.commit.id.abbrev" in git_configs:
@@ -327,9 +324,7 @@ def _load_config_from_file(self) -> None:
build_info += "-dev"
self._config.set_build_info(build_info)
- pom_file = os.path.join(
- AINODE_ROOT_DIR, AINODE_ROOT_CONF_DIRECTORY_NAME, AINODE_CONF_POM_FILE_NAME
- )
+ pom_file = os.path.join(AINODE_CONF_DIRECTORY_NAME, AINODE_CONF_POM_FILE_NAME)
if os.path.exists(pom_file):
pom_configs = load_properties(pom_file)
if "version" in pom_configs:
@@ -453,18 +448,29 @@ def get_config(self) -> AINodeConfig:
return self._config
+def unescape_java_properties(value: str) -> str:
+ """Undo Java Properties escaping rules"""
+ value = value.replace("\\t", "\t")
+ value = value.replace("\\n", "\n")
+ value = value.replace("\\r", "\r")
+ value = value.replace("\\\\", "\\")
+ value = re.sub(r"\\([:=\s])", r"\1", value)
+ return value
+
+
def load_properties(filepath, sep="=", comment_char="#"):
"""
Read the file passed as parameter as a properties file.
"""
props = {}
- with open(filepath, "rt") as f:
+ with open(filepath, "rt", encoding="utf-8") as f:
for line in f:
l = line.strip()
if l and not l.startswith(comment_char):
- key_value = l.split(sep)
+ key_value = l.split(sep, 1)
key = key_value[0].strip()
- value = sep.join(key_value[1:]).strip().strip('"')
+ value = key_value[1].strip().strip('"')
+ value = unescape_java_properties(value)
props[key] = value
return props
diff --git a/iotdb-core/ainode/iotdb/ainode/core/constant.py b/iotdb-core/ainode/iotdb/ainode/core/constant.py
index f4547e99803e0..b9923d3e3ee7e 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/constant.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/constant.py
@@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
#
-import inspect
import logging
import os
from enum import Enum
@@ -24,10 +23,10 @@
from iotdb.ainode.core.model.model_enums import BuiltInModelType
from iotdb.thrift.common.ttypes import TEndPoint
+IOTDB_AINODE_HOME = os.getenv("IOTDB_AINODE_HOME", "")
AINODE_VERSION_INFO = "UNKNOWN"
AINODE_BUILD_INFO = "UNKNOWN"
-AINODE_CONF_DIRECTORY_NAME = "conf"
-AINODE_ROOT_CONF_DIRECTORY_NAME = "conf"
+AINODE_CONF_DIRECTORY_NAME = os.path.join(IOTDB_AINODE_HOME, "conf")
AINODE_CONF_FILE_NAME = "iotdb-ainode.properties"
AINODE_CONF_GIT_FILE_NAME = "git.properties"
AINODE_CONF_POM_FILE_NAME = "pom.properties"
@@ -62,13 +61,12 @@
)
# AINode folder structure
-AINODE_ROOT_DIR = os.path.dirname(
- os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
-)
-AINODE_MODELS_DIR = "data/ainode/models"
-AINODE_BUILTIN_MODELS_DIR = "data/ainode/models/weights" # For built-in models, we only need to store their weights and config.
-AINODE_SYSTEM_DIR = "data/ainode/system"
-AINODE_LOG_DIR = "logs"
+AINODE_MODELS_DIR = os.path.join(IOTDB_AINODE_HOME, "data/ainode/models")
+AINODE_BUILTIN_MODELS_DIR = os.path.join(
+ IOTDB_AINODE_HOME, "data/ainode/models/weights"
+) # For built-in models, we only need to store their weights and config.
+AINODE_SYSTEM_DIR = os.path.join(IOTDB_AINODE_HOME, "data/ainode/system")
+AINODE_LOG_DIR = os.path.join(IOTDB_AINODE_HOME, "logs")
# AINode log
LOG_FILE_TYPE = ["all", "info", "warn", "error"]
diff --git a/iotdb-core/ainode/iotdb/ainode/core/inference/pool_controller.py b/iotdb-core/ainode/iotdb/ainode/core/inference/pool_controller.py
index 069a6b9ced6d3..00bb3b5568b0f 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/inference/pool_controller.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/inference/pool_controller.py
@@ -50,7 +50,6 @@
from iotdb.ainode.core.util.thread_name import ThreadName
logger = Logger()
-MODEL_MANAGER = ModelManager()
class PoolController:
@@ -59,6 +58,7 @@ class PoolController:
"""
def __init__(self, result_queue: mp.Queue):
+ self._model_manager = ModelManager()
# structure: {model_id: {device_id: PoolGroup}}
self._request_pool_map: Dict[str, Dict[str, PoolGroup]] = {}
self._new_pool_id = AtomicInt()
@@ -82,24 +82,25 @@ def first_req_init(self, model_id: str):
"""
Initialize the pools when the first request for the given model_id arrives.
"""
- if not self.has_request_pools(model_id, device.index):
- # TODO: choose a device based on some strategy
- device = self.DEFAULT_DEVICE
- actions = self._pool_scheduler.schedule(model_id, device)
- for action in actions:
- if action.action == ScaleActionType.SCALE_UP:
- # initialize the first pool
- self._first_pool_init(action.model_id, str(device))
- # start a background thread to expand pools
- expand_thread = threading.Thread(
- target=self._expand_pools_on_device,
- args=(action.model_id, str(device), action.amount - 1),
- daemon=True,
- )
- expand_thread.start()
- elif action.action == ScaleActionType.SCALE_DOWN:
- # TODO: implement scale down logic
- pass
+ pass
+ # if not self.has_request_pools(model_id, device.index):
+ # # TODO: choose a device based on some strategy
+ # device = self.DEFAULT_DEVICE
+ # actions = self._pool_scheduler.schedule(model_id, device)
+ # for action in actions:
+ # if action.action == ScaleActionType.SCALE_UP:
+ # # initialize the first pool
+ # self._first_pool_init(action.model_id, str(device))
+ # # start a background thread to expand pools
+ # expand_thread = threading.Thread(
+ # target=self._expand_pools_on_device,
+ # args=(action.model_id, str(device), action.amount - 1),
+ # daemon=True,
+ # )
+ # expand_thread.start()
+ # elif action.action == ScaleActionType.SCALE_DOWN:
+ # # TODO: implement scale down logic
+ # pass
def _first_pool_init(self, model_id: str, device_str: str):
"""
@@ -194,7 +195,7 @@ def _load_model_task(self, model_id: str, device_id_list: list[str]):
def _load_model_on_device_task(device_id: str):
if not self.has_request_pools(model_id, device_id):
actions = self._pool_scheduler.schedule_load_model_to_device(
- MODEL_MANAGER.get_model_info(model_id), device_id
+ self._model_manager.get_model_info(model_id), device_id
)
for action in actions:
if action.action == ScaleActionType.SCALE_UP:
@@ -221,7 +222,7 @@ def _unload_model_task(self, model_id: str, device_id_list: list[str]):
def _unload_model_on_device_task(device_id: str):
if self.has_request_pools(model_id, device_id):
actions = self._pool_scheduler.schedule_unload_model_from_device(
- MODEL_MANAGER.get_model_info(model_id), device_id
+ self._model_manager.get_model_info(model_id), device_id
)
for action in actions:
if action.action == ScaleActionType.SCALE_DOWN:
@@ -256,7 +257,7 @@ def _expand_pools_on_device(self, model_id: str, device_id: str, count: int):
def _expand_pool_on_device(*_):
result_queue = mp.Queue()
pool_id = self._new_pool_id.get_and_increment()
- model_info = MODEL_MANAGER.get_model_info(model_id)
+ model_info = self._model_manager.get_model_info(model_id)
model_type = model_info.model_type
if model_type == BuiltInModelType.SUNDIAL.value:
config = SundialConfig()
@@ -277,7 +278,7 @@ def _expand_pool_on_device(*_):
)
pool.start()
self._register_pool(model_id, device_id, pool_id, pool, result_queue)
- if not pool.ready_event.wait(timeout=30):
+ if not pool.ready_event.wait(timeout=300):
logger.error(
f"[Inference][Device-{device_id}][Pool-{pool_id}] Pool failed to be ready in time"
)
diff --git a/iotdb-core/ainode/iotdb/ainode/core/inference/pool_scheduler/basic_pool_scheduler.py b/iotdb-core/ainode/iotdb/ainode/core/inference/pool_scheduler/basic_pool_scheduler.py
index 5ee1b4f0c9a29..6a2bd2b619aa7 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/inference/pool_scheduler/basic_pool_scheduler.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/inference/pool_scheduler/basic_pool_scheduler.py
@@ -41,8 +41,6 @@
logger = Logger()
-MODEL_MANAGER = ModelManager()
-
def _estimate_shared_pool_size_by_total_mem(
device: torch.device,
@@ -106,6 +104,7 @@ class BasicPoolScheduler(AbstractPoolScheduler):
def __init__(self, request_pool_map: Dict[str, Dict[str, PoolGroup]]):
super().__init__(request_pool_map)
+ self._model_manager = ModelManager()
def schedule(self, model_id: str) -> List[ScaleAction]:
"""
@@ -123,7 +122,7 @@ def schedule_load_model_to_device(
self, model_info: ModelInfo, device_id: str
) -> List[ScaleAction]:
existing_model_infos = [
- MODEL_MANAGER.get_model_info(existing_model_id)
+ self._model_manager.get_model_info(existing_model_id)
for existing_model_id, pool_group_map in self._request_pool_map.items()
if existing_model_id != model_info.model_id and device_id in pool_group_map
]
@@ -140,7 +139,7 @@ def schedule_unload_model_from_device(
self, model_info: ModelInfo, device_id: str
) -> List[ScaleAction]:
existing_model_infos = [
- MODEL_MANAGER.get_model_info(existing_model_id)
+ self._model_manager.get_model_info(existing_model_id)
for existing_model_id, pool_group_map in self._request_pool_map.items()
if existing_model_id != model_info.model_id and device_id in pool_group_map
]
diff --git a/iotdb-core/ainode/iotdb/ainode/core/log.py b/iotdb-core/ainode/iotdb/ainode/core/log.py
index a7a05d0ea9298..fd121b26349d7 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/log.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/log.py
@@ -116,7 +116,9 @@ def gzip_rotator(src: str, dst: str):
file_handler.setFormatter(self.logger_format)
self.logger.addHandler(file_handler)
- self.info(f"Logger init successfully.")
+ self.info(
+ f"Logger init successfully, log file prefix name {log_file_name_prefix}."
+ )
# interfaces
def debug(self, *msg):
diff --git a/iotdb-core/ainode/iotdb/ainode/core/script.py b/iotdb-core/ainode/iotdb/ainode/core/script.py
index 82ed32bc5a245..38653d7ceab9f 100644
--- a/iotdb-core/ainode/iotdb/ainode/core/script.py
+++ b/iotdb-core/ainode/iotdb/ainode/core/script.py
@@ -15,68 +15,27 @@
# specific language governing permissions and limitations
# under the License.
#
-import os
-import shutil
+import multiprocessing
import sys
import torch.multiprocessing as mp
from iotdb.ainode.core.ai_node import AINode
-from iotdb.ainode.core.config import AINodeDescriptor
-from iotdb.ainode.core.constant import TSStatusCode
-from iotdb.ainode.core.exception import MissingConfigError
from iotdb.ainode.core.log import Logger
-from iotdb.ainode.core.rpc.client import ClientManager
-from iotdb.thrift.common.ttypes import TAINodeLocation, TEndPoint
logger = Logger()
-def remove_ainode(arguments):
- # Delete the current node
- if len(arguments) == 2:
- target_ainode_id = AINodeDescriptor().get_config().get_ainode_id()
- target_rpc_address = AINodeDescriptor().get_config().get_ain_rpc_address()
- target_rpc_port = AINodeDescriptor().get_config().get_ain_rpc_port()
-
- # Delete the node with a given id
- elif len(arguments) == 3:
- target_ainode_id = int(arguments[2])
- ainode_configuration_map = (
- ClientManager()
- .borrow_config_node_client()
- .get_ainode_configuration(target_ainode_id)
- )
-
- end_point = ainode_configuration_map[target_ainode_id].location.internalEndPoint
- target_rpc_address = end_point.ip
- target_rpc_port = end_point.port
-
- if not end_point:
- raise MissingConfigError(
- "NodeId: {} not found in cluster ".format(target_ainode_id)
- )
-
- logger.info("Got target AINode id: {}".format(target_ainode_id))
-
- else:
- raise MissingConfigError("Invalid command")
-
- location = TAINodeLocation(
- target_ainode_id, TEndPoint(target_rpc_address, target_rpc_port)
- )
- status = ClientManager().borrow_config_node_client().node_remove(location)
-
- if status.code == TSStatusCode.SUCCESS_STATUS.get_status_code():
- logger.info("IoTDB-AINode has successfully removed.")
- if os.path.exists(AINodeDescriptor().get_config().get_ain_models_dir()):
- shutil.rmtree(AINodeDescriptor().get_config().get_ain_models_dir())
-
-
def main():
+ # Handle PyInstaller: filter out Python arguments that might be passed to subprocesses
+ # These arguments are not needed in frozen executables and cause warnings
+ # Note: This filtering should happen AFTER freeze_support() has handled child processes
+ if getattr(sys, "frozen", False):
+ python_args_to_filter = ["-I", "-B", "-S", "-E", "-O", "-OO"]
+ sys.argv = [arg for arg in sys.argv if arg not in python_args_to_filter]
+
+ logger.info(f"Starting IoTDB-AINode process with sys argv {sys.argv}.")
arguments = sys.argv
- # load config
- AINodeDescriptor()
if len(arguments) == 1:
logger.info("Command line argument must be specified.")
return
@@ -89,19 +48,22 @@ def main():
ai_node = AINode()
ai_node.start()
except Exception as e:
- logger.error("Start AINode failed, because of: {}".format(e))
- sys.exit(1)
- # TODO: remove the following function, and add a destroy script
- elif command == "remove":
- try:
- logger.info("Removing AINode...")
- remove_ainode(arguments)
- except Exception as e:
- logger.error("Remove AINode failed, because of: {}".format(e))
+ logger.warning("Start AINode failed, because of: {}".format(e))
sys.exit(1)
else:
logger.warning("Unknown argument: {}.".format(command))
if __name__ == "__main__":
+ # PyInstaller multiprocessing support
+ # freeze_support() is essential for PyInstaller frozen executables on all platforms
+ # It detects if the current process is a multiprocessing child process
+ # If it is, it executes the child process target function and exits
+ # If it's not, it returns immediately and continues with main() execution
+ # This prevents child processes from executing the main application logic
+ if getattr(sys, "frozen", False):
+ # Call freeze_support() for both standard multiprocessing and torch.multiprocessing
+ multiprocessing.freeze_support()
+ mp.freeze_support()
+
main()
diff --git a/iotdb-core/ainode/poetry.lock b/iotdb-core/ainode/poetry.lock
index 7a22506d06fe5..a2d8ce581f16e 100644
--- a/iotdb-core/ainode/poetry.lock
+++ b/iotdb-core/ainode/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
[[package]]
name = "alembic"
@@ -20,6 +20,18 @@ typing-extensions = ">=4.12"
[package.extras]
tz = ["tzdata"]
+[[package]]
+name = "altgraph"
+version = "0.17.4"
+description = "Python graph (network) package"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"},
+ {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"},
+]
+
[[package]]
name = "black"
version = "25.1.0"
@@ -510,6 +522,22 @@ cli = ["jsonargparse[signatures] (>=4.38.0)", "tomlkit"]
docs = ["requests (>=2.0.0)"]
typing = ["mypy (>=1.0.0)", "types-setuptools"]
+[[package]]
+name = "macholib"
+version = "1.16.3"
+description = "Mach-O header analysis and editing"
+optional = false
+python-versions = "*"
+groups = ["main"]
+markers = "sys_platform == \"darwin\""
+files = [
+ {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"},
+ {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"},
+]
+
+[package.dependencies]
+altgraph = ">=0.17"
+
[[package]]
name = "mako"
version = "1.3.10"
@@ -1141,6 +1169,19 @@ numpy = ">=1.4"
[package.extras]
test = ["pytest", "pytest-cov", "scipy"]
+[[package]]
+name = "pefile"
+version = "2023.2.7"
+description = "Python PE parsing module"
+optional = false
+python-versions = ">=3.6.0"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"},
+ {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"},
+]
+
[[package]]
name = "platformdirs"
version = "4.4.0"
@@ -1181,6 +1222,57 @@ files = [
dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pyreadline ; os_name == \"nt\"", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""]
test = ["pytest", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "setuptools", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""]
+[[package]]
+name = "pyinstaller"
+version = "6.16.0"
+description = "PyInstaller bundles a Python application and all its dependencies into a single package."
+optional = false
+python-versions = "<3.15,>=3.8"
+groups = ["main"]
+files = [
+ {file = "pyinstaller-6.16.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:7fd1c785219a87ca747c21fa92f561b0d2926a7edc06d0a0fe37f3736e00bd7a"},
+ {file = "pyinstaller-6.16.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b756ddb9007b8141c5476b553351f9d97559b8af5d07f9460869bfae02be26b0"},
+ {file = "pyinstaller-6.16.0-py3-none-manylinux2014_i686.whl", hash = "sha256:0a48f55b85ff60f83169e10050f2759019cf1d06773ad1c4da3a411cd8751058"},
+ {file = "pyinstaller-6.16.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:73ba72e04fcece92e32518bbb1e1fb5ac2892677943dfdff38e01a06e8742851"},
+ {file = "pyinstaller-6.16.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:b1752488248f7899281b17ca3238eefb5410521291371a686a4f5830f29f52b3"},
+ {file = "pyinstaller-6.16.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ba618a61627ee674d6d68e5de084ba17c707b59a4f2a856084b3999bdffbd3f0"},
+ {file = "pyinstaller-6.16.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:c8b7ef536711617e12fef4673806198872033fa06fa92326ad7fd1d84a9fa454"},
+ {file = "pyinstaller-6.16.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:d1ebf84d02c51fed19b82a8abb4df536923abd55bb684d694e1356e4ae2a0ce5"},
+ {file = "pyinstaller-6.16.0-py3-none-win32.whl", hash = "sha256:6d5f8617f3650ff9ef893e2ab4ddbf3c0d23d0c602ef74b5df8fbef4607840c8"},
+ {file = "pyinstaller-6.16.0-py3-none-win_amd64.whl", hash = "sha256:bc10eb1a787f99fea613509f55b902fbd2d8b73ff5f51ff245ea29a481d97d41"},
+ {file = "pyinstaller-6.16.0-py3-none-win_arm64.whl", hash = "sha256:d0af8a401de792c233c32c44b16d065ca9ab8262ee0c906835c12bdebc992a64"},
+ {file = "pyinstaller-6.16.0.tar.gz", hash = "sha256:53559fe1e041a234f2b4dcc3288ea8bdd57f7cad8a6644e422c27bb407f3edef"},
+]
+
+[package.dependencies]
+altgraph = "*"
+macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""}
+packaging = ">=22.0"
+pefile = {version = ">=2022.5.30,<2024.8.26 || >2024.8.26", markers = "sys_platform == \"win32\""}
+pyinstaller-hooks-contrib = ">=2025.8"
+pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""}
+setuptools = ">=42.0.0"
+
+[package.extras]
+completion = ["argcomplete"]
+hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"]
+
+[[package]]
+name = "pyinstaller-hooks-contrib"
+version = "2025.9"
+description = "Community maintained hooks for PyInstaller"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "pyinstaller_hooks_contrib-2025.9-py3-none-any.whl", hash = "sha256:ccbfaa49399ef6b18486a165810155e5a8d4c59b41f20dc5da81af7482aaf038"},
+ {file = "pyinstaller_hooks_contrib-2025.9.tar.gz", hash = "sha256:56e972bdaad4e9af767ed47d132362d162112260cbe488c9da7fee01f228a5a6"},
+]
+
+[package.dependencies]
+packaging = ">=22.0"
+setuptools = ">=42.0.0"
+
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
@@ -1208,6 +1300,19 @@ files = [
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
]
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.3"
+description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"},
+ {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"},
+]
+
[[package]]
name = "pyyaml"
version = "6.0.3"
@@ -2222,5 +2327,5 @@ zstd = ["zstandard (>=0.18.0)"]
[metadata]
lock-version = "2.1"
-python-versions = ">=3.11,<=3.13.5"
-content-hash = "08746b5f45ee9836e2d4368f8e394d55d27f556789fb1254e7584b9eedd49d5b"
+python-versions = ">=3.11.0,<3.14.0"
+content-hash = "37441b66c5cf440280823bb97248fa10b0ca20d7fef4d20aec616e3860a31736"
diff --git a/iotdb-core/ainode/pom.xml b/iotdb-core/ainode/pom.xml
index 63c8f2e3f554c..1a3be76b63369 100644
--- a/iotdb-core/ainode/pom.xml
+++ b/iotdb-core/ainode/pom.xml
@@ -87,9 +87,6 @@
target
-
- venv
-
@@ -142,6 +139,9 @@
${project.build.sourceEncoding}
+
copy-python-dependencies
generate-sources
@@ -212,55 +212,6 @@
org.codehaus.mojo
exec-maven-plugin
-
-
- python-venv
- initialize
-
- exec
-
-
- ${python.exe.bin}
-
- -m
- venv
- ./venv
-
-
-
-
-
- python-upgrade-pip
- initialize
-
- exec
-
-
- ${python.venv.bin}${python.exe.bin}
-
- -m
- pip
- install
- --upgrade
- pip
-
-
-
-
-
- python-install-poetry
- initialize
-
- exec
-
-
- ${python.venv.bin}pip3
-
- install
- poetry
-
-
-
python-compile
compile
@@ -268,44 +219,13 @@
exec
- ${python.venv.bin}poetry
+ ${python.exe.bin}
+ ${project.basedir}
- build
+ build_binary.py
-
-
diff --git a/iotdb-core/ainode/pyproject.toml b/iotdb-core/ainode/pyproject.toml
index 634f3e09a7445..331cb8ab32a34 100644
--- a/iotdb-core/ainode/pyproject.toml
+++ b/iotdb-core/ainode/pyproject.toml
@@ -76,24 +76,7 @@ exclude = [
]
[tool.poetry.dependencies]
-python = ">=3.11,<=3.13.5"
-
-# Core scientific stack
-numpy = [
- { version = "^2.3.2", python = ">=3.10" },
- { version = "^1.26.4", python = ">=3.9,<3.10" }
-]
-scipy = [
- { version = "^1.12.0", python = ">=3.10" },
- { version = "^1.11.4", python = ">=3.9,<3.10" }
-]
-pandas = "^2.3.2"
-scikit-learn = [
- { version = "^1.7.1", python = ">=3.10" },
- { version = "^1.5.2", python = ">=3.9,<3.10" }
-]
-statsmodels = "^0.14.5"
-sktime = "0.38.5"
+python = ">=3.11.0,<3.14.0"
# ---- DL / HF stack ----
torch = ">=2.7.0"
@@ -104,6 +87,14 @@ huggingface_hub = "^0.34.4"
safetensors = "^0.6.2"
einops = "^0.8.1"
+# ---- Core scientific stack ----
+numpy = "^2.3.2"
+scipy = "^1.12.0"
+pandas = "^2.3.2"
+scikit-learn = "^1.7.1"
+statsmodels = "^0.14.5"
+sktime = "0.38.5"
+
# ---- Optimizers / utils ----
optuna = "^4.4.0"
psutil = "^7.0.0"
@@ -116,6 +107,7 @@ tzlocal = "^5.3.1"
hf_xet = ">=1.1.9"
# ---- Tooling ----
+pyinstaller = "6.16.0"
black = "25.1.0"
isort = "6.0.1"
setuptools = ">=75.3.0"
@@ -127,3 +119,4 @@ ainode = "iotdb.ainode.core.script:main"
[tool.isort]
profile = "black"
+skip = ["build", "dist", "target"]
\ No newline at end of file
diff --git a/iotdb-core/ainode/resources/syncPythonVersion.groovy b/iotdb-core/ainode/resources/syncPythonVersion.groovy
index 0061930bfca2f..ecd0f2bdf21a2 100644
--- a/iotdb-core/ainode/resources/syncPythonVersion.groovy
+++ b/iotdb-core/ainode/resources/syncPythonVersion.groovy
@@ -120,41 +120,8 @@ def checkPython() {
}
}
-
-// On Ubuntu it seems that venv is generally available, but the 'ensurepip' command fails.
-// In this case we need to install the python3-venv package. Unfortunately checking the
-// venv is successful in this case, so we need this slightly odd test.
-def checkPythonVenv() {
- print "Detecting venv: "
- try {
- def python = project.properties['python.exe.bin']
- def cmdArray = [python, "-Im", "ensurepip"]
- def process = cmdArray.execute()
- def stdOut = new StringBuilder()
- def stdErr = new StringBuilder()
- process.waitForProcessOutput(stdOut, stdErr)
- if (stdErr.contains("No module named")) {
- println "missing"
- println "--- output of version `python -Im \"ensurepip\"` command ---"
- println output
- println "------------------------------------------------------------"
- allConditionsMet = false
- } else {
- println " OK"
- }
- } catch (Exception e) {
- println "missing"
- println "--- failed with exception ---"
- println e
- e.printStackTrace()
- println "----------------------------------------------------"
- allConditionsMet = false
- }
-}
-
// Check the python environment is setup correctly.
checkPython()
-checkPythonVenv()
if (!allConditionsMet) {
throw new RuntimeException("Not all conditions met, see log for details.")
diff --git a/scripts/conf/ainode-env.sh b/scripts/conf/ainode-env.sh
deleted file mode 100644
index 1ec434ad2d983..0000000000000
--- a/scripts/conf/ainode-env.sh
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
-# ain_interpreter_dir=
-
-# Set ain_force_reinstall to 1 to force reinstall AINode
-ain_force_reinstall=0
-
-# don't install dependencies online
-ain_install_offline=0
-
-SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
-
-# fetch parameters with names
-while getopts "i:t:rnm:" opt; do
- case $opt in
- i)
- p_ain_interpreter_dir="$OPTARG"
- ;;
- r)
- p_ain_force_reinstall=1
- ;;
- t) ;;
- n)
- p_ain_no_dependencies="--no-dependencies"
- ;;
- m)
- p_pypi_mirror="$OPTARG"
- ;;
- \?)
- echo "Invalid option -$OPTARG" >&2
- exit 1
- ;;
- esac
-done
-
-if [ -z "$p_ain_interpreter_dir" ]; then
- echo "No interpreter_dir is set, use default value."
-else
- ain_interpreter_dir="$p_ain_interpreter_dir"
-fi
-
-if [ -z "$p_ain_force_reinstall" ]; then
- echo "No check_version is set, use default value."
-else
- ain_force_reinstall="$p_ain_force_reinstall"
-fi
-echo Script got inputs: "ain_interpreter_dir: $ain_interpreter_dir", "ain_force_reinstall: $ain_force_reinstall"
-
-if [ -z $ain_interpreter_dir ]; then
- $(dirname "$0")/../venv/bin/python3 -c "import sys; print(sys.executable)" &&
- echo "Activate default venv environment" || (
- echo "Creating default venv environment" && python3 -m venv "$(dirname "$0")/../venv"
- )
- ain_interpreter_dir="$SCRIPT_DIR/../venv/bin/python3"
-fi
-echo "Calling venv to check: $ain_interpreter_dir"
-
-# Change the working directory to the parent directory
-cd "$SCRIPT_DIR/.."
-
-echo "Confirming AINode..."
-$ain_interpreter_dir -m pip config set global.disable-pip-version-check true
-$ain_interpreter_dir -m pip list | grep "apache-iotdb-ainode" >/dev/null
-if [ $? -eq 0 ]; then
- if [ $ain_force_reinstall -eq 0 ]; then
- echo "AINode is already installed"
- exit 0
- fi
-fi
-
-ain_only_ainode=1
-
-# if $ain_install_offline is 1 then do not install dependencies
-if [ $ain_install_offline -eq 1 ]; then
- # if offline and not -n, then install dependencies
- if [ -z "$p_ain_no_dependencies" ]; then
- ain_only_ainode=0
- else
- ain_only_ainode=1
- fi
- p_ain_no_dependencies="--no-dependencies"
- echo "Installing AINode offline----without dependencies..."
-fi
-
-if [ $ain_force_reinstall -eq 1 ]; then
- p_ain_force_reinstall="--force-reinstall"
-else
- p_ain_force_reinstall=""
-fi
-
-echo "Installing AINode..."
-cd "$SCRIPT_DIR/../lib/"
-shopt -s nullglob
-for i in *.whl; do
- if [[ $i =~ "ainode" ]]; then
- echo Installing AINode body: $i
- if [ -z "$p_pypi_mirror" ]; then
- $ain_interpreter_dir -m pip install "$i" $p_ain_force_reinstall --no-warn-script-location $p_ain_no_dependencies --find-links https://download.pytorch.org/whl/cpu/torch_stable.html
- else
- $ain_interpreter_dir -m pip install "$i" $p_ain_force_reinstall -i $p_pypi_mirror --no-warn-script-location $p_ain_no_dependencies --find-links https://download.pytorch.org/whl/cpu/torch_stable.html
- fi
- else
- # if ain_only_ainode is 0 then install dependencies
- if [ $ain_only_ainode -eq 0 ]; then
- echo Installing dependencies $i
- if [ -z "$p_pypi_mirror" ]; then
- $ain_interpreter_dir -m pip install "$i" $p_ain_force_reinstall --no-warn-script-location $p_ain_no_dependencies
- else
- $ain_interpreter_dir -m pip install "$i" $p_ain_force_reinstall -i $p_pypi_mirror --no-warn-script-location $p_ain_no_dependencies
- fi
- fi
- fi
- if [ $? -eq 1 ]; then
- echo "Failed to install AINode"
- exit 1
- fi
-done
-echo "AINode is installed successfully"
-exit 0
diff --git a/scripts/conf/windows/ainode-env.bat b/scripts/conf/windows/ainode-env.bat
deleted file mode 100644
index 2c01d411a2cdf..0000000000000
--- a/scripts/conf/windows/ainode-env.bat
+++ /dev/null
@@ -1,129 +0,0 @@
-@REM
-@REM Licensed to the Apache Software Foundation (ASF) under one
-@REM or more contributor license agreements. See the NOTICE file
-@REM distributed with this work for additional information
-@REM regarding copyright ownership. The ASF licenses this file
-@REM to you under the Apache License, Version 2.0 (the
-@REM "License"); you may not use this file except in compliance
-@REM with the License. You may obtain a copy of the License at
-@REM
-@REM http://www.apache.org/licenses/LICENSE-2.0
-@REM
-@REM Unless required by applicable law or agreed to in writing,
-@REM software distributed under the License is distributed on an
-@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-@REM KIND, either express or implied. See the License for the
-@REM specific language governing permissions and limitations
-@REM under the License.
-@REM
-
-@echo off
-
-@REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
-@REM set ain_interpreter_dir=
-
-@REM Set ain_force_reinstall to 1 to force reinstall ainode
-set ain_force_reinstall=0
-
-@REM don't install dependencies online
-set ain_install_offline=0
-
-pushd %~dp0..\..
-if NOT DEFINED IOTDB_AINODE_HOME set IOTDB_AINODE_HOME=%cd%
-
-:initial
-if "%1"=="" goto done
-set aux=%1
-if "%aux:~0,2%"=="-r" (
- set ain_force_reinstall=1
- shift
- goto initial
-)
-if "%aux:~0,2%"=="-n" (
- set ain_no_dependencies=--no-dependencies
- shift
- goto initial
-)
-if "%aux:~0,1%"=="-" (
- set nome=%aux:~1,250%
-) else (
- set "%nome%=%1"
- set nome=
-)
-shift
-goto initial
-
-:done
-@REM check if the parameters are set
-if "%i%"=="" (
- echo No interpreter_dir is set, use default value.
-) else (
- set ain_interpreter_dir=%i%
-)
-
-echo Script got inputs: ain_interpreter_dir: %ain_interpreter_dir% , ain_force_reinstall: %ain_force_reinstall%
-if "%ain_interpreter_dir%"=="" (
- %IOTDB_AINODE_HOME%//venv//Scripts//python.exe -c "import sys; print(sys.executable)" && (
- echo Activate default venv environment
- ) || (
- echo Creating default venv environment
- python -m venv "%IOTDB_AINODE_HOME%//venv"
- )
- set ain_interpreter_dir="%IOTDB_AINODE_HOME%//venv//Scripts//python.exe"
-)
-
-@REM Switch the working directory to the directory one level above the script
-cd %IOTDB_AINODE_HOME%
-
-echo Confirming ainode
-%ain_interpreter_dir% -m pip config set global.disable-pip-version-check true
-%ain_interpreter_dir% -m pip list | findstr /C:"apache-iotdb-ainode" >nul
-if %errorlevel% == 0 (
- if %ain_force_reinstall% == 0 (
- echo ainode is already installed
- exit /b 0
- )
-)
-
-set ain_only_ainode=1
-@REM if $ain_install_offline is 1 then do not install dependencies
-if %ain_install_offline% == 1 (
- @REM if offline and not -n, then install dependencies
- if "%ain_no_dependencies%"=="" (
- set ain_only_ainode=0
- ) else (
- set ain_only_ainode=1
- )
- set ain_no_dependencies=--no-dependencies
- echo Installing ainode offline----without dependencies...
-)
-
-if %ain_force_reinstall% == 1 (
- set ain_force_reinstall=--force-reinstall
-) else (
- set ain_force_reinstall=
-)
-
-echo Installing ainode...
-@REM Print current work dir
-cd lib
-for %%i in (*.whl *.tar.gz) do (
- echo %%i | findstr "ainode" >nul && (
- echo Installing ainode body: %%i
- %ain_interpreter_dir% -m pip install %%i %ain_force_reinstall% --no-warn-script-location %ain_no_dependencies% --find-links https://download.pytorch.org/whl/cpu/torch_stable.html
- ) || (
- @REM if ain_only_ainode is 0 then install dependencies
- if %ain_only_ainode% == 0 (
- echo Installing dependencies: %%i
- set ain_force_reinstall=--force-reinstall
- %ain_interpreter_dir% -m pip install %%i %ain_force_reinstall% --no-warn-script-location %ain_no_dependencies% --find-links https://download.pytorch.org/whl/cpu/torch_stable.html
- )
- )
- if %errorlevel% == 1 (
- echo Failed to install ainode
- exit /b 1
- )
-)
-echo ainode is installed successfully
-cd ..
-exit /b 0
diff --git a/scripts/sbin/start-ainode.sh b/scripts/sbin/start-ainode.sh
index 4ab202a209ed3..671de54ba1a86 100644
--- a/scripts/sbin/start-ainode.sh
+++ b/scripts/sbin/start-ainode.sh
@@ -23,24 +23,12 @@ echo Starting IoTDB AINode
echo ---------------------------
IOTDB_AINODE_HOME="$(cd "`dirname "$0"`"/..; pwd)"
-
+export IOTDB_AINODE_HOME
echo "IOTDB_AINODE_HOME: $IOTDB_AINODE_HOME"
-chmod u+x $IOTDB_AINODE_HOME/conf/ainode-env.sh
-ain_interpreter_dir=$(sed -n 's/^ain_interpreter_dir=\(.*\)$/\1/p' $IOTDB_AINODE_HOME/conf/ainode-env.sh)
-bash $IOTDB_AINODE_HOME/conf/ainode-env.sh $*
-if [ $? -eq 1 ]; then
- echo "Environment check failed. Exiting..."
- exit 1
-fi
-
# fetch parameters with names
while getopts "i:rn" opt; do
case $opt in
- i) p_ain_interpreter_dir="$OPTARG"
- ;;
- r) p_ain_force_reinstall="$OPTARG"
- ;;
n)
;;
\?) echo "Invalid option -$OPTARG" >&2
@@ -49,31 +37,10 @@ while getopts "i:rn" opt; do
esac
done
-# If ain_interpreter_dir in parameters is empty:
-if [ -z "$p_ain_interpreter_dir" ]; then
- # If ain_interpreter_dir in ../conf/ainode-env.sh is empty, set default value to ../venv/bin/python3
- if [ -z "$ain_interpreter_dir" ]; then
- ain_interpreter_dir="$IOTDB_AINODE_HOME/venv/bin/python3"
- fi
-else
- # If ain_interpreter_dir in parameters is not empty, set ain_interpreter_dir to the value in parameters
- ain_interpreter_dir="$p_ain_interpreter_dir"
-fi
-
-# check if ain_interpreter_dir is an absolute path
-if [[ "$ain_interpreter_dir" != /* ]]; then
- ain_interpreter_dir="$IOTDB_AINODE_HOME/$ain_interpreter_dir"
-fi
-
-echo Script got parameter: ain_interpreter_dir: $ain_interpreter_dir
-
-# Change the working directory to the parent directory
-cd "$IOTDB_AINODE_HOME"
-
-ain_ainode_dir=$(dirname "$ain_interpreter_dir")/ainode
+ain_ainode_executable="$IOTDB_AINODE_HOME/lib/ainode"
-echo Script got ainode dir: ain_ainode_dir: $ain_ainode_dir
+echo Script got ainode executable: "$ain_ainode_executable"
echo Starting AINode...
-$ain_ainode_dir start
+$ain_ainode_executable start
diff --git a/scripts/sbin/windows/start-ainode.bat b/scripts/sbin/windows/start-ainode.bat
index 0a83865fd23c0..1d9a4306bafb7 100644
--- a/scripts/sbin/windows/start-ainode.bat
+++ b/scripts/sbin/windows/start-ainode.bat
@@ -26,54 +26,12 @@ echo ```````````````````````````
pushd %~dp0..\..
if NOT DEFINED IOTDB_AINODE_HOME set IOTDB_AINODE_HOME=%cd%
-call %IOTDB_AINODE_HOME%\\conf\\windows\\ainode-env.bat %*
-if %errorlevel% neq 0 (
- echo Environment check failed. Exiting...
- exit /b 1
-)
+set ain_ainode_executable=%IOTDB_AINODE_HOME%\lib\ainode
-for /f "tokens=2 delims==" %%a in ('findstr /i /c:"^ain_interpreter_dir" "%IOTDB_AINODE_HOME%\\conf\\windows\\ainode-env.bat"') do (
- set _ain_interpreter_dir=%%a
- goto :done
-)
-
-:initial
-if "%1"=="" goto done
-set aux=%1
-if "%aux:~0,1%"=="-" (
- set nome=%aux:~1,250%
-) else (
- set "%nome%=%1"
- set nome=
-)
-shift
-goto initial
-
-:done
-if "%i%"=="" (
- if "%_ain_interpreter_dir%"=="" (
- set _ain_interpreter_dir=%IOTDB_AINODE_HOME%\\venv\\Scripts\\python.exe
- )
-) else (
- set _ain_interpreter_dir=%i%
-)
-
-echo Script got parameter: ain_interpreter_dir: %_ain_interpreter_dir%
-
-cd %IOTDB_AINODE_HOME%
-
-for %%i in ("%_ain_interpreter_dir%") do set "parent=%%~dpi"
-
-set ain_ainode_dir=%parent%\ainode.exe
-
-set ain_ainode_dir_new=%parent%\Scripts\\ainode.exe
+echo Script got ainode executable: %ain_ainode_executable%
echo Starting AINode...
-%ain_ainode_dir% start
-if %errorlevel% neq 0 (
- echo ain_ainode_dir_new is %ain_ainode_dir_new%
- %ain_ainode_dir_new% start
-)
+%$ain_ainode_executable% start
pause
\ No newline at end of file