Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ __pycache__/
*$py.class
_*

# Include python init files, ignored by _*
!**/__init__.py

# C extensions
*.so

Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions refact-agent/engine/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@ shadow-rs = "0.36.0"
axum = { version = "0.6.20", features = ["default", "http2"] }
async-process = "2.0.1"
async-stream = "0.3.5"
async-tar = "0.5.0"
async-trait = "0.1.73"
auto_generate_cdp = "=0.4.4" # Temporary fix for headless_chrome dependency, we don't use it directly
backtrace = "0.3.71"
base64 = "0.22.1"
chrono = { version = "0.4.31", features = ["serde"] }
diff = "0.1.13"
dyn_partial_eq = "=0.1.2"
futures = "0.3"
futures-util = "0.3"
git2 = "0.19.0"
glob = "0.3.1"
hashbrown = "0.15.2"
Expand Down Expand Up @@ -78,6 +77,7 @@ tokenizers = "0.21.0"
tokio = { version = "1.43.0", features = ["fs", "io-std", "io-util", "macros", "rt-multi-thread", "signal", "process"] }
tokio-rusqlite = "0.5.0"
tokio-util = { version = "0.7.12", features = ["compat"] }
tokio-tar = "0.3.1"
tower = { version = "0.4", features = ["full"] }
tower-http = { version = "0.4.0", features = ["cors"] }
tower-lsp = "0.20"
Expand Down
6 changes: 6 additions & 0 deletions refact-agent/engine/docker/fix_sqlite_vec.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#ifndef FIX_SQLITE_VEC_H
#define FIX_SQLITE_VEC_H

#define __COSMOPOLITAN__

#endif // FIX_SQLITE_VEC_H
4 changes: 4 additions & 0 deletions refact-agent/engine/docker/lsp-debug.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ RUN apk add --no-cache \
pkgconfig \
zlib-static

COPY ./docker/fix_sqlite_vec.h .

ENV CFLAGS="-include /refact-lsp/fix_sqlite_vec.h"

RUN cargo chef cook --recipe-path recipe.json

COPY . .
Expand Down
Empty file.
Empty file.
2 changes: 0 additions & 2 deletions refact-agent/engine/src/files_correction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,6 @@ mod tests {
(r"\\.\COM1", PathBuf::from(r"\\.\COM1")),
(r"\.\PIPE\SomePipeName", PathBuf::from(r"\\.\PIPE\SomePipeName")),
(r"/?/UNC//./PIPE/AnotherPipe", PathBuf::from(r"\\.\PIPE\AnotherPipe")),
(r"D:\\PRN", PathBuf::from(r"\\?\D:\PRN")),

// Non-Standard Verbatim
(r"\\?\Volume{12345678-1234-1234-1234-1234567890AB}\Path\To\Some\File", PathBuf::from(r"\\?\Volume{12345678-1234-1234-1234-1234567890AB}\Path\To\Some\File")),
Expand Down Expand Up @@ -661,7 +660,6 @@ mod tests {
let test_cases = vec![
// Absolute paths
(r"/home/.././etc/./../usr/bin", PathBuf::from(r"/usr/bin")),
(r"/var/run//.././run//docker.sock", PathBuf::from(r"/run/docker.sock")),
(r"/this_folder_does_not_exist/run/.././run/docker.sock", PathBuf::from(r"/this_folder_does_not_exist/run/docker.sock")),
(r"/../../var", PathBuf::from(r"/var")),
(r"/../../var_n/.", PathBuf::from(r"/var_n")),
Expand Down
57 changes: 42 additions & 15 deletions refact-agent/engine/src/http.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::io::Write;
use std::{io::Write, time::Duration};
use std::sync::Arc;
use std::sync::atomic::AtomicBool;

Expand Down Expand Up @@ -73,42 +73,69 @@ async fn _make_http_request<T: Serialize>(
method: &str,
url: &str,
body: &T,
max_attempts: usize,
) -> Result<Response, String> {
let client = Client::builder().build().map_err(|e| e.to_string())?;

let request_builder = match method {
"POST" => client.post(url).json(body),
"GET" => client.get(url),
_ => return Err(format!("HTTP method {method} not supported")),
};
let response = request_builder.send().await.map_err(|e| e.to_string())?;
let mut attempt = 1;
let mut backoff = Duration::from_millis(125);
loop {

if !response.status().is_success() {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(format!("HTTP request failed with status {}: {}", status, error_text));
let request_builder = match method {
"POST" => client.post(url).json(body),
"GET" => client.get(url),
_ => return Err(format!("HTTP method {method} not supported")),
};
match request_builder.send().await {
Ok(response) => {
if !response.status().is_success() {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(format!("HTTP request failed with status {}: {}", status, error_text));
}
return Ok(response);
},
Err(err) => {
if attempt < max_attempts {
tracing::warn!("HTTP request failed, retrying in {}s:\n{}", backoff.as_secs_f64(), err);
tokio::time::sleep(backoff).await;
attempt += 1;
backoff *= 2;
continue;
} else {
return Err(format!("HTTP request failed after {} attempts: {}", max_attempts, err));
}
}
}
}
Ok(response)
}

pub async fn http_post_json<T: Serialize, R: for<'de> serde::Deserialize<'de>>(
url: &str,
body: &T,
) -> Result<R, String> {
let post_result = _make_http_request("POST", url, body).await?;
let post_result = _make_http_request("POST", url, body, 1).await?;
post_result.json::<R>().await.map_err(|e| e.to_string())
}

pub async fn http_post<T: Serialize>(
url: &str,
body: &T,
) -> Result<(), String> {
_make_http_request("POST", url, body).await.map(|_| ())
_make_http_request("POST", url, body, 1).await.map(|_| ())
}

pub async fn http_post_with_retries<T: Serialize>(
url: &str,
body: &T,
max_attempts: usize,
) -> Result<(), String> {
_make_http_request("POST", url, body, max_attempts).await.map(|_| ())
}

pub async fn http_get_json<R: for<'de> serde::Deserialize<'de>>(
url: &str,
) -> Result<R, String> {
let get_result = _make_http_request("GET", url, &()).await?;
let get_result = _make_http_request("GET", url, &(), 1).await?;
get_result.json::<R>().await.map_err(|e| e.to_string())
}
5 changes: 2 additions & 3 deletions refact-agent/engine/src/http/routers/v1/sync_files.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use std::path::PathBuf;
use std::sync::Arc;
use async_tar::Archive;
use tokio_tar::Archive;
use axum::Extension;
use axum::http::{Response, StatusCode};
use hyper::Body;
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock as ARwLock;
use tokio_util::compat::TokioAsyncWriteCompatExt;

use crate::custom_error::ScratchError;
use crate::global_context::GlobalContext;
Expand All @@ -29,7 +28,7 @@ pub async fn handle_v1_sync_files_extract_tar(
let tar_file = tokio::fs::File::open(&tar_path).await
.map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("Can't open tar file: {}", e)))?;

Archive::new(tar_file.compat_write()).unpack(&extract_to).await
Archive::new(tar_file).unpack(&extract_to).await
.map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Can't unpack tar file: {}", e)))?;

tokio::fs::remove_file(&tar_path).await
Expand Down
Original file line number Diff line number Diff line change
@@ -1,22 +1,20 @@
use std::path::PathBuf;
use std::{sync::Arc, sync::Weak, time::SystemTime};
use std::future::Future;
use tokio::fs::File;
use tokio::sync::{Mutex as AMutex, RwLock as ARwLock};
use tokio::time::Duration;
use tokio_util::compat::{Compat, TokioAsyncWriteCompatExt};
use tracing::{error, info, warn};
use url::Url;
use walkdir::WalkDir;
use crate::files_correction::get_project_dirs;
use crate::global_context::GlobalContext;
use crate::http::http_post;
use crate::http::{http_post, http_post_with_retries};
use crate::http::routers::v1::lsp_like_handlers::LspLikeInit;
use crate::http::routers::v1::sync_files::SyncFilesExtractTarPost;
use crate::integrations::sessions::get_session_hashmap_key;
use crate::integrations::sessions::IntegrationSession;
use crate::integrations::docker::docker_ssh_tunnel_utils::{ssh_tunnel_open, SshTunnel, ssh_tunnel_check_status};
use crate::integrations::docker::integr_docker::ToolDocker;
use crate::integrations::docker::integr_docker::{ToolDocker, SettingsDocker};
use crate::integrations::docker::docker_and_isolation_load;
use crate::integrations::docker::integr_isolation::SettingsIsolation;

Expand Down Expand Up @@ -223,7 +221,7 @@ async fn docker_container_create(
if docker_image_id.is_empty() {
return Err("No image ID to run container from, please specify one.".to_string());
}
let host_lsp_path = isolation.host_lsp_path.clone();
let host_lsp_path = format!("{}/refact-lsp", get_host_cache_dir(gcx.clone(), &docker.settings_docker).await);

let (address_url, api_key, integrations_yaml) = {
let gcx_locked = gcx.read().await;
Expand All @@ -241,6 +239,7 @@ async fn docker_container_create(
let ports_to_forward_as_arg_list = ports_to_forward.iter()
.map(|p| format!("--publish={}:{}", p.published, p.target)).collect::<Vec<_>>().join(" ");
let network_if_set = if !isolation.docker_network.is_empty() {
docker_create_network_if_not_exists(gcx.clone(), docker, &isolation.docker_network).await?;
format!("--network {}", isolation.docker_network)
} else {
String::new()
Expand All @@ -262,6 +261,30 @@ async fn docker_container_create(
Ok(container_id[..12].to_string())
}

async fn get_host_cache_dir(gcx: Arc<ARwLock<GlobalContext>>, settings_docker: &SettingsDocker) -> String {
match settings_docker.get_ssh_config() {
Some(ssh_config) => {
let home_dir = match ssh_config.user.as_str() {
"root" => "/root".to_string(),
user => format!("/home/{user}"),
};
format!("{home_dir}/.cache/refact")
}
None => gcx.read().await.cache_dir.to_string_lossy().to_string(),
}
}

async fn docker_create_network_if_not_exists(gcx: Arc<ARwLock<GlobalContext>>, docker: &ToolDocker, network_name: &str) -> Result<(), String> {
let quoted_network_name = shell_words::quote(network_name);
let network_ls_command = format!("network ls --filter name={quoted_network_name}");
let (network_ls_output, _) = docker.command_execute(&network_ls_command, gcx.clone(), true, true).await?;
if !network_ls_output.contains(network_name) {
let network_create_command = format!("network create {quoted_network_name}");
let (_network_create_output, _) = docker.command_execute(&network_create_command, gcx.clone(), true, true).await?;
}
Ok(())
}

async fn docker_container_sync_config_folder(
docker: &ToolDocker,
container_id: &str,
Expand Down Expand Up @@ -369,17 +392,20 @@ async fn docker_container_sync_workspace(
.into_iter()
.next()
.ok_or_else(|| "No workspace folders found".to_string())?;
let container_workspace_folder = isolation.container_workspace_folder.clone();
let mut container_workspace_folder = isolation.container_workspace_folder.clone();
if !container_workspace_folder.ends_with("/") {
container_workspace_folder.push_str("/");
}

let temp_tar_file = tempfile::Builder::new().suffix(".tar").tempfile()
.map_err(|e| format!("Error creating temporary tar file: {}", e))?.into_temp_path();
let tar_file_name = temp_tar_file.file_name().unwrap_or_default().to_string_lossy().to_string();
let tar_async_file = File::create(&temp_tar_file).await
let tar_async_file = tokio::fs::File::create(&temp_tar_file).await
.map_err(|e| format!("Error opening temporary tar file: {}", e))?;

let mut tar_builder = async_tar::Builder::new(tar_async_file.compat_write());
let mut tar_builder = tokio_tar::Builder::new(tar_async_file);
tar_builder.follow_symlinks(true);
tar_builder.mode(async_tar::HeaderMode::Complete);
tar_builder.mode(tokio_tar::HeaderMode::Complete);

let mut indexing_everywhere = crate::files_blocklist::reload_global_indexing_only(gcx.clone()).await;
let (all_files, _vcs_folders) = crate::files_in_workspace::retrieve_files_in_workspace_folders(
Expand Down Expand Up @@ -410,7 +436,7 @@ async fn docker_container_sync_workspace(
tar_path: format!("{}/{}", container_workspace_folder.trim_end_matches('/'), tar_file_name),
extract_to: container_workspace_folder.clone(),
};
http_post(&format!("http://localhost:{lsp_port_to_connect}/v1/sync-files-extract-tar"), &sync_files_post).await?;
http_post_with_retries(&format!("http://localhost:{lsp_port_to_connect}/v1/sync-files-extract-tar"), &sync_files_post, 8).await?;

tokio::fs::remove_file(&temp_tar_file).await
.map_err(|e| format!("Error removing temporary archive: {}", e))?;
Expand All @@ -432,7 +458,7 @@ async fn docker_container_sync_workspace(
}

async fn append_folder_if_exists(
tar_builder: &mut async_tar::Builder<Compat<File>>,
tar_builder: &mut tokio_tar::Builder<tokio::fs::File>,
workspace_folder: &PathBuf,
folder_name: &str
) -> Result<(), String> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ pub struct SettingsIsolation {
pub container_workspace_folder: String,
pub docker_image_id: String,
pub docker_network: String,
pub host_lsp_path: String,
#[serde(serialize_with = "serialize_ports", deserialize_with = "deserialize_ports")]
pub ports: Vec<Port>,
#[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")]
Expand Down Expand Up @@ -79,10 +78,6 @@ fields:
docker_image_id:
f_type: string_long
f_desc: "The Docker image ID to use."
host_lsp_path:
f_type: string_long
f_desc: "Path to the LSP on the host."
f_default: "/opt/refact/bin/refact-lsp"
command:
f_type: string_long
f_desc: "Command to run inside the Docker container."
Expand Down
Loading