Skip to content

Commit b0dfb40

Browse files
committed
fix patch function
1 parent 4a3619d commit b0dfb40

File tree

2 files changed

+44
-12
lines changed

2 files changed

+44
-12
lines changed

qbittorrent-nox-static.sh

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1743,17 +1743,33 @@ _apply_patches() {
17431743
has_content=true
17441744
fi
17451745

1746-
# Step 2: If URL file exists, download and append/merge to patch
1746+
# Step 2: If URL file exists, download and append/merge to patch (only if not already present)
17471747
if [[ -f "${patch_dir}/url" && -s "${patch_dir}/url" ]]; then
17481748
local patch_url tmp_patch="${patch_dir}/url_download.tmp"
17491749
patch_url="$(< "${patch_dir}/url")"
17501750

1751-
if _curl "${patch_url}" -o "${tmp_patch}"; then
1752-
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1753-
cat "${tmp_patch}" >> "${temp_patch}"
1754-
has_content=true
1751+
# Check if this URL was already merged by looking for the comment marker
1752+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1753+
# URL already processed, skip download
1754+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1755+
cat "${patch_dir}/patch" > "${temp_patch}"
1756+
has_content=true
1757+
}
1758+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Downloaded from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1759+
# URL already processed, skip download
1760+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1761+
cat "${patch_dir}/patch" > "${temp_patch}"
1762+
has_content=true
1763+
}
17551764
else
1756-
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1765+
# Download and merge URL content
1766+
if _curl "${patch_url}" -o "${tmp_patch}"; then
1767+
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1768+
cat "${tmp_patch}" >> "${temp_patch}"
1769+
has_content=true
1770+
else
1771+
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1772+
fi
17571773
fi
17581774
rm -f "${tmp_patch}"
17591775
fi

qbt-nox-static.bash

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1743,17 +1743,33 @@ _apply_patches() {
17431743
has_content=true
17441744
fi
17451745

1746-
# Step 2: If URL file exists, download and append/merge to patch
1746+
# Step 2: If URL file exists, download and append/merge to patch (only if not already present)
17471747
if [[ -f "${patch_dir}/url" && -s "${patch_dir}/url" ]]; then
17481748
local patch_url tmp_patch="${patch_dir}/url_download.tmp"
17491749
patch_url="$(< "${patch_dir}/url")"
17501750

1751-
if _curl "${patch_url}" -o "${tmp_patch}"; then
1752-
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1753-
cat "${tmp_patch}" >> "${temp_patch}"
1754-
has_content=true
1751+
# Check if this URL was already merged by looking for the comment marker
1752+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1753+
# URL already processed, skip download
1754+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1755+
cat "${patch_dir}/patch" > "${temp_patch}"
1756+
has_content=true
1757+
}
1758+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Downloaded from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1759+
# URL already processed, skip download
1760+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1761+
cat "${patch_dir}/patch" > "${temp_patch}"
1762+
has_content=true
1763+
}
17551764
else
1756-
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1765+
# Download and merge URL content
1766+
if _curl "${patch_url}" -o "${tmp_patch}"; then
1767+
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1768+
cat "${tmp_patch}" >> "${temp_patch}"
1769+
has_content=true
1770+
else
1771+
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1772+
fi
17571773
fi
17581774
rm -f "${tmp_patch}"
17591775
fi

0 commit comments

Comments
 (0)