Skip to content

Commit 272e8b4

Browse files
committed
fix patch function
1 parent e478908 commit 272e8b4

File tree

2 files changed

+98
-22
lines changed

2 files changed

+98
-22
lines changed

qbittorrent-nox-static.sh

Lines changed: 49 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1743,17 +1743,33 @@ _apply_patches() {
17431743
has_content=true
17441744
fi
17451745

1746-
# Step 2: If URL file exists, download and append/merge to patch
1746+
# Step 2: If URL file exists, download and append/merge to patch (only if not already present)
17471747
if [[ -f "${patch_dir}/url" && -s "${patch_dir}/url" ]]; then
17481748
local patch_url tmp_patch="${patch_dir}/url_download.tmp"
17491749
patch_url="$(< "${patch_dir}/url")"
17501750

1751-
if _curl "${patch_url}" -o "${tmp_patch}"; then
1752-
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1753-
cat "${tmp_patch}" >> "${temp_patch}"
1754-
has_content=true
1751+
# Check if this URL was already merged by looking for the comment marker
1752+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1753+
# URL already processed, skip download
1754+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1755+
cat "${patch_dir}/patch" > "${temp_patch}"
1756+
has_content=true
1757+
}
1758+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Downloaded from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1759+
# URL already processed, skip download
1760+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1761+
cat "${patch_dir}/patch" > "${temp_patch}"
1762+
has_content=true
1763+
}
17551764
else
1756-
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1765+
# Download and merge URL content
1766+
if _curl "${patch_url}" -o "${tmp_patch}"; then
1767+
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1768+
cat "${tmp_patch}" >> "${temp_patch}"
1769+
has_content=true
1770+
else
1771+
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1772+
fi
17571773
fi
17581774
rm -f "${tmp_patch}"
17591775
fi
@@ -1765,13 +1781,35 @@ _apply_patches() {
17651781
done
17661782

17671783
for patch_src in "${additional_patches[@]}"; do
1768-
if [[ ${has_content} == true ]]; then
1769-
printf '\n\n# Merged from: %s\n' "${patch_src##*/}" >> "${temp_patch}"
1784+
local patch_filename="${patch_src##*/}"
1785+
1786+
# Check if this patch file was already merged by looking for the comment marker
1787+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from: ${patch_filename}" "${patch_dir}/patch" 2> /dev/null; then
1788+
# Patch already processed, skip merge
1789+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1790+
[[ ${has_content} == false ]] && {
1791+
cat "${patch_dir}/patch" > "${temp_patch}"
1792+
has_content=true
1793+
}
1794+
}
1795+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# From: ${patch_filename}" "${patch_dir}/patch" 2> /dev/null; then
1796+
# Patch already processed, skip merge
1797+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1798+
[[ ${has_content} == false ]] && {
1799+
cat "${patch_dir}/patch" > "${temp_patch}"
1800+
has_content=true
1801+
}
1802+
}
17701803
else
1771-
printf '# From: %s\n' "${patch_src##*/}" > "${temp_patch}"
1772-
has_content=true
1804+
# Merge the patch file
1805+
if [[ ${has_content} == true ]]; then
1806+
printf '\n\n# Merged from: %s\n' "${patch_filename}" >> "${temp_patch}"
1807+
else
1808+
printf '# From: %s\n' "${patch_filename}" > "${temp_patch}"
1809+
has_content=true
1810+
fi
1811+
cat "${patch_src}" >> "${temp_patch}"
17731812
fi
1774-
cat "${patch_src}" >> "${temp_patch}"
17751813
done
17761814

17771815
# Final validation and atomic move

qbt-nox-static.bash

Lines changed: 49 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1743,17 +1743,33 @@ _apply_patches() {
17431743
has_content=true
17441744
fi
17451745

1746-
# Step 2: If URL file exists, download and append/merge to patch
1746+
# Step 2: If URL file exists, download and append/merge to patch (only if not already present)
17471747
if [[ -f "${patch_dir}/url" && -s "${patch_dir}/url" ]]; then
17481748
local patch_url tmp_patch="${patch_dir}/url_download.tmp"
17491749
patch_url="$(< "${patch_dir}/url")"
17501750

1751-
if _curl "${patch_url}" -o "${tmp_patch}"; then
1752-
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1753-
cat "${tmp_patch}" >> "${temp_patch}"
1754-
has_content=true
1751+
# Check if this URL was already merged by looking for the comment marker
1752+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1753+
# URL already processed, skip download
1754+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1755+
cat "${patch_dir}/patch" > "${temp_patch}"
1756+
has_content=true
1757+
}
1758+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Downloaded from URL: ${patch_url}" "${patch_dir}/patch" 2> /dev/null; then
1759+
# URL already processed, skip download
1760+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1761+
cat "${patch_dir}/patch" > "${temp_patch}"
1762+
has_content=true
1763+
}
17551764
else
1756-
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1765+
# Download and merge URL content
1766+
if _curl "${patch_url}" -o "${tmp_patch}"; then
1767+
[[ ${has_content} == true ]] && printf '\n\n# Merged from URL: %s\n' "${patch_url}" >> "${temp_patch}" || printf '# Downloaded from URL: %s\n' "${patch_url}" > "${temp_patch}"
1768+
cat "${tmp_patch}" >> "${temp_patch}"
1769+
has_content=true
1770+
else
1771+
printf '%b\n' " ${unicode_yellow_circle} Failed to download from URL: ${patch_url}"
1772+
fi
17571773
fi
17581774
rm -f "${tmp_patch}"
17591775
fi
@@ -1765,13 +1781,35 @@ _apply_patches() {
17651781
done
17661782

17671783
for patch_src in "${additional_patches[@]}"; do
1768-
if [[ ${has_content} == true ]]; then
1769-
printf '\n\n# Merged from: %s\n' "${patch_src##*/}" >> "${temp_patch}"
1784+
local patch_filename="${patch_src##*/}"
1785+
1786+
# Check if this patch file was already merged by looking for the comment marker
1787+
if [[ -f "${patch_dir}/patch" ]] && grep -Fq "# Merged from: ${patch_filename}" "${patch_dir}/patch" 2> /dev/null; then
1788+
# Patch already processed, skip merge
1789+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1790+
[[ ${has_content} == false ]] && {
1791+
cat "${patch_dir}/patch" > "${temp_patch}"
1792+
has_content=true
1793+
}
1794+
}
1795+
elif [[ -f "${patch_dir}/patch" ]] && grep -Fq "# From: ${patch_filename}" "${patch_dir}/patch" 2> /dev/null; then
1796+
# Patch already processed, skip merge
1797+
[[ -f "${patch_dir}/patch" && -s "${patch_dir}/patch" ]] && {
1798+
[[ ${has_content} == false ]] && {
1799+
cat "${patch_dir}/patch" > "${temp_patch}"
1800+
has_content=true
1801+
}
1802+
}
17701803
else
1771-
printf '# From: %s\n' "${patch_src##*/}" > "${temp_patch}"
1772-
has_content=true
1804+
# Merge the patch file
1805+
if [[ ${has_content} == true ]]; then
1806+
printf '\n\n# Merged from: %s\n' "${patch_filename}" >> "${temp_patch}"
1807+
else
1808+
printf '# From: %s\n' "${patch_filename}" > "${temp_patch}"
1809+
has_content=true
1810+
fi
1811+
cat "${patch_src}" >> "${temp_patch}"
17731812
fi
1774-
cat "${patch_src}" >> "${temp_patch}"
17751813
done
17761814

17771815
# Final validation and atomic move

0 commit comments

Comments
 (0)