Skip to content

Commit 8028be1

Browse files
authored
Merge branch 'master' into get_or_fetch
2 parents c966f50 + 183c405 commit 8028be1

File tree

10 files changed

+203
-20
lines changed

10 files changed

+203
-20
lines changed

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
* @Pycord-Development/maintainers
1+
* @Pycord-Development/library-maintainers @Pycord-Development/library-contributors
22

33
/discord/ @Pycord-Development/maintain-discord-api
44

.github/SECURITY.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
| Version | Supported |
66
| ------- | ------------------ |
77
| 2.x | :white_check_mark: |
8-
| <2.0.0 | :x: |
8+
| <2.6.1 | :x: |
99

1010
## Reporting a Vulnerability
1111

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
name: Docs JSON Export
2+
3+
on:
4+
push:
5+
branches:
6+
- master
7+
workflow_dispatch:
8+
9+
jobs:
10+
export-docs-json:
11+
name: Export docs.json
12+
runs-on: ubuntu-latest
13+
steps:
14+
- name: Checkout repository
15+
id: checkout
16+
uses: actions/checkout@v5
17+
- name: Set up Python
18+
uses: actions/setup-python@v5
19+
id: setup-python
20+
with:
21+
python-version: "3.13"
22+
cache: "pip"
23+
cache-dependency-path: "requirements/docs.txt"
24+
check-latest: true
25+
- name: Install dependencies
26+
id: install-deps
27+
run: |
28+
python -m pip install -U pip
29+
pip install ".[docs]"
30+
pip install beautifulsoup4
31+
- name: Build Sphinx HTML docs
32+
id: build-sphinx
33+
run: sphinx-build -b html docs docs/_build/html
34+
- name: Export docs.json
35+
id: generate-json
36+
run: python scripts/docs_json_exporter.py
37+
- name: Upload docs.json as artifact
38+
uses: actions/[email protected]
39+
id: artifact-upload
40+
with:
41+
name: Pycord Docs JSON
42+
path: docs.json
43+
retention-days: 1
44+
- name: Show docs.json summary
45+
run: |
46+
head -n 40 docs.json || tail -n 40 docs.json
47+
- name: Output artifact ID
48+
run: |
49+
echo "artifact-id=${{ steps.artifact-upload.outputs.artifact-id }}" >> $GITHUB_OUTPUT
50+
echo "artifact-url=${{ steps.artifact-upload.outputs.artifact-url }}" >> $GITHUB_OUTPUT
51+
echo "::notice::Artifact uploaded: ${{ steps.artifact-upload.outputs.artifact-url }}"

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@ These changes are available on the `master` branch, but have not yet been releas
1616

1717
### Fixed
1818

19+
- Manage silence for new SSRC with existing user_id.
20+
([#2808](https://github.com/Pycord-Development/pycord/pull/2808))
21+
1922
### Removed
2023

2124
## [2.7.0rc1] - 2025-08-30

README.rst

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,12 @@ Pycord is a modern, easy to use, feature-rich, and async ready API wrapper for D
1717
.. image:: https://img.shields.io/github/v/release/Pycord-Development/pycord?include_prereleases&label=Latest%20Release&logo=github&sort=semver&style=for-the-badge&logoColor=white
1818
:target: https://github.com/Pycord-Development/pycord/releases
1919
:alt: Latest release
20-
2120
.. image:: https://img.shields.io/discord/881207955029110855?label=discord&style=for-the-badge&logo=discord&color=5865F2&logoColor=white
2221
:target: https://pycord.dev/discord
2322
:alt: Discord server invite
24-
23+
.. image:: https://img.shields.io/github/sponsors/Pycord-Development?style=for-the-badge
24+
:target: https://github.com/sponsors/Pycord-Development
25+
:alt: GitHub Sponsors
2526
.. image:: https://badges.crowdin.net/badge/dark/crowdin-on-light.png
2627
:target: https://translations.pycord.dev/documentation/?utm_source=badge&utm_medium=referral&utm_campaign=badge-add-on
2728
:alt: Crowdin | Agile localization for tech companies

discord/commands/core.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -806,6 +806,7 @@ def _parse_options(self, params, *, check_params: bool = True) -> list[Option]:
806806
if option == inspect.Parameter.empty:
807807
option = str
808808

809+
option = Option._strip_none_type(option)
809810
if self._is_typing_literal(option):
810811
literal_values = get_args(option)
811812
if not all(isinstance(v, (str, int, float)) for v in literal_values):

discord/commands/options.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -386,9 +386,9 @@ def _strip_none_type(input_type):
386386
raise TypeError("Option type cannot be only NoneType")
387387
if len(filtered) == 1:
388388
return filtered[0]
389-
if all(getattr(t, "__origin__", None) is Literal for t in filtered):
390-
return Union[filtered]
391-
return Union[filtered]
389+
390+
return filtered
391+
392392
return input_type
393393

394394
def to_dict(self) -> dict:

discord/voice_client.py

Lines changed: 51 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ def __init__(self, client: Client, channel: abc.Connectable):
265265
self.sink = None
266266
self.starting_time = None
267267
self.stopping_time = None
268+
self.temp_queued_data: dict[int, list] = {}
268269

269270
warn_nacl = not has_nacl
270271
supported_modes: tuple[SupportedModes, ...] = (
@@ -890,7 +891,7 @@ def recv_audio(self, sink, callback, *args):
890891
# it by user, handles pcm files and
891892
# silence that should be added.
892893

893-
self.user_timestamps: dict[int, tuple[int, float]] = {}
894+
self.user_timestamps: dict[int, tuple[int, int, float]] = {}
894895
self.starting_time = time.perf_counter()
895896
self.first_packet_timestamp: float
896897
while self.recording:
@@ -918,7 +919,30 @@ def recv_audio(self, sink, callback, *args):
918919

919920
def recv_decoded_audio(self, data: RawData):
920921
# Add silence when they were not being recorded.
921-
if data.ssrc not in self.user_timestamps: # First packet from user
922+
data.user_id = self.ws.ssrc_map.get(data.ssrc, {}).get("user_id")
923+
924+
if data.user_id is None:
925+
_log.debug(
926+
f"DEBUG: received packet with SSRC {data.ssrc} not linked to a user_id."
927+
f"Queueing for later processing."
928+
)
929+
self.temp_queued_data.setdefault(data.ssrc, []).append(data)
930+
return
931+
elif data.ssrc in self.temp_queued_data:
932+
_log.debug(
933+
"DEBUG: We got %d packet(s) in queue for SSRC %d",
934+
len(self.temp_queued_data[data.ssrc]),
935+
data.ssrc,
936+
)
937+
queued_packets = self.temp_queued_data.pop(data.ssrc)
938+
for q_packet in queued_packets:
939+
q_packet.user_id = data.user_id
940+
self._process_audio_packet(q_packet)
941+
942+
self._process_audio_packet(data)
943+
944+
def _process_audio_packet(self, data: RawData):
945+
if data.user_id not in self.user_timestamps: # First packet from user
922946
if (
923947
not self.user_timestamps or not self.sync_start
924948
): # First packet from anyone
@@ -931,19 +955,33 @@ def recv_decoded_audio(self, data: RawData):
931955
) - 960
932956

933957
else: # Previously received a packet from user
934-
dRT = (
935-
data.receive_time - self.user_timestamps[data.ssrc][1]
936-
) * 48000 # delta receive time
937-
dT = data.timestamp - self.user_timestamps[data.ssrc][0] # delta timestamp
938-
diff = abs(100 - dT * 100 / dRT)
939-
if (
940-
diff > 60 and dT != 960
941-
): # If the difference in change is more than 60% threshold
942-
silence = dRT - 960
958+
prev_ssrc = self.user_timestamps[data.user_id][0]
959+
prev_timestamp = self.user_timestamps[data.user_id][1]
960+
prev_receive_time = self.user_timestamps[data.user_id][2]
961+
962+
if data.ssrc != prev_ssrc:
963+
_log.info(
964+
f"Received audio data from USER_ID {data.user_id} with a previous SSRC {prev_ssrc} and new "
965+
f"SSRC {data.ssrc}."
966+
)
967+
dRT = (data.receive_time - prev_receive_time) * 1000
968+
silence = max(0, int(dRT / (1000 / 48000))) - 960
943969
else:
944-
silence = dT - 960
970+
dRT = (
971+
data.receive_time - prev_receive_time
972+
) * 48000 # delta receive time
973+
dT = data.timestamp - prev_timestamp # delta timestamp
974+
diff = abs(100 - dT * 100 / dRT)
975+
if (
976+
diff > 60 and dT != 960
977+
): # If the difference in change is more than 60% threshold
978+
silence = dRT - 960
979+
else:
980+
silence = dT - 960
945981

946-
self.user_timestamps.update({data.ssrc: (data.timestamp, data.receive_time)})
982+
self.user_timestamps.update(
983+
{data.user_id: (data.ssrc, data.timestamp, data.receive_time)}
984+
)
947985

948986
data.decoded_data = (
949987
struct.pack("<h", 0) * max(0, int(silence)) * opus._OpusStruct.CHANNELS

scripts/count_sourcelines.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import os
2+
3+
cur_path = os.getcwd()
4+
ignore_set = {"__init__.py", "count_sourcelines.py", "docs-json-exporter.py"}
5+
6+
loc_list = []
7+
8+
for py_dir, _, py_files in os.walk(cur_path):
9+
for py_file in py_files:
10+
if py_file.endswith(".py") and py_file not in ignore_set:
11+
total_path = os.path.join(py_dir, py_file)
12+
try:
13+
with open(total_path, encoding="utf-8") as file:
14+
loc_list.append(
15+
(len(file.read().splitlines()), total_path.split(cur_path)[1])
16+
)
17+
except UnicodeDecodeError as e:
18+
print(f"Skipping file {total_path} due to encoding error: {e}")
19+
20+
for line_number_count, filename in loc_list:
21+
print("%05d lines in %s" % (line_number_count, filename))
22+
23+
print("\nTotal: {} lines ({})".format(sum([x[0] for x in loc_list]), cur_path))

scripts/docs_json_exporter.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import json
2+
import os
3+
4+
from bs4 import BeautifulSoup
5+
6+
7+
def log(msg):
8+
print(f"::notice::{msg}")
9+
10+
11+
log("Starting docs JSON export...")
12+
folders = [
13+
"docs/_build/html/api",
14+
"docs/_build/html/ext",
15+
]
16+
result = {}
17+
try:
18+
for folder in folders:
19+
if not os.path.isdir(folder):
20+
log(f"Skipping missing folder: {folder}")
21+
continue
22+
base_html = os.path.normpath("docs/_build/html")
23+
for root, _, files in os.walk(folder):
24+
rel_dir = os.path.relpath(root, base_html).replace("\\", "/") + "/"
25+
if rel_dir not in result:
26+
result[rel_dir] = {}
27+
for html_file in files:
28+
if not html_file.endswith(".html"):
29+
continue
30+
file_path = os.path.join(root, html_file)
31+
with open(file_path, encoding="utf-8") as f:
32+
soup = BeautifulSoup(f, "html.parser")
33+
page_index = {}
34+
for class_dl in soup.find_all("dl", class_="class"):
35+
dt = class_dl.find("dt")
36+
class_name = dt.get("id") if dt else None
37+
if not class_name:
38+
class_name = dt.text.split(":")[-1].strip() if dt else None
39+
members = []
40+
for member_dl in class_dl.find_all(
41+
"dl", class_=["attribute", "method"]
42+
):
43+
for member_dt in member_dl.find_all("dt"):
44+
member_id = member_dt.get("id")
45+
member_name = (
46+
member_id.split(".")[-1]
47+
if member_id
48+
else member_dt.text.split(":")[-1].strip()
49+
)
50+
if member_name:
51+
members.append(member_name)
52+
page_index[class_name] = members
53+
for func_dl in soup.find_all("dl", class_="function"):
54+
dt = func_dl.find("dt")
55+
func_name = dt.get("id") if dt else None
56+
if not func_name:
57+
func_name = dt.text.split(":")[-1].strip() if dt else None
58+
page_index[func_name] = []
59+
result[rel_dir][html_file] = page_index
60+
cleaned_result = {k: v for k, v in result.items() if v}
61+
with open("docs.json", "w", encoding="utf-8") as out:
62+
json.dump(cleaned_result, out, indent=2, ensure_ascii=False)
63+
log("Exported docs to docs.json")
64+
log("To upload as artifact: docs.json")
65+
except Exception as e:
66+
print(f"::error::Docs JSON export failed: {e}")

0 commit comments

Comments
 (0)