Skip to content

Commit f60c41d

Browse files
committed
added back github get_connected_pulls()
1 parent da026b2 commit f60c41d

File tree

2 files changed

+118
-59
lines changed

2 files changed

+118
-59
lines changed

issues_parser.py

Lines changed: 116 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from time import sleep
55
from typing import Generator
66
import os
7-
from typing import Optional
87
import pytz
98
import requests
109

@@ -45,42 +44,33 @@ class IssueDataWithComment(IssueData):
4544

4645

4746
def get_connected_pulls(
47+
token: str,
4848
issue_number: int,
4949
repo_owner: str,
5050
repo_name: str,
51-
forgejo_token: Optional[str] = None
51+
base_url: str | None = None
5252
) -> str:
5353

54-
base_url = os.getenv('FORGEJO_BASE_URL')
55-
if not base_url:
56-
raise ValueError("FORGEJO_BASE_URL environment variable must be set")
54+
if base_url: # Forgejo
55+
headers = {
56+
"Authorization": f"token {token}",
57+
"Accept": "application/json"
58+
}
5759

58-
token = forgejo_token or os.getenv('FORGEJO_TOKEN')
59-
if not token:
60-
raise ValueError(
61-
"Forgejo API token is required. "
62-
"Set FORGEJO_TOKEN environment variable or pass forgejo_token parameter"
63-
)
64-
65-
headers = {
66-
"Authorization": f"token {token}",
67-
"Accept": "application/json"
68-
}
60+
connected_prs = set()
61+
api_base = f"{base_url}/api/v1/repos/{repo_owner}/{repo_name}"
6962

70-
connected_prs = set()
71-
api_base = f"{base_url}/api/v1/repos/{repo_owner}/{repo_name}"
72-
73-
try:
74-
comments_response = requests.get(
75-
f"{api_base}/issues/{issue_number}/comments",
76-
headers=headers
77-
)
78-
comments_response.raise_for_status()
63+
try:
64+
comments_response = requests.get(
65+
f"{api_base}/issues/{issue_number}/comments",
66+
headers=headers
67+
)
68+
comments_response.raise_for_status()
7969

80-
for comment in comments_response.json():
81-
body = comment.get("body", "")
82-
if not body:
83-
continue
70+
for comment in comments_response.json():
71+
body = comment.get("body", "")
72+
if not body:
73+
continue
8474

8575
for word in body.split():
8676
clean_word = word.strip(".,:;!?()[]{}")
@@ -92,23 +82,96 @@ def get_connected_pulls(
9282
pr_num = clean_word[1:]
9383
connected_prs.add(f"{base_url}/{repo_owner}/{repo_name}/pulls/{pr_num}")
9484

95-
prs_response = requests.get(
96-
f"{api_base}/pulls?state=all",
97-
headers=headers
98-
)
99-
prs_response.raise_for_status()
85+
prs_response = requests.get(
86+
f"{api_base}/pulls?state=all",
87+
headers=headers
88+
)
89+
prs_response.raise_for_status()
10090

101-
for pr in prs_response.json():
102-
if f"#{issue_number}" in pr.get("body", ""):
103-
connected_prs.add(pr.get("html_url"))
91+
for pr in prs_response.json():
92+
if f"#{issue_number}" in pr.get("body", ""):
93+
connected_prs.add(pr.get("html_url"))
10494

105-
except requests.exceptions.RequestException as e:
106-
print(f"[Warning] Failed to fetch connected PRs: {str(e)}")
107-
return 'Empty field'
95+
except requests.exceptions.RequestException as e:
96+
print(f"[Warning] Failed to fetch connected PRs: {str(e)}")
97+
return 'Empty field'
10898

109-
return ';'.join(sorted(connected_prs)) if connected_prs else 'Empty field'
99+
return ';'.join(sorted(connected_prs)) if connected_prs else 'Empty field'
100+
101+
else: # PyGithub
102+
repo_owner = repo_owner.login
103+
# Формирование запроса GraphQL
104+
query = """
105+
{
106+
repository(owner: "%s", name: "%s") {
107+
issue(number: %d) {
108+
timelineItems(first: 50, itemTypes:[CONNECTED_EVENT,CROSS_REFERENCED_EVENT]) {
109+
filteredCount
110+
nodes {
111+
... on ConnectedEvent {
112+
ConnectedEvent: subject {
113+
... on PullRequest {
114+
number
115+
title
116+
url
117+
}
118+
}
119+
}
120+
... on CrossReferencedEvent {
121+
CrossReferencedEvent: source {
122+
... on PullRequest {
123+
number
124+
title
125+
url
126+
}
127+
}
128+
}
129+
}
130+
}
131+
}
132+
}
133+
}""" % (
134+
repo_owner,
135+
repo_name,
136+
issue_number,
137+
)
110138

139+
# Формирование заголовков запроса
140+
headers = {
141+
"Authorization": f"Bearer {token}",
142+
"Content-Type": "application/json",
143+
}
111144

145+
# Отправка запроса GraphQL
146+
response = requests.post(
147+
"https://api.github.com/graphql",
148+
headers=headers,
149+
data=json.dumps({"query": query}),
150+
)
151+
response_data = response.json()
152+
# Обработка полученных данных
153+
pull_request_data = response_data["data"]["repository"]["issue"]
154+
list_url = []
155+
if pull_request_data is not None:
156+
issues_data = pull_request_data["timelineItems"]["nodes"]
157+
for pulls in issues_data:
158+
if (
159+
pulls.get("CrossReferencedEvent") is not None
160+
and pulls.get("CrossReferencedEvent").get("url") is not None
161+
and pulls.get("CrossReferencedEvent").get("url") not in list_url
162+
):
163+
list_url.append(pulls.get("CrossReferencedEvent").get("url"))
164+
if (
165+
pulls.get("ConnectedEvent") is not None
166+
and pulls.get("ConnectedEvent").get("url") is not None
167+
and pulls.get("ConnectedEvent").get("url") not in list_url
168+
):
169+
list_url.append(pulls.get("ConnectedEvent").get("url"))
170+
if list_url == []:
171+
return 'Empty field'
172+
else:
173+
return ';'.join(list_url)
174+
return 'Empty field'
112175

113176
def log_repository_issues(
114177
client: IRepositoryAPI, repository: Repository, csv_name, token, start, finish
@@ -144,7 +207,7 @@ def get_info(obj, attr):
144207
closer_email=issue.closed_by.email if issue.closed_by else None,
145208
assignee_story=get_assignee_story(issue),
146209
connected_pull_requests=(
147-
get_connected_pulls(issue._id, repository.owner, repository.name, token)
210+
get_connected_pulls(token, issue._id, repository.owner, repository.name)
148211
if issue._id is not None
149212
else EMPTY_FIELD
150213
),
@@ -189,18 +252,14 @@ def log_issues(
189252
logger.log_to_csv(csv_name, list(info.keys()))
190253

191254
for client, repo, token in binded_repos:
192-
try:
193-
logger.log_title(repo.name)
194-
log_repository_issues(client, repo, csv_name, token, start, finish)
195-
if fork_flag:
196-
forked_repos = client.get_forks(repo)
197-
for forked_repo in forked_repos:
198-
logger.log_title(f"FORKED: {forked_repo.name}")
199-
log_repository_issues(
200-
client, forked_repo, csv_name, token, start, finish
201-
)
202-
sleep(TIMEDELTA)
203-
sleep(TIMEDELTA)
204-
except Exception as e:
205-
print("log_issues exception:", e)
206-
exit(1)
255+
logger.log_title(repo.name)
256+
log_repository_issues(client, repo, csv_name, token, start, finish)
257+
if fork_flag:
258+
forked_repos = client.get_forks(repo)
259+
for forked_repo in forked_repos:
260+
logger.log_title(f"FORKED: {forked_repo.name}")
261+
log_repository_issues(
262+
client, forked_repo, csv_name, token, start, finish
263+
)
264+
sleep(TIMEDELTA)
265+
sleep(TIMEDELTA)

utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,11 @@ def log_title(title: str, title_len: int = TITLE_LEN):
2121
@staticmethod
2222
def log_to_csv(csv_name: str, field_names: tuple[str], row: dict | None = None):
2323
if isinstance(row, dict):
24-
with open(csv_name, 'a', newline='') as file:
24+
with open(csv_name, 'a', encoding='utf-8', newline='') as file:
2525
writer = csv.DictWriter(file, fieldnames=field_names)
2626
writer.writerow(row)
2727
elif row is None:
28-
with open(csv_name, 'w', newline='') as file:
28+
with open(csv_name, 'w', encoding='utf-8', newline='') as file:
2929
writer = csv.writer(file)
3030
writer.writerow(field_names)
3131
else:

0 commit comments

Comments
 (0)