Skip to content

Commit aba07f0

Browse files
committed
utils-logger implimented
1 parent 395df6b commit aba07f0

File tree

7 files changed

+108
-102
lines changed

7 files changed

+108
-102
lines changed

commits_parser.py

Lines changed: 7 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import csv
1+
from utils import logger
22
import pytz
33
from time import sleep
44
from github import Github, Repository, GithubException, PullRequest
@@ -17,17 +17,6 @@
1717
'branch',
1818
)
1919

20-
21-
def log_commit_to_csv(info, csv_name):
22-
with open(csv_name, 'a', newline='') as file:
23-
writer = csv.DictWriter(file, fieldnames=FIELDNAMES)
24-
writer.writerow(info)
25-
26-
27-
def log_commit_to_stdout(info):
28-
print(info)
29-
30-
3120
def log_repository_commits(repository: Repository, csv_name, start, finish, branch):
3221
branches = []
3322
match branch:
@@ -63,25 +52,24 @@ def log_repository_commits(repository: Repository, csv_name, start, finish, bran
6352
]
6453
info = dict(zip(FIELDNAMES, commit_data))
6554

66-
log_commit_to_csv(info, csv_name)
67-
log_commit_to_stdout(info)
55+
logger.log_to_csv(csv_name, FIELDNAMES, info)
56+
logger.log_to_stdout(info)
57+
6858
sleep(TIMEDELTA)
6959

7060

7161
def log_commits(
7262
client: Github, working_repos, csv_name, start, finish, branch, fork_flag
7363
):
74-
with open(csv_name, 'w', newline='') as file:
75-
writer = csv.writer(file)
76-
writer.writerow(FIELDNAMES)
64+
logger.log_to_csv(csv_name, FIELDNAMES)
7765

7866
for repo in working_repos:
7967
try:
80-
print('=' * 20, repo.full_name, '=' * 20)
68+
logger.log_title(repo.full_name)
8169
log_repository_commits(repo, csv_name, start, finish, branch)
8270
if fork_flag:
8371
for forked_repo in repo.get_forks():
84-
print('=' * 20, "FORKED:", forked_repo.full_name, '=' * 20)
72+
logger.log_title("FORKED:", forked_repo.full_name)
8573
log_repository_commits(forked_repo, csv_name, start, finish, branch)
8674
sleep(TIMEDELTA)
8775
sleep(TIMEDELTA)

contributors_parser.py

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import csv
1+
from utils import logger
22
from time import sleep
33
from typing import Generator
44
from github import Github, Repository, GithubException
@@ -21,17 +21,6 @@
2121
'site_admin',
2222
)
2323

24-
25-
def log_contributors_to_csv(info: dict, csv_name: str):
26-
with open(csv_name, 'a', newline='') as file:
27-
writer = csv.DictWriter(file, fieldnames=FIELDNAMES)
28-
writer.writerow(info)
29-
30-
31-
def log_contributors_to_stdout(info: dict):
32-
print(info)
33-
34-
3524
def log_repository_contributors(repository: Repository, csv_name: str):
3625
contributors_stats = get_contributors_stats(repository)
3726

@@ -56,8 +45,8 @@ def log_repository_contributors(repository: Repository, csv_name: str):
5645
'site_admin': contributor.site_admin,
5746
}
5847

59-
log_contributors_to_csv(info_tmp, csv_name)
60-
log_contributors_to_stdout(info_tmp)
48+
logger.log_to_csv(csv_name, FIELDNAMES, info_tmp)
49+
logger.log_to_stdout(info_tmp)
6150

6251
sleep(TIMEDELTA)
6352

@@ -85,18 +74,16 @@ def get_contributors_stats(repository: Repository) -> dict:
8574
def log_contributors(
8675
client: Github, working_repos: Generator, csv_name: str, fork_flag: bool
8776
):
88-
with open(csv_name, 'w', newline='') as file:
89-
writer = csv.writer(file)
90-
writer.writerow(FIELDNAMES)
77+
logger.log_to_csv(csv_name, FIELDNAMES)
9178

9279
for repo in working_repos:
9380
try:
94-
print('=' * 20, repo.full_name, '=' * 20)
81+
logger.log_title(repo.full_name)
9582
log_repository_contributors(repo, csv_name)
9683

9784
if fork_flag:
9885
for forked_repo in repo.get_forks():
99-
print('=' * 20, "FORKED:", forked_repo.full_name, '=' * 20)
86+
logger.log_title("FORKED:", forked_repo.full_name)
10087
log_repository_contributors(forked_repo, csv_name)
10188
sleep(TIMEDELTA)
10289

invites_parser.py

Lines changed: 24 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,14 @@
1-
import csv
2-
import pytz
3-
import requests
4-
import json
1+
from utils import logger
52
from time import sleep
63
from github import Github, Repository, GithubException, PullRequest
7-
import csv
84

95
FIELDNAMES = (
106
'repository name',
117
'invited login',
128
'invite creation date',
139
'invitation url',
1410
)
15-
11+
TIMEDELTA = 0.05
1612

1713
def log_inviter(repo, invite, writer):
1814
invite_info = [
@@ -24,17 +20,26 @@ def log_inviter(repo, invite, writer):
2420
writer.writerow(invite_info)
2521
print(invite_info)
2622

23+
def log_repository_invitations(repository: Repository, csv_name):
24+
invitations = repository.get_pending_invitations()
25+
for invite in invitations:
26+
invite_info = {
27+
'repository name': repository.full_name,
28+
'invited login': invite.invitee.login,
29+
'invite creation date': invite.created_at.strftime("%d/%m/%Y, %H:%M:%S"),
30+
'invitation url': invite.html_url,
31+
}
32+
logger.log_to_csv(csv_name, FIELDNAMES, invite_info)
33+
logger.log_to_stdout(invite_info)
34+
sleep(TIMEDELTA)
35+
36+
37+
def log_invitations(client: Github, working_repos, csv_name):
38+
logger.log_to_csv(csv_name, FIELDNAMES)
2739

28-
def log_invitations(client: Github, working_repos, csv_name, timedelta=1):
29-
with open(csv_name, 'w', newline='') as file:
30-
writer = csv.writer(file)
31-
writer.writerow(FIELDNAMES)
32-
for repo in working_repos:
33-
print('=' * 20, repo.full_name, '=' * 20)
34-
invitations = repo.get_pending_invitations()
35-
for invite in invitations:
36-
try:
37-
log_inviter(repo, invite, writer)
38-
sleep(timedelta)
39-
except Exception as e:
40-
print(e)
40+
for repo in working_repos:
41+
logger.log_title(repo.full_name)
42+
try:
43+
log_repository_invitations(repo, csv_name)
44+
except Exception as e:
45+
print(e)

issues_parser.py

Lines changed: 10 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import csv
1+
from utils import logger
22
import pytz
33
import requests
44
import json
@@ -35,16 +35,6 @@
3535
)
3636

3737

38-
def log_issue_to_csv(info, csv_name):
39-
with open(csv_name, 'a', newline='') as file:
40-
writer = csv.DictWriter(file, fieldnames=FIELDNAMES)
41-
writer.writerow(info)
42-
43-
44-
def log_issue_to_stdout(info):
45-
print(info)
46-
47-
4838
def get_connected_pulls(issue_number, repo_owner, repo_name, token):
4939
access_token = token
5040
repo_owner = repo_owner.login
@@ -172,26 +162,26 @@ def log_repository_issues(repository: Repository, csv_name, token, start, finish
172162
info['comment author name'] = comment.user.name
173163
info['comment author login'] = comment.user.login
174164
info['comment author email'] = comment.user.email
175-
log_issue_to_csv(info, csv_name)
176-
log_issue_to_stdout(info)
165+
166+
logger.log_to_csv(csv_name, FIELDNAMES, info)
167+
logger.log_to_stdout(info)
177168
else:
178-
log_issue_to_csv(info_tmp, csv_name)
179-
log_issue_to_stdout(info_tmp)
169+
logger.log_to_csv(csv_name, FIELDNAMES, info_tmp)
170+
logger.log_to_stdout(info_tmp)
171+
180172
sleep(TIMEDELTA)
181173

182174

183175
def log_issues(client: Github, working_repo, csv_name, token, start, finish, fork_flag):
184-
with open(csv_name, 'w', newline='') as file:
185-
writer = csv.writer(file)
186-
writer.writerow(FIELDNAMES)
176+
logger.log_to_csv(csv_name, FIELDNAMES)
187177

188178
for repo in working_repo:
189179
try:
190-
print('=' * 20, repo.full_name, '=' * 20)
180+
logger.log_title(repo.full_name)
191181
log_repository_issues(repo, csv_name, token, start, finish)
192182
if fork_flag:
193183
for forked_repo in repo.get_forks():
194-
print('=' * 20, "FORKED:", forked_repo.full_name, '=' * 20)
184+
logger.log_title("FORKED:", forked_repo.full_name)
195185
log_repository_issues(forked_repo, csv_name, token, start, finish)
196186
sleep(TIMEDELTA)
197187
sleep(TIMEDELTA)

main.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
import contributors_parser
1313

1414

15-
1615
def parse_args():
1716
parser = argparse.ArgumentParser()
1817
parser.add_argument("--invites", help="print pending invites", action="store_true")
@@ -140,10 +139,9 @@ def main():
140139
print(e)
141140
else:
142141
working_repos = git_logger.get_next_repo(client, repositories)
143-
if args.start:
144-
start = parse_time(args.start.split('-'))
145-
if args.finish:
146-
finish = parse_time(args.finish.split('-'))
142+
start = parse_time(args.start.split('-'))
143+
finish = parse_time(args.finish.split('-'))
144+
147145
if args.commits:
148146
commits_parser.log_commits(
149147
client, working_repos, csv_name, start, finish, args.branch, fork_flag

pull_requests_parser.py

Lines changed: 9 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import csv
1+
from utils import logger
22
import pytz
33
import requests
44
import json
@@ -38,16 +38,6 @@
3838
)
3939

4040

41-
def log_pr_to_stdout(info):
42-
print(info)
43-
44-
45-
def log_pr_to_csv(info, csv_name):
46-
with open(csv_name, 'a', newline='') as file:
47-
writer = csv.DictWriter(file, fieldnames=FIELDNAMES)
48-
writer.writerow(info)
49-
50-
5141
def get_related_issues(pull_request_number, repo_owner, repo_name, token):
5242
access_token = token
5343
repo_owner = repo_owner.login
@@ -161,11 +151,12 @@ def log_repositories_pr(
161151
info['comment author name'] = comment.user.name
162152
info['comment author login'] = comment.user.login
163153
info['comment author email'] = nvl(comment.user.email)
164-
log_pr_to_csv(info, csv_name)
165-
log_pr_to_stdout(info)
154+
155+
logger.log_to_csv(csv_name, FIELDNAMES, info)
156+
logger.log_to_stdout(info)
166157
else:
167-
log_pr_to_csv(info_tmp, csv_name)
168-
log_pr_to_stdout(info_tmp)
158+
logger.log_to_csv(csv_name, FIELDNAMES, info_tmp)
159+
logger.log_to_stdout(info_tmp)
169160
sleep(TIMEDELTA)
170161

171162

@@ -179,17 +170,15 @@ def log_pull_requests(
179170
fork_flag,
180171
log_comments=False,
181172
):
182-
with open(csv_name, 'w', newline='') as file:
183-
writer = csv.writer(file)
184-
writer.writerow(FIELDNAMES)
173+
logger.log_to_csv(csv_name, FIELDNAMES)
185174

186175
for repo in working_repos:
187176
try:
188-
print('=' * 20, repo.full_name, '=' * 20)
177+
logger.log_title(repo.full_name)
189178
log_repositories_pr(repo, csv_name, token, start, finish)
190179
if fork_flag:
191180
for forked_repo in repo.get_forks():
192-
print('=' * 20, "FORKED:", forked_repo.full_name, '=' * 20)
181+
logger.log_title("FORKED:", forked_repo.full_name)
193182
log_repositories_pr(
194183
forked_repo, csv_name, token, start, finish, log_comments
195184
)

utils.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import csv
2+
3+
TITLE_LEN = 80
4+
MIN_SIDE_PADDING = 4
5+
SIDE_WHITE_SPACES = 1
6+
7+
class logger:
8+
@staticmethod
9+
def log_title(title: str, title_len: int = TITLE_LEN):
10+
if len(title) + MIN_SIDE_PADDING * 2 + SIDE_WHITE_SPACES * 2 > title_len:
11+
need_sz = title_len - MIN_SIDE_PADDING * 2 - SIDE_WHITE_SPACES * 2
12+
13+
title = title[:need_sz]
14+
15+
padding = title_len - (len(title) + SIDE_WHITE_SPACES * 2)
16+
17+
right_padding = (padding + 1) // 2
18+
left_padding = padding // 2
19+
20+
print(f"{left_padding * '='}{SIDE_WHITE_SPACES * ' '}{title}{SIDE_WHITE_SPACES * ' '}{right_padding * '='}")
21+
22+
@staticmethod
23+
def log_to_csv(csv_name: str, field_names: tuple[str], row: dict | None = None):
24+
if isinstance(row, dict):
25+
file = open(csv_name, 'a', newline='')
26+
writer = csv.DictWriter(file, fieldnames=field_names)
27+
elif row == None:
28+
file = open(csv_name, 'w', newline='')
29+
writer = csv.writer(file)
30+
row = field_names
31+
else:
32+
raise TypeError(f"row has type {type(row)} but must be [ dict | None ]")
33+
34+
writer.writerow(row)
35+
36+
file.close()
37+
38+
@staticmethod
39+
def log_to_stdout(info: dict):
40+
print(info)
41+
42+
@staticmethod
43+
def log_error(error: str):
44+
# или использовать logging, как в interface_wrapper
45+
pass
46+
47+
@staticmethod
48+
def log_warning(warning: str):
49+
pass

0 commit comments

Comments
 (0)