1010from src .log .logger import upload_log
1111import time
1212import fcntl
13+ from concurrent .futures import ThreadPoolExecutor , as_completed
14+ from db .conn import get_single_upload_queue , delete_upload_queue , update_upload_queue_lock
15+ import threading
16+
17+ read_lock = threading .Lock ()
1318
1419def upload_video (upload_path , yaml_file_path ):
1520 try :
@@ -32,12 +37,16 @@ def upload_video(upload_path, yaml_file_path):
3237 if result .returncode == 0 :
3338 upload_log .info ("Upload successfully, then delete the video" )
3439 os .remove (upload_path )
40+ os .remove (yaml_file_path )
41+ delete_upload_queue (upload_path )
3542 else :
3643 upload_log .error ("Fail to upload, the files will be reserved." )
44+ update_upload_queue_lock (upload_path , 0 )
3745 return False
3846
3947 except subprocess .CalledProcessError as e :
4048 upload_log .error (f"The upload_video called failed, the files will be reserved. error: { e } " )
49+ update_upload_queue_lock (upload_path , 0 )
4150 return False
4251
4352def find_bv_number (target_str , my_list ):
@@ -48,57 +57,6 @@ def find_bv_number(target_str, my_list):
4857 return parts [1 ].strip ()
4958 return None
5059
51- def read_append_and_delete_lines (file_path ):
52- try :
53- while True :
54- if os .path .getsize (file_path ) == 0 :
55- upload_log .info ("Empty queue, wait 2 minutes and check again." )
56- time .sleep (120 )
57- return
58-
59- with open (file_path , "r+" ) as file :
60- fcntl .flock (file , fcntl .LOCK_EX )
61- lines = file .readlines ()
62- upload_video_path = lines .pop (0 ).strip ()
63- file .seek (0 )
64- file .writelines (lines )
65- # Truncate the file to the current position
66- file .truncate ()
67- # Release the lock
68- fcntl .flock (file , fcntl .LOCK_UN )
69-
70- upload_log .info (f"deal with { upload_video_path } " )
71- # check if the live is already uploaded
72- if upload_video_path .endswith ('.flv' ):
73- # upload slice video
74- yaml_template = generate_slice_yaml_template (upload_video_path )
75- yaml_file_path = SRC_DIR + "/upload/upload.yaml"
76- with open (yaml_file_path , 'w' , encoding = 'utf-8' ) as file :
77- file .write (yaml_template )
78- upload_video (upload_video_path , yaml_file_path )
79- return
80- else :
81- query = generate_title (upload_video_path )
82- result = subprocess .check_output ("bilitool" + " list" , shell = True )
83- # print(result.decode("utf-8"), flush=True)
84- upload_list = result .decode ("utf-8" ).splitlines ()
85- bv_result = find_bv_number (query , upload_list )
86- if bv_result :
87- upload_log .info (f"The series of videos has already been uploaded, the BV number is: { bv_result } " )
88- append_upload (upload_video_path , bv_result )
89- else :
90- upload_log .info ("First upload this live" )
91- # generate the yaml template
92- yaml_template = generate_yaml_template (upload_video_path )
93- yaml_file_path = SRC_DIR + "/upload/upload.yaml"
94- with open (yaml_file_path , 'w' , encoding = 'utf-8' ) as file :
95- file .write (yaml_template )
96- upload_video (upload_video_path , yaml_file_path )
97- return
98-
99- except subprocess .CalledProcessError as e :
100- upload_log .error (f"The read_append_and_delete_lines called failed, the files will be reserved. error: { e } " )
101- return False
10260
10361def append_upload (upload_path , bv_result ):
10462 try :
@@ -120,18 +78,65 @@ def append_upload(upload_path, bv_result):
12078 if result .returncode == 0 :
12179 upload_log .info ("Upload successfully, then delete the video" )
12280 os .remove (upload_path )
81+ delete_upload_queue (upload_path )
12382 else :
12483 upload_log .error ("Fail to append, the files will be reserved." )
84+ update_upload_queue_lock (upload_path , 0 )
12585 return False
12686
12787 except subprocess .CalledProcessError as e :
12888 upload_log .error (f"The append_upload called failed, the files will be reserved. error: { e } " )
89+ update_upload_queue_lock (upload_path , 0 )
12990 return False
91+
92+
93+ def read_append_and_delete_lines ():
94+ while True :
95+ upload_queue = None
96+ # read the queue and update lock status to prevent other threads from reading the same data
97+ with read_lock :
98+ upload_queue = get_single_upload_queue ()
99+ # if there is a task in the queue, try to lock the task
100+ if upload_queue :
101+ video_path , config_path = upload_queue .values ()
102+ # lock the task by updating the locked status to 1
103+ update_result = update_upload_queue_lock (video_path , 1 )
104+ # if failed to lock, log the error and let the next thread to handle the task
105+ if not update_result :
106+ upload_log .error (f"Failed to lock task for { video_path } , possibly already locked by another thread or database error." )
107+ upload_queue = None
108+ continue
109+ else :
110+ upload_log .info ("Empty queue, wait 2 minutes and check again." )
111+ time .sleep (120 )
112+ continue
113+
114+ if upload_queue :
115+ video_path , config_path = upload_queue .values ()
116+ upload_log .info (f"deal with { video_path } " )
117+ # check if the live is already uploaded
118+ if video_path .endswith ('.flv' ):
119+ # upload slice video
120+ upload_video (video_path , config_path )
121+ return
122+ else :
123+ query = generate_title (video_path )
124+ result = subprocess .check_output ("bilitool" + " list" , shell = True )
125+ # print(result.decode("utf-8"), flush=True)
126+ upload_list = result .decode ("utf-8" ).splitlines ()
127+ bv_result = find_bv_number (query , upload_list )
128+ if bv_result :
129+ upload_log .info (f"The series of videos has already been uploaded, the BV number is: { bv_result } " )
130+ append_upload (video_path , bv_result )
131+ else :
132+ upload_log .info ("First upload this live" )
133+ upload_video (video_path , config_path )
134+ return
135+ time .sleep (20 )
136+
130137
131138if __name__ == "__main__" :
132- # read the queue and upload the video
133- queue_path = SRC_DIR + "/upload/uploadVideoQueue.txt"
134- while True :
135- read_append_and_delete_lines (queue_path )
136- upload_log .info ("wait for 20 seconds" )
137- time .sleep (20 )
139+ max_workers = os .getenv ("MAX_WORKERS" , 5 )
140+ with ThreadPoolExecutor (max_workers = max_workers ) as executor :
141+ future_to_upload = {executor .submit (read_append_and_delete_lines ) for _ in range (max_workers )}
142+
0 commit comments