@@ -92,9 +92,13 @@ def get_channel_data_from_file(channels, file, use_old):
9292 if name not in category_dict :
9393 category_dict [name ] = []
9494 if use_old and url :
95- info = (url , None , None , None )
96- if info [0 ] and info not in category_dict [name ]:
97- category_dict [name ].append (info )
95+ info = url .partition ("$" )[2 ]
96+ origin = None
97+ if info and info .startswith ("!" ):
98+ origin = "important"
99+ data = (url , None , None , origin )
100+ if data not in category_dict [name ]:
101+ category_dict [name ].append (data )
98102 return channels
99103
100104
@@ -119,10 +123,17 @@ def get_channel_items():
119123 for cate , data in channels .items ():
120124 if cate in old_result :
121125 for name , info_list in data .items ():
126+ urls = [
127+ item [0 ].partition ("$" )[0 ]
128+ for item in info_list
129+ if item [0 ]
130+ ]
122131 if name in old_result [cate ]:
123132 for info in old_result [cate ][name ]:
124- if info not in info_list :
125- channels [cate ][name ].append (info )
133+ if info :
134+ pure_url = info [0 ].partition ("$" )[0 ]
135+ if pure_url not in urls :
136+ channels [cate ][name ].append (info )
126137 return channels
127138
128139
@@ -473,7 +484,9 @@ def init_info_data(data, cate, name):
473484 data [cate ][name ] = []
474485
475486
476- def append_data_to_info_data (info_data , cate , name , data , origin = None , check = True ):
487+ def append_data_to_info_data (
488+ info_data , cate , name , data , origin = None , check = True , insert = False
489+ ):
477490 """
478491 Append channel data to total info data
479492 """
@@ -482,13 +495,25 @@ def append_data_to_info_data(info_data, cate, name, data, origin=None, check=Tru
482495 for item in data :
483496 try :
484497 url , date , resolution , * rest = item
485- origin = origin or (rest [0 ] if rest else None )
498+ url_origin = origin or (rest [0 ] if rest else None )
486499 if url :
487500 pure_url = url .partition ("$" )[0 ]
488- if pure_url not in urls and (
489- not check or (check and check_url_by_patterns (pure_url ))
501+ if pure_url in urls :
502+ continue
503+ if (
504+ url_origin == "important"
505+ or (not check )
506+ or (check and check_url_by_patterns (pure_url ))
490507 ):
491- info_data [cate ][name ].append ((url , date , resolution , origin ))
508+ if insert :
509+ info_data [cate ][name ].insert (
510+ 0 , (url , date , resolution , url_origin )
511+ )
512+ else :
513+ info_data [cate ][name ].append (
514+ (url , date , resolution , url_origin )
515+ )
516+ urls .append (pure_url )
492517 except :
493518 continue
494519
@@ -504,19 +529,15 @@ def get_origin_method_name(method):
504529
505530def append_old_data_to_info_data (info_data , cate , name , data ):
506531 """
507- Append old channel data to total info data
532+ Append history channel data to total info data
508533 """
509534 append_data_to_info_data (
510535 info_data ,
511536 cate ,
512537 name ,
513538 data ,
514539 )
515- print (name , "old:" , len (data ), end = ", " )
516- print (
517- "total:" ,
518- len (info_data .get (cate , {}).get (name , [])),
519- )
540+ print ("History:" , len (data ), end = ", " )
520541
521542
522543def append_total_data (
@@ -542,6 +563,8 @@ def append_total_data(
542563 for cate , channel_obj in items :
543564 for name , old_info_list in channel_obj .items ():
544565 print (f"{ name } :" , end = " " )
566+ if constants .open_use_old_result and old_info_list :
567+ append_old_data_to_info_data (data , cate , name , old_info_list )
545568 for method , result in total_result :
546569 if constants .open_method [method ]:
547570 origin_method = get_origin_method_name (method )
@@ -552,8 +575,10 @@ def append_total_data(
552575 data , cate , name , name_results , origin = origin_method
553576 )
554577 print (f"{ method .capitalize ()} :" , len (name_results ), end = ", " )
555- if constants .open_use_old_result :
556- append_old_data_to_info_data (data , cate , name , old_info_list )
578+ print (
579+ "total:" ,
580+ len (data .get (cate , {}).get (name , [])),
581+ )
557582 if constants .open_keep_all :
558583 extra_cate = "📥其它频道"
559584 for method , result in total_result :
@@ -565,15 +590,20 @@ def append_total_data(
565590 if name in names :
566591 continue
567592 print (f"{ name } :" , end = " " )
593+ if constants .open_use_old_result :
594+ old_info_list = channel_obj .get (name , [])
595+ if old_info_list :
596+ append_old_data_to_info_data (
597+ data , extra_cate , name , old_info_list
598+ )
568599 append_data_to_info_data (
569600 data , extra_cate , name , urls , origin = origin_method
570601 )
571602 print (name , f"{ method .capitalize ()} :" , len (urls ), end = ", " )
572- if constants .open_use_old_result :
573- old_info_list = channel_obj .get (name , [])
574- append_old_data_to_info_data (
575- data , extra_cate , name , old_info_list
576- )
603+ print (
604+ "total:" ,
605+ len (data .get (cate , {}).get (name , [])),
606+ )
577607
578608
579609async def sort_channel_list (
@@ -629,7 +659,7 @@ async def process_sort_channel_list(data, ipv6=False, callback=None):
629659 is_ffmpeg = constants .open_ffmpeg and ffmpeg_installed
630660 semaphore = asyncio .Semaphore (5 )
631661 need_sort_data = copy .deepcopy (data )
632- process_nested_dict (need_sort_data , seen = set (), flag = r"cache:(.*)" )
662+ process_nested_dict (need_sort_data , seen = set (), flag = r"cache:(.*)" , force_str = "!" )
633663 tasks = [
634664 asyncio .create_task (
635665 sort_channel_list (
@@ -663,7 +693,18 @@ async def process_sort_channel_list(data, ipv6=False, callback=None):
663693 }
664694 for url , date , resolution , origin in info_list :
665695 if "$" in url :
666- matcher = re .search (r"cache:(.*)" , url )
696+ info = url .partition ("$" )[2 ]
697+ if info and info .startswith ("!" ):
698+ append_data_to_info_data (
699+ sort_data ,
700+ cate ,
701+ name ,
702+ [(url , date , resolution , origin )],
703+ check = False ,
704+ insert = True ,
705+ )
706+ continue
707+ matcher = re .search (r"cache:(.*)" , info )
667708 if matcher :
668709 cache_key = matcher .group (1 )
669710 if not cache_key :
0 commit comments