Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update #630

Merged
merged 9 commits into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ opencc-python-reimplemented = "*"
fake-useragent = "*"
pillow = "*"
yt-dlp = "*"
m3u8 = "*"

[packages]
requests = "*"
Expand All @@ -39,6 +40,7 @@ fake-useragent = "*"
gunicorn = "*"
pillow = "*"
yt-dlp = "*"
m3u8 = "*"

[requires]
python_version = "3.13"
1,060 changes: 523 additions & 537 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
- [🚀 快速上手](#快速上手)
- [📖 详细教程](./docs/tutorial.md)
- [🗓️ 更新日志](./CHANGELOG.md)
- [💰️ 赞赏](#赞赏)
- [️ 赞赏](#赞赏)
- [👀 关注](#关注)
- [📣 免责声明](#免责声明)
- [⚖️ 许可证](#许可证)
Expand Down
2 changes: 1 addition & 1 deletion README_en.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
- [🚀 Quick Start](#quick-start)
- [📖 Detailed Tutorial](./docs/tutorial_en.md)
- [🗓️ Changelog](./CHANGELOG.md)
- [💰️ Appreciate](#appreciate)
- [️ Appreciate](#appreciate)
- [👀 Follow](#follow)
- [📣 Disclaimer](#disclaimer)
- [⚖️ License](#license)
Expand Down
4 changes: 2 additions & 2 deletions config/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ ipv_type_prefer = 自动
ipv4_num = 5
ipv6_num = 5
open_m3u_result = True
url_keywords_blacklist = epg.pw,skype.serv00.net,iptv.yjxfz.com,live-hls-web-ajb.getaj.net,live.goodiptv.club,hc73k3dhwo5gfkt.wcetv.com,stream1.freetv.fun,zw9999.cnstream.top
url_keywords_blacklist = epg.pw,skype.serv00.net,iptv.yjxfz.com,live-hls-web-ajb.getaj.net,live.goodiptv.club,hc73k3dhwo5gfkt.wcetv.com,stream1.freetv.fun,zw9999.cnstream.top,zsntlqj.xicp.net
open_subscribe = True
subscribe_urls = https://live.zbds.top/tv/iptv6.txt,https://live.zbds.top/tv/iptv4.txt,https://live.fanmingming.com/tv/m3u/ipv6.m3u,https://ghp.ci/https://raw.githubusercontent.com/joevess/IPTV/main/home.m3u8,https://aktv.top/live.txt,http://175.178.251.183:6689/live.txt,https://ghp.ci/https://raw.githubusercontent.com/kimwang1978/collect-tv-txt/main/merged_output.txt,https://m3u.ibert.me/txt/fmml_dv6.txt,https://m3u.ibert.me/txt/o_cn.txt,https://m3u.ibert.me/txt/j_iptv.txt,https://ghp.ci/https://raw.githubusercontent.com/xzw832/cmys/main/S_CCTV.txt,https://ghp.ci/https://raw.githubusercontent.com/xzw832/cmys/main/S_weishi.txt,http://itv.22m.top/ITVBox/tv/tvonline.txt,https://ghp.ci//https://raw.githubusercontent.com/asdjkl6/tv/tv/.m3u/整套直播源/测试/整套直播源/l.txt,https://ghp.ci//https://raw.githubusercontent.com/asdjkl6/tv/tv/.m3u/整套直播源/测试/整套直播源/kk.txt
open_multicast = True
Expand All @@ -30,7 +30,7 @@ open_multicast_fofa = True
multicast_region_list = 全部
multicast_page_num = 1
open_proxy = False
open_driver = True
open_driver = False
open_hotel = True
open_hotel_foodie = False
open_hotel_fofa = True
Expand Down
31 changes: 17 additions & 14 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
import asyncio
from utils.config import config
import copy
import pickle
from time import time

from tqdm import tqdm

import utils.constants as constants
from service.app import run_service
from updates.fofa import get_channels_by_fofa
from updates.hotel import get_channels_by_hotel
from updates.multicast import get_channels_by_multicast
from updates.online_search import get_channels_by_online_search
from updates.subscribe import get_channels_by_subscribe_urls
from utils.channel import (
get_channel_items,
append_total_data,
Expand All @@ -10,6 +20,7 @@
get_channel_data_cache_with_compare,
format_channel_url_info,
)
from utils.config import config
from utils.tools import (
update_file,
get_pbar_remaining,
Expand All @@ -20,20 +31,12 @@
check_ipv6_support,
resource_path,
)
from updates.subscribe import get_channels_by_subscribe_urls
from updates.multicast import get_channels_by_multicast
from updates.hotel import get_channels_by_hotel
from updates.fofa import get_channels_by_fofa
from updates.online_search import get_channels_by_online_search
from tqdm import tqdm
from time import time
import pickle
import copy


class UpdateSource:

def __init__(self):
self.update_progress = None
self.run_ui = False
self.tasks = []
self.channel_items = {}
Expand Down Expand Up @@ -62,7 +65,7 @@ async def visit_page(self, channel_names=None):

for setting, task_func, result_attr in tasks_config:
if (
setting == "hotel_foodie" or setting == "hotel_fofa"
setting == "hotel_foodie" or setting == "hotel_fofa"
) and config.open_hotel == False:
continue
if config.open_method[setting]:
Expand Down Expand Up @@ -160,17 +163,17 @@ async def main(self):
channel_data_cache, self.channel_data
)
with open(
resource_path(constants.cache_path, persistent=True),
"wb",
resource_path(constants.cache_path, persistent=True),
"wb",
) as file:
pickle.dump(channel_data_cache, file)
convert_to_m3u()
total_time = format_interval(time() - main_start_time)
print(
f"🥳 Update completed! Total time spent: {total_time}. Please check the {user_final_file} file!"
)
open_service = config.open_service
if self.run_ui:
open_service = config.open_service
service_tip = ", 可使用以下链接观看直播:" if open_service else ""
tip = (
f"✅ 服务启动成功{service_tip}"
Expand Down
Binary file modified updates/fofa/fofa_hotel_region_result.pkl
Binary file not shown.
74 changes: 38 additions & 36 deletions utils/channel.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
from utils.config import config
import asyncio
import base64
import copy
import datetime
import os
import pickle
import re
from collections import defaultdict
from logging import INFO

from bs4 import NavigableString
from opencc import OpenCC

import utils.constants as constants
from utils.config import config
from utils.speed import (
get_speed,
sort_urls_by_speed_and_resolution,
)
from utils.tools import (
check_url_by_patterns,
get_total_urls_from_info_list,
Expand All @@ -10,21 +27,6 @@
write_content_into_txt,
get_logger,
)
from utils.speed import (
get_speed,
sort_urls_by_speed_and_resolution,
)
import os
from collections import defaultdict
import re
from bs4 import NavigableString
from opencc import OpenCC
import base64
import pickle
import copy
import datetime
import asyncio
from logging import INFO


def get_name_url(content, pattern, multiline=False, check_url=True):
Expand Down Expand Up @@ -207,9 +209,9 @@ def get_channel_multicast_region_type_list(result):
for region_type in result.values()
for region, types in region_type.items()
if "all" in region_list
or "ALL" in region_list
or "全部" in region_list
or region in region_list
or "ALL" in region_list
or "全部" in region_list
or region in region_list
for type in types
}
return list(region_type_list)
Expand Down Expand Up @@ -449,9 +451,9 @@ def append_data_to_info_data(info_data, cate, name, data, origin=None, check=Tru
if pure_url in urls:
continue
if (
url_origin == "important"
or (not check)
or (check and check_url_by_patterns(pure_url))
url_origin == "important"
or (not check)
or (check and check_url_by_patterns(pure_url))
):
info_data[cate][name].append((url, date, resolution, url_origin))
urls.append(pure_url)
Expand Down Expand Up @@ -480,14 +482,14 @@ def append_old_data_to_info_data(info_data, cate, name, data):


def append_total_data(
items,
names,
data,
hotel_fofa_result=None,
multicast_result=None,
hotel_foodie_result=None,
subscribe_result=None,
online_search_result=None,
items,
names,
data,
hotel_fofa_result=None,
multicast_result=None,
hotel_foodie_result=None,
subscribe_result=None,
online_search_result=None,
):
"""
Append all method data to total info data
Expand Down Expand Up @@ -547,7 +549,7 @@ def append_total_data(

async def process_sort_channel_list(data, ipv6=False, callback=None):
"""
Processs the sort channel list
Process the sort channel list
"""
ipv6_proxy = None if (not config.open_ipv6 or ipv6) else constants.ipv6_proxy
need_sort_data = copy.deepcopy(data)
Expand Down Expand Up @@ -640,7 +642,7 @@ def get_multicast_fofa_search_org(region, type):
elif type == "电信":
org = "Chinanet"
elif type == "移动":
org == "China Mobile communications corporation"
org = "China Mobile communications corporation"
return org


Expand All @@ -658,14 +660,14 @@ def get_multicast_fofa_search_urls():
(parts[0], parts[1])
for name in rtp_file_names
if (parts := name.partition("_"))[0] in region_list
or "all" in region_list
or "ALL" in region_list
or "全部" in region_list
or "all" in region_list
or "ALL" in region_list
or "全部" in region_list
]
search_urls = []
for region, type in region_type_list:
search_url = "https://fofa.info/result?qbase64="
search_txt = f'"udpxy" && country="CN" && region="{region}" && org="{get_multicast_fofa_search_org(region,type)}"'
search_txt = f'"udpxy" && country="CN" && region="{region}" && org="{get_multicast_fofa_search_org(region, type)}"'
bytes_string = search_txt.encode("utf-8")
search_txt = base64.b64encode(bytes_string).decode("utf-8")
search_url += search_txt
Expand Down
62 changes: 61 additions & 1 deletion utils/speed.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import re
import subprocess
from time import time
from urllib.parse import quote

import m3u8
import yt_dlp
from aiohttp import ClientSession, TCPConnector

Expand All @@ -13,6 +15,62 @@
logger = get_logger(constants.log_path)


async def get_speed_with_download(url, timeout=config.sort_timeout):
"""
Get the speed of the url with a total timeout
"""
start_time = time()
total_size = 0
total_time = 0
try:
async with ClientSession(
connector=TCPConnector(ssl=False), trust_env=True
) as session:
async with session.get(url, timeout=timeout) as response:
async for chunk in response.content.iter_any():
if chunk:
total_size += len(chunk)
except Exception as e:
pass
finally:
end_time = time()
total_time += end_time - start_time
average_speed = (total_size / total_time if total_time > 0 else 0) / 1024
return average_speed


async def get_speed_m3u8(url, timeout=config.sort_timeout):
"""
Get the speed of the m3u8 url with a total timeout
"""
start_time = time()
total_size = 0
total_time = 0
try:
url = quote(url, safe=':/?$&=@')
m3u8_obj = m3u8.load(url)
async with ClientSession(
connector=TCPConnector(ssl=False), trust_env=True
) as session:
for segment in m3u8_obj.segments:
if time() - start_time > timeout:
break
ts_url = segment.absolute_uri
async with session.get(ts_url, timeout=timeout) as response:
file_size = 0
async for chunk in response.content.iter_any():
if chunk:
file_size += len(chunk)
end_time = time()
download_time = end_time - start_time
total_size += file_size
total_time += download_time
except Exception as e:
pass
average_speed = (total_size / total_time if total_time > 0 else 0) / 1024
return average_speed


def get_info_yt_dlp(url, timeout=config.sort_timeout):
"""
Get the url info by yt_dlp
Expand Down Expand Up @@ -54,7 +112,7 @@ async def get_speed_requests(url, timeout=config.sort_timeout, proxy=None):
Get the speed of the url by requests
"""
async with ClientSession(
connector=TCPConnector(verify_ssl=False), trust_env=True
connector=TCPConnector(ssl=False), trust_env=True
) as session:
start = time()
end = None
Expand Down Expand Up @@ -171,6 +229,8 @@ async def get_speed(url, ipv6_proxy=None, callback=None):
return speed_cache[cache_key][0]
if ipv6_proxy and url_is_ipv6:
speed = (0, None)
# elif '.m3u8' in url:
# speed = await get_speed_m3u8(url)
else:
speed = await get_speed_yt_dlp(url)
if cache_key and cache_key not in speed_cache:
Expand Down