Skip to content

Commit

Permalink
Merge pull request #732 from Guovin/master
Browse files Browse the repository at this point in the history
feat update
  • Loading branch information
Guovin authored Dec 24, 2024
2 parents e494555 + 7be54e0 commit 9cb932a
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 56 deletions.
17 changes: 3 additions & 14 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
timeout-minutes: 120
strategy:
matrix:
operating-system: ['ubuntu-20.04']
operating-system: [ 'ubuntu-20.04' ]
steps:
- name: Set branch name
id: vars
Expand All @@ -39,16 +39,6 @@ jobs:
except:
open_driver = False
print(open_driver)')" >> $GITHUB_ENV
# - name: Check open_ffmpeg config
# id: check_ffmpeg
# run: |
# echo "OPEN_FFMPEG=$(python -c '
# try:
# from utils.config import config
# open_ffmpeg = config.open_ffmpeg
# except:
# open_ffmpeg = False
# print(open_ffmpeg)')" >> $GITHUB_ENV
- name: Set up Chrome
if: env.OPEN_DRIVER == 'True'
uses: browser-actions/setup-chrome@latest
Expand All @@ -57,9 +47,8 @@ jobs:
- name: Download chrome driver
if: env.OPEN_DRIVER == 'True'
uses: nanasess/setup-chromedriver@master
# - name: Install FFmpeg
# if: env.OPEN_FFMPEG == 'True'
# run: sudo apt-get update && sudo apt-get install -y ffmpeg
- name: Install FFmpeg
run: sudo apt-get update && sudo apt-get install -y ffmpeg
- name: Install pipenv
run: pip3 install --user pipenv
- name: Install dependecies
Expand Down
8 changes: 4 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
- ❤️ 推荐关注微信公众号(Govin),订阅更新通知与使用技巧等文章推送,还可进行答疑和交流讨论
- ⚠️ 本次更新涉及配置变更,以最新 `config/config.ini` 为准,工作流用户需复制最新配置至`user_config.ini`
,Docker用户需清除主机挂载的旧配置
- ✨ 新增补偿机制模式(open_supply),用于控制是否开启补偿机制,当满足条件的结果数量不足时,将可能可用的接口补充到结果中
- ✨ 新增支持通过配置修改服务端口(app_port)
- ✨ 新增补偿机制模式(`open_supply`),用于控制是否开启补偿机制,当满足条件的结果数量不足时,将可能可用的接口补充到结果中
- ✨ 新增支持通过配置修改服务端口(`app_port`
- ✨ 新增ghgo.xyz CDN代理加速
- ✨ config.ini配置文件新增注释说明(#704
- ✨ 更新酒店源与组播源离线数据
Expand All @@ -27,10 +27,10 @@
- ⚠️ This update involves configuration changes. Refer to the latest `config/config.ini`. Workflow users need to copy
the latest configuration to `user_config.ini`, and Docker users need to clear the old configuration mounted on the
host.
- ✨ Added compensation mechanism mode (open_supply) to control whether to enable the compensation mechanism. When the
- ✨ Added compensation mechanism mode (`open_supply`) to control whether to enable the compensation mechanism. When the
number of results meeting the conditions is insufficient, potentially available interfaces will be supplemented into
the results.
- ✨ Added support for modifying the server port through configuration (app_port).
- ✨ Added support for modifying the server port through configuration (`app_port`).
- ✨ Added ghgo.xyz CDN proxy acceleration.
- ✨ Added comments to the config.ini configuration file (#704).
- ✨ Updated offline data for hotel sources and multicast sources.
Expand Down
18 changes: 15 additions & 3 deletions utils/channel.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,20 +571,25 @@ async def process_sort_channel_list(data, ipv6=False, callback=None):
Process the sort channel list
"""
ipv6_proxy = None if (not config.open_ipv6 or ipv6) else constants.ipv6_proxy
open_filter_resolution = config.open_filter_resolution
sort_timeout = config.sort_timeout
need_sort_data = copy.deepcopy(data)
process_nested_dict(need_sort_data, seen=set(), flag=r"cache:(.*)", force_str="!")
result = {}
semaphore = asyncio.Semaphore(5)

async def limited_get_speed(info, ipv6_proxy, callback):
async def limited_get_speed(info, ipv6_proxy, filter_resolution, timeout, callback):
async with semaphore:
return await get_speed(info[0], ipv6_proxy=ipv6_proxy, callback=callback)
return await get_speed(info[0], ipv6_proxy=ipv6_proxy, filter_resolution=filter_resolution, timeout=timeout,
callback=callback)

tasks = [
asyncio.create_task(
limited_get_speed(
info,
ipv6_proxy=ipv6_proxy,
filter_resolution=open_filter_resolution,
timeout=sort_timeout,
callback=callback,
)
)
Expand All @@ -594,9 +599,16 @@ async def limited_get_speed(info, ipv6_proxy, callback):
]
await asyncio.gather(*tasks)
logger = get_logger(constants.sort_log_path, level=INFO, init=True)
open_supply = config.open_supply
open_filter_speed = config.open_filter_speed
open_filter_resolution = config.open_filter_resolution
min_speed = config.min_speed
min_resolution = config.min_resolution
for cate, obj in data.items():
for name, info_list in obj.items():
info_list = sort_urls(name, info_list, logger=logger)
info_list = sort_urls(name, info_list, supply=open_supply, filter_speed=open_filter_speed,
min_speed=min_speed, filter_resolution=open_filter_resolution,
min_resolution=min_resolution, logger=logger)
append_data_to_info_data(
result,
cate,
Expand Down
85 changes: 62 additions & 23 deletions utils/speed.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import json
import re
import subprocess
from time import time
Expand All @@ -9,7 +10,7 @@
from multidict import CIMultiDictProxy

from utils.config import config
from utils.tools import is_ipv6, remove_cache_info
from utils.tools import is_ipv6, remove_cache_info, get_resolution_value


async def get_speed_with_download(url: str, session: ClientSession = None, timeout: int = config.sort_timeout) -> dict[
Expand All @@ -29,20 +30,20 @@ async def get_speed_with_download(url: str, session: ClientSession = None, timeo
try:
async with session.get(url, timeout=timeout) as response:
if response.status == 404:
return info
raise Exception("404")
info['delay'] = int(round((time() - start_time) * 1000))
async for chunk in response.content.iter_any():
if chunk:
total_size += len(chunk)
except Exception as e:
pass
finally:
end_time = time()
total_time += end_time - start_time
info['speed'] = (total_size / total_time if total_time > 0 else 0) / 1024 / 1024
if created_session:
await session.close()
end_time = time()
total_time += end_time - start_time
info['speed'] = (total_size / total_time if total_time > 0 else 0) / 1024 / 1024
return info
return info


async def get_m3u8_headers(url: str, session: ClientSession = None, timeout: int = 5) -> CIMultiDictProxy[str] | dict[
Expand All @@ -55,15 +56,16 @@ async def get_m3u8_headers(url: str, session: ClientSession = None, timeout: int
created_session = True
else:
created_session = False
headers = {}
try:
async with session.head(url, timeout=timeout) as response:
return response.headers
headers = response.headers
except:
pass
finally:
if created_session:
await session.close()
return {}
return headers


def check_m3u8_valid(headers: CIMultiDictProxy[str] | dict[any, any]) -> bool:
Expand All @@ -78,19 +80,20 @@ def check_m3u8_valid(headers: CIMultiDictProxy[str] | dict[any, any]) -> bool:
return False


async def get_speed_m3u8(url: str, timeout: int = config.sort_timeout) -> dict[str, float | None]:
async def get_speed_m3u8(url: str, filter_resolution: bool = config.open_filter_resolution,
timeout: int = config.sort_timeout) -> dict[str, float | None]:
"""
Get the speed of the m3u8 url with a total timeout
"""
info = {'speed': None, 'delay': None}
info = {'speed': None, 'delay': None, 'resolution': None}
try:
url = quote(url, safe=':/?$&=@[]').partition('$')[0]
async with ClientSession(connector=TCPConnector(ssl=False), trust_env=True) as session:
headers = await get_m3u8_headers(url, session)
if check_m3u8_valid(headers):
location = headers.get('Location')
if location:
info.update(await get_speed_m3u8(location, timeout))
info.update(await get_speed_m3u8(location, filter_resolution, timeout))
else:
m3u8_obj = m3u8.load(url, timeout=2)
playlists = m3u8_obj.data.get('playlists')
Expand All @@ -102,11 +105,11 @@ async def get_speed_m3u8(url: str, timeout: int = config.sort_timeout) -> dict[s
if not check_m3u8_valid(uri_headers):
if uri_headers.get('Content-Length'):
info.update(await get_speed_with_download(url, session, timeout))
return info
raise Exception("Invalid m3u8")
m3u8_obj = m3u8.load(url, timeout=2)
segments = m3u8_obj.segments
if not segments:
return info
raise Exception("Segments not found")
ts_urls = [segment.absolute_uri for segment in segments]
speed_list = []
start_time = time()
Expand All @@ -118,13 +121,15 @@ async def get_speed_m3u8(url: str, timeout: int = config.sort_timeout) -> dict[s
if info['delay'] is None and download_info['delay'] is not None:
info['delay'] = download_info['delay']
info['speed'] = sum(speed_list) / len(speed_list) if speed_list else 0
url = ts_urls[0]
elif headers.get('Content-Length'):
info.update(await get_speed_with_download(url, session, timeout))
else:
return info
except:
pass
return info
finally:
if filter_resolution:
info['resolution'] = await get_resolution_ffprobe(url, timeout)
return info


async def get_delay_requests(url, timeout=config.sort_timeout, proxy=None):
Expand Down Expand Up @@ -194,6 +199,33 @@ async def ffmpeg_url(url, timeout=config.sort_timeout):
return res


async def get_resolution_ffprobe(url: str, timeout: int = config.sort_timeout) -> str | None:
"""
Get the resolution of the url by ffprobe
"""
resolution = None
proc = None
try:
probe_args = ["ffprobe", "-show_format", "-show_streams", "-of", "json", url]
proc = await asyncio.create_subprocess_exec(
*probe_args,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
out, err = await asyncio.wait_for(proc.communicate(), timeout)
if proc.returncode != 0:
raise Exception("FFprobe failed")
video_stream = json.loads(out.decode("utf-8"))["streams"][0]
resolution = f"{int(video_stream['width'])}x{int(video_stream['height'])}"
except:
if proc:
proc.kill()
finally:
if proc:
await proc.wait()
return resolution


def get_video_info(video_info):
"""
Get the video info
Expand Down Expand Up @@ -233,7 +265,8 @@ async def check_stream_delay(url_info):
cache = {}


async def get_speed(url, ipv6_proxy=None, callback=None):
async def get_speed(url, ipv6_proxy=None, filter_resolution=config.open_filter_resolution, timeout=config.sort_timeout,
callback=None):
"""
Get the speed (response time and resolution) of the url
"""
Expand All @@ -251,8 +284,9 @@ async def get_speed(url, ipv6_proxy=None, callback=None):
if ipv6_proxy and url_is_ipv6:
data['speed'] = float("inf")
data['delay'] = float("-inf")
data['resolution'] = "1920x1080"
else:
data.update(await get_speed_m3u8(url))
data.update(await get_speed_m3u8(url, filter_resolution, timeout))
if cache_key and cache_key not in cache:
cache[cache_key] = data
return data
Expand All @@ -263,7 +297,9 @@ async def get_speed(url, ipv6_proxy=None, callback=None):
callback()


def sort_urls(name, data, logger=None):
def sort_urls(name, data, supply=config.open_supply, filter_speed=config.open_filter_speed, min_speed=config.min_speed,
filter_resolution=config.open_filter_resolution, min_resolution=config.min_resolution,
logger=None):
"""
Sort the urls with info
"""
Expand Down Expand Up @@ -295,20 +331,23 @@ def sort_urls(name, data, logger=None):
)
except Exception as e:
print(e)
if (not config.open_supply and config.open_filter_speed and speed < config.min_speed) or (
config.open_supply and delay is None):
if (not supply and filter_speed and speed < min_speed) or (
not supply and filter_resolution and get_resolution_value(resolution) < min_resolution) or (
supply and delay is None):
continue
result["delay"] = delay
result["speed"] = speed
result["resolution"] = resolution
filter_data.append(result)

def combined_key(item):
speed, origin = item["speed"], item["origin"]
speed, resolution, origin = item["speed"], item["resolution"], item["origin"]
if origin == "whitelist":
return float("inf")
else:
return speed if speed is not None else float("-inf")
speed = speed if speed is not None else float("-inf")
resolution = get_resolution_value(resolution)
return speed + resolution

filter_data.sort(key=combined_key, reverse=True)
return [
Expand Down
22 changes: 10 additions & 12 deletions utils/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,13 +133,16 @@ def get_resolution_value(resolution_str):
"""
Get resolution value from string
"""
pattern = r"(\d+)[xX*](\d+)"
match = re.search(pattern, resolution_str)
if match:
width, height = map(int, match.groups())
return width * height
else:
return 0
try:
if resolution_str:
pattern = r"(\d+)[xX*](\d+)"
match = re.search(pattern, resolution_str)
if match:
width, height = map(int, match.groups())
return width * height
except:
pass
return 0


def get_total_urls(info_list, ipv_type_prefer, origin_type_prefer):
Expand Down Expand Up @@ -169,11 +172,6 @@ def get_total_urls(info_list, ipv_type_prefer, origin_type_prefer):
if origin_prefer_bool and (origin not in origin_type_prefer):
continue

if config.open_filter_resolution and resolution:
resolution_value = get_resolution_value(resolution)
if resolution_value < config.min_resolution_value:
continue

pure_url, _, info = url.partition("$")
if not info:
origin_name = constants.origin_map[origin]
Expand Down

0 comments on commit 9cb932a

Please sign in to comment.