Skip to content

Commit

Permalink
feat:sort_timeout and request_timeout(#388)
Browse files Browse the repository at this point in the history
  • Loading branch information
Guovin committed Oct 17, 2024
1 parent 827108c commit 4be7f4f
Show file tree
Hide file tree
Showing 8 changed files with 85 additions and 76 deletions.
2 changes: 2 additions & 0 deletions config/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ online_search_page_num = 3
urls_limit = 30
open_keep_all = False
open_sort = True
sort_timeout = 5
open_ffmpeg = True
open_filter_resolution = True
min_resolution = 1920x1080
Expand All @@ -33,3 +34,4 @@ open_hotel_fofa = True
hotel_region_list = 全部
hotel_page_num = 3
open_update_time = True
request_timeout = 10
72 changes: 37 additions & 35 deletions docs/config.md

Large diffs are not rendered by default.

72 changes: 37 additions & 35 deletions docs/config_en.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion updates/fofa/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import pickle
import threading

timeout = 10
timeout = config.getint("Settings", "request_timeout") or 10


def get_fofa_urls_from_region_list():
Expand Down
6 changes: 4 additions & 2 deletions updates/online_search/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
from concurrent.futures import ThreadPoolExecutor
from requests_custom.utils import get_soup_requests, close_session

timeout = config.getint("Settings", "request_timeout") or 10


async def use_accessible_url(callback):
"""
Expand All @@ -33,8 +35,8 @@ async def use_accessible_url(callback):
callback(f"正在获取最优的在线检索节点", 0)
baseUrl1 = "https://www.foodieguide.com/iptvsearch/"
baseUrl2 = "http://tonkiang.us/"
task1 = create_task(get_speed(baseUrl1, timeout=30))
task2 = create_task(get_speed(baseUrl2, timeout=30))
task1 = create_task(get_speed(baseUrl1, timeout=timeout))
task2 = create_task(get_speed(baseUrl2, timeout=timeout))
task_results = await gather(task1, task2)
callback(f"获取在线检索节点完成", 100)
if task_results[0] == float("inf") and task_results[1] == float("inf"):
Expand Down
2 changes: 1 addition & 1 deletion updates/subscribe/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from concurrent.futures import ThreadPoolExecutor
from collections import defaultdict

timeout = 10
timeout = config.getint("Settings", "request_timeout") or 10


async def get_channels_by_subscribe_urls(
Expand Down
3 changes: 2 additions & 1 deletion utils/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from utils.config import config

max_retries = 2
timeout = 10
timeout = config.getint("Settings", "request_timeout") or 10


def retry_func(func, retries=max_retries, name=""):
Expand Down
2 changes: 1 addition & 1 deletion utils/speed.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from utils.tools import is_ipv6, get_resolution_value
import subprocess

timeout = 5
timeout = config.getint("Settings", "sort_timeout") or 5


async def get_speed(url, timeout=timeout, proxy=None):
Expand Down

0 comments on commit 4be7f4f

Please sign in to comment.