From 806a20f774eeca535c758233648110c9152b422e Mon Sep 17 00:00:00 2001 From: hjdhnx Date: Fri, 16 Sep 2022 21:57:41 +0800 Subject: [PATCH] =?UTF-8?q?=E7=89=88=E6=9C=AC=E5=8D=87=E7=BA=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- base/rules.db | Bin 40960 -> 40960 bytes controllers/vod.py | 157 +++++++++++++++++++++++++++++---------------- js/version.txt | 2 +- readme.md | 1 + 4 files changed, 104 insertions(+), 56 deletions(-) diff --git a/base/rules.db b/base/rules.db index da78990916ac72782681b9644563ccab85fcade2..3380ecfd2188f6a8cbef7cb938bfb791ec9409d0 100644 GIT binary patch delta 59 zcmZoTz|?SnX@V3JBf~@)Cm^{op-)&L$=uT1($px`)XY5D(8xGBIn5*~)zru|#XQy6 N+}y-`GoQ$(ZUE+~5WoNc delta 59 zcmZoTz|?SnX@V3J!_SE_PC#;FLZ7gLsd 0: + res.extend(vod['list']) + result = { + 'list': res + } + logger.info(f'drpy聚搜{len(search_sites)}个源耗时{get_interval(t2)}毫秒,含准备共计耗时{get_interval(t1)}毫秒') + except Exception as e: + result = { + 'list': [] + } + logger.info(f'drpy聚搜{len(search_sites)}个源耗时{get_interval(t2)}毫秒,含准备共计耗时:{get_interval(t1)}毫秒,发生错误:{e}') + return jsonify(result) + - def multi_search(wd): - lsg = storage_service() - t1 = time() +def multi_search(wd): + lsg = storage_service() + t1 = time() + try: + timeout = round(int(lsg.getItem('SEARCH_TIMEOUT',5000))/1000,2) + except: + timeout = 5 + rules = getRules('js')['list'] + rule_names = list(map(lambda x:x['name'],rules)) + rules_exclude = ['drpy'] + new_rules = list(filter(lambda x: x.get('searchable', 0) and x.get('name', '') not in rules_exclude, rules)) + search_sites = [new_rule['name'] for new_rule in new_rules] + nosearch_sites = set(rule_names) ^ set(search_sites) + nosearch_sites.remove('drpy') + # print(nosearch_sites) + logger.info(f'开始聚搜{wd},共计{len(search_sites)}个规则,聚搜超时{timeout}秒') + logger.info(f'不支持聚搜的规则,共计{len(nosearch_sites)}个规则:{",".join(nosearch_sites)}') + # print(search_sites) + res = [] + with open('js/模板.js', encoding='utf-8') as f: + before = f.read() + with ThreadPoolExecutor(max_workers=len(search_sites)) as executor: + to_do = [] + for site in search_sites: + future = executor.submit(search_one, site, wd, before) + to_do.append(future) try: - timeout = round(int(lsg.getItem('SEARCH_TIMEOUT',5000))/1000,2) - except: - timeout = 5 - rules = getRules('js')['list'] - rule_names = list(map(lambda x:x['name'],rules)) - rules_exclude = ['drpy'] - new_rules = list(filter(lambda x: x.get('searchable', 0) and x.get('name', '') not in rules_exclude, rules)) - search_sites = [new_rule['name'] for new_rule in new_rules] - nosearch_sites = set(rule_names) ^ set(search_sites) - nosearch_sites.remove('drpy') - # print(nosearch_sites) - logger.info(f'开始聚搜{wd},共计{len(search_sites)}个规则,聚搜超时{timeout}秒') - logger.info(f'不支持聚搜的规则,共计{len(nosearch_sites)}个规则:{",".join(nosearch_sites)}') - # print(search_sites) - res = [] - with open('js/模板.js', encoding='utf-8') as f: - before = f.read() - with ThreadPoolExecutor(max_workers=len(search_sites)) as executor: - to_do = [] - for site in search_sites: - future = executor.submit(search_one, site, wd, before) - to_do.append(future) - try: - for future in as_completed(to_do, timeout=timeout): # 并发执行 - ret = future.result() - # print(ret) - if ret and isinstance(ret,dict) and ret.get('list'): - res.extend(ret['list']) - except Exception as e: - print(f'发生错误:{e}') - import atexit - atexit.unregister(thread._python_exit) - executor.shutdown = lambda wait: None - logger.info(f'drpy聚搜{len(search_sites)}个源共计耗时{get_interval(t1)}毫秒') - return jsonify({ - "list": res - }) + for future in as_completed(to_do, timeout=timeout): # 并发执行 + ret = future.result() + # print(ret) + if ret and isinstance(ret,dict) and ret.get('list'): + res.extend(ret['list']) + except Exception as e: + print(f'发生错误:{e}') + import atexit + atexit.unregister(thread._python_exit) + executor.shutdown = lambda wait: None + logger.info(f'drpy聚搜{len(search_sites)}个源共计耗时{get_interval(t1)}毫秒') + return jsonify({ + "list": res + }) +@vod.route('/vod') +def vod_home(): t0 = time() rule = getParmas('rule') ac = getParmas('ac') @@ -187,6 +233,7 @@ def vod_home(): if rule == 'drpy': # print(f'准备单独处理聚合搜索:{wd}') return multi_search(wd) + # return multi_search2(wd) else: data = cms.searchContent(wd) # print(data) diff --git a/js/version.txt b/js/version.txt index 6cbdc99..e845c63 100644 --- a/js/version.txt +++ b/js/version.txt @@ -1 +1 @@ -3.6.9 \ No newline at end of file +3.6.10 \ No newline at end of file diff --git a/readme.md b/readme.md index 18aef8a..d222f0f 100644 --- a/readme.md +++ b/readme.md @@ -55,6 +55,7 @@ - [X] 6.增加豆瓣首页插件以及详情评分功能,可以custom自定义 - [X] 7.增加嗅探配置,可自定义 - [X] 8.版本升级 至3.6.9 +- [X] 9.版本升级 至3.7.10,优化drpy搜索必定成功 ###### 2022/09/15 - [X] 1.修复生成配置文件中静态文件链接对应的配置文本爬虫地址渲染异常问题 - [X] 2.删除custom里的xb、xp源 -- GitLab