fix: 修复资源加载问题

This commit is contained in:
qwerdvd 2023-12-04 12:31:14 +08:00
parent 544b7a2bb8
commit 43ee2e2d75
No known key found for this signature in database
GPG Key ID: A3AF89C783404769
8 changed files with 33 additions and 528 deletions

View File

@ -5,9 +5,11 @@ from gsuid_core.bot import Bot
from gsuid_core.data_store import get_res_path from gsuid_core.data_store import get_res_path
from gsuid_core.logger import logger from gsuid_core.logger import logger
from gsuid_core.models import Event from gsuid_core.models import Event
from gsuid_core.plugins.ArknightsUID.ArknightsUID.utils.resource.download_all_resource import (
download_all_resource,
)
from gsuid_core.sv import SV from gsuid_core.sv import SV
from ..utils.resource.download_from_cos import check_use
from .constants import Excel from .constants import Excel
from .memoryStore import store from .memoryStore import store
@ -17,13 +19,13 @@ sv_download_config = SV("下载资源", pm=2)
@sv_download_config.on_fullmatch(("ark下载全部资源")) # noqa: UP034 @sv_download_config.on_fullmatch(("ark下载全部资源")) # noqa: UP034
async def send_download_resource_msg(bot: Bot, ev: Event): async def send_download_resource_msg(bot: Bot, ev: Event):
await bot.send("正在开始下载~可能需要较久的时间!") await bot.send("正在开始下载~可能需要较久的时间!")
im = await check_use() im = await download_all_resource()
await bot.send(im) await bot.send(im)
async def startup(): async def startup():
logger.info("[资源文件下载] 正在检查与下载缺失的资源文件, 可能需要较长时间, 请稍等") logger.info("[资源文件下载] 正在检查与下载缺失的资源文件, 可能需要较长时间, 请稍等")
await check_use() await download_all_resource()
logger.info("[资源文件下载] 检查完毕, 正在加载 gamedata") logger.info("[资源文件下载] 检查完毕, 正在加载 gamedata")
TASK = [] TASK = []
@ -34,3 +36,4 @@ async def startup():
asyncio.gather(*TASK) asyncio.gather(*TASK)
await Excel.preload_table() await Excel.preload_table()
logger.info("[资源文件下载] gamedata 加载完毕")

View File

@ -1,15 +1 @@
import asyncio from .start import all_start # noqa: F401
# import threading
from loguru import logger
from ..arknightsuid_resource import startup
from ..utils.database.startup import ark_adapter
async def all_start():
await startup()
await ark_adapter()
# asyncio.run(all_start())

View File

@ -0,0 +1,11 @@
import asyncio
from ..arknightsuid_resource import startup
from ..utils.database.startup import ark_adapter
async def all_start():
await startup()
await ark_adapter()
asyncio.run(all_start())

View File

@ -40,17 +40,6 @@ async def text2pic(text: str, max_size: int = 800, font_size: int = 20):
async def send_role_wiki_pic(bot: Bot, ev: Event): async def send_role_wiki_pic(bot: Bot, ev: Event):
char_name = ' '.join(re.findall('[\u4e00-\u9fa5]+', ev.text)) char_name = ' '.join(re.findall('[\u4e00-\u9fa5]+', ev.text))
try:
CHARACTER_TABLE = Excel.CHARATER_TABLE
except AttributeError:
TASK = []
for file_path in Path(
get_res_path(["ArknightsUID", "resource", "gamedata"])
).glob("*.json"):
TASK.append(store.get_file(file_path))
asyncio.gather(*TASK)
await Excel.preload_table()
CHARACTER_TABLE = Excel.CHARATER_TABLE CHARACTER_TABLE = Excel.CHARATER_TABLE
char_id = None char_id = None

View File

@ -112,14 +112,6 @@ def render_template(template_str, data):
async def get_equip_info(char_id: str): async def get_equip_info(char_id: str):
TASK = []
for file_path in Path(
get_res_path(["ArknightsUID", "resource", "gamedata"])
).glob("*.json"):
TASK.append(store.get_file(file_path))
asyncio.gather(*TASK)
await Excel.preload_table()
UNIEQUIP_TABLE = Excel.UNIEQUIP_TABLE UNIEQUIP_TABLE = Excel.UNIEQUIP_TABLE
BATTLE_EQUIP_TABLE = Excel.BATTLE_EQUIP_TABLE BATTLE_EQUIP_TABLE = Excel.BATTLE_EQUIP_TABLE
@ -235,14 +227,6 @@ async def get_equip_info(char_id: str):
async def get_wiki_info(char_id: str): async def get_wiki_info(char_id: str):
TASK = []
for file_path in Path(
get_res_path(["ArknightsUID", "resource", "gamedata"])
).glob("*.json"):
TASK.append(store.get_file(file_path))
asyncio.gather(*TASK)
await Excel.preload_table()
CHARACTER_TABLE = Excel.CHARATER_TABLE CHARACTER_TABLE = Excel.CHARATER_TABLE
SKILL_TABLE = Excel.SKILL_TABLE SKILL_TABLE = Excel.SKILL_TABLE
@ -402,14 +386,14 @@ async def get_wiki_info(char_id: str):
async def draw_wiki(char_id: str): async def draw_wiki(char_id: str):
TASK = [] # TASK = []
for file_path in Path( # for file_path in Path(
get_res_path(["ArknightsUID", "resource", "gamedata"]) # get_res_path(["ArknightsUID", "resource", "gamedata"])
).glob("*.json"): # ).glob("*.json"):
TASK.append(store.get_file(file_path)) # TASK.append(store.get_file(file_path))
asyncio.gather(*TASK) # asyncio.gather(*TASK)
await Excel.preload_table() # await Excel.preload_table()
CHARACTER_TABLE = Excel.CHARATER_TABLE CHARACTER_TABLE = Excel.CHARATER_TABLE
SKILL_TABLE = Excel.SKILL_TABLE SKILL_TABLE = Excel.SKILL_TABLE

View File

@ -1,226 +1,12 @@
import asyncio from gsuid_core.utils.download_resource.download_core import download_all_file
from pathlib import Path
from typing import Dict, List, Tuple, Union
from aiohttp import ClientTimeout, TCPConnector from .RESOURCE_PATH import GAMEDATA_PATH
from aiohttp.client import ClientSession
from bs4 import BeautifulSoup
from gsuid_core.logger import logger
from gsuid_core.utils.download_resource.download_core import check_url
from gsuid_core.utils.download_resource.download_file import download
from msgspec import json as msgjson
from .download_url import download_file
from .RESOURCE_PATH import GAMEDATA_PATH, RESOURCE_PATH
MAX_DOWNLOAD = 10
with Path.open(
Path(__file__).parent / 'resource_map.json', encoding='UTF-8'
) as f:
resource_map = msgjson.decode(
f.read(),
type=Dict[str, Dict[str, Dict[str, Dict[str, Union[int, str]]]]],
)
async def find_fastest_url(urls: Dict[str, str]):
tasks = []
for tag in urls:
tasks.append(asyncio.create_task(check_url(tag, urls[tag])))
results: list[
Union[tuple[str, str, float], BaseException]
] = await asyncio.gather(*tasks, return_exceptions=True)
fastest_tag = ''
fastest_url = None
fastest_time = float('inf')
for result in results:
if isinstance(result, BaseException):
continue
tag, url, elapsed_time = result
if elapsed_time < fastest_time:
fastest_url = url
fastest_time = elapsed_time
fastest_tag = tag
return fastest_tag, fastest_url
async def check_speed():
logger.info('[GsCore资源下载]测速中...')
URL_LIB = {
'[qxqx]': 'https://kr-arm.qxqx.me',
'[cos]': 'http://182.43.43.40:8765',
'[JPFRP]': 'http://jp-2.lcf.icu:13643',
}
TAG, BASE_URL = await find_fastest_url(URL_LIB)
logger.info(f'最快资源站: {TAG} {BASE_URL}')
return TAG, BASE_URL
async def check_use():
tag, _ = await check_speed()
logger.info(tag, _)
if tag == '[qxqx]':
await download_all_file(
'https://kr-arm.qxqx.me',
'[qxqx]',
'ArknightsUID',
{'resource/gamedata': GAMEDATA_PATH},
)
if tag == '[JPFRP]':
await download_all_file(
'http://jp-2.lcf.icu:13643',
'[JPFRP]',
'ArknightsUID',
{'resource/gamedata': GAMEDATA_PATH},
)
if tag == '[cos]':
await download_all_file_from_cos()
return 'ark全部资源下载完成!'
async def download_all_file_from_cos(): async def download_all_file_from_cos():
async def _download(tasks: List[asyncio.Task]): await download_all_file(
failed_list.extend( 'ArknightsUID',
list(filter(lambda x: x is not None, await asyncio.gather(*tasks))) {
) 'resource/gamedata': GAMEDATA_PATH,
tasks.clear() },
logger.info('[cos]下载完成!') )
failed_list: List[Tuple[str, str, str, str]] = []
TASKS = []
async with ClientSession() as sess:
for res_type in ['resource']:
logger.info('[cos]开始下载资源文件...')
resource_type_list = [
'gamedata',
]
for resource_type in resource_type_list:
file_dict = resource_map[res_type][resource_type]
logger.info(
f'[cos]数据库[{resource_type}]中存在{len(file_dict)}个内容!'
)
temp_num = 0
for file_name, file_info in file_dict.items():
name = file_name
size = file_info['size']
url = file_info['url']
if isinstance(url, int):
continue
path = Path(RESOURCE_PATH / resource_type / name)
if path.exists():
is_diff = size == Path.stat(path).st_size
else:
is_diff = True
if (
not path.exists()
or not Path.stat(path).st_size
or not is_diff
):
logger.info(f'[cos]开始下载[{resource_type}]_[{name}]...')
temp_num += 1
TASKS.append(
asyncio.wait_for(
download_file(
url, res_type, resource_type, name, sess
),
timeout=600,
)
)
# await download_file(url, FILE_TO_PATH[file], name)
if len(TASKS) >= MAX_DOWNLOAD:
await _download(TASKS)
await _download(TASKS)
if temp_num == 0:
im = f'[cos]数据库[{resource_type}]无需下载!'
else:
im = f'[cos]数据库[{resource_type}]已下载{temp_num}个内容!'
temp_num = 0
logger.info(im)
if failed_list:
logger.info(f'[cos]开始重新下载失败的{len(failed_list)}个文件...')
for url, res_type, resource_type, name in failed_list:
TASKS.append(
asyncio.wait_for(
download_file(url, res_type, resource_type, name, sess),
timeout=600,
)
)
if len(TASKS) >= MAX_DOWNLOAD:
await _download(TASKS)
await _download(TASKS)
if count := len(failed_list):
logger.error(f'[cos]仍有{count}个文件未下载, 请使用命令 `下载全部资源` 重新下载')
async def _get_url(url: str, sess: ClientSession):
req = await sess.get(url=url)
return await req.read()
async def download_all_file(
BASE_URL: str, TAG: str, plugin_name: str, EPATH_MAP: Dict[str, Path]
):
PLUGIN_RES = f'{BASE_URL}/{plugin_name}'
TASKS = []
async with ClientSession(
connector=TCPConnector(verify_ssl=False),
timeout=ClientTimeout(total=None, sock_connect=20, sock_read=200),
) as sess:
for endpoint in EPATH_MAP:
url = f'{PLUGIN_RES}/{endpoint}/'
path = EPATH_MAP[endpoint]
base_data = await _get_url(url, sess)
content_bs = BeautifulSoup(base_data, 'lxml')
pre_data = content_bs.find_all('pre')[0]
data_list = pre_data.find_all('a')
size_list = list(content_bs.strings)
logger.info(f'{TAG} 数据库 {endpoint} 中存在 {len(data_list)} 个内容!')
temp_num = 0
for index, data in enumerate(data_list):
if data['href'] == '../':
continue
file_url = f'{url}{data["href"]}'
name: str = data.text
size = size_list[index * 2 + 6].split(' ')[-1]
size = size.replace('\r\n', '')
file_path = path / name
if file_path.exists():
is_diff = size == str(Path.stat(file_path).st_size)
else:
is_diff = True
if (
not file_path.exists()
or not Path.stat(file_path).st_size
or not is_diff
):
logger.info(
f'{TAG} {plugin_name} 开始下载 {endpoint}/{name} ...'
)
temp_num += 1
TASKS.append(
asyncio.wait_for(
download(file_url, path, name, sess, TAG),
timeout=600,
)
)
if len(TASKS) >= 10:
await asyncio.gather(*TASKS)
TASKS.clear()
await asyncio.gather(*TASKS)
TASKS.clear()
if temp_num == 0:
im = f'{TAG} 数据库 {endpoint} 无需下载!'
else:
im = f'{TAG}数据库 {endpoint} 已下载{temp_num}个内容!'
temp_num = 0
logger.info(im)

View File

@ -1,60 +0,0 @@
from typing import Union, Tuple
import aiofiles
from aiohttp.client import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from gsuid_core.logger import logger
from .RESOURCE_PATH import RESOURCE_PATH
PATHDICT = {
'resource': RESOURCE_PATH,
}
async def download(
url: str,
res_type: str,
resource_type: str,
name: str,
) -> Union[Tuple[str, str, str], None]:
"""
:说明:
下载URL保存入目录
:参数:
* url: `str`
资源下载地址
* res_type: `str`
资源类型, `resource``wiki`
* resource_type: `str`
资源文件夹名
* name: `str`
资源保存名称
:返回(失败才会有返回值):
url: `str`
resource_type: `str`
name: `str`
"""
async with ClientSession() as sess:
return await download_file(url, res_type, resource_type, name, sess)
async def download_file(
url: str,
res_type: str,
resource_type: str,
name: str,
sess: Union[ClientSession, None] = None,
) -> Union[Tuple[str, str, str], None]:
if sess is None:
sess = ClientSession()
try:
async with sess.get(url) as res:
content = await res.read()
except ClientConnectorError:
logger.warning(f'[cos]{name}下载失败')
return url, resource_type, name
async with aiofiles.open(
PATHDICT[res_type] / resource_type / name, 'wb'
) as f:
await f.write(content)

View File

@ -1,194 +0,0 @@
{
"resource": {
"gamedata": {
"activity_table.json": {
"size": 6073859,
"url": "http://182.43.43.40:8765/UploadPic/a0f1ede97d3319a1bc54aa71f8e0d3b7.json"
},
"audio_data.json": {
"size": 4148762,
"url": "http://182.43.43.40:8765/UploadPic/086fc0b514b092ee4b0848339c0c390b.json"
},
"battle_equip_table.json": {
"size": 3127845,
"url": "http://182.43.43.40:8765/UploadPic/7a116d92fc3edfb118eadc6d445a9634.json"
},
"building_data.json": {
"size": 3751638,
"url": "http://182.43.43.40:8765/UploadPic/004cc0bee9a859b1288ad11d88d6cc92.json"
},
"campaign_table.json": {
"size": 383164,
"url": "http://182.43.43.40:8765/UploadPic/d4ce0f19d3dbbe30e1047193cb93368c.json"
},
"chapter_table.json": {
"size": 971,
"url": "http://182.43.43.40:8765/UploadPic/93372e99a20ee65da8835e7ab14cd0ae.json"
},
"character_table.json": {
"size": 10590926,
"url": "http://182.43.43.40:8765/UploadPic/31c6cfc69ca4b5efe5f2aaeb361dc0a5.json"
},
"charm_table.json": {
"size": 115959,
"url": "http://182.43.43.40:8765/UploadPic/531ca7c8a0e50ce887cb82c53bfc1735.json"
},
"charword_table.json": {
"size": 8185744,
"url": "http://182.43.43.40:8765/UploadPic/a8fd59b7bba9cf3ab8e8ba20ee1d7d9b.json"
},
"char_meta_table.json": {
"size": 68999,
"url": "http://182.43.43.40:8765/UploadPic/e6916f811f3cf9106dc93c952b501605.json"
},
"char_patch_table.json": {
"size": 32184,
"url": "http://182.43.43.40:8765/UploadPic/9e1c18366b2a683e62ce45a046736bfc.json"
},
"checkin_table.json": {
"size": 128629,
"url": "http://182.43.43.40:8765/UploadPic/66c5b9adff70e540de47ba9727634bd1.json"
},
"climb_tower_table.json": {
"size": 311399,
"url": "http://182.43.43.40:8765/UploadPic/266de955492e71c71352e5a7f15b874a.json"
},
"clue_data.json": {
"size": 9157,
"url": "http://182.43.43.40:8765/UploadPic/1a4bc40c48a8ab09f1e17bd52c40694f.json"
},
"crisis_table.json": {
"size": 26075,
"url": "http://182.43.43.40:8765/UploadPic/e0ae91a98b61c08815e7cee46d6db028.json"
},
"display_meta_table.json": {
"size": 47570,
"url": "http://182.43.43.40:8765/UploadPic/e2e1b6bc43b67c1f167d802ba9a737ea.json"
},
"enemy_handbook_table.json": {
"size": 1039317,
"url": "http://182.43.43.40:8765/UploadPic/b96d9eea501ac8e2288b04a2547a7317.json"
},
"favor_table.json": {
"size": 39045,
"url": "http://182.43.43.40:8765/UploadPic/a2aad37d3f67ffae35ce18aaba24898a.json"
},
"gacha_table.json": {
"size": 245310,
"url": "http://182.43.43.40:8765/UploadPic/b6bb888244e34863e01c2f415d4867e5.json"
},
"gamedata_const.json": {
"size": 49035,
"url": "http://182.43.43.40:8765/UploadPic/b4c46b3446b303750618749edd76abe0.json"
},
"handbook_info_table.json": {
"size": 3831976,
"url": "http://182.43.43.40:8765/UploadPic/ad15a21aca6c0d4b75fa4e1bd6707a79.json"
},
"handbook_table.json": {
"size": 17270,
"url": "http://182.43.43.40:8765/UploadPic/5212c2720ab7afde6685cfdc8b580b9a.json"
},
"handbook_team_table.json": {
"size": 11170,
"url": "http://182.43.43.40:8765/UploadPic/eb15c058fb2607fd149146b47b0657a3.json"
},
"item_table.json": {
"size": 1040831,
"url": "http://182.43.43.40:8765/UploadPic/351a4853e13a2ff7c54e7e6002195646.json"
},
"medal_table.json": {
"size": 944151,
"url": "http://182.43.43.40:8765/UploadPic/64ba969ce848d8978396f912c7c13b66.json"
},
"mission_table.json": {
"size": 615067,
"url": "http://182.43.43.40:8765/UploadPic/e462b6ea559050bc90d74f194cd778e4.json"
},
"open_server_table.json": {
"size": 148892,
"url": "http://182.43.43.40:8765/UploadPic/b025b4ed419d97af9a3f355e3ff611f8.json"
},
"player_avatar_table.json": {
"size": 9277,
"url": "http://182.43.43.40:8765/UploadPic/ed23f950d5771278aee506281dc0df5d.json"
},
"range_table.json": {
"size": 58423,
"url": "http://182.43.43.40:8765/UploadPic/1b5e45111d64a38dccc022c7927ff839.json"
},
"replicate_table.json": {
"size": 89591,
"url": "http://182.43.43.40:8765/UploadPic/808ddd375e78fa52c18746a331e5f3af.json"
},
"retro_table.json": {
"size": 2979414,
"url": "http://182.43.43.40:8765/UploadPic/d6fe932d4b7240a842cea2cedff2aaac.json"
},
"roguelike_table.json": {
"size": 493073,
"url": "http://182.43.43.40:8765/UploadPic/f6d59f951312558c0129b7841746574c.json"
},
"roguelike_topic_table.json": {
"size": 5714268,
"url": "http://182.43.43.40:8765/UploadPic/eb9d00b6c99ce7c76896c60a9efd77b4.json"
},
"sandbox_table.json": {
"size": 798748,
"url": "http://182.43.43.40:8765/UploadPic/5bc662e1bd9a3ddfda37d1b3b765098f.json"
},
"shop_client_table.json": {
"size": 775150,
"url": "http://182.43.43.40:8765/UploadPic/39f38721154e730ee421572a9576049a.json"
},
"skill_table.json": {
"size": 7751259,
"url": "http://182.43.43.40:8765/UploadPic/9e45ef441a762f3593f660b15e558a73.json"
},
"skin_table.json": {
"size": 2237674,
"url": "http://182.43.43.40:8765/UploadPic/b3faf166fe4cf20258e5c0c33326ce14.json"
},
"stage_table.json": {
"size": 13500695,
"url": "http://182.43.43.40:8765/UploadPic/49100618d819fa0ed09071b67dee8aec.json"
},
"story_review_meta_table.json": {
"size": 442837,
"url": "http://182.43.43.40:8765/UploadPic/467a1bfb27b9b105827330aa75ad1f25.json"
},
"story_review_table.json": {
"size": 1412606,
"url": "http://182.43.43.40:8765/UploadPic/82def473c33916cd6bcc446dce6cbf93.json"
},
"story_table.json": {
"size": 1084715,
"url": "http://182.43.43.40:8765/UploadPic/6cfdfc4f03b2ded83ed821c5f6a8ec97.json"
},
"tech_buff_table.json": {
"size": 12123,
"url": "http://182.43.43.40:8765/UploadPic/7a4b2c5370ec8afc677f531cb8f0281c.json"
},
"tip_table.json": {
"size": 19659,
"url": "http://182.43.43.40:8765/UploadPic/1bc4eaf9db02b3d2abbd5581033e1f67.json"
},
"token_table.json": {
"size": 15033,
"url": "http://182.43.43.40:8765/UploadPic/99aa92331d9b9fe28be10dbe4338e9e3.json"
},
"uniequip_data.json": {
"size": 11460,
"url": "http://182.43.43.40:8765/UploadPic/1d89fb12977515ccb0887b391a781928.json"
},
"uniequip_table.json": {
"size": 1803714,
"url": "http://182.43.43.40:8765/UploadPic/66690ea4301738ea5f72c3d6f19a88c1.json"
},
"zone_table.json": {
"size": 238807,
"url": "http://182.43.43.40:8765/UploadPic/db9bbeb891e1d72d1a1b2f8e3d4bd72a.json"
}
}
}
}