S4U: 都市朋克2011与爱的重拳 @ 官中 | SakuraLLM数据集
Browse files
v-corpus-zh/U0U/S4U_都市朋克2011与爱的重拳/0.txt.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4c08c934165192aadb764dbf1c99af818a85841ee223635b33c8a0f1b61915d9
|
3 |
+
size 235306
|
v-corpus-zh/U0U/S4U_都市朋克2011与爱的重拳/S4U_json.py
ADDED
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
sys.path.append('..')
|
3 |
+
|
4 |
+
from common import clearT, get_all_files_in_directory
|
5 |
+
# https://huggingface.co/datasets/Limour/archvie/blob/main/v-corpus-zh/common.py
|
6 |
+
|
7 |
+
# =================
|
8 |
+
|
9 |
+
a = get_all_files_in_directory(r'E:\galgame\s4u\Chs', ext='.json')
|
10 |
+
b = r'E:\galgame\s4u\text'
|
11 |
+
|
12 |
+
# =================
|
13 |
+
|
14 |
+
sc = {}
|
15 |
+
_n = {
|
16 |
+
"Miki" : "三木",
|
17 |
+
"Nil" : "薯条仔",
|
18 |
+
"Kaede" : "每天开心",
|
19 |
+
"Someone" : "匿名消息",
|
20 |
+
"Ben" : "Ben",
|
21 |
+
"Alan" : "艾兰",
|
22 |
+
"主食+云府事业部" : "主食+",
|
23 |
+
"Duke" : "小巴司机",
|
24 |
+
"Holly" : "霍利蓬松猫",
|
25 |
+
"Kathy" : "老珍妮",
|
26 |
+
"Max" : "大麦",
|
27 |
+
"Sella" : "疤头",
|
28 |
+
"ANS" : "不喝可乐",
|
29 |
+
"May" : "冰箱会吃什么?",
|
30 |
+
"Grave" : "飞行孤独员",
|
31 |
+
"Nami" : "咸鱼Hitomi",
|
32 |
+
"Haruka" : "遥遥还想努力",
|
33 |
+
"Daisy" : "繁花与诗",
|
34 |
+
"System" : "连接助手",
|
35 |
+
"Sake" : "苍蓝之星",
|
36 |
+
"Sue" : "Sue",
|
37 |
+
"Regules" : "大神秘学家Regules",
|
38 |
+
"Nora" : "Nora",
|
39 |
+
"Roland" : "开口前先看自己的需求",
|
40 |
+
"Mo" : "肉夹馍",
|
41 |
+
"Neko" : "我起了一枪秒",
|
42 |
+
"U" : "用户82331617U",
|
43 |
+
"Norida" : "Nori认真工作版",
|
44 |
+
"Frank" : "林富兰克林",
|
45 |
+
"Viz" : "艾维辛的誓约",
|
46 |
+
"Dom" : "余火",
|
47 |
+
"Will" : "五十夜威尔森",
|
48 |
+
"Amanda" : "曼困困",
|
49 |
+
"Lynn" : "泛音圈",
|
50 |
+
"Ray" : "Ray",
|
51 |
+
"Pi" : "铍动力攻城狮的飞升",
|
52 |
+
"Li" : "锂电子生物男的绝望",
|
53 |
+
"Malow" : "大川医科Dr.M",
|
54 |
+
"Zoy" : "往左",
|
55 |
+
"打印机大电脑啦啦啦" : "打印机大电脑啦啦啦",
|
56 |
+
"学长A" : "学长A",
|
57 |
+
"Dora" : "反水母",
|
58 |
+
"CYDB助手" : "CYDB助手",
|
59 |
+
"Eam" : "Eam",
|
60 |
+
"Eka" : "TT艾卡",
|
61 |
+
"Levi" : "AA美男子小李",
|
62 |
+
"神之域超门" : "神之域超门",
|
63 |
+
"Levi, Miki, Pi" : "Levi&Miki&Pi",
|
64 |
+
"Lith" : "Lith(非本人)",
|
65 |
+
"Meeka" : "匆匆过客而已",
|
66 |
+
"Aspen" : "不上大师不改名",
|
67 |
+
"TycoGroup" : "Tyco的花花幼稚园",
|
68 |
+
"ChikenGroup" : "狂暴番茄鸡互骂群",
|
69 |
+
"JacyGroup" : "Jacy骑士团",
|
70 |
+
"Tyco" : "Tyco睡不着",
|
71 |
+
"Jacy" : "一J开天门",
|
72 |
+
"MageCC" : "MageCC",
|
73 |
+
"Woods" : "重生之我在麻园当魔王",
|
74 |
+
"Deans" : "枫叶",
|
75 |
+
"Teddy" : "疾风炸弹人",
|
76 |
+
"Kris" : "只有学习才是出路",
|
77 |
+
"Plunk" : "85年生的Silver",
|
78 |
+
"Lucid" : "Lucid",
|
79 |
+
"DomGroup" : "【八番设计院】【建设与创意部】【水泥组】",
|
80 |
+
"Hans" : "转角街老韩",
|
81 |
+
"MastroX" : "MastroX",
|
82 |
+
"Pheo" : "幻想狂大飞",
|
83 |
+
"Miki2" : "Chiaki",
|
84 |
+
"Pi2" : "Pi",
|
85 |
+
"Malow2" : "Malow2",
|
86 |
+
"829" : "829咖啡",
|
87 |
+
"Think": '心声',
|
88 |
+
"think": '心声',
|
89 |
+
"Monologue": '旁白',
|
90 |
+
"Typing": '三木(打字)',
|
91 |
+
"": '旁白',
|
92 |
+
}
|
93 |
+
|
94 |
+
# =================
|
95 |
+
import re
|
96 |
+
def custom_sort_key(s):
|
97 |
+
s: str = s[0]
|
98 |
+
return [int(x) for x in s.split('.') if x.isdigit()]
|
99 |
+
|
100 |
+
import json
|
101 |
+
|
102 |
+
# =================
|
103 |
+
|
104 |
+
def readJson(_p):
|
105 |
+
with open(_p, 'r', encoding='utf-8') as _json_file:
|
106 |
+
_data = json.load(_json_file)
|
107 |
+
|
108 |
+
if 'm_Script' not in _data:
|
109 |
+
return {}
|
110 |
+
|
111 |
+
_data = json.loads(_data['m_Script'])
|
112 |
+
_data = [(k,v) for k,v in _data.items()]
|
113 |
+
_data = sorted(_data, key=custom_sort_key)
|
114 |
+
return _data
|
115 |
+
|
116 |
+
# data = readJson(r'E:\galgame\s4u\Character.json')
|
117 |
+
# for k,v in data:
|
118 |
+
# if 'NickName.' in k:
|
119 |
+
# k = k.split('NickName.', maxsplit=1)[-1]
|
120 |
+
# _n[k] = clearT(v)
|
121 |
+
# tmp = json.dumps(_n, ensure_ascii=False, indent=4)
|
122 |
+
|
123 |
+
# =================
|
124 |
+
|
125 |
+
with open(r'E:\galgame\s4u\zh-cn.json', 'r', encoding='utf-8') as json_file:
|
126 |
+
data = json.load(json_file)
|
127 |
+
data = data['m_PreloadTable']['Array']
|
128 |
+
_a = []
|
129 |
+
for one in data:
|
130 |
+
m_PathID = str(one['m_PathID']) + '.json'
|
131 |
+
try:
|
132 |
+
one = next(x for x in a if x.endswith(m_PathID))
|
133 |
+
except StopIteration:
|
134 |
+
continue
|
135 |
+
_a.append(one)
|
136 |
+
a = _a
|
137 |
+
|
138 |
+
re_html = re.compile(r'<.+?>')
|
139 |
+
# =================
|
140 |
+
for path in a:
|
141 |
+
name = path[path.rindex('\\') + 1:]
|
142 |
+
name = '0'
|
143 |
+
if name not in sc:
|
144 |
+
sc[name] = []
|
145 |
+
print(name)
|
146 |
+
# =================
|
147 |
+
data = readJson(path)
|
148 |
+
# =================
|
149 |
+
for k, v in data:
|
150 |
+
if not v:
|
151 |
+
continue
|
152 |
+
tmp = k.split('.')
|
153 |
+
if len(tmp) > 2:
|
154 |
+
n: str = tmp[1]
|
155 |
+
if n.isdigit() and (n not in _n):
|
156 |
+
n = tmp[0]
|
157 |
+
else:
|
158 |
+
n = tmp[0]
|
159 |
+
if n in _n:
|
160 |
+
n = _n[n]
|
161 |
+
else:
|
162 |
+
_n[n] = clearT(n).replace('・', '&')
|
163 |
+
print(k, v, n)
|
164 |
+
if n.lower() in {'todo', 'ui'}:
|
165 |
+
continue
|
166 |
+
# =================
|
167 |
+
d = clearT(v)
|
168 |
+
d = re_html.sub(' ', d)
|
169 |
+
if d:
|
170 |
+
sc[name].append(n + ':' + d)
|
171 |
+
|
172 |
+
# =================
|
173 |
+
for k, v in sc.items():
|
174 |
+
if v:
|
175 |
+
with open(b + f'\\{k}.txt', 'w', encoding='utf-8') as f:
|
176 |
+
f.write('\n'.join(v))
|
177 |
+
|
178 |
+
# =================
|
179 |
+
import json
|
180 |
+
tmp = json.dumps(_n, ensure_ascii=False, indent=4)
|