-
Notifications
You must be signed in to change notification settings - Fork 1
/
get_code.py
107 lines (99 loc) · 3.5 KB
/
get_code.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import crawl
import asyncio
import warnings
from parser import parser
warnings.filterwarnings('ignore',category=DeprecationWarning)
html_data=open('download.htm','r',encoding='utf-8').read()
def collection_classify(url:str)->tuple[list]:
dps=[]
tps=[]
ots=[]
tasks=[]
tasks.append(asyncio.ensure_future(crawl.expand_collection(url)))
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))
packs=tasks[0].result()
if type(packs)==str:
return dps,tps,ots
for url in packs:
if parser.datapack(url):
dps.append(url)
elif parser.texturepack(url):
tps.append(url)
else:
ots.append(url)
for _ in range(5):
dps2=[]
tasks=[]
for url in dps:
if parser.datapack(url):
tasks.append(asyncio.ensure_future(crawl.expand_datapack(url)))
elif parser.curseforge(url):
tasks.append(asyncio.ensure_future(crawl.expand_curseforge(url)))
elif parser.adfoc(url):
tasks.append(asyncio.ensure_future(crawl.expand_adfoc(url)))
elif parser.mediafire(url):
tasks.append(asyncio.ensure_future(crawl.expand_mediafire(url)))
elif parser.mirror(url):
tasks.append(asyncio.ensure_future(crawl.expand_mirror(url)))
elif parser.ignore(url):
ots.append(url)
else:
dps2.append(url)
if len(tasks)==0:
break
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))
for t in tasks:
data=t.result()
if type(data)==str:
dps2.append(data)
elif type(data)==tuple:
dp,tp=data
dps2+=dp
tps+=tp
else:
dps2+=data
dps=dps2
for _ in range(5):
tps2=[]
tasks=[]
for url in tps:
if parser.texturepack(url):
tasks.append(asyncio.ensure_future(crawl.expand_texturepack(url)))
elif parser.curseforge(url):
tasks.append(asyncio.ensure_future(crawl.expand_curseforge(url)))
elif parser.adfoc(url):
tasks.append(asyncio.ensure_future(crawl.expand_adfoc(url)))
elif parser.mediafire(url):
tasks.append(asyncio.ensure_future(crawl.expand_mediafire(url)))
elif parser.mirror(url):
tasks.append(asyncio.ensure_future(crawl.expand_mirror(url)))
elif parser.ignore(url):
ots.append(url)
else:
tps2.append(url)
if len(tasks)==0:
break
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))
for t in tasks:
data=t.result()
if type(data)==str:
tps2.append(data)
else:
tps2+=data
tps=tps2
return dps,tps,ots
def collection_to_page(url):
dps,tps,ots=collection_classify(url)
data=html_data
data=data.replace('collection_name',url,-1)
data=data.replace('[\'data_packs\']',str(dps))
data=data.replace('[\'texture_packs\']',str(tps))
data=data.replace('[\'others\']',str(ots))
return data
if __name__=='__main__':
with open('out.htm','w') as f:
data=collection_to_page('https://www.planetminecraft.com/collection/147813/hello-world/')
f.write(data)