-
Notifications
You must be signed in to change notification settings - Fork 28
/
tutorial.py
141 lines (110 loc) · 3.5 KB
/
tutorial.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import datetime
import os
from pixiv_utils.pixiv_crawler import (
BookmarkCrawler,
KeywordCrawler,
RankingCrawler,
UserCrawler,
checkDir,
displayAllConfig,
download_config,
network_config,
ranking_config,
user_config,
)
def downloadRanking():
"""
Download artworks from rankings
NOTE: Require cookie for R18 images!
Args:
capacity (int): flow capacity, default is 1024MB
"""
user_config.user_id = ""
user_config.cookie = ""
download_config.with_tag = False
ranking_config.start_date = datetime.date(2024, 5, 1)
ranking_config.range = 2
ranking_config.mode = "weekly"
ranking_config.content_mode = "illust"
ranking_config.num_artwork = 50
displayAllConfig()
checkDir(download_config.store_path)
app = RankingCrawler(capacity=200)
app.run()
def downloadBookmark():
"""
Download artworks from bookmark
NOTE: Require cookie!
Args:
n_images (int): max download number, default is 200
capacity (int): flow capacity, default is 1024MB
"""
download_config.with_tag = False
user_config.user_id = "[TODO]: Your user_id here"
user_config.cookie = "[TODO]: Your cookie here"
displayAllConfig()
checkDir(download_config.store_path)
app = BookmarkCrawler(n_images=20, capacity=200)
app.run()
def downloadUser():
"""
Download artworks from a single artist
NOTE: Require cookie for R18 images!
Args:
artist_id (str): artist id
capacity (int): flow capacity, default is 1024MB
"""
user_config.user_id = ""
user_config.cookie = ""
download_config.with_tag = False
displayAllConfig()
checkDir(download_config.store_path)
app = UserCrawler(artist_id="32548944", capacity=200)
app.run()
def downloadKeyword():
"""
Download search results of a keyword (sorted by popularity if order=True)
Support advanced search, e.g. "(Lucy OR 边缘行者) AND (5000users OR 10000users)", refer to https://www.pixiv.help/hc/en-us/articles/235646387-I-would-like-to-know-how-to-search-for-content-on-pixiv
NOTE: Require cookie for R18 images!
NOTE: Require premium account for popularity sorting!
Args:
keyword (str): search keyword
order (bool): order by popularity or not, default is False
mode (str): content mode, default is "safe", support ["safe", "r18", "all"]
n_images (int): max download number, default is 200
capacity (int): flow capacity, default is 1024MB
"""
user_config.user_id = ""
user_config.cookie = ""
download_config.with_tag = False
displayAllConfig()
checkDir(download_config.store_path)
app = KeywordCrawler(
keyword="(Lucy OR 边缘行者) AND (5000users OR 10000users)",
order=False,
mode=["safe", "r18", "all"][-1],
n_images=20,
capacity=200,
)
app.run()
def loadEnv():
"""
Load environment variables for proxy, cookie, and user_id
"""
# Use system proxy settings
proxy = os.getenv("https_proxy") or os.getenv("HTTPS_PROXY")
if proxy is not None:
network_config.proxy["https"] = proxy
# Use system user_id and cookie
cookie = os.getenv("PIXIV_COOKIE")
uid = os.getenv("PIXIV_UID")
if cookie is not None:
user_config.cookie = cookie
if uid is not None:
user_config.user_id = uid
if __name__ == "__main__":
# loadEnv()
downloadRanking()
downloadBookmark()
downloadUser()
downloadKeyword()