This commit is contained in:
56
handlers/pixiv/Webdav.py
Normal file
56
handlers/pixiv/Webdav.py
Normal file
@ -0,0 +1,56 @@
|
||||
import os
|
||||
|
||||
from webdav3.client import Client
|
||||
from webdav3.exceptions import LocalResourceNotFound, RemoteResourceNotFound
|
||||
|
||||
class Webdav(object):
|
||||
def __init__(self, option):
|
||||
webdav_option = {
|
||||
'webdav_hostname': option['webdav_hostname'],
|
||||
'webdav_login': option['webdav_login'],
|
||||
'webdav_password': option['webdav_password'],
|
||||
'disable_check': option['disable_check'],
|
||||
}
|
||||
self.client = self.connect_webdav(webdav_option)
|
||||
self.default_upload_path = option['default_upload_path']
|
||||
|
||||
# 链接webdav
|
||||
def connect_webdav(self, options):
|
||||
return Client(options)
|
||||
|
||||
# 批量同步
|
||||
def batch_sync(self, file_list, upload_path=None):
|
||||
# 记录失败与成功的id
|
||||
success_ids = []
|
||||
failed_ids = []
|
||||
# 获取临时文件路径
|
||||
temp_path = os.path.join(os.path.dirname(__file__), 'temp')
|
||||
|
||||
for file_item in file_list:
|
||||
path = os.path.join(temp_path, file_item['file_name'])
|
||||
result_flag = self.sync_file(file_item['file_name'], path, upload_path)
|
||||
|
||||
if result_flag:
|
||||
success_ids.append(file_item['id'])
|
||||
else:
|
||||
failed_ids.append(file_item['id'])
|
||||
|
||||
return (success_ids, failed_ids)
|
||||
|
||||
def sync_file(self, file_name, path, upload_path=None):
|
||||
result_flag = True
|
||||
try:
|
||||
# self.client.upload('Pictures/ACGN/Pixiv/' + file_name, path)
|
||||
if not(upload_path):
|
||||
upload_path = self.default_upload_path
|
||||
|
||||
self.client.upload(upload_path + file_name, path)
|
||||
|
||||
print('Info: ' + file_name + ' upload success!!')
|
||||
except LocalResourceNotFound as exception:
|
||||
result_flag = False
|
||||
|
||||
print('Error: ' + file_name + ' upload failed!!')
|
||||
|
||||
return result_flag
|
||||
|
0
handlers/pixiv/__init__.py
Normal file
0
handlers/pixiv/__init__.py
Normal file
26
handlers/pixiv/comb.py
Normal file
26
handlers/pixiv/comb.py
Normal file
@ -0,0 +1,26 @@
|
||||
import re
|
||||
|
||||
# 整理数据
|
||||
def extract_pixiv_info(entries):
|
||||
# 整理为JSON数组
|
||||
pixiv_list = []
|
||||
for entry in entries:
|
||||
links = []
|
||||
for i in entry['content']:
|
||||
pattern = re.compile(
|
||||
r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
|
||||
links = (re.findall(pattern, i['value']))
|
||||
|
||||
tag_pattern = re.compile(r'tags/(.*)/')
|
||||
item = {'id': entry['id'], 'title': entry['title'], 'link': links, 'author': entry['author'], 'tag': re.findall(tag_pattern, entry['source']['id'])}
|
||||
|
||||
pixiv_list.append(item)
|
||||
return pixiv_list
|
||||
|
||||
def get_prefix_name_on_entry(entry):
|
||||
tag = ','.join(entry['tag'])
|
||||
title = entry['title']
|
||||
author = entry['author']
|
||||
prefix_name = f'#{tag}# @{author}@ {title}'
|
||||
|
||||
return prefix_name
|
13
handlers/pixiv/file.py
Normal file
13
handlers/pixiv/file.py
Normal file
@ -0,0 +1,13 @@
|
||||
import os
|
||||
|
||||
def save_file(name, data):
|
||||
temp_path = os.path.join(os.path.dirname(__file__), 'temp')
|
||||
with open(os.path.join(temp_path, name), 'wb') as code:
|
||||
code.write(data)
|
||||
|
||||
def remove_file(path):
|
||||
if len(path) > 0:
|
||||
try:
|
||||
os.remove(path)
|
||||
except:
|
||||
pass
|
64
handlers/pixiv/pixiv_handler.py
Normal file
64
handlers/pixiv/pixiv_handler.py
Normal file
@ -0,0 +1,64 @@
|
||||
import re
|
||||
import time
|
||||
import os
|
||||
|
||||
from handlers.pixiv.comb import extract_pixiv_info, get_prefix_name_on_entry
|
||||
from handlers.pixiv.Webdav import Webdav
|
||||
from handlers.pixiv.request import downloadPic
|
||||
from handlers.pixiv.file import save_file, remove_file
|
||||
|
||||
|
||||
webdav = Webdav({
|
||||
'webdav_hostname': os.getenv('webdav_hostname'),
|
||||
'webdav_login': os.getenv('webdav_login'),
|
||||
'webdav_password': os.getenv('webdav_password'),
|
||||
'disable_check': True,
|
||||
'default_upload_path': os.getenv('default_upload_path')
|
||||
})
|
||||
|
||||
# 处理结果
|
||||
def pixiv_result_handler(entries):
|
||||
list = extract_pixiv_info(entries)
|
||||
|
||||
success_entries = []
|
||||
failed_entries = []
|
||||
|
||||
for item in list:
|
||||
prefix_name = get_prefix_name_on_entry(item)
|
||||
|
||||
result_flag = True
|
||||
|
||||
need_sync_files = []
|
||||
|
||||
for url in item['link']:
|
||||
file_name_pattern = re.compile(r'\/(\w*\.(?:jpg|png))$')
|
||||
file_name = ','.join(re.findall(file_name_pattern, url))
|
||||
|
||||
if file_name:
|
||||
full_name = f'{prefix_name} {file_name}'
|
||||
# 替换不符合文件名规范的字符
|
||||
full_name = re.sub(r'[\/\\\:\*\?\"\<\>\|]', "_", full_name)
|
||||
(status, data) = downloadPic(url)
|
||||
if status:
|
||||
save_file(full_name, data)
|
||||
need_sync_files.append({ 'id': item['id'], 'file_name': full_name })
|
||||
else:
|
||||
result_flag = False
|
||||
time.sleep(10)
|
||||
|
||||
if result_flag:
|
||||
(success_ids, failed_ids) = webdav.batch_sync(need_sync_files)
|
||||
|
||||
for entry in entries:
|
||||
if entry['id'] in success_ids:
|
||||
success_entries.append(entry)
|
||||
elif entry['id'] in failed_ids:
|
||||
failed_entries.append(entry)
|
||||
else:
|
||||
for entry in entries:
|
||||
if entry['id'] == item['id']:
|
||||
failed_entries.append(entry)
|
||||
|
||||
return (success_entries, failed_entries)
|
||||
|
||||
|
21
handlers/pixiv/request.py
Normal file
21
handlers/pixiv/request.py
Normal file
@ -0,0 +1,21 @@
|
||||
import requests
|
||||
import os
|
||||
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
def downloadPic(url):
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36 QIHU 360SE",
|
||||
}
|
||||
data = None
|
||||
status = False
|
||||
try:
|
||||
proxies={'https': os.getenv('https_proxy'),'http': os.getenv('http_proxy')}
|
||||
res = requests.get(url, headers = headers, verify=False, timeout=(5,5), proxies=proxies)
|
||||
data = res.content
|
||||
status = True
|
||||
res.close()
|
||||
print(f'Info: download success {url}')
|
||||
except:
|
||||
print(f'Error: download failed {url}')
|
||||
return (status, data)
|
Reference in New Issue
Block a user