Catch and retry on ssl errors

This commit is contained in:
kermit 2020-09-16 15:44:15 +01:00
parent 3e93a9f180
commit e0194c8241
3 changed files with 30 additions and 22 deletions

View File

@ -31,6 +31,9 @@ class Deezer:
self.session = requests.Session()
self.logged_in = False
self.session.mount('http://', requests.adapters.HTTPAdapter(pool_maxsize=100))
self.session.mount('https://', requests.adapters.HTTPAdapter(pool_maxsize=100))
def get_token(self):
token_data = self.gw_api_call('deezer.getUserData')
return token_data["results"]["checkForm"]
@ -587,7 +590,6 @@ class Deezer:
eventlet.sleep(2)
return self.stream_track(track_id, url, stream)
request.raise_for_status()
eventlet.sleep(0)
blowfish_key = str.encode(self._get_blowfish_key(str(track_id)))
i = 0
for chunk in request.iter_content(2048):
@ -596,7 +598,6 @@ class Deezer:
chunk)
stream.write(chunk)
i += 1
eventlet.sleep(0)
def _md5(self, data):
h = MD5.new()

View File

@ -8,6 +8,7 @@ requests = eventlet.import_patched('requests')
get = requests.get
request_exception = requests.exceptions
from ssl import SSLError
from os import makedirs, remove, system as execute
from tempfile import gettempdir
@ -583,38 +584,43 @@ class DownloadJob:
return error_num # fallback is enabled and loop went through all formats
def streamTrack(self, stream, track):
def streamTrack(self, stream, track, range=None):
if self.queueItem.cancel: raise DownloadCancelled
try:
request = get(track.downloadUrl, headers=self.dz.http_headers, stream=True, timeout=30)
headers=self.dz.http_headers
if range is not None:
headers['Range'] = range
request = self.dz.session.get(track.downloadUrl, headers=self.dz.http_headers, stream=True, timeout=10)
except request_exception.ConnectionError:
eventlet.sleep(2)
return self.streamTrack(stream, track)
request.raise_for_status()
eventlet.sleep(0)
blowfish_key = str.encode(self.dz._get_blowfish_key(str(track.id)))
complete = int(request.headers["Content-Length"])
if complete == 0:
raise DownloadEmpty
chunkLength = 0
percentage = 0
i = 0
for chunk in request.iter_content(2048):
if self.queueItem.cancel: raise DownloadCancelled
if i % 3 == 0 and len(chunk) == 2048:
chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk)
stream.write(chunk)
chunkLength += len(chunk)
if isinstance(self.queueItem, QISingle):
percentage = (chunkLength / complete) * 100
self.downloadPercentage = percentage
else:
chunkProgres = (len(chunk) / complete) / self.queueItem.size * 100
self.downloadPercentage += chunkProgres
self.updatePercentage()
i += 1
eventlet.sleep(0)
try:
for chunk in request.iter_content(2048 * 3):
eventlet.sleep(0)
if self.queueItem.cancel: raise DownloadCancelled
if len(chunk) >= 2048:
chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk[0:2048]) + chunk[2048:]
stream.write(chunk)
chunkLength += len(chunk)
if isinstance(self.queueItem, QISingle):
percentage = (chunkLength / complete) * 100
self.downloadPercentage = percentage
else:
chunkProgres = (len(chunk) / complete) / self.queueItem.size * 100
self.downloadPercentage += chunkProgres
self.updatePercentage()
except SSLError:
range = f'bytes={chunkLength}-'
logger.info(f'retrying {track.title} with range {range}')
return self.streamTrack(stream, track, range)
def updatePercentage(self):
if round(self.downloadPercentage) != self.lastPercentage and round(self.downloadPercentage) % 2 == 0:

View File

@ -8,7 +8,8 @@ import logging
import os.path as path
import json
from os import remove
from urllib.request import urlopen
import eventlet
urlopen = eventlet.import_patched('urllib.request').urlopen
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('deemix')