Fix download of URLs with UTF-8 chars in path

refs #650
This commit is contained in:
Noam Meltzer 2017-06-23 01:48:24 +03:00
parent 564d361cdf
commit ebb2556176
2 changed files with 9 additions and 6 deletions

View file

@ -1,2 +1,2 @@
future>=0.15.2 future>=0.16.0
certifi certifi

View file

@ -17,9 +17,10 @@
# You should have received a copy of the GNU Lesser Public License # You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/]. # along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram File.""" """This module contains an object that represents a Telegram File."""
from os.path import basename from os.path import basename
from future.backports.urllib import parse as urllib_parse
from telegram import TelegramObject from telegram import TelegramObject
@ -46,8 +47,7 @@ class File(TelegramObject):
# Optionals # Optionals
self.file_size = file_size self.file_size = file_size
if file_path: self.file_path = file_path
self.file_path = str(file_path)
self.bot = bot self.bot = bot
@ -91,7 +91,10 @@ class File(TelegramObject):
if custom_path is not None and out is not None: if custom_path is not None and out is not None:
raise ValueError('custom_path and out are mutually exclusive') raise ValueError('custom_path and out are mutually exclusive')
url = self.file_path # Convert any UTF-8 char into a url encoded ASCII string.
sres = urllib_parse.urlsplit(self.file_path)
url = urllib_parse.urlunsplit(urllib_parse.SplitResult(
sres.scheme, sres.netloc, urllib_parse.quote(sres.path), sres.query, sres.fragment))
if out: if out:
buf = self.bot.request.retrieve(url) buf = self.bot.request.retrieve(url)
@ -101,6 +104,6 @@ class File(TelegramObject):
if custom_path: if custom_path:
filename = custom_path filename = custom_path
else: else:
filename = basename(url) filename = basename(self.file_path)
self.bot.request.download(url, filename, timeout=timeout) self.bot.request.download(url, filename, timeout=timeout)