avoid reading file to ram in ox.net.save_url
This commit is contained in:
parent
e72d5bb6c1
commit
aaf30c35a0
1 changed files with 16 additions and 3 deletions
19
ox/net.py
19
ox/net.py
|
@ -8,6 +8,11 @@ import os
|
|||
import re
|
||||
import struct
|
||||
|
||||
try:
|
||||
import requests
|
||||
USE_REQUESTS = True
|
||||
except:
|
||||
USE_REQUESTS = False
|
||||
from six import BytesIO, PY2
|
||||
from six.moves import urllib
|
||||
from chardet.universaldetector import UniversalDetector
|
||||
|
@ -117,9 +122,17 @@ def save_url(url, filename, overwrite=False):
|
|||
dirname = os.path.dirname(filename)
|
||||
if dirname and not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
data = read_url(url)
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(data)
|
||||
headers = DEFAULT_HEADERS.copy()
|
||||
if USE_REQUESTS:
|
||||
r = requests.get(url, headers=headers, stream=True)
|
||||
with open(filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
else:
|
||||
data = read_url(url)
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
def _get_size(url):
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS.copy())
|
||||
|
|
Loading…
Reference in a new issue