avoid reading file to ram in ox.net.save_url
This commit is contained in:
parent
e72d5bb6c1
commit
aaf30c35a0
1 changed files with 16 additions and 3 deletions
13
ox/net.py
13
ox/net.py
|
@ -8,6 +8,11 @@ import os
|
|||
import re
|
||||
import struct
|
||||
|
||||
try:
|
||||
import requests
|
||||
USE_REQUESTS = True
|
||||
except:
|
||||
USE_REQUESTS = False
|
||||
from six import BytesIO, PY2
|
||||
from six.moves import urllib
|
||||
from chardet.universaldetector import UniversalDetector
|
||||
|
@ -117,6 +122,14 @@ def save_url(url, filename, overwrite=False):
|
|||
dirname = os.path.dirname(filename)
|
||||
if dirname and not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
headers = DEFAULT_HEADERS.copy()
|
||||
if USE_REQUESTS:
|
||||
r = requests.get(url, headers=headers, stream=True)
|
||||
with open(filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
else:
|
||||
data = read_url(url)
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(data)
|
||||
|
|
Loading…
Reference in a new issue