better performances of ox.js.minify
This commit is contained in:
parent
46278349e3
commit
c2de06d9d8
2 changed files with 11 additions and 2 deletions
|
@ -294,7 +294,7 @@ def copy_file(source, target, verbose=False):
|
||||||
def read_file(file, verbose=False):
|
def read_file(file, verbose=False):
|
||||||
if verbose:
|
if verbose:
|
||||||
print('reading', file)
|
print('reading', file)
|
||||||
f = open(file)
|
f = open(file, 'rb')
|
||||||
data = f.read()
|
data = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
return data
|
return data
|
||||||
|
@ -309,8 +309,10 @@ def read_json(file, verbose=False):
|
||||||
def write_file(file, data, verbose=False):
|
def write_file(file, data, verbose=False):
|
||||||
if verbose:
|
if verbose:
|
||||||
print('writing', file)
|
print('writing', file)
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
data = data.encode('utf-8')
|
||||||
write_path(file)
|
write_path(file)
|
||||||
f = open(file, 'w')
|
f = open(file, 'wb')
|
||||||
f.write(data)
|
f.write(data)
|
||||||
f.close()
|
f.close()
|
||||||
return len(data)
|
return len(data)
|
||||||
|
|
7
ox/js.py
7
ox/js.py
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
|
import sys
|
||||||
|
|
||||||
from ox.utils import json
|
from ox.utils import json
|
||||||
|
|
||||||
|
@ -8,6 +9,12 @@ def minify(source, comment=''):
|
||||||
# see https://github.com/douglascrockford/JSMin/blob/master/README
|
# see https://github.com/douglascrockford/JSMin/blob/master/README
|
||||||
def get_next_non_whitespace_token():
|
def get_next_non_whitespace_token():
|
||||||
pass
|
pass
|
||||||
|
# python2 performance with unicode string is terrible
|
||||||
|
if sys.version[0] == '2':
|
||||||
|
if isinstance(source, unicode):
|
||||||
|
source = source.encode('utf-8')
|
||||||
|
if isinstance(comment, unicode):
|
||||||
|
comment = comment.encode('utf-8')
|
||||||
tokens = tokenize(source)
|
tokens = tokenize(source)
|
||||||
length = len(tokens)
|
length = len(tokens)
|
||||||
minified = '/*' + comment + '*/' if comment else ''
|
minified = '/*' + comment + '*/' if comment else ''
|
||||||
|
|
Loading…
Reference in a new issue