2011-04-23 14:54:05 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#vim: et:ts=4:sw=4:sts=4
|
2011-10-05 17:07:21 +00:00
|
|
|
|
2011-08-09 17:00:39 +00:00
|
|
|
import base64
|
2011-04-19 09:09:51 +00:00
|
|
|
import json
|
2010-09-05 14:24:22 +00:00
|
|
|
import os
|
2011-10-07 01:04:47 +00:00
|
|
|
import ox
|
2011-04-22 22:03:10 +00:00
|
|
|
import re
|
2011-04-25 09:12:02 +00:00
|
|
|
import shutil
|
2012-03-30 21:12:00 +00:00
|
|
|
import subprocess
|
2011-10-07 01:04:47 +00:00
|
|
|
import sys
|
2012-04-05 20:42:35 +00:00
|
|
|
import tarfile
|
2012-04-05 15:27:27 +00:00
|
|
|
import time
|
2011-10-07 01:04:47 +00:00
|
|
|
|
2012-04-05 20:42:35 +00:00
|
|
|
def build_oxjs(downloads=False, geo=False):
|
2011-10-07 01:19:00 +00:00
|
|
|
|
2011-10-07 01:04:47 +00:00
|
|
|
base_path = os.path.dirname(__file__)
|
|
|
|
if base_path:
|
|
|
|
os.chdir(base_path)
|
|
|
|
|
2012-04-05 15:27:27 +00:00
|
|
|
root_path = '../../'
|
|
|
|
source_path = root_path + 'source/'
|
|
|
|
build_path = root_path + 'build/'
|
|
|
|
dev_path = root_path + 'dev/'
|
|
|
|
|
2012-03-30 21:12:00 +00:00
|
|
|
version = '0.1.%s' % subprocess.Popen(
|
|
|
|
['bzr', 'revno'],
|
|
|
|
stdout=subprocess.PIPE
|
|
|
|
).communicate()[0].strip()
|
2012-04-05 15:27:27 +00:00
|
|
|
year = time.strftime('%Y', time.gmtime())
|
2012-04-05 20:42:35 +00:00
|
|
|
comment = ' OxJS %s (c) %s 0x2620, dual-licensed GPL/MIT, see https://oxjs.org for details ' % (version, year)
|
2012-04-05 15:27:27 +00:00
|
|
|
|
2011-10-07 01:04:47 +00:00
|
|
|
# SVGs
|
|
|
|
path = source_path + 'Ox.UI/themes/classic/svg/'
|
|
|
|
for filename in os.listdir(path):
|
|
|
|
svg = read_file(path + filename)
|
|
|
|
svg = svg.replace('#000000', '#XXXXXX').replace('#404040', '#C0C0C0').replace('#FFFFFF', '#000000').replace('#XXXXXX', '#FFFFFF')
|
|
|
|
write_file(path.replace('/classic/', '/modern/') + filename, svg)
|
|
|
|
imageURLs = {}
|
|
|
|
imageDataURLs = {}
|
|
|
|
for theme in ['classic', 'modern']:
|
|
|
|
path = source_path + 'Ox.UI/themes/' + theme + '/svg/'
|
|
|
|
for filename in os.listdir(path):
|
2011-12-30 10:06:45 +00:00
|
|
|
if not filename[0] in '._' and not filename.endswith('~'):
|
2011-10-07 01:04:47 +00:00
|
|
|
key = theme + '/' + filename[:-4]
|
|
|
|
imageURLs[key] = os.path.join(path, filename).replace(source_path, '')
|
|
|
|
data = re.sub('\n\s+', '', read_file(path + filename))
|
|
|
|
imageDataURLs[key] = 'data:image/svg+xml;base64,' + base64.b64encode(data)
|
|
|
|
write_file(build_path + 'Ox.UI/json/Ox.UI.imageURLs.json', json.dumps(imageURLs, sort_keys=True))
|
|
|
|
write_file(build_path + 'Ox.UI/json/Ox.UI.imageDataURLs.json', json.dumps(imageDataURLs, sort_keys=True))
|
|
|
|
write_file(dev_path + 'Ox.UI/json/Ox.UI.imageURLs.json', json.dumps(imageURLs, indent=4, sort_keys=True))
|
|
|
|
write_file(dev_path + 'Ox.UI/json/Ox.UI.imageDataURLs.json', json.dumps(imageDataURLs, indent=4, sort_keys=True))
|
|
|
|
|
|
|
|
# copy & link
|
|
|
|
ui_files = {'build': [], 'dev': []}
|
|
|
|
for path, dirnames, filenames in os.walk(source_path):
|
|
|
|
for filename in filenames:
|
2011-12-30 10:06:45 +00:00
|
|
|
if not '_' in path and not filename[0] in '._' \
|
|
|
|
and not filename.endswith('~') \
|
2011-10-07 01:04:47 +00:00
|
|
|
and (geo or not '/Ox.Geo/' in path):
|
|
|
|
# write copies in build path
|
|
|
|
source = os.path.join(path, filename)
|
|
|
|
is_jquery = re.match('^jquery-[\d\.]+\.js$', filename)
|
|
|
|
is_jquery_min = re.match('^jquery-[\d\.]+\.min\.js$', filename)
|
|
|
|
is_jquery_plugin = re.match('^jquery\..*?\.js$', filename)
|
|
|
|
if is_jquery or is_jquery_min:
|
|
|
|
target = os.path.join(path.replace(source_path, build_path), 'jquery.js')
|
|
|
|
else:
|
|
|
|
target = os.path.join(path.replace(source_path, build_path), filename)
|
|
|
|
if is_jquery_plugin:
|
|
|
|
ui_files['build'].append(target.replace(build_path, ''))
|
|
|
|
ui_files['dev'].append(target.replace(build_path, ''))
|
|
|
|
if not '/Ox/js' in source and not '/Ox.UI/js/' in source and not is_jquery:
|
|
|
|
if re.match('^Ox\..+\.js$', filename):
|
|
|
|
js = read_file(source)
|
|
|
|
write_file(target, ox.js.minify(js, comment))
|
|
|
|
else:
|
|
|
|
copy_file(source, target)
|
|
|
|
# write links in dev path
|
|
|
|
parts = os.path.join(path.replace(source_path, ''), filename).split('/')
|
|
|
|
for i, part in enumerate(parts):
|
|
|
|
if i < len(parts) - 1:
|
|
|
|
parts[i] = '..'
|
|
|
|
link_source = '/'.join(parts).replace(filename, os.path.join(path, filename))[3:]
|
|
|
|
link_target = target.replace(build_path, dev_path)
|
|
|
|
if not is_jquery_min:
|
|
|
|
write_link(link_source, link_target)
|
|
|
|
|
|
|
|
# Ox.js
|
2011-12-30 15:37:41 +00:00
|
|
|
# FIXME: Document what exactly the following dependecies are!
|
2011-10-07 01:04:47 +00:00
|
|
|
filenames = [
|
|
|
|
['Fallback.js', 'Core.js'],
|
2012-05-25 08:37:19 +00:00
|
|
|
# Constants.js needs Ox.toArray to determine Ox.PATH
|
|
|
|
['Array.js', 'Math.js']
|
2011-10-07 01:04:47 +00:00
|
|
|
]
|
|
|
|
js = ''
|
|
|
|
js_dir = 'Ox/js/'
|
2012-04-05 15:27:27 +00:00
|
|
|
ox_files = [[], [], []]
|
2011-10-07 01:04:47 +00:00
|
|
|
for filename in filenames[0]:
|
2012-04-05 15:27:27 +00:00
|
|
|
ox_files[0].append(js_dir + filename)
|
2011-10-07 01:04:47 +00:00
|
|
|
for filename in filenames[1]:
|
2012-04-05 15:27:27 +00:00
|
|
|
ox_files[1].append(js_dir + filename)
|
2011-10-07 01:04:47 +00:00
|
|
|
filenames = filenames[0] + filenames[1]
|
|
|
|
for filename in os.listdir(source_path + js_dir):
|
2011-12-30 15:38:50 +00:00
|
|
|
if not filename in filenames \
|
|
|
|
and not filename.startswith('.') \
|
|
|
|
and not filename.endswith('~'):
|
2011-10-07 01:04:47 +00:00
|
|
|
filenames.append(filename)
|
|
|
|
for filename in filenames:
|
|
|
|
js += read_file(source_path + js_dir + filename) + '\n'
|
2012-04-05 15:27:27 +00:00
|
|
|
if not js_dir + filename in ox_files[0] + ox_files[1]:
|
|
|
|
ox_files[2].append(js_dir + filename)
|
|
|
|
js = re.sub("Ox.VERSION = '([\d\.]+)'", "Ox.VERSION = '%s'" % version, js)
|
2011-10-07 01:04:47 +00:00
|
|
|
write_file(build_path + 'Ox.js', ox.js.minify(js, comment))
|
2012-04-05 15:27:27 +00:00
|
|
|
write_file(dev_path + '/Ox/json/' + 'Ox.json', json.dumps(ox_files, indent=4))
|
2011-10-07 01:04:47 +00:00
|
|
|
|
|
|
|
# Ox.UI
|
|
|
|
js = ''
|
|
|
|
root = source_path + 'Ox.UI/'
|
|
|
|
for path, dirnames, filenames in os.walk(root):
|
2012-01-19 12:29:29 +00:00
|
|
|
for filename in sorted(filenames):
|
2011-10-07 01:04:47 +00:00
|
|
|
# jquery gets included by Ox.UI loader
|
|
|
|
# theme css files get included by main css
|
|
|
|
# svgs are loaded as URLs or dataURLs
|
|
|
|
# browser images appear before load
|
|
|
|
if path != root and not '_' in path and not filename[0] in '._'\
|
2012-01-04 09:50:07 +00:00
|
|
|
and not filename.endswith('~')\
|
2011-10-07 01:04:47 +00:00
|
|
|
and not 'jquery' in filename\
|
|
|
|
and not ('/themes/' in path and filename.endswith('.css'))\
|
|
|
|
and not filename.endswith('.svg')\
|
|
|
|
and not filename.startswith('browser'):
|
|
|
|
ui_files['dev'].append(os.path.join(path.replace(source_path, ''), filename))
|
|
|
|
if not '/js/' in path:
|
|
|
|
ui_files['build'].append(os.path.join(path.replace(source_path, ''), filename))
|
|
|
|
if filename.endswith('.js'):
|
|
|
|
js += read_file(os.path.join(path, filename)) + '\n'
|
|
|
|
filename = build_path + 'Ox.UI/js/Ox.UI.js'
|
|
|
|
write_file(filename, ox.js.minify(js, comment))
|
|
|
|
ui_files['build'].append(filename.replace(build_path, ''))
|
|
|
|
files = json.dumps(sorted(ui_files['build']))
|
|
|
|
write_file(build_path + 'Ox.UI/json/Ox.UI.files.json', files)
|
|
|
|
files = json.dumps(sorted(ui_files['dev']), indent=4)
|
|
|
|
write_file(dev_path + 'Ox.UI/json/Ox.UI.files.json', files)
|
|
|
|
|
2012-04-05 15:27:27 +00:00
|
|
|
# index
|
|
|
|
data = {
|
|
|
|
# sum(list, []) is flatten
|
|
|
|
'documentation': sorted(sum(ox_files, [])) + sorted(filter(
|
2012-04-05 20:42:35 +00:00
|
|
|
lambda x: re.search('\.js$', x),
|
2012-04-05 15:27:27 +00:00
|
|
|
ui_files['dev']
|
|
|
|
) + map(
|
|
|
|
lambda x: 'Ox.%s/Ox.%s.js' % (x, x),
|
|
|
|
['Geo', 'Image', 'Unicode']
|
|
|
|
)),
|
|
|
|
'examples': filter(
|
2012-04-13 21:26:48 +00:00
|
|
|
lambda x: not re.search('^[\._]', x),
|
2012-04-05 15:27:27 +00:00
|
|
|
os.listdir(root_path + 'examples/')
|
|
|
|
),
|
|
|
|
'readme': map(
|
2012-04-05 15:56:53 +00:00
|
|
|
lambda x: {
|
|
|
|
'date': time.strftime(
|
|
|
|
'%Y-%m-%d %H:%M:%S',
|
|
|
|
time.gmtime(os.path.getmtime(root_path + 'readme/html/' + x))
|
|
|
|
),
|
2012-04-07 14:42:20 +00:00
|
|
|
'id': x.split('.')[0],
|
|
|
|
'title': get_title(root_path + 'readme/html/' + x)
|
2012-04-05 15:56:53 +00:00
|
|
|
},
|
2012-04-05 15:27:27 +00:00
|
|
|
filter(
|
2012-04-05 20:42:35 +00:00
|
|
|
lambda x: not re.search('^[\._]', x),
|
2012-04-05 15:56:53 +00:00
|
|
|
os.listdir(root_path + 'readme/html/')
|
2012-04-05 15:27:27 +00:00
|
|
|
)
|
2012-04-05 20:42:35 +00:00
|
|
|
),
|
|
|
|
'version': version
|
2012-04-05 15:27:27 +00:00
|
|
|
}
|
|
|
|
write_file(root_path + 'index.json', json.dumps(data, indent=4, sort_keys=True))
|
|
|
|
|
2012-04-05 20:42:35 +00:00
|
|
|
# downloads
|
|
|
|
if downloads:
|
|
|
|
# source
|
|
|
|
download_path = root_path + 'downloads/'
|
|
|
|
source_file = 'OxJS.%s.source.tar.gz' % version
|
|
|
|
source_tar = tarfile.open(download_path + source_file, 'w:gz')
|
|
|
|
source_tar.add(root_path, arcname='OxJS', filter=filter_source)
|
|
|
|
source_tar.close()
|
|
|
|
write_link(source_file, root_path + 'downloads/OxJS.latest.source.tar.gz')
|
|
|
|
# build
|
|
|
|
build_file = 'OxJS.%s.build.tar.gz' % version
|
|
|
|
build_tar = tarfile.open(download_path + build_file, 'w:gz')
|
|
|
|
build_tar.add(root_path, arcname='OxJS', filter=filter_build)
|
|
|
|
build_tar.close()
|
|
|
|
write_link(build_file, root_path + 'downloads/OxJS.latest.build.tar.gz')
|
|
|
|
|
2011-04-25 09:12:02 +00:00
|
|
|
|
|
|
|
def copy_file(source, target):
|
|
|
|
print 'copying', source, 'to', target
|
|
|
|
write_file(target, read_file(source))
|
2011-04-19 09:09:51 +00:00
|
|
|
|
2012-04-05 20:42:35 +00:00
|
|
|
def filter_build(tarinfo):
|
|
|
|
if tarinfo.name == 'OxJS':
|
|
|
|
return tarinfo
|
|
|
|
if re.search('^OxJS/build', tarinfo.name):
|
|
|
|
return tarinfo
|
|
|
|
return None
|
|
|
|
|
|
|
|
def filter_source(tarinfo):
|
|
|
|
if re.search('^OxJS/(demos|downloads/|tools/geo/png/icons/)', tarinfo.name):
|
|
|
|
return None
|
2012-05-19 13:32:12 +00:00
|
|
|
if re.search('/\.', tarinfo.name) or re.search('(\.gz|[~])$', tarinfo.name):
|
2012-04-05 20:42:35 +00:00
|
|
|
return None
|
|
|
|
return tarinfo
|
|
|
|
|
2012-04-07 14:42:20 +00:00
|
|
|
def get_title(file):
|
|
|
|
match = re.search('<h1>(.+)</h1>', read_file(file))
|
|
|
|
return match.groups()[0] if match else 'Untitled'
|
|
|
|
|
2011-04-19 09:09:51 +00:00
|
|
|
def read_file(file):
|
2011-04-25 09:12:02 +00:00
|
|
|
print 'reading', file
|
2011-04-19 09:09:51 +00:00
|
|
|
f = open(file)
|
|
|
|
data = f.read()
|
|
|
|
f.close()
|
|
|
|
return data
|
|
|
|
|
|
|
|
def write_file(file, data):
|
2011-04-25 09:12:02 +00:00
|
|
|
print 'writing', file
|
2011-04-19 09:09:51 +00:00
|
|
|
write_path(file)
|
|
|
|
f = open(file, 'w')
|
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
return len(data)
|
|
|
|
|
2011-04-22 22:03:10 +00:00
|
|
|
def write_link(source, target):
|
2011-04-25 09:12:02 +00:00
|
|
|
print 'linking', source, 'to', target
|
|
|
|
write_path(target)
|
2011-04-22 22:03:10 +00:00
|
|
|
if os.path.exists(target):
|
|
|
|
os.unlink(target)
|
|
|
|
os.symlink(source, target)
|
|
|
|
|
2011-04-19 09:09:51 +00:00
|
|
|
def write_path(file):
|
|
|
|
path = os.path.split(file)[0]
|
|
|
|
if path and not os.path.exists(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
|
2010-09-05 14:24:22 +00:00
|
|
|
|
2011-10-07 01:04:47 +00:00
|
|
|
if __name__ == '__main__':
|
2012-04-05 20:42:35 +00:00
|
|
|
build_oxjs(downloads='-downloads' in sys.argv, geo=not '-nogeo' in sys.argv)
|