less magic in oxd, files returns files with path,stats,oshash and info a dict with oshash keys and related info

This commit is contained in:
j 2010-08-11 00:10:26 +02:00
parent 05a922defb
commit edc69304e0
3 changed files with 67 additions and 81 deletions

View file

@ -285,8 +285,6 @@ class Database(object):
db = [
'''CREATE TABLE IF NOT EXISTS file (
path varchar(1024) unique,
folder varchar(1024),
filename varchar(1024),
oshash varchar(16),
atime FLOAT,
ctime FLOAT,
@ -347,17 +345,15 @@ class Database(object):
def file(self, oshash):
conn, c = self.conn()
f = {}
sql = 'SELECT path, folder, filename, info FROM file WHERE oshash=?'
sql = 'SELECT path, info FROM file WHERE oshash=?'
c.execute(sql, (oshash, ))
for row in c:
f['path'] = row[0]
f['folder'] = row[1]
f['filename'] = row[2]
f['info'] = json.loads(row[3])
f['info'] = json.loads(row[2])
break
return f
def files(self, site, user, volume, since=None):
def files(self, site, user, volume):
conn, c = self.conn()
c.execute('SELECT path from volume where site=? AND user=? AND name=?', (site, user, volume))
prefix = None
@ -365,32 +361,28 @@ class Database(object):
prefix = row[0]
if not prefix:
return {}
#since 2 volumes can have the same file/folder, needs some check for that or other structure
def get_files(files, key, sql, t=()):
t = list(t) + [u"%s%%"%prefix]
c.execute(sql, t)
for row in c:
folder = row[0]
filename = row[1]
info = json.loads(row[2])
if key:
if not key in files: files[key]={}
if not folder in files[key]: files[key][folder]={}
files[key][folder][filename] = info
else:
if not folder in files: files[folder]={}
files[folder][filename] = info
files = {}
sql_prefix = 'SELECT folder, filename, info FROM file WHERE '
sql_postfix = ' deleted < 0 AND path LIKE ? ORDER BY path'
if since:
get_files(files, 'deleted', sql_prefix + 'deleted >= ? ORDER BY path' , (since, ))
get_files(files, 'modified',
sql_prefix + 'created < ? AND modified >= ? AND'+sql_postfix,
(since, since))
get_files(files, 'new', sql_prefix + 'created >= ? AND'+sql_postfix, (since, ))
else:
get_files(files, None, sql_prefix + sql_postfix)
files['info'] = {}
files['files'] = []
sql = 'SELECT path, oshash, info, atime, ctime, mtime FROM file WHERE deleted < 0 AND path LIKE ? ORDER BY path'
t = [u"%s%%"%prefix]
c.execute(sql, t)
for row in c:
path = row[0]
oshash = row[1]
info = json.loads(row[2])
for key in ('atime', 'ctime', 'mtime', 'path'):
if key in info:
del info[key]
files['info'][oshash] = info
files['files'].append({
'oshash': oshash,
'path': path[len(prefix)+1:],
'atime': row[3],
'ctime': row[4],
'mtime': row[5],
})
return files
#derivative
@ -453,7 +445,7 @@ class Database(object):
self.derivative(oshash, name, STATUS_FAILED)
#volumes
def update(self, path, folder, filename):
def update(self, path):
conn, c = self.conn()
update = True
@ -470,13 +462,11 @@ class Database(object):
break
if update:
info = avinfo(path)
for key in ('atime', 'ctime', 'mtime'):
info[key] = getattr(stat, 'st_'+key)
oshash = info['oshash']
deleted = -1
t = (path, folder, filename, oshash, stat.st_atime, stat.st_ctime, stat.st_mtime,
t = (path, oshash, stat.st_atime, stat.st_ctime, stat.st_mtime,
stat.st_size, json.dumps(info), created, modified, deleted)
c.execute(u'INSERT OR REPLACE INTO file values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', t)
c.execute(u'INSERT OR REPLACE INTO file values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', t)
conn.commit()
def spider(self, path):
@ -486,14 +476,13 @@ class Database(object):
if isinstance(dirpath, str):
dirpath = dirpath.decode('utf-8')
if filenames:
prefix = dirpath[len(path)+1:]
for filename in sorted(filenames):
if isinstance(filename, str):
filename = filename.decode('utf-8')
if not filename.startswith('._') and not filename in ('.DS_Store', ):
file_path = os.path.join(dirpath, filename)
files.append(file_path)
self.update(file_path, prefix, filename)
self.update(file_path)
conn, c = self.conn()
c.execute('SELECT path FROM file WHERE path LIKE ? AND deleted < 0', ["%s%%"%path])

View file

@ -192,7 +192,7 @@ OxFF.prototype = {
req.addEventListener("error", function(e) {
_this.startDaemon();
ox.setTimeout(function() { _this.api(action, data, callback); }, 500);
ox.setTimeout(function() { _this.api(action, data, callback); }, 1000);
}, false);
req.addEventListener("load", function(e) {
//links should have base prefixed or whatever proxy is used to access them, i.e. some api call

View file

@ -17,7 +17,7 @@ pandora.request = function(fn, data, callback) {
success: function(response) {
if (typeof callback != "undefined")
callback(response);
console.log(response);
//console.log(response);
},
error: function (xhr, ajaxOptions, thrownError){
var response = {};
@ -112,42 +112,32 @@ function update() {
var volumes = JSON.parse(result);
for(volume in volumes) {
ox.files(volume, function(result) {
var _files = [];
var _info = {};
var data = JSON.parse(result);
$.each(data, function(folder, files) {
$.each(files, function(f, info) {
var f = {
oshash: info.oshash,
name: f,
folder: folder,
ctime: info.ctime, atime: info.atime, mtime: info.mtime
};
_files.push(f);
_info[info.oshash] = info;
});
});
pandora.request('update', {
'volume': volume, 'files': _files
'volume': volume, 'files': data.files
}, function(result) {
var data = {'info': {}};
if (result.data.info.length>0) {
$.each(_info, function(oshash, info) {
if($.inArray(oshash, result.data.info) >= 0) {
data.info[oshash] = info;
}
var post = {'info': {}};
function highlight_resulsts(result) {
$.each(result.data.data, function(i, oshash) {
$('#' + oshash).css('background', 'red');
$('#' + oshash).parent().css('background', 'orange');
});
pandora.request('update', data, function(result) {
console.log(result);
$.each(result.data.file, function(i, oshash) {
$('#' + oshash).css('background', 'blue');
});
}
$.each(result.data.data, function(i, oshash) {
$('#' + oshash).css('background', 'red');
$('#' + oshash).parent().css('background', 'orange');
});
$.each(result.data.file, function(i, oshash) {
$('#' + oshash).css('background', 'blue');
});
if (result.data.info.length>0) {
$.each(data.info, function(oshash, info) {
if($.inArray(oshash, result.data.info) >= 0) {
post.info[oshash] = info;
}
});
pandora.request('update', post, function(result) {
highlight_resulsts(result);
});
} else {
highlight_resulsts(result);
}
});
});
}
@ -168,8 +158,17 @@ function update() {
ox.files(volume, function(result) {
var data = JSON.parse(result);
var _files = [];
$.each(data, function(folder, files) {
if(!folder) folder = "rootfolder";
$.each(data.files, function(i, file) {
var folder = file.path.split('/');
folder.pop();
if(folder.length==0) {
folder.push("rootfolder");
}
//FIXME: this is also done on the backend but might require more sub options
if (folder[folder.length-1] == "Extras" || folder[folder.length-1] == "Versions")
folder.pop();
folder = folder.join('/');
var folderId = 'folder_'+safe_id(folder);
var $folder = $('#'+folderId);
if($folder.length==0) {
@ -177,14 +176,12 @@ function update() {
$folder.find('h3').click(function() { $(this).parent().find('div').toggle();});
$volume.append($folder);
}
for_each_sorted(files, function(f, info) {
var fileId = info.oshash;
var $file = $('#'+fileId);
if($file.length==0) {
$file = $('<div>').attr('id', fileId).html(f).hide();
$folder.append($file);
}
});
var fileId = file.oshash;
var $file = $('#'+fileId);
if($file.length==0) {
$file = $('<div>').attr('id', fileId).html(file.path).hide();
$folder.append($file);
}
});
});
}(volume));