mirror of https://github.com/zlatinb/muwire
fix browsing failures when requests return no files or folders
parent
3fa4eed3cc
commit
a3e2bc8d23
|
@ -123,10 +123,12 @@ class BrowseManager {
|
||||||
JsonOutput jsonOutput = new JsonOutput()
|
JsonOutput jsonOutput = new JsonOutput()
|
||||||
def baos = new ByteArrayOutputStream()
|
def baos = new ByteArrayOutputStream()
|
||||||
def dos = new DataOutputStream(new GZIPOutputStream(baos))
|
def dos = new DataOutputStream(new GZIPOutputStream(baos))
|
||||||
writeFiles(topLevelItems.files.values(), dos, jsonOutput)
|
if (count > 0) {
|
||||||
writeDirs(topLevelItems.dirs, dos, jsonOutput)
|
writeFiles(topLevelItems.files.values(), dos, jsonOutput)
|
||||||
dos.close()
|
writeDirs(topLevelItems.dirs, dos, jsonOutput)
|
||||||
os.write(baos.toByteArray())
|
dos.close()
|
||||||
|
os.write(baos.toByteArray())
|
||||||
|
}
|
||||||
os.flush()
|
os.flush()
|
||||||
|
|
||||||
InputStream is = endpoint.getInputStream()
|
InputStream is = endpoint.getInputStream()
|
||||||
|
@ -168,19 +170,21 @@ class BrowseManager {
|
||||||
def cb = new PathCallback()
|
def cb = new PathCallback()
|
||||||
tempTree.traverse(requestedPath, cb)
|
tempTree.traverse(requestedPath, cb)
|
||||||
filesToWrite = cb.files
|
filesToWrite = cb.files
|
||||||
dirsToWrite = Collections.emptySet()
|
dirsToWrite = cb.dirs
|
||||||
}
|
}
|
||||||
filesToWrite.each {it.hit(browser, System.currentTimeMillis(), "Browse Host")}
|
filesToWrite.each {it.hit(browser, System.currentTimeMillis(), "Browse Host")}
|
||||||
os.write("Files:${filesToWrite.size()}\r\n".getBytes(StandardCharsets.US_ASCII))
|
os.write("Files:${filesToWrite.size()}\r\n".getBytes(StandardCharsets.US_ASCII))
|
||||||
os.write("Dirs:${dirsToWrite.size()}\r\n".getBytes(StandardCharsets.US_ASCII))
|
os.write("Dirs:${dirsToWrite.size()}\r\n".getBytes(StandardCharsets.US_ASCII))
|
||||||
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
|
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
|
||||||
|
|
||||||
baos = new ByteArrayOutputStream()
|
if (filesToWrite.size() + dirsToWrite.size() > 0) {
|
||||||
dos = new DataOutputStream(new GZIPOutputStream(baos))
|
baos = new ByteArrayOutputStream()
|
||||||
writeFiles(filesToWrite, dos, jsonOutput)
|
dos = new DataOutputStream(new GZIPOutputStream(baos))
|
||||||
writeDirs(dirsToWrite, dos, jsonOutput)
|
writeFiles(filesToWrite, dos, jsonOutput)
|
||||||
dos.close()
|
writeDirs(dirsToWrite, dos, jsonOutput)
|
||||||
os.write(baos.toByteArray())
|
dos.close()
|
||||||
|
os.write(baos.toByteArray())
|
||||||
|
}
|
||||||
os.flush()
|
os.flush()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -239,9 +243,14 @@ class BrowseManager {
|
||||||
private static class PathCallback implements PathTreeCallback<BrowsedFile, BrowsedFolder> {
|
private static class PathCallback implements PathTreeCallback<BrowsedFile, BrowsedFolder> {
|
||||||
|
|
||||||
final Set<SharedFile> files = new HashSet<>()
|
final Set<SharedFile> files = new HashSet<>()
|
||||||
|
final Set<Path> dirs = new HashSet<>()
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void onDirectoryEnter(Path path, BrowsedFolder value) {
|
void onDirectoryEnter(Path path, BrowsedFolder value) {
|
||||||
|
if (!value.sent) {
|
||||||
|
value.sent = true
|
||||||
|
dirs.add(value.path)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -136,48 +136,49 @@ class BrowseSession implements Runnable {
|
||||||
if (!headers.containsKey("Dirs"))
|
if (!headers.containsKey("Dirs"))
|
||||||
throw new Exception("Dirs header missing")
|
throw new Exception("Dirs header missing")
|
||||||
int dirs = Integer.parseInt(headers['Dirs'])
|
int dirs = Integer.parseInt(headers['Dirs'])
|
||||||
eventBus.publish(new BrowseStatusEvent(host: event.host, status: BrowseStatus.FETCHING,
|
if (files + dirs > 0) {
|
||||||
totalResults: results, currentItems: (files + dirs), uuid: uuid))
|
eventBus.publish(new BrowseStatusEvent(host: event.host, status: BrowseStatus.FETCHING,
|
||||||
log.info("starting to fetch ${files} files and ${dirs} dirs with uuid $uuid")
|
totalResults: results, currentItems: (files + dirs), uuid: uuid))
|
||||||
|
log.info("starting to fetch ${files} files and ${dirs} dirs with uuid $uuid")
|
||||||
|
|
||||||
JsonSlurper slurper = new JsonSlurper()
|
JsonSlurper slurper = new JsonSlurper()
|
||||||
DataInputStream dis = new DataInputStream(new GZIPInputStream(is))
|
DataInputStream dis = new DataInputStream(new GZIPInputStream(is))
|
||||||
UIResultEvent[] batch = new UIResultEvent[Math.min(BATCH_SIZE, files)]
|
UIResultEvent[] batch = new UIResultEvent[Math.min(BATCH_SIZE, files)]
|
||||||
int j = 0
|
int j = 0
|
||||||
for (int i = 0; i < files; i++) {
|
for (int i = 0; i < files; i++) {
|
||||||
if (closed)
|
if (closed)
|
||||||
return
|
return
|
||||||
log.fine("parsing result $i at batch position $j")
|
log.fine("parsing result $i at batch position $j")
|
||||||
|
|
||||||
def json = readJson(slurper, dis)
|
def json = readJson(slurper, dis)
|
||||||
UIResultEvent result = ResultsParser.parse(event.host, uuid, json)
|
UIResultEvent result = ResultsParser.parse(event.host, uuid, json)
|
||||||
result.chat = chat
|
result.chat = chat
|
||||||
result.profileHeader = profileHeader
|
result.profileHeader = profileHeader
|
||||||
batch[j++] = result
|
batch[j++] = result
|
||||||
|
|
||||||
|
|
||||||
// publish piecemally
|
// publish piecemally
|
||||||
if (j == batch.length) {
|
if (j == batch.length) {
|
||||||
eventBus.publish(new UIResultBatchEvent(results: batch, uuid: uuid))
|
eventBus.publish(new UIResultBatchEvent(results: batch, uuid: uuid))
|
||||||
j = 0
|
j = 0
|
||||||
batch = new UIResultEvent[Math.min(files - i - 1, BATCH_SIZE)]
|
batch = new UIResultEvent[Math.min(files - i - 1, BATCH_SIZE)]
|
||||||
log.fine("publishing batch, next batch size ${batch.length}")
|
log.fine("publishing batch, next batch size ${batch.length}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
for (int i = 0; i < dirs; i++) {
|
||||||
for (int i = 0; i < dirs; i++) {
|
if (closed)
|
||||||
if (closed)
|
return
|
||||||
return
|
|
||||||
|
|
||||||
def json = readJson(slurper, dis)
|
def json = readJson(slurper, dis)
|
||||||
if (!json.directory || json.path == null)
|
if (!json.directory || json.path == null)
|
||||||
throw new Exception("Invalid dir json")
|
throw new Exception("Invalid dir json")
|
||||||
List<String> path = json.path.collect { DataUtil.readi18nString(Base64.decode(it)) }
|
List<String> path = json.path.collect { DataUtil.readi18nString(Base64.decode(it)) }
|
||||||
def event = new UIBrowseDirEvent(uuid: uuid,
|
def event = new UIBrowseDirEvent(uuid: uuid,
|
||||||
path: path.toArray(new String[0]))
|
path: path.toArray(new String[0]))
|
||||||
eventBus.publish(event)
|
eventBus.publish(event)
|
||||||
|
}
|
||||||
|
eventBus.publish(new BrowseStatusEvent(host: event.host, status: BrowseStatus.FINISHED, uuid: uuid))
|
||||||
}
|
}
|
||||||
eventBus.publish(new BrowseStatusEvent(host: event.host, status : BrowseStatus.FINISHED, uuid : uuid))
|
|
||||||
|
|
||||||
while(true) {
|
while(true) {
|
||||||
Request nextPath = fetchQueue.poll(PING_INTERVAL, TimeUnit.MILLISECONDS)
|
Request nextPath = fetchQueue.poll(PING_INTERVAL, TimeUnit.MILLISECONDS)
|
||||||
if (nextPath == null) {
|
if (nextPath == null) {
|
||||||
|
|
|
@ -37,7 +37,8 @@ class ResultTreeModel extends DefaultTreeModel {
|
||||||
node = elementNode
|
node = elementNode
|
||||||
}
|
}
|
||||||
|
|
||||||
node.addDescendant(new PlaceholderNode())
|
if (node.getChildCount() == 0)
|
||||||
|
node.addDescendant(new PlaceholderNode())
|
||||||
}
|
}
|
||||||
|
|
||||||
void addToTree(UIResultEvent event) {
|
void addToTree(UIResultEvent event) {
|
||||||
|
|
Loading…
Reference in New Issue