Skip to content

Commit

Permalink
Merge branch 'master' of https://g.iterate.ch/scm/iterate/cyberduck i…
Browse files Browse the repository at this point in the history
…nto feature/CD-1143

Former-commit-id: 7e399977a8c75af5a1895e650494fe7805d34958
  • Loading branch information
AliveDevil committed Nov 20, 2016
2 parents 755ed63 + caa72a0 commit c0c3af9
Show file tree
Hide file tree
Showing 14 changed files with 389 additions and 88 deletions.
1 change: 0 additions & 1 deletion core/src/main/java/ch/cyberduck/core/Session.java
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ public C getClient() {
* @param key Host identity verification callback
* @param transcript Transcript
* @return Client
* @throws BackgroundException
*/
public C open(final HostKeyCallback key, final TranscriptListener transcript) throws BackgroundException {
if(log.isDebugEnabled()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ abstract class AbstractWatchKey implements WatchKey {
/**
* Possible key states
*/
private static enum State {
private enum State {
READY, SIGNALLED
}

Expand Down
80 changes: 39 additions & 41 deletions core/src/main/java/ch/cyberduck/core/worker/ReadMetadataWorker.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import ch.cyberduck.core.Path;
import ch.cyberduck.core.Session;
import ch.cyberduck.core.exception.BackgroundException;
import ch.cyberduck.core.exception.ConnectionCanceledException;
import ch.cyberduck.core.features.Headers;

import org.apache.log4j.Logger;
Expand All @@ -33,6 +32,8 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Stream;

public class ReadMetadataWorker extends Worker<Map<String, String>> {
private static final Logger log = Logger.getLogger(ReadMetadataWorker.class);
Expand All @@ -46,54 +47,51 @@ public ReadMetadataWorker(final List<Path> files) {
this.files = files;
}

/**
* @return Metadata
*/
@Override
public Map<String, String> run(final Session<?> session) throws BackgroundException {
final Headers feature = session.getFeature(Headers.class);
final Map<String, Integer> count = new HashMap<String, Integer>();
final Map<String, String> updated = new HashMap<String, String>() {
@Override
public String put(String key, String value) {
int n = 0;
if(count.containsKey(key)) {
n = count.get(key);
}
count.put(key, ++n);
return super.put(key, value);
}
};
for(Path next : files) {
if(this.isCanceled()) {
throw new ConnectionCanceledException();
}
// Reading HTTP headers custom metadata
next.attributes().setMetadata(feature.getMetadata(next));
final Map<String, String> metadata = next.attributes().getMetadata();

// Map for File > Metadata Set
Map<Path, Map<String, String>> fullMetadata = new HashMap<>();
// Map for metadata entry key > File & Metadata Values
Map<String, Map<Path, String>> graphMetadata = new HashMap<>();

for(Path file : files) {
// Read online metadata
final Map<String, String> metadata = feature.getMetadata(file);
file.attributes().setMetadata(metadata);
fullMetadata.put(file, new HashMap<>(metadata));
// take every entry of current metadata and store it in metaGraph
for(Map.Entry<String, String> entry : metadata.entrySet()) {
// Prune metadata from entries which are unique to a single file.
// For example md5-hash
if(updated.containsKey(entry.getKey())) {
if(!entry.getValue().equals(updated.get(entry.getKey()))) {
log.info(String.format("Nullify %s from metadata because value is not equal for selected files.", entry));
updated.put(entry.getKey(), null);
continue;
}
if(graphMetadata.containsKey(entry.getKey())) {
// if existing, get map, put value
graphMetadata.get(entry.getKey()).put(file, entry.getValue());
}
else {
// if not existent create hashmap and put it back
Map<Path, String> map = new HashMap<>();
graphMetadata.put(entry.getKey(), map);
map.put(file, entry.getValue());
}
updated.put(entry.getKey(), entry.getValue());
}
}
for(Map.Entry<String, Integer> entry : count.entrySet()) {
if(files.size() == entry.getValue()) {
// Every file has this metadata set.
continue;

// Store result metadata in hashmap
Map<String, String> metadata = new HashMap<>();
for(Map.Entry<String, Map<Path, String>> entry : graphMetadata.entrySet()) {
if(entry.getValue().size() != files.size()) {
metadata.put(entry.getKey(), null);
}
else {
// single use of streams, reason: distinct is easier in Streams than it would be writing it manually
Supplier<Stream<String>> valueSupplier = () -> entry.getValue().entrySet().stream().map(y -> y.getValue()).distinct();
// Check count against 1, if it is use that value, otherwise use null
String value = valueSupplier.get().count() == 1 ? valueSupplier.get().findAny().get() : null;
// store it
metadata.put(entry.getKey(), value);
}
// Not all files selected have this metadata. Remove for editing.
log.info(String.format("Remove %s from metadata not available for all selected files.", entry.getKey()));
updated.remove(entry.getKey());
}
return updated;
return metadata;
}

@Override
Expand Down Expand Up @@ -134,4 +132,4 @@ public String toString() {
sb.append('}');
return sb.toString();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@
import ch.cyberduck.core.exception.ConnectionCanceledException;
import ch.cyberduck.core.features.Headers;

import org.apache.commons.lang3.StringUtils;

import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

Expand All @@ -52,7 +52,7 @@ public class WriteMetadataWorker extends Worker<Boolean> {

private final ProgressListener listener;

public WriteMetadataWorker(final List<Path> files, final Map<String, String> metadata,
public WriteMetadataWorker(List<Path> files, final Map<String, String> metadata,
final boolean recursive,
final ProgressListener listener) {
this(files, metadata, new BooleanRecursiveCallback<String>(recursive), listener);
Expand Down Expand Up @@ -83,17 +83,28 @@ protected void write(final Session<?> session, final Headers feature, final Path
if(this.isCanceled()) {
throw new ConnectionCanceledException();
}
if(!metadata.equals(file.attributes().getMetadata())) {
for(Map.Entry<String, String> entry : metadata.entrySet()) {
// Prune metadata from entries which are unique to a single file. For example md5-hash.
if(StringUtils.isBlank(entry.getValue())) {
// Reset with previous value
metadata.put(entry.getKey(), file.attributes().getMetadata().get(entry.getKey()));
}
// Read online metadata (storing non-edited metadata entries)
final Map<String, String> update = new HashMap<>(file.attributes().getMetadata());
// purge removed entries
for(Iterator<Map.Entry<String, String>> iterator = update.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<String, String> entry = iterator.next();
if(!metadata.containsKey(entry.getKey())) {
iterator.remove();
}
}
// iterate all metadata entries and
for(Map.Entry<String, String> entry : metadata.entrySet()) {
// check if update is non-null (should not) && entry value is not null
if(update.get(entry.getKey()) != null && entry.getValue() != null) {
// update
update.put(entry.getKey(), entry.getValue());
}
}
// If anything has changed save metadata, otherwise continue and do for everything underneath this directory
if(!update.equals(file.attributes().getMetadata())) {
listener.message(MessageFormat.format(LocaleFactory.localizedString("Writing metadata of {0}", "Status"),
file.getName()));
feature.setMetadata(file, metadata);
feature.setMetadata(file, update);
}
if(file.isDirectory()) {
if(callback.recurse(file, LocaleFactory.localizedString("Metadata", "Info"))) {
Expand All @@ -106,8 +117,7 @@ protected void write(final Session<?> session, final Headers feature, final Path

@Override
public String getActivity() {
return MessageFormat.format(LocaleFactory.localizedString("Writing metadata of {0}", "Status"),
this.toString(files));
return MessageFormat.format(LocaleFactory.localizedString("Writing metadata of {0}", "Status"), this.toString(files));
}

@Override
Expand Down Expand Up @@ -142,4 +152,4 @@ public String toString() {
sb.append('}');
return sb.toString();
}
}
}
3 changes: 0 additions & 3 deletions core/src/main/java/ch/cyberduck/ui/browser/RegexFilter.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,6 @@ public class RegexFilter implements Filter<Path> {

@Override
public boolean accept(final Path file) {
if(null == pattern) {
return true;
}
if(pattern.matcher(file.getName()).matches()) {
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,10 @@ public void setMetadata(final Path file, final Map<String, String> metadata) thr
return super.getFeature(type);
}
});
assertFalse(map.containsKey("key1"));
assertFalse(map.containsKey("key2"));
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertNull(map.get("key1"));
assertNull(map.get("key2"));
}

@Test
Expand Down Expand Up @@ -167,9 +169,10 @@ public void setMetadata(final Path file, final Map<String, String> metadata) thr
return super.getFeature(type);
}
});
assertFalse(map.containsKey("key1"));
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertNull(map.get("key1"));
assertNull(map.get("key2"));
assertNotNull(map.get("key3"));
}
}
}
Loading

0 comments on commit c0c3af9

Please sign in to comment.