Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
Expand All @@ -58,6 +57,7 @@
import org.apache.http.client.methods.HttpGet;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.BlobStores;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.CopyOptions;
Expand Down Expand Up @@ -274,9 +274,7 @@ public void clearAllStashes(TaskListener listener) throws IOException, Interrupt
BlobStore blobStore = getContext().getBlobStore();
int count = 0;
try {
Iterator<StorageMetadata> it = new JCloudsVirtualFile.PageSetIterable(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(stashPrefix).recursive());
while (it.hasNext()) {
StorageMetadata sm = it.next();
for (StorageMetadata sm : BlobStores.listAll(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(stashPrefix).recursive())) {
String path = sm.getName();
assert path.startsWith(stashPrefix);
LOGGER.fine("deleting " + path);
Expand All @@ -300,9 +298,7 @@ public void copyAllArtifactsAndStashes(Run<?, ?> to, TaskListener listener) thro
BlobStore blobStore = getContext().getBlobStore();
int count = 0;
try {
Iterator<StorageMetadata> it = new JCloudsVirtualFile.PageSetIterable(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(allPrefix).recursive());
while (it.hasNext()) {
StorageMetadata sm = it.next();
for (StorageMetadata sm : BlobStores.listAll(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(allPrefix).recursive())) {
String path = sm.getName();
assert path.startsWith(allPrefix);
String destPath = getBlobPath(dest.key, path.substring(allPrefix.length()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,21 +40,17 @@
import java.util.Date;
import java.util.Deque;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.StreamSupport;
import jenkins.util.VirtualFile;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.BlobStores;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.MutableBlobMetadata;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.ListContainerOptions;
import static org.jclouds.blobstore.options.ListContainerOptions.Builder.*;
Expand Down Expand Up @@ -154,7 +150,7 @@ public boolean isDirectory() throws IOException {
String keyS = key + "/";
CacheFrame frame = findCacheFrame(keyS);
if (frame != null) {
LOGGER.log(Level.FINE, "cache hit on directory status of {0} / {1}", new Object[] {container, key});
LOGGER.log(Level.FINER, "cache hit on directory status of {0} / {1}", new Object[] {container, key});
String relSlash = keyS.substring(frame.root.length()); // "" or "sub/dir/"
return frame.children.keySet().stream().anyMatch(f -> f.startsWith(relSlash));
}
Expand All @@ -168,7 +164,7 @@ public boolean isFile() throws IOException {
if (frame != null) {
String rel = key.substring(frame.root.length());
CachedMetadata metadata = frame.children.get(rel);
LOGGER.log(Level.FINE, "cache hit on file status of {0} / {1}", new Object[] {container, key});
LOGGER.log(Level.FINER, "cache hit on file status of {0} / {1}", new Object[] {container, key});
return metadata != null;
}
LOGGER.log(Level.FINE, "checking file status {0} / {1}", new Object[] {container, key});
Expand All @@ -186,20 +182,20 @@ public boolean exists() throws IOException {
* @return some blobs
* @throws RuntimeException either now or when the stream is processed; wrap in {@link IOException} if desired
*/
private Iterator<StorageMetadata> listStorageMetadata(boolean recursive) throws IOException {
private Iterable<StorageMetadata> listStorageMetadata(boolean recursive) throws IOException {
ListContainerOptions options = prefix(key + "/");
if (recursive) {
options.recursive();
}
return new PageSetIterable(getContext().getBlobStore(), getContainer(), options);
return BlobStores.listAll(getContext().getBlobStore(), getContainer(), options);
}

@Override
public VirtualFile[] list() throws IOException {
String keyS = key + "/";
CacheFrame frame = findCacheFrame(keyS);
if (frame != null) {
LOGGER.log(Level.FINE, "cache hit on listing of {0} / {1}", new Object[] {container, key});
LOGGER.log(Level.FINER, "cache hit on listing of {0} / {1}", new Object[] {container, key});
String relSlash = keyS.substring(frame.root.length()); // "" or "sub/dir/"
return frame.children.keySet().stream(). // filenames relative to frame root
filter(f -> f.startsWith(relSlash)). // those inside this dir
Expand All @@ -210,7 +206,7 @@ public VirtualFile[] list() throws IOException {
}
VirtualFile[] list;
try {
list = StreamSupport.stream(Spliterators.spliteratorUnknownSize(listStorageMetadata(false), Spliterator.ORDERED), false)
list = StreamSupport.stream(listStorageMetadata(false).spliterator(), false)
.map(meta -> new JCloudsVirtualFile(provider, getContainer(), meta.getName().replaceFirst("/$", "")))
.toArray(VirtualFile[]::new);
} catch (RuntimeException x) {
Expand All @@ -232,7 +228,7 @@ public long length() throws IOException {
if (frame != null) {
String rel = key.substring(frame.root.length());
CachedMetadata metadata = frame.children.get(rel);
LOGGER.log(Level.FINE, "cache hit on length of {0} / {1}", new Object[] {container, key});
LOGGER.log(Level.FINER, "cache hit on length of {0} / {1}", new Object[] {container, key});
return metadata != null ? metadata.length : 0;
}
LOGGER.log(Level.FINE, "checking length {0} / {1}", new Object[] {container, key});
Expand All @@ -247,7 +243,7 @@ public long lastModified() throws IOException {
if (frame != null) {
String rel = key.substring(frame.root.length());
CachedMetadata metadata = frame.children.get(rel);
LOGGER.log(Level.FINE, "cache hit on lastModified of {0} / {1}", new Object[] {container, key});
LOGGER.log(Level.FINER, "cache hit on lastModified of {0} / {1}", new Object[] {container, key});
return metadata != null ? metadata.lastModified : 0;
}
LOGGER.log(Level.FINE, "checking modification time {0} / {1}", new Object[] {container, key});
Expand All @@ -274,65 +270,6 @@ public InputStream open() throws IOException {
return getBlob().getPayload().openStream();
}

/**
* An Iterator for JClouds PageSet
*/
@Restricted(NoExternalUse.class)
static class PageSetIterable implements Iterator<StorageMetadata> {
private final BlobStore blobStore;
private final String container;
private ListContainerOptions options;
private PageSet<? extends StorageMetadata> set;
private Iterator<? extends StorageMetadata> iterator;

/**
* @throws RuntimeException either now or when iterating; wrap in {@link IOException} if desired
*/
PageSetIterable(@NonNull BlobStore blobStore, @NonNull String container,
@NonNull ListContainerOptions options) {
this.blobStore = blobStore;
this.container = container;
advanceList(options);
}

@Override
public boolean hasNext() {
if (iterator.hasNext()) {
return true;
}
String marker = set.getNextMarker();
if (marker == null) {
return false;
}
advanceList(options.afterMarker(marker));
return iterator.hasNext();
}

@Override
public StorageMetadata next() {
if (hasNext()) {
return iterator.next();
} else {
throw new NoSuchElementException();
}
}

/**
* Unsupported operation
*/
@Override
public void remove() {
throw new UnsupportedOperationException();
}

private void advanceList(ListContainerOptions options) {
LOGGER.log(Level.FINE, "listing {0}: {1}", new Object[] {container, options});
this.options = options;
this.set = blobStore.list(container, options);
this.iterator = set.iterator();
}
}

/**
* Cache of metadata collected during {@link #run}.
* Keys are {@link #container}.
Expand Down Expand Up @@ -373,9 +310,7 @@ public <V> V run(Callable<V, IOException> callable) throws IOException {
Map<String, CachedMetadata> saved = new HashMap<>();
int prefixLength = key.length() + /* / */1;
try {
Iterator<StorageMetadata> it = listStorageMetadata(true);
while (it.hasNext()) {
StorageMetadata sm = it.next();
for (StorageMetadata sm : listStorageMetadata(true)) {
Long length = sm.getSize();
if (length != null) {
Date lastModified = sm.getLastModified();
Expand Down Expand Up @@ -409,10 +344,8 @@ private Deque<CacheFrame> cacheFrames() {
*/
public static boolean delete(BlobStoreProvider provider, BlobStore blobStore, String prefix) throws IOException, InterruptedException {
try {
Iterator<StorageMetadata> it = new PageSetIterable(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(prefix).recursive());
List<String> paths = new ArrayList<>();
while (it.hasNext()) {
StorageMetadata sm = it.next();
for (StorageMetadata sm : BlobStores.listAll(blobStore, provider.getContainer(), ListContainerOptions.Builder.prefix(prefix).recursive())) {
String path = sm.getName();
assert path.startsWith(prefix);
paths.add(path);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,18 +186,32 @@ public void list() throws Exception {
@Test
@SuppressWarnings("deprecation")
public void listGlob() throws Exception {
httpLogging.record(InvokeHttpMethod.class, Level.FINE);
httpLogging.capture(1000);
assertThat(subdir.list("*", null, true), containsInAnyOrder(weirdCharacters.getName(), vf.getName()));
// TODO more interesting to create a directory with enough files to exceed a single iterator page:
assertEquals("calls GetBucketLocation then ListBucket", 2, httpLogging.getRecords().size());
assertThat(subdir.list("**/**"), arrayContainingInAnyOrder(vf.getName(), weirdCharacters.getName()));
assertArrayEquals(new String[] { vf.getName() }, subdir.list(tmpFile.getName().substring(0, 4) + "*"));
assertArrayEquals(new String[0], subdir.list("**/something**"));
assertArrayEquals(new String[0], vf.list("**/**"));
assertArrayEquals(new String[0], missing.list("**/**"));
}

@Test
public void pagedListing() throws Exception {
for (int i = 0; i < 10; i++) {
String iDir = getPrefix() + "sprawling/i" + i + "/";
for (int j = 0; j < 10; j++) {
for (int k = 0; k < 10; k++) {
blobStore.putBlob(getContainer(), blobStore.blobBuilder(iDir + "j" + j + "/k" + k).payload(new byte[0]).build());
}
}
blobStore.putBlob(getContainer(), blobStore.blobBuilder(iDir + "extra").payload(new byte[0]).build());
LOGGER.log(Level.INFO, "added 101 blobs to {0}", iDir);
}
httpLogging.record(InvokeHttpMethod.class, Level.FINE);
httpLogging.capture(1000);
// Default list page size for S3 is 1000 blobs; we have 1010 plus the two created for all tests, so should hit a second page.
assertThat(subdir.list("sprawling/**/k3", null, true), iterableWithSize(100));
assertEquals("calls GetBucketLocation then ListBucket, advance to …/sprawling/i9/j8/k8, ListBucket again", 3, httpLogging.getRecords().size());
}

@Test
public void open() throws Exception {
try (InputStream is = subdir.open()) {
Expand Down