diff --git a/pom.xml b/pom.xml index b9fb5b6d..5582a49e 100644 --- a/pom.xml +++ b/pom.xml @@ -1,120 +1,179 @@ - - 4.0.0 + + + 4.0.0 - com.github.ipfs - java-ipfs-http-client - v1.4.5-SNAPSHOT - jar + com.github.ipfs + java-ipfs-http-client + v1.4.5-SNAPSHOT + jar - java-ipfs-http-client - https://github.com/ipfs/java-ipfs-http-client + java-ipfs-http-client + https://github.com/ipfs/java-ipfs-http-client - - https://github.com/ipfs/java-ipfs-http-client/issues - GitHub Issues - + + + MIT License + https://github.com/ipfs/java-ipfs-http-client/blob/master/LICENSE + repo + + - - scm:git:git://github.com/ipfs/java-ipfs-http-client.git - scm:git:git@github.com:ipfs/java-ipfs-http-client.git - HEAD - https://github.com/ipfs/java-ipfs-http-client - + + scm:git:git://github.com/ipfs/java-ipfs-http-client.git + scm:git:git@github.com:ipfs/java-ipfs-http-client.git + HEAD + https://github.com/ipfs/java-ipfs-http-client + - - - MIT License - https://github.com/ipfs/java-ipfs-http-client/blob/master/LICENSE - repo - - + + GitHub Issues + https://github.com/ipfs/java-ipfs-http-client/issues + - - 2026-01-20T00:00:00Z - UTF-8 - UTF-8 - 11 - 4.13.2 - 2.2 - v1.4.13 - + + 2026-01-20T00:00:00Z + UTF-8 + UTF-8 + 11 + 4.13.2 + 2.2 + v1.4.13 + - - - jitpack.io - https://jitpack.io - - + + + com.github.multiformats + java-multiaddr + ${version.multiaddr} + + + junit + junit + ${version.junit} + test + + + org.hamcrest + hamcrest + ${version.hamcrest} + test + + - - - com.github.multiformats - java-multiaddr - ${version.multiaddr} - - - junit - junit - ${version.junit} - test - - - org.hamcrest - hamcrest - ${version.hamcrest} - test - - + + + jitpack.io + https://jitpack.io + + - - - - org.apache.maven.plugins - maven-enforcer-plugin - 3.6.2 - - - - 3.9.6 - - - 21 - - - - - - enforce - - enforce - - validate - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.14.1 - - - org.apache.maven.plugins - maven-surefire-plugin - 3.5.4 - - - org.apache.maven.plugins - maven-jar-plugin - 3.5.0 - - - - true - - - - - - + + + + org.apache.maven.plugins + maven-enforcer-plugin + 3.6.2 + + + + 3.9.6 + + + 21 + + + + + + enforce + + enforce + + validate + + + + + com.diffplug.spotless + spotless-maven-plugin + 3.1.0 + + + + 1.33.0 + + + google-java-format + + + + + + false + + true + true + + + + true + + + + + + ${spotless.action} + + process-sources + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.14.1 + + + org.apache.maven.plugins + maven-surefire-plugin + 3.5.4 + + + org.apache.maven.plugins + maven-jar-plugin + 3.5.0 + + + + true + + + + + + + + + format-check + + + env.CI + + + + check + + + + format + + + !env.CI + + + + apply + + + diff --git a/src/main/java/io/ipfs/api/AddArgs.java b/src/main/java/io/ipfs/api/AddArgs.java index 747eb57e..3504c9da 100644 --- a/src/main/java/io/ipfs/api/AddArgs.java +++ b/src/main/java/io/ipfs/api/AddArgs.java @@ -1,7 +1,11 @@ package io.ipfs.api; import java.net.URLEncoder; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; /* @@ -13,106 +17,122 @@ */ public final class AddArgs { + private final Map args = new HashMap<>(); + + public AddArgs(Builder builder) { + args.putAll(builder.args); + } + + @Override + public String toString() { + List asList = + args.entrySet().stream() + .sorted(Comparator.comparing(Map.Entry::getKey)) + .map(e -> e.getKey() + " = " + e.getValue()) + .collect(Collectors.toList()); + return Arrays.toString(asList.toArray()); + } + + public String toQueryString() { + StringBuilder sb = new StringBuilder(); + for (Map.Entry entry : args.entrySet()) { + sb.append("&").append(entry.getKey()).append("=").append(URLEncoder.encode(entry.getValue())); + } + return sb.length() > 0 ? sb.toString().substring(1) : sb.toString(); + } + + public static class Builder { + private static final String TRUE = "true"; private final Map args = new HashMap<>(); - public AddArgs(Builder builder) - { - args.putAll(builder.args); - } - @Override - public String toString() - { - List asList = args.entrySet() - .stream() - .sorted(Comparator.comparing(Map.Entry::getKey)) - .map(e -> e.getKey() + " = " + e.getValue()).collect(Collectors.toList()); - return Arrays.toString(asList.toArray()); - } - public String toQueryString() - { - StringBuilder sb = new StringBuilder(); - for (Map.Entry entry: args.entrySet()) { - sb.append("&").append(entry.getKey()) - .append("=") - .append(URLEncoder.encode(entry.getValue())); - } - return sb.length() > 0 ? sb.toString().substring(1) : sb.toString(); - } - public static class Builder { - private static final String TRUE = "true"; - private final Map args = new HashMap<>(); - private Builder() {} - public static Builder newInstance() - { - return new Builder(); - } - public Builder setQuiet() { - args.put("quiet", TRUE); - return this; - } - public Builder setQuieter() { - args.put("quieter", TRUE); - return this; - } - public Builder setSilent() { - args.put("silent", TRUE); - return this; - } - public Builder setTrickle() { - args.put("trickle", TRUE); - return this; - } - public Builder setOnlyHash() { - args.put("only-hash", TRUE); - return this; - } - public Builder setWrapWithDirectory() { - args.put("wrap-with-directory", TRUE); - return this; - } - public Builder setChunker(String chunker) { - args.put("chunker", chunker); - return this; - } - public Builder setRawLeaves() { - args.put("raw-leaves", TRUE); - return this; - } - public Builder setNocopy() { - args.put("nocopy", TRUE); - return this; - } - public Builder setFscache() { - args.put("fscache", TRUE); - return this; - } - public Builder setCidVersion(int version) { - args.put("cid-version", String.valueOf(version)); - return this; - } - public Builder setHash(String hashFunction) { - args.put("hash", hashFunction); - return this; - } - public Builder setInline() { - args.put("inline", TRUE); - return this; - } - public Builder setInlineLimit(int maxBlockSize) { - args.put("inline-limit", String.valueOf(maxBlockSize)); - return this; - } - public Builder setPin() { - args.put("pin", TRUE); - return this; - } - public Builder setToFiles(String path) { - args.put("to-files", path); - return this; - } - public AddArgs build() - { - return new AddArgs(this); - } + private Builder() {} + + public static Builder newInstance() { + return new Builder(); + } + + public Builder setQuiet() { + args.put("quiet", TRUE); + return this; + } + + public Builder setQuieter() { + args.put("quieter", TRUE); + return this; + } + + public Builder setSilent() { + args.put("silent", TRUE); + return this; + } + + public Builder setTrickle() { + args.put("trickle", TRUE); + return this; + } + + public Builder setOnlyHash() { + args.put("only-hash", TRUE); + return this; + } + + public Builder setWrapWithDirectory() { + args.put("wrap-with-directory", TRUE); + return this; + } + + public Builder setChunker(String chunker) { + args.put("chunker", chunker); + return this; + } + + public Builder setRawLeaves() { + args.put("raw-leaves", TRUE); + return this; + } + + public Builder setNocopy() { + args.put("nocopy", TRUE); + return this; + } + + public Builder setFscache() { + args.put("fscache", TRUE); + return this; + } + + public Builder setCidVersion(int version) { + args.put("cid-version", String.valueOf(version)); + return this; + } + + public Builder setHash(String hashFunction) { + args.put("hash", hashFunction); + return this; + } + + public Builder setInline() { + args.put("inline", TRUE); + return this; + } + + public Builder setInlineLimit(int maxBlockSize) { + args.put("inline-limit", String.valueOf(maxBlockSize)); + return this; + } + + public Builder setPin() { + args.put("pin", TRUE); + return this; + } + + public Builder setToFiles(String path) { + args.put("to-files", path); + return this; + } + + public AddArgs build() { + return new AddArgs(this); } + } } diff --git a/src/main/java/io/ipfs/api/IPFS.java b/src/main/java/io/ipfs/api/IPFS.java index 1cbdbea2..76f98161 100644 --- a/src/main/java/io/ipfs/api/IPFS.java +++ b/src/main/java/io/ipfs/api/IPFS.java @@ -1,1265 +1,1552 @@ package io.ipfs.api; -import io.ipfs.cid.*; -import io.ipfs.multibase.*; -import io.ipfs.multihash.Multihash; +import io.ipfs.cid.Cid; import io.ipfs.multiaddr.MultiAddress; - -import java.io.*; -import java.net.*; -import java.nio.file.*; -import java.util.*; -import java.util.concurrent.*; -import java.util.function.*; -import java.util.stream.*; +import io.ipfs.multibase.Multibase; +import io.ipfs.multihash.Multihash; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.ConnectException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class IPFS { - public static final Version MIN_VERSION = Version.parse("0.4.11"); - public enum PinType {all, direct, indirect, recursive} - public enum PinStatus {queued, pinning, pinned, failed} - public List ObjectTemplates = Arrays.asList("unixfs-dir"); - public List ObjectPatchTypes = Arrays.asList("add-link", "rm-link", "set-data", "append-data"); - private static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 10_000; - private static final int DEFAULT_READ_TIMEOUT_MILLIS = 60_000; - - public final String host; - public final int port; - public final String protocol; - private final String apiVersion; - private final int connectTimeoutMillis; - private final int readTimeoutMillis; - public final Key key = new Key(); - public final Log log = new Log(); - public final MultibaseAPI multibase = new MultibaseAPI(); - public final Pin pin = new Pin(); - public final Repo repo = new Repo(); - public final IPFSObject object = new IPFSObject(); - public final Swarm swarm = new Swarm(); - public final Bootstrap bootstrap = new Bootstrap(); - public final Bitswap bitswap = new Bitswap(); - public final Block block = new Block(); - public final CidAPI cid = new CidAPI(); - public final Dag dag = new Dag(); - public final Diag diag = new Diag(); - public final Config config = new Config(); - public final Refs refs = new Refs(); - public final Update update = new Update(); - public final DHT dht = new DHT(); - public final File file = new File(); - public final Files files = new Files(); - public final FileStore fileStore = new FileStore(); - public final Stats stats = new Stats(); - public final Name name = new Name(); - public final Pubsub pubsub = new Pubsub(); - public final VersionAPI version = new VersionAPI(); - - public IPFS(String host, int port) { - this(host, port, "/api/v0/", false); - } - - public IPFS(String multiaddr) { - this(new MultiAddress(multiaddr)); - } - - public IPFS(MultiAddress addr) { - this(addr.getHost(), addr.getPort(), "/api/v0/", detectSSL(addr)); - } - - public IPFS(String host, int port, String version, boolean ssl) { - this(host, port, version, true, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS, ssl); - } - - public IPFS(String host, int port, String version, boolean enforceMinVersion, boolean ssl) { - this(host, port, version, enforceMinVersion, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS, ssl); - } - - public IPFS(String host, int port, String version, int connectTimeoutMillis, int readTimeoutMillis, boolean ssl) { - this(host, port, version, true, connectTimeoutMillis, readTimeoutMillis, ssl); - } - - public IPFS(String host, int port, String version, boolean enforceMinVersion, int connectTimeoutMillis, int readTimeoutMillis, boolean ssl) { - if (connectTimeoutMillis < 0) throw new IllegalArgumentException("connect timeout must be zero or positive"); - if (readTimeoutMillis < 0) throw new IllegalArgumentException("read timeout must be zero or positive"); - this.host = host; - this.port = port; - this.connectTimeoutMillis = connectTimeoutMillis; - this.readTimeoutMillis = readTimeoutMillis; - - if (ssl) { - this.protocol = "https"; - } else { - this.protocol = "http"; - } - - this.apiVersion = version; - // Check IPFS is sufficiently recent - if (enforceMinVersion) { - try { - Version detected = Version.parse(version()); - if (detected.isBefore(MIN_VERSION)) - throw new IllegalStateException("You need to use a more recent version of IPFS! >= " + MIN_VERSION); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - /** - * Configure a HTTP client timeout - * @param timeout (default 0: infinite timeout) - * @return current IPFS object with configured timeout - */ - public IPFS timeout(int timeout) { - return new IPFS(host, port, apiVersion, timeout, timeout, protocol.equals("https")); + public static final Version MIN_VERSION = Version.parse("0.4.11"); + + public enum PinType { + all, + direct, + indirect, + recursive + } + + public enum PinStatus { + queued, + pinning, + pinned, + failed + } + + public List ObjectTemplates = Arrays.asList("unixfs-dir"); + public List ObjectPatchTypes = + Arrays.asList("add-link", "rm-link", "set-data", "append-data"); + private static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 10_000; + private static final int DEFAULT_READ_TIMEOUT_MILLIS = 60_000; + + public final String host; + public final int port; + public final String protocol; + private final String apiVersion; + private final int connectTimeoutMillis; + private final int readTimeoutMillis; + public final Key key = new Key(); + public final Log log = new Log(); + public final MultibaseAPI multibase = new MultibaseAPI(); + public final Pin pin = new Pin(); + public final Repo repo = new Repo(); + public final IPFSObject object = new IPFSObject(); + public final Swarm swarm = new Swarm(); + public final Bootstrap bootstrap = new Bootstrap(); + public final Bitswap bitswap = new Bitswap(); + public final Block block = new Block(); + public final CidAPI cid = new CidAPI(); + public final Dag dag = new Dag(); + public final Diag diag = new Diag(); + public final Config config = new Config(); + public final Refs refs = new Refs(); + public final Update update = new Update(); + public final DHT dht = new DHT(); + public final File file = new File(); + public final Files files = new Files(); + public final FileStore fileStore = new FileStore(); + public final Stats stats = new Stats(); + public final Name name = new Name(); + public final Pubsub pubsub = new Pubsub(); + public final VersionAPI version = new VersionAPI(); + + public IPFS(String host, int port) { + this(host, port, "/api/v0/", false); + } + + public IPFS(String multiaddr) { + this(new MultiAddress(multiaddr)); + } + + public IPFS(MultiAddress addr) { + this(addr.getHost(), addr.getPort(), "/api/v0/", detectSSL(addr)); + } + + public IPFS(String host, int port, String version, boolean ssl) { + this( + host, + port, + version, + true, + DEFAULT_CONNECT_TIMEOUT_MILLIS, + DEFAULT_READ_TIMEOUT_MILLIS, + ssl); + } + + public IPFS(String host, int port, String version, boolean enforceMinVersion, boolean ssl) { + this( + host, + port, + version, + enforceMinVersion, + DEFAULT_CONNECT_TIMEOUT_MILLIS, + DEFAULT_READ_TIMEOUT_MILLIS, + ssl); + } + + public IPFS( + String host, + int port, + String version, + int connectTimeoutMillis, + int readTimeoutMillis, + boolean ssl) { + this(host, port, version, true, connectTimeoutMillis, readTimeoutMillis, ssl); + } + + public IPFS( + String host, + int port, + String version, + boolean enforceMinVersion, + int connectTimeoutMillis, + int readTimeoutMillis, + boolean ssl) { + if (connectTimeoutMillis < 0) + throw new IllegalArgumentException("connect timeout must be zero or positive"); + if (readTimeoutMillis < 0) + throw new IllegalArgumentException("read timeout must be zero or positive"); + this.host = host; + this.port = port; + this.connectTimeoutMillis = connectTimeoutMillis; + this.readTimeoutMillis = readTimeoutMillis; + + if (ssl) { + this.protocol = "https"; + } else { + this.protocol = "http"; + } + + this.apiVersion = version; + // Check IPFS is sufficiently recent + if (enforceMinVersion) { + try { + Version detected = Version.parse(version()); + if (detected.isBefore(MIN_VERSION)) + throw new IllegalStateException( + "You need to use a more recent version of IPFS! >= " + MIN_VERSION); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + /** + * Configure a HTTP client timeout + * + * @param timeout (default 0: infinite timeout) + * @return current IPFS object with configured timeout + */ + public IPFS timeout(int timeout) { + return new IPFS(host, port, apiVersion, timeout, timeout, protocol.equals("https")); + } + + public String shutdown() throws IOException { + return retrieveString("shutdown"); + } + + public List add(NamedStreamable file) throws IOException { + return add(file, false); + } + + public List add(NamedStreamable file, boolean wrap) throws IOException { + return add(file, wrap, false); + } + + public List add(NamedStreamable file, boolean wrap, boolean hashOnly) + throws IOException { + return add(Collections.singletonList(file), wrap, hashOnly); + } + + public List add(List files, boolean wrap, boolean hashOnly) + throws IOException { + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "add?stream-channels=true&w=" + + wrap + + "&n=" + + hashOnly, + "UTF-8"); + for (NamedStreamable file : files) { + if (file.isDirectory()) { + m.addSubtree(Paths.get(""), file); + } else m.addFilePart("file", Paths.get(""), file); + } + String res = m.finish(); + return JSONParser.parseStream(res).stream() + .map(x -> MerkleNode.fromJSON((Map) x)) + .collect(Collectors.toList()); + } + + public List add(NamedStreamable file, AddArgs args) throws IOException { + return add(Collections.singletonList(file), args); + } + + public List add(List files, AddArgs args) throws IOException { + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "add?stream-channels=true&" + + args.toQueryString(), + "UTF-8"); + for (NamedStreamable file : files) { + if (file.isDirectory()) { + m.addSubtree(Paths.get(""), file); + } else m.addFilePart("file", Paths.get(""), file); + } + String res = m.finish(); + return JSONParser.parseStream(res).stream() + .map(x -> MerkleNode.fromJSON((Map) x)) + .collect(Collectors.toList()); + } + + public List ls(Multihash hash) throws IOException { + Map reply = retrieveMap("ls?arg=" + hash); + return ((List) reply.get("Objects")) + .stream() + .flatMap( + x -> ((List) ((Map) x).get("Links")).stream().map(MerkleNode::fromJSON)) + .collect(Collectors.toList()); + } + + public byte[] cat(Multihash hash) throws IOException { + return retrieve("cat?arg=" + hash); + } + + public byte[] cat(Multihash hash, String subPath) throws IOException { + return retrieve("cat?arg=" + hash + URLEncoder.encode(subPath, "UTF-8")); + } + + public byte[] get(Multihash hash) throws IOException { + return retrieve("get?arg=" + hash); + } + + public InputStream catStream(Multihash hash) throws IOException { + return retrieveStream("cat?arg=" + hash); + } + + public List refs(Multihash hash, boolean recursive) throws IOException { + String jsonStream = new String(retrieve("refs?arg=" + hash + "&r=" + recursive)); + return JSONParser.parseStream(jsonStream).stream() + .map(m -> (String) (((Map) m).get("Ref"))) + .map(Cid::decode) + .collect(Collectors.toList()); + } + + public Map resolve(String scheme, Multihash hash, boolean recursive) throws IOException { + return retrieveMap("resolve?arg=/" + scheme + "/" + hash + "&r=" + recursive); + } + + public Map mount(java.io.File ipfsRoot, java.io.File ipnsRoot) throws IOException { + if (ipfsRoot != null && !ipfsRoot.exists()) ipfsRoot.mkdirs(); + if (ipnsRoot != null && !ipnsRoot.exists()) ipnsRoot.mkdirs(); + return (Map) + retrieveAndParse( + "mount?arg=" + + (ipfsRoot != null ? ipfsRoot.getPath() : "/ipfs") + + "&arg=" + + (ipnsRoot != null ? ipnsRoot.getPath() : "/ipns")); + } + + // level 2 commands + public class Refs { + public List local() throws IOException { + String jsonStream = new String(retrieve("refs/local")); + return JSONParser.parseStream(jsonStream).stream() + .map(m -> (String) (((Map) m).get("Ref"))) + .map(Cid::decode) + .collect(Collectors.toList()); + } + } + + /* Pinning an object ensures a local copy of it is kept. + */ + public class Pin { + public final Remote remote = new Remote(); + + public class Remote { + public Map add(String service, Multihash hash, Optional name, boolean background) + throws IOException { + String nameArg = name.isPresent() ? "&name=" + name.get() : ""; + return retrieveMap( + "pin/remote/add?arg=" + + hash + + "&service=" + + service + + nameArg + + "&background=" + + background); + } + + public Map ls(String service, Optional name, Optional> statusList) + throws IOException { + String nameArg = name.isPresent() ? "&name=" + name.get() : ""; + String statusArg = + statusList.isPresent() + ? statusList.get().stream().map(p -> "&status=" + p).collect(Collectors.joining()) + : ""; + return retrieveMap("pin/remote/ls?service=" + service + nameArg + statusArg); + } + + public String rm( + String service, + Optional name, + Optional> statusList, + Optional> cidList) + throws IOException { + String nameArg = name.isPresent() ? "&name=" + name.get() : ""; + String statusArg = + statusList.isPresent() + ? statusList.get().stream().map(p -> "&status=" + p).collect(Collectors.joining()) + : ""; + String cidArg = + cidList.isPresent() + ? cidList.get().stream() + .map(p -> "&cid=" + p.toBase58()) + .collect(Collectors.joining()) + : ""; + return retrieveString("pin/remote/rm?service=" + service + nameArg + statusArg + cidArg); + } + + public String addService(String service, String endPoint, String key) throws IOException { + return retrieveString( + "pin/remote/service/add?arg=" + service + "&arg=" + endPoint + "&arg=" + key); + } + + public List lsService(boolean stat) throws IOException { + return (List) retrieveMap("pin/remote/service/ls?stat=" + stat).get("RemoteServices"); + } + + public String rmService(String service) throws IOException { + return retrieveString("pin/remote/service/rm?arg=" + service); + } + } + + public List add(Multihash hash) throws IOException { + return ((List) + ((Map) retrieveAndParse("pin/add?stream-channels=true&arg=" + hash)).get("Pins")) + .stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList()); + } + + public Map ls() throws IOException { + return ls(PinType.direct); + } + + public Map ls(PinType type) throws IOException { + return ((Map) + (((Map) retrieveAndParse("pin/ls?stream-channels=true&t=" + type.name())) + .get("Keys"))) + .entrySet().stream() + .collect(Collectors.toMap(x -> Cid.decode(x.getKey()), x -> x.getValue())); + } + + public List rm(Multihash hash) throws IOException { + return rm(hash, true); + } + + public List rm(Multihash hash, boolean recursive) throws IOException { + Map json = retrieveMap("pin/rm?stream-channels=true&r=" + recursive + "&arg=" + hash); + return ((List) json.get("Pins")) + .stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList()); + } + + public List update(Multihash existing, Multihash modified, boolean unpin) + throws IOException { + return ((List) + ((Map) + retrieveAndParse( + "pin/update?stream-channels=true&arg=" + + existing + + "&arg=" + + modified + + "&unpin=" + + unpin)) + .get("Pins")) + .stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList()); + } + + public Map verify(boolean verbose, boolean quiet) throws IOException { + return retrieveMap("pin/verify?verbose=" + verbose + "&quiet=" + quiet); + } + } + + /* 'ipfs key' is a command for dealing with IPNS keys. + */ + public class Key { + public KeyInfo gen(String name, Optional type, Optional size) + throws IOException { + return KeyInfo.fromJson( + retrieveAndParse( + "key/gen?arg=" + + name + + type.map(t -> "&type=" + t).orElse("") + + size.map(s -> "&size=" + s).orElse(""))); + } + + public List list() throws IOException { + return ((List) ((Map) retrieveAndParse("key/list")).get("Keys")) + .stream().map(KeyInfo::fromJson).collect(Collectors.toList()); } - public String shutdown() throws IOException { - return retrieveString("shutdown"); + public Object rename(String name, String newName) throws IOException { + return retrieveAndParse("key/rename?arg=" + name + "&arg=" + newName); } - public List add(NamedStreamable file) throws IOException { - return add(file, false); + public List rm(String name) throws IOException { + return ((List) ((Map) retrieveAndParse("key/rm?arg=" + name)).get("Keys")) + .stream().map(KeyInfo::fromJson).collect(Collectors.toList()); } + } - public List add(NamedStreamable file, boolean wrap) throws IOException { - return add(file, wrap, false); + public class Log { + public Map level(String subsystem, String logLevel) throws IOException { + return retrieveMap("log/level?arg=" + subsystem + "&arg=" + logLevel); } - public List add(NamedStreamable file, boolean wrap, boolean hashOnly) throws IOException { - return add(Collections.singletonList(file), wrap, hashOnly); + public Map ls() throws IOException { + return retrieveMap("log/ls"); } + } - public List add(List files, boolean wrap, boolean hashOnly) throws IOException { - Multipart m = new Multipart(protocol + "://" + host + ":" + port + apiVersion + "add?stream-channels=true&w="+wrap + "&n="+hashOnly, "UTF-8"); - for (NamedStreamable file: files) { - if (file.isDirectory()) { - m.addSubtree(Paths.get(""), file); - } else - m.addFilePart("file", Paths.get(""), file); + public class MultibaseAPI { + public String decode(NamedStreamable encoded_file) { + Multipart m = + new Multipart( + protocol + "://" + host + ":" + port + apiVersion + "multibase/decode", "UTF-8"); + try { + if (encoded_file.isDirectory()) { + throw new IllegalArgumentException("encoded_file must be a file"); + } else { + m.addFilePart("file", Paths.get(""), encoded_file); + return m.finish(); + } + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public String encode(Optional encoding, NamedStreamable file) { + String b = encoding.map(f -> "?b=" + f).orElse("?b=base64url"); + Multipart m = + new Multipart( + protocol + "://" + host + ":" + port + apiVersion + "multibase/encode" + b, "UTF-8"); + try { + if (file.isDirectory()) { + throw new IllegalArgumentException("Input must be a file"); + } else { + m.addFilePart("file", Paths.get(""), file); + return m.finish(); } - String res = m.finish(); - return JSONParser.parseStream(res).stream() - .map(x -> MerkleNode.fromJSON((Map) x)) - .collect(Collectors.toList()); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } } - public List add(NamedStreamable file, AddArgs args) throws IOException { - return add(Collections.singletonList(file), args); + public List list(boolean prefix, boolean numeric) throws IOException { + return (List) retrieveAndParse("multibase/list?prefix=" + prefix + "&numeric=" + numeric); } - public List add(List files, AddArgs args) throws IOException { - Multipart m = new Multipart(protocol + "://" + host + ":" + port + apiVersion + "add?stream-channels=true&"+ args.toQueryString(), "UTF-8"); - for (NamedStreamable file: files) { - if (file.isDirectory()) { - m.addSubtree(Paths.get(""), file); - } else - m.addFilePart("file", Paths.get(""), file); + public String transcode(Optional encoding, NamedStreamable file) { + String b = encoding.map(f -> "?b=" + f).orElse("?b=base64url"); + Multipart m = + new Multipart( + protocol + "://" + host + ":" + port + apiVersion + "multibase/transcode" + b, + "UTF-8"); + try { + if (file.isDirectory()) { + throw new IllegalArgumentException("Input must be a file"); + } else { + m.addFilePart("file", Paths.get(""), file); + return m.finish(); } - String res = m.finish(); - return JSONParser.parseStream(res).stream() - .map(x -> MerkleNode.fromJSON((Map) x)) - .collect(Collectors.toList()); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } } + } - public List ls(Multihash hash) throws IOException { - Map reply = retrieveMap("ls?arg=" + hash); - return ((List) reply.get("Objects")) - .stream() - .flatMap(x -> ((List)((Map) x).get("Links")) - .stream() - .map(MerkleNode::fromJSON)) - .collect(Collectors.toList()); + /* 'ipfs repo' is a plumbing command used to manipulate the repo. + */ + public class Repo { + public Map gc() throws IOException { + return retrieveMap("repo/gc"); } - public byte[] cat(Multihash hash) throws IOException { - return retrieve("cat?arg=" + hash); + public Multihash ls() throws IOException { + Map res = retrieveMap("repo/ls"); + return Cid.decode((String) res.get("Ref")); } - public byte[] cat(Multihash hash, String subPath) throws IOException { - return retrieve("cat?arg=" + hash + URLEncoder.encode(subPath, "UTF-8")); + /*public String migrate(boolean allowDowngrade) throws IOException { + return retrieveString("repo/migrate?allow-downgrade=" + allowDowngrade); + }*/ + public RepoStat stat(boolean sizeOnly) throws IOException { + return RepoStat.fromJson(retrieveAndParse("repo/stat?size-only=" + sizeOnly)); } - public byte[] get(Multihash hash) throws IOException { - return retrieve("get?arg=" + hash); + public Map verify() throws IOException { + return retrieveMap("repo/verify"); } - public InputStream catStream(Multihash hash) throws IOException { - return retrieveStream("cat?arg=" + hash); + public Map version() throws IOException { + return retrieveMap("repo/version"); } + } - public List refs(Multihash hash, boolean recursive) throws IOException { - String jsonStream = new String(retrieve("refs?arg=" + hash + "&r=" + recursive)); - return JSONParser.parseStream(jsonStream).stream() - .map(m -> (String) (((Map) m).get("Ref"))) - .map(Cid::decode) - .collect(Collectors.toList()); + public class VersionAPI { + public Map versionDeps() throws IOException { + return retrieveMap("version/deps"); } + } - public Map resolve(String scheme, Multihash hash, boolean recursive) throws IOException { - return retrieveMap("resolve?arg=/" + scheme+"/"+hash +"&r="+recursive); + public class Pubsub { + public Object ls() throws IOException { + return retrieveAndParse("pubsub/ls"); } - public Map mount(java.io.File ipfsRoot, java.io.File ipnsRoot) throws IOException { - if (ipfsRoot != null && !ipfsRoot.exists()) - ipfsRoot.mkdirs(); - if (ipnsRoot != null && !ipnsRoot.exists()) - ipnsRoot.mkdirs(); - return (Map)retrieveAndParse("mount?arg=" + (ipfsRoot != null ? ipfsRoot.getPath() : "/ipfs" ) + "&arg=" + - (ipnsRoot != null ? ipnsRoot.getPath() : "/ipns" )); + public Object peers() throws IOException { + return retrieveAndParse("pubsub/peers"); } - // level 2 commands - public class Refs { - public List local() throws IOException { - String jsonStream = new String(retrieve("refs/local")); - return JSONParser.parseStream(jsonStream).stream() - .map(m -> (String) (((Map) m).get("Ref"))) - .map(Cid::decode) - .collect(Collectors.toList()); - } + public Object peers(String topic) throws IOException { + return retrieveAndParse("pubsub/peers?arg=" + topic); } - /* Pinning an object ensures a local copy of it is kept. + /** + * @param topic topic to publish to + * @param data url encoded data to be published */ - public class Pin { - public final Remote remote = new Remote(); - - public class Remote { - public Map add(String service, Multihash hash, Optional name, boolean background) throws IOException { - String nameArg = name.isPresent() ? "&name=" + name.get() : ""; - return retrieveMap("pin/remote/add?arg=" + hash + "&service=" + service + nameArg + "&background=" + background); - } - public Map ls(String service, Optional name, Optional> statusList) throws IOException { - String nameArg = name.isPresent() ? "&name=" + name.get() : ""; - String statusArg = statusList.isPresent() ? statusList.get().stream(). - map(p -> "&status=" + p).collect(Collectors.joining()) : ""; - return retrieveMap("pin/remote/ls?service=" + service + nameArg + statusArg); - } - public String rm(String service, Optional name, Optional> statusList, Optional> cidList) throws IOException { - String nameArg = name.isPresent() ? "&name=" + name.get() : ""; - String statusArg = statusList.isPresent() ? statusList.get().stream(). - map(p -> "&status=" + p).collect(Collectors.joining()) : ""; - String cidArg = cidList.isPresent() ? cidList.get().stream(). - map(p -> "&cid=" + p.toBase58()).collect(Collectors.joining()) : ""; - return retrieveString("pin/remote/rm?service=" + service + nameArg + statusArg + cidArg); - } - public String addService(String service, String endPoint, String key) throws IOException { - return retrieveString("pin/remote/service/add?arg=" + service + "&arg=" + endPoint + "&arg=" + key); - } - - public List lsService(boolean stat) throws IOException { - return (List) retrieveMap("pin/remote/service/ls?stat=" + stat).get("RemoteServices"); - } - - public String rmService(String service) throws IOException { - return retrieveString("pin/remote/service/rm?arg=" + service); - } - } - public List add(Multihash hash) throws IOException { - return ((List)((Map)retrieveAndParse("pin/add?stream-channels=true&arg=" + hash)).get("Pins")) - .stream() - .map(x -> Cid.decode((String) x)) - .collect(Collectors.toList()); - } - - public Map ls() throws IOException { - return ls(PinType.direct); - } - - public Map ls(PinType type) throws IOException { - return ((Map)(((Map)retrieveAndParse("pin/ls?stream-channels=true&t="+type.name())).get("Keys"))).entrySet() - .stream() - .collect(Collectors.toMap(x -> Cid.decode(x.getKey()), x-> x.getValue())); - } - - public List rm(Multihash hash) throws IOException { - return rm(hash, true); - } - - public List rm(Multihash hash, boolean recursive) throws IOException { - Map json = retrieveMap("pin/rm?stream-channels=true&r=" + recursive + "&arg=" + hash); - return ((List) json.get("Pins")).stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList()); - } + public void pub(String topic, String data) { + String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); + Multipart m = + new Multipart( + protocol + "://" + host + ":" + port + apiVersion + "pubsub/pub?arg=" + encodedTopic, + "UTF-8"); + try { + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data.getBytes())); + String res = m.finish(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } - public List update(Multihash existing, Multihash modified, boolean unpin) throws IOException { - return ((List)((Map)retrieveAndParse("pin/update?stream-channels=true&arg=" + existing + "&arg=" + modified + "&unpin=" + unpin)).get("Pins")) - .stream() - .map(x -> Cid.decode((String) x)) - .collect(Collectors.toList()); - } + public Stream> sub(String topic) throws Exception { + return sub(topic, ForkJoinPool.commonPool()); + } - public Map verify(boolean verbose, boolean quiet) throws IOException { - return retrieveMap("pin/verify?verbose=" + verbose + "&quiet=" + quiet); - } + public Stream> sub(String topic, ForkJoinPool threadSupplier) + throws Exception { + String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); + return retrieveAndParseStream("pubsub/sub?arg=" + encodedTopic, threadSupplier) + .map(obj -> (Map) obj); } - /* 'ipfs key' is a command for dealing with IPNS keys. + /** + * A synchronous method to subscribe which consumes the calling thread + * + * @param topic + * @param results + * @throws IOException */ - public class Key { - public KeyInfo gen(String name, Optional type, Optional size) throws IOException { - return KeyInfo.fromJson(retrieveAndParse("key/gen?arg=" + name + type.map(t -> "&type=" + t).orElse("") + size.map(s -> "&size=" + s).orElse(""))); - } - - public List list() throws IOException { - return ((List)((Map)retrieveAndParse("key/list")).get("Keys")) - .stream() - .map(KeyInfo::fromJson) - .collect(Collectors.toList()); - } + public void sub( + String topic, Consumer> results, Consumer error) + throws IOException { + String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); + retrieveAndParseStream( + "pubsub/sub?arg=" + encodedTopic, res -> results.accept((Map) res), error); + } + } - public Object rename(String name, String newName) throws IOException { - return retrieveAndParse("key/rename?arg="+name + "&arg=" + newName); - } + public class CidAPI { + public Map base32(Cid hash) throws IOException { + return (Map) retrieveAndParse("cid/base32?arg=" + hash); + } - public List rm(String name) throws IOException { - return ((List)((Map)retrieveAndParse("key/rm?arg=" + name)).get("Keys")) - .stream() - .map(KeyInfo::fromJson) - .collect(Collectors.toList()); - } + public List bases(boolean prefix, boolean numeric) throws IOException { + return (List) retrieveAndParse("cid/bases?prefix=" + prefix + "&numeric=" + numeric); } - public class Log { - public Map level(String subsystem, String logLevel) throws IOException { - return retrieveMap("log/level?arg=" + subsystem + "&arg=" + logLevel); - } - public Map ls() throws IOException { - return retrieveMap("log/ls"); - } + public List codecs(boolean numeric, boolean supported) throws IOException { + return (List) retrieveAndParse("cid/codecs?numeric=" + numeric + "&supported=" + supported); } - public class MultibaseAPI { - public String decode(NamedStreamable encoded_file) { - Multipart m = new Multipart(protocol + "://" + host + ":" + port + apiVersion + - "multibase/decode", "UTF-8"); - try { - if (encoded_file.isDirectory()) { - throw new IllegalArgumentException("encoded_file must be a file"); - } else { - m.addFilePart("file", Paths.get(""), encoded_file); - return m.finish(); - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - public String encode(Optional encoding, NamedStreamable file) { - String b = encoding.map(f -> "?b=" + f).orElse("?b=base64url"); - Multipart m = new Multipart(protocol + "://" + host + ":" + port + apiVersion + - "multibase/encode" + b, "UTF-8"); - try { - if (file.isDirectory()) { - throw new IllegalArgumentException("Input must be a file"); - } else { - m.addFilePart("file", Paths.get(""), file); - return m.finish(); - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - public List list(boolean prefix, boolean numeric) throws IOException { - return (List)retrieveAndParse("multibase/list?prefix=" + prefix + "&numeric=" + numeric); - } - public String transcode(Optional encoding, NamedStreamable file) { - String b = encoding.map(f -> "?b=" + f).orElse("?b=base64url"); - Multipart m = new Multipart(protocol + "://" + host + ":" + port + apiVersion + - "multibase/transcode" + b, "UTF-8"); - try { - if (file.isDirectory()) { - throw new IllegalArgumentException("Input must be a file"); - } else { - m.addFilePart("file", Paths.get(""), file); - return m.finish(); - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } + public Map format( + Cid hash, Optional f, Optional v, Optional mc, Optional b) + throws IOException { + String fArg = f.isPresent() ? "&f=" + URLEncoder.encode(f.get(), "UTF-8") : ""; + String vArg = v.isPresent() ? "&v=" + v.get() : ""; + String mcArg = mc.isPresent() ? "&mc=" + mc.get() : ""; + String bArg = b.isPresent() ? "&b=" + b.get() : ""; + return (Map) retrieveAndParse("cid/format?arg=" + hash + fArg + vArg + mcArg + bArg); } - /* 'ipfs repo' is a plumbing command used to manipulate the repo. - */ - public class Repo { - public Map gc() throws IOException { - return retrieveMap("repo/gc"); - } - public Multihash ls() throws IOException { - Map res = retrieveMap("repo/ls"); - return Cid.decode((String)res.get("Ref")); - } - /*public String migrate(boolean allowDowngrade) throws IOException { - return retrieveString("repo/migrate?allow-downgrade=" + allowDowngrade); - }*/ - public RepoStat stat(boolean sizeOnly) throws IOException { - return RepoStat.fromJson(retrieveAndParse("repo/stat?size-only=" + sizeOnly)); - } - public Map verify() throws IOException { - return retrieveMap("repo/verify"); - } - public Map version() throws IOException { - return retrieveMap("repo/version"); - } + public List hashes(boolean numeric, boolean supported) throws IOException { + return (List) retrieveAndParse("cid/hashes?numeric=" + numeric + "&supported=" + supported); } + } + /* 'ipfs block' is a plumbing command used to manipulate raw ipfs blocks. + */ + public class Block { + public byte[] get(Multihash hash) throws IOException { + return retrieve("block/get?stream-channels=true&arg=" + hash); + } + + public byte[] rm(Multihash hash) throws IOException { + return retrieve("block/rm?stream-channels=true&arg=" + hash); + } + + public List put(List data) throws IOException { + return put(data, Optional.empty()); + } + + public List put(List data, Optional format) throws IOException { + // N.B. Once IPFS implements a bulk put this can become a single multipart call with multiple + // 'files' + List res = new ArrayList<>(); + for (byte[] value : data) { + res.add(put(value, format)); + } + return res; + } + + public MerkleNode put(byte[] data, Optional format) throws IOException { + String fmt = format.map(f -> "&format=" + f).orElse(""); + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "block/put?stream-channels=true" + + fmt, + "UTF-8"); + try { + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data)); + String res = m.finish(); + return JSONParser.parseStream(res).stream() + .map(x -> MerkleNode.fromJSON((Map) x)) + .findFirst() + .get(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public Map stat(Multihash hash) throws IOException { + return retrieveMap("block/stat?stream-channels=true&arg=" + hash); + } + } + + /* 'ipfs object' is a plumbing command used to manipulate DAG objects directly. {Object} is a subset of {Block} + */ + public class IPFSObject { + @Deprecated + public List put(List data) throws IOException { + Multipart m = + new Multipart( + protocol + "://" + host + ":" + port + apiVersion + "object/put?stream-channels=true", + "UTF-8"); + for (byte[] f : data) + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(f)); + String res = m.finish(); + return JSONParser.parseStream(res).stream() + .map(x -> MerkleNode.fromJSON((Map) x)) + .collect(Collectors.toList()); + } + + @Deprecated + public List put(String encoding, List data) throws IOException { + if (!"json".equals(encoding) && !"protobuf".equals(encoding)) + throw new IllegalArgumentException("Encoding must be json or protobuf"); + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "object/put?stream-channels=true&encoding=" + + encoding, + "UTF-8"); + for (byte[] f : data) + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(f)); + String res = m.finish(); + return JSONParser.parseStream(res).stream() + .map(x -> MerkleNode.fromJSON((Map) x)) + .collect(Collectors.toList()); + } + + @Deprecated + public MerkleNode get(Multihash hash) throws IOException { + Map json = retrieveMap("object/get?stream-channels=true&arg=" + hash); + json.put("Hash", hash.toBase58()); + return MerkleNode.fromJSON(json); + } + + @Deprecated + public MerkleNode links(Multihash hash) throws IOException { + Map json = retrieveMap("object/links?stream-channels=true&arg=" + hash); + return MerkleNode.fromJSON(json); + } + + @Deprecated + public Map stat(Multihash hash) throws IOException { + return retrieveMap("object/stat?stream-channels=true&arg=" + hash); + } + + @Deprecated + public byte[] data(Multihash hash) throws IOException { + return retrieve("object/data?stream-channels=true&arg=" + hash); + } + + @Deprecated + public MerkleNode _new(Optional template) throws IOException { + if (template.isPresent() && !ObjectTemplates.contains(template.get())) + throw new IllegalStateException("Unrecognised template: " + template.get()); + Map json = + retrieveMap( + "object/new?stream-channels=true" + + (template.isPresent() ? "&arg=" + template.get() : "")); + return MerkleNode.fromJSON(json); + } + + @Deprecated + public MerkleNode patch( + Multihash base, + String command, + Optional data, + Optional name, + Optional target) + throws IOException { + if (!ObjectPatchTypes.contains(command)) + throw new IllegalStateException("Illegal Object.patch command type: " + command); + String targetPath = "object/patch/" + command + "?arg=" + base.toBase58(); + if (name.isPresent()) targetPath += "&arg=" + name.get(); + if (target.isPresent()) targetPath += "&arg=" + target.get().toBase58(); + + switch (command) { + case "add-link": + if (!target.isPresent()) + throw new IllegalStateException("add-link requires name and target!"); + case "rm-link": + if (!name.isPresent()) throw new IllegalStateException("link name is required!"); + return MerkleNode.fromJSON(retrieveMap(targetPath)); + case "set-data": + case "append-data": + if (!data.isPresent()) throw new IllegalStateException("set-data requires data!"); + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "object/patch/" + + command + + "?arg=" + + base.toBase58() + + "&stream-channels=true", + "UTF-8"); + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data.get())); + String res = m.finish(); + return MerkleNode.fromJSON(JSONParser.parse(res)); - public class VersionAPI { - public Map versionDeps() throws IOException { - return retrieveMap("version/deps"); - } + default: + throw new IllegalStateException("Unimplemented"); + } } + } - public class Pubsub { - public Object ls() throws IOException { - return retrieveAndParse("pubsub/ls"); - } - - public Object peers() throws IOException { - return retrieveAndParse("pubsub/peers"); - } + public class Name { + public Map publish(Multihash hash) throws IOException { + return publish(hash, Optional.empty()); + } - public Object peers(String topic) throws IOException { - return retrieveAndParse("pubsub/peers?arg="+topic); - } + public Map publish(Multihash hash, Optional id) throws IOException { + return retrieveMap( + "name/publish?arg=/ipfs/" + hash + id.map(name -> "&key=" + name).orElse("")); + } - /** - * - * @param topic topic to publish to - * @param data url encoded data to be published - */ - public void pub(String topic, String data) { - String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"pubsub/pub?arg=" + encodedTopic, "UTF-8"); - try { - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data.getBytes())); - String res = m.finish(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } + public String resolve(Multihash hash) throws IOException { + Map res = (Map) retrieveAndParse("name/resolve?arg=" + hash); + return (String) res.get("Path"); + } - public Stream> sub(String topic) throws Exception { - return sub(topic, ForkJoinPool.commonPool()); - } + public String resolve(String name) throws IOException { + Map res = (Map) retrieveAndParse("name/resolve?arg=" + name); + return (String) res.get("Path"); + } + } - public Stream> sub(String topic, ForkJoinPool threadSupplier) throws Exception { - String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); - return retrieveAndParseStream("pubsub/sub?arg=" + encodedTopic, threadSupplier).map(obj -> (Map)obj); - } + public class DHT { + @Deprecated + public List> findprovs(Multihash hash) throws IOException { + return getAndParseStream("dht/findprovs?arg=" + hash).stream() + .map(x -> (Map) x) + .collect(Collectors.toList()); + } - /** - * A synchronous method to subscribe which consumes the calling thread - * @param topic - * @param results - * @throws IOException - */ - public void sub(String topic, Consumer> results, Consumer error) throws IOException { - String encodedTopic = Multibase.encode(Multibase.Base.Base64Url, topic.getBytes()); - retrieveAndParseStream("pubsub/sub?arg="+encodedTopic, res -> results.accept((Map)res), error); - } + public Map query(Multihash peerId) throws IOException { + return retrieveMap("dht/query?arg=" + peerId.toString()); } - public class CidAPI { - public Map base32(Cid hash) throws IOException { - return (Map)retrieveAndParse("cid/base32?arg=" + hash); - } + @Deprecated + public Map findpeer(Multihash id) throws IOException { + return retrieveMap("dht/findpeer?arg=" + id.toString()); + } - public List bases(boolean prefix, boolean numeric) throws IOException { - return (List)retrieveAndParse("cid/bases?prefix=" + prefix + "&numeric=" + numeric); - } + @Deprecated + public Map get(Multihash hash) throws IOException { + return retrieveMap("dht/get?arg=" + hash); + } - public List codecs(boolean numeric, boolean supported) throws IOException { - return (List)retrieveAndParse("cid/codecs?numeric=" + numeric + "&supported=" + supported); - } + @Deprecated + public Map put(String key, String value) throws IOException { + return retrieveMap("dht/put?arg=" + key + "&arg=" + value); + } + } - public Map format(Cid hash, Optional f, Optional v, Optional mc, Optional b) throws IOException { - String fArg = f.isPresent() ? "&f=" + URLEncoder.encode(f.get(), "UTF-8") : ""; - String vArg = v.isPresent() ? "&v=" + v.get() : ""; - String mcArg = mc.isPresent() ? "&mc=" + mc.get() : ""; - String bArg = b.isPresent() ? "&b=" + b.get() : ""; - return (Map)retrieveAndParse("cid/format?arg=" + hash + fArg + vArg + mcArg + bArg); - } + public class File { + @Deprecated + public Map ls(Multihash path) throws IOException { + return retrieveMap("file/ls?arg=" + path); + } + } - public List hashes(boolean numeric, boolean supported) throws IOException { - return (List)retrieveAndParse("cid/hashes?numeric=" + numeric + "&supported=" + supported); - } + public class Files { + public String chcid() throws IOException { + return retrieveString("files/chcid"); } - /* 'ipfs block' is a plumbing command used to manipulate raw ipfs blocks. - */ - public class Block { - public byte[] get(Multihash hash) throws IOException { - return retrieve("block/get?stream-channels=true&arg=" + hash); - } - public byte[] rm(Multihash hash) throws IOException { - return retrieve("block/rm?stream-channels=true&arg=" + hash); - } + public String chcid(String path) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieveString("files/chcid?args=" + arg); + } - public List put(List data) throws IOException { - return put(data, Optional.empty()); - } + public String chcid(String path, Optional cidVersion, Optional hash) + throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + String cid = cidVersion.isPresent() ? "&cid-version=" + cidVersion.get() : ""; + String hashFunc = hash.isPresent() ? "&hash=" + hash.get() : ""; + return retrieveString("files/chcid?args=" + arg + cid + hashFunc); + } - public List put(List data, Optional format) throws IOException { - // N.B. Once IPFS implements a bulk put this can become a single multipart call with multiple 'files' - List res = new ArrayList<>(); - for (byte[] value : data) { - res.add(put(value, format)); - } - return res; - } + public String cp(String source, String dest, boolean parents) throws IOException { + return retrieveString( + "files/cp?arg=" + + URLEncoder.encode(source, "UTF-8") + + "&arg=" + + URLEncoder.encode(dest, "UTF-8") + + "&parents=" + + parents); + } - public MerkleNode put(byte[] data, Optional format) throws IOException { - String fmt = format.map(f -> "&format=" + f).orElse(""); - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"block/put?stream-channels=true" + fmt, "UTF-8"); - try { - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data)); - String res = m.finish(); - return JSONParser.parseStream(res).stream().map(x -> MerkleNode.fromJSON((Map) x)).findFirst().get(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } + public Map flush() throws IOException { + return retrieveMap("files/flush"); + } - public Map stat(Multihash hash) throws IOException { - return retrieveMap("block/stat?stream-channels=true&arg=" + hash); - } + public Map flush(String path) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieveMap("files/flush?arg=" + arg); } - /* 'ipfs object' is a plumbing command used to manipulate DAG objects directly. {Object} is a subset of {Block} - */ - public class IPFSObject { - @Deprecated - public List put(List data) throws IOException { - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"object/put?stream-channels=true", "UTF-8"); - for (byte[] f : data) - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(f)); - String res = m.finish(); - return JSONParser.parseStream(res).stream().map(x -> MerkleNode.fromJSON((Map) x)).collect(Collectors.toList()); - } - @Deprecated - public List put(String encoding, List data) throws IOException { - if (!"json".equals(encoding) && !"protobuf".equals(encoding)) - throw new IllegalArgumentException("Encoding must be json or protobuf"); - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"object/put?stream-channels=true&encoding="+encoding, "UTF-8"); - for (byte[] f : data) - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(f)); - String res = m.finish(); - return JSONParser.parseStream(res).stream().map(x -> MerkleNode.fromJSON((Map) x)).collect(Collectors.toList()); - } - @Deprecated - public MerkleNode get(Multihash hash) throws IOException { - Map json = retrieveMap("object/get?stream-channels=true&arg=" + hash); - json.put("Hash", hash.toBase58()); - return MerkleNode.fromJSON(json); - } - @Deprecated - public MerkleNode links(Multihash hash) throws IOException { - Map json = retrieveMap("object/links?stream-channels=true&arg=" + hash); - return MerkleNode.fromJSON(json); - } - @Deprecated - public Map stat(Multihash hash) throws IOException { - return retrieveMap("object/stat?stream-channels=true&arg=" + hash); - } - @Deprecated - public byte[] data(Multihash hash) throws IOException { - return retrieve("object/data?stream-channels=true&arg=" + hash); - } - @Deprecated - public MerkleNode _new(Optional template) throws IOException { - if (template.isPresent() && !ObjectTemplates.contains(template.get())) - throw new IllegalStateException("Unrecognised template: "+template.get()); - Map json = retrieveMap("object/new?stream-channels=true"+(template.isPresent() ? "&arg=" + template.get() : "")); - return MerkleNode.fromJSON(json); - } - @Deprecated - public MerkleNode patch(Multihash base, String command, Optional data, Optional name, Optional target) throws IOException { - if (!ObjectPatchTypes.contains(command)) - throw new IllegalStateException("Illegal Object.patch command type: "+command); - String targetPath = "object/patch/"+command+"?arg=" + base.toBase58(); - if (name.isPresent()) - targetPath += "&arg=" + name.get(); - if (target.isPresent()) - targetPath += "&arg=" + target.get().toBase58(); - - switch (command) { - case "add-link": - if (!target.isPresent()) - throw new IllegalStateException("add-link requires name and target!"); - case "rm-link": - if (!name.isPresent()) - throw new IllegalStateException("link name is required!"); - return MerkleNode.fromJSON(retrieveMap(targetPath)); - case "set-data": - case "append-data": - if (!data.isPresent()) - throw new IllegalStateException("set-data requires data!"); - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"object/patch/"+command+"?arg="+base.toBase58()+"&stream-channels=true", "UTF-8"); - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(data.get())); - String res = m.finish(); - return MerkleNode.fromJSON(JSONParser.parse(res)); - - default: - throw new IllegalStateException("Unimplemented"); - } - } + public List ls() throws IOException { + return (List) retrieveMap("files/ls").get("Entries"); } - public class Name { - public Map publish(Multihash hash) throws IOException { - return publish(hash, Optional.empty()); - } + public List ls(String path) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return (List) retrieveMap("files/ls?arg=" + arg).get("Entries"); + } - public Map publish(Multihash hash, Optional id) throws IOException { - return retrieveMap("name/publish?arg=/ipfs/" + hash + id.map(name -> "&key=" + name).orElse("")); - } + public List ls(String path, boolean longListing, boolean u) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return (List) + retrieveMap("files/ls?arg=" + arg + "&long=" + longListing + "&U=" + u).get("Entries"); + } - public String resolve(Multihash hash) throws IOException { - Map res = (Map) retrieveAndParse("name/resolve?arg=" + hash); - return (String)res.get("Path"); - } + public String mkdir(String path, boolean parents) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieveString("files/mkdir?arg=" + arg + "&parents=" + parents); + } - public String resolve(String name) throws IOException { - Map res = (Map) retrieveAndParse("name/resolve?arg=" + name); - return (String)res.get("Path"); - } + public String mkdir( + String path, boolean parents, Optional cidVersion, Optional hash) + throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + String cid = cidVersion.isPresent() ? "&cid-version=" + cidVersion.get() : ""; + String hashFunc = hash.isPresent() ? "&hash=" + hash.get() : ""; + return retrieveString("files/mkdir?arg=" + arg + "&parents=" + parents + cid + hashFunc); } - public class DHT { - @Deprecated - public List> findprovs(Multihash hash) throws IOException { - return getAndParseStream("dht/findprovs?arg=" + hash).stream() - .map(x -> (Map) x) - .collect(Collectors.toList()); - } + public String mv(String source, String dest) throws IOException { + return retrieveString( + "files/mv?arg=" + + URLEncoder.encode(source, "UTF-8") + + "&arg=" + + URLEncoder.encode(dest, "UTF-8")); + } - public Map query(Multihash peerId) throws IOException { - return retrieveMap("dht/query?arg=" + peerId.toString()); - } - @Deprecated - public Map findpeer(Multihash id) throws IOException { - return retrieveMap("dht/findpeer?arg=" + id.toString()); - } - @Deprecated - public Map get(Multihash hash) throws IOException { - return retrieveMap("dht/get?arg=" + hash); - } - @Deprecated - public Map put(String key, String value) throws IOException { - return retrieveMap("dht/put?arg=" + key + "&arg="+value); - } + public byte[] read(String path) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieve("files/read?arg=" + arg); } - public class File { - @Deprecated - public Map ls(Multihash path) throws IOException { - return retrieveMap("file/ls?arg=" + path); - } + public byte[] read(String path, int offset, int count) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieve("files/read?arg=" + arg + "&offset=" + offset + "&count=" + count); } - public class Files { + public String rm(String path, boolean recursive, boolean force) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieveString("files/rm?arg=" + arg + "&recursive=" + recursive + "&force=" + force); + } - public String chcid() throws IOException { - return retrieveString("files/chcid"); - } + public Map stat(String path) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + return retrieveMap("files/stat?arg=" + arg); + } - public String chcid(String path) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieveString("files/chcid?args=" + arg); - } + public Map stat(String path, Optional format, boolean withLocal) throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + String formatStr = format.isPresent() ? "&format=" + format.get() : ""; + return retrieveMap("files/stat?arg=" + arg + formatStr + "&with-local=" + withLocal); + } - public String chcid(String path, Optional cidVersion, Optional hash) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - String cid = cidVersion.isPresent() ? "&cid-version=" + cidVersion.get() : ""; - String hashFunc = hash.isPresent() ? "&hash=" + hash.get() : ""; - return retrieveString("files/chcid?args=" + arg + cid + hashFunc); - } + public String write(String path, NamedStreamable uploadFile, boolean create, boolean parents) + throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + String rpcParams = "files/write?arg=" + arg + "&create=" + create + "&parents=" + parents; + URL target = new URL(protocol, host, port, apiVersion + rpcParams); + Multipart m = new Multipart(target.toString(), "UTF-8"); + if (uploadFile.isDirectory()) { + throw new IllegalArgumentException("Input must be a file"); + } else { + m.addFilePart("file", Paths.get(""), uploadFile); + } + return m.finish(); + } - public String cp(String source, String dest, boolean parents) throws IOException { - return retrieveString("files/cp?arg=" + URLEncoder.encode(source, "UTF-8") + "&arg=" + - URLEncoder.encode(dest, "UTF-8") + "&parents=" + parents); - } + public String write(String path, NamedStreamable uploadFile, WriteFilesArgs args) + throws IOException { + String arg = URLEncoder.encode(path, "UTF-8"); + String rpcParams = "files/write?arg=" + arg + "&" + args.toQueryString(); + URL target = new URL(protocol, host, port, apiVersion + rpcParams); + Multipart m = new Multipart(target.toString(), "UTF-8"); + if (uploadFile.isDirectory()) { + throw new IllegalArgumentException("Input must be a file"); + } else { + m.addFilePart("file", Paths.get(""), uploadFile); + } + return m.finish(); + } + } - public Map flush() throws IOException { - return retrieveMap("files/flush"); - } + public class FileStore { - public Map flush(String path) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieveMap("files/flush?arg=" + arg); - } + public Map dups() throws IOException { + return retrieveMap("filestore/dups"); + } - public List ls() throws IOException { - return (List)retrieveMap("files/ls").get("Entries"); - } + public Map ls(boolean fileOrder) throws IOException { + return retrieveMap("filestore/ls?file-order=" + fileOrder); + } - public List ls(String path) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return (List)retrieveMap("files/ls?arg=" + arg).get("Entries"); - } + public Map verify(boolean fileOrder) throws IOException { + return retrieveMap("filestore/verify?file-order=" + fileOrder); + } + } - public List ls(String path, boolean longListing, boolean u) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return (List)retrieveMap("files/ls?arg=" + arg + "&long=" + longListing + "&U=" + u).get("Entries"); - } + // Network commands + public List bootstrap() throws IOException { + return ((List) retrieveMap("bootstrap/").get("Peers")) + .stream() + .flatMap( + x -> { + try { + return Stream.of(new MultiAddress(x)); + } catch (Exception e) { + return Stream.empty(); + } + }) + .collect(Collectors.toList()); + } - public String mkdir(String path, boolean parents) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieveString("files/mkdir?arg=" + arg + "&parents=" + parents); - } + public class Bitswap { + public Map ledger(Multihash peerId) throws IOException { + return retrieveMap("bitswap/ledger?arg=" + peerId); + } - public String mkdir(String path, boolean parents, Optional cidVersion, Optional hash) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - String cid = cidVersion.isPresent() ? "&cid-version=" + cidVersion.get() : ""; - String hashFunc = hash.isPresent() ? "&hash=" + hash.get() : ""; - return retrieveString("files/mkdir?arg=" + arg + "&parents=" + parents + cid + hashFunc); - } + public String reprovide() throws IOException { + return retrieveString("bitswap/reprovide"); + } - public String mv(String source, String dest) throws IOException { - return retrieveString("files/mv?arg=" + URLEncoder.encode(source, "UTF-8") + "&arg=" + - URLEncoder.encode(dest, "UTF-8")); - } + public Map stat() throws IOException { + return retrieveMap("bitswap/stat"); + } - public byte[] read(String path) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieve("files/read?arg=" + arg); - } + public Map stat(boolean verbose) throws IOException { + return retrieveMap("bitswap/stat?verbose=" + verbose); + } - public byte[] read(String path, int offset, int count) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieve("files/read?arg=" + arg + "&offset=" + offset + "&count=" + count); - } + public Map wantlist(Multihash peerId) throws IOException { + return retrieveMap("bitswap/wantlist?peer=" + peerId); + } + } - public String rm(String path, boolean recursive, boolean force) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieveString("files/rm?arg=" + arg + "&recursive=" + recursive + "&force=" + force); - } + public class Bootstrap { - public Map stat(String path) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - return retrieveMap("files/stat?arg=" + arg); - } - public Map stat(String path, Optional format, boolean withLocal) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - String formatStr = format.isPresent() ? "&format=" + format.get() : ""; - return retrieveMap("files/stat?arg=" + arg + formatStr + "&with-local=" + withLocal); - } - public String write(String path, NamedStreamable uploadFile, boolean create, boolean parents) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - String rpcParams = "files/write?arg=" + arg + "&create=" + create + "&parents=" + parents; - URL target = new URL(protocol,host,port,apiVersion + rpcParams); - Multipart m = new Multipart(target.toString(),"UTF-8"); - if (uploadFile.isDirectory()) { - throw new IllegalArgumentException("Input must be a file"); - } else { - m.addFilePart("file", Paths.get(""), uploadFile); - } - return m.finish(); - } + public List add(MultiAddress addr) throws IOException { + return ((List) retrieveMap("bootstrap/add?arg=" + addr).get("Peers")) + .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); + } - public String write(String path, NamedStreamable uploadFile, WriteFilesArgs args) throws IOException { - String arg = URLEncoder.encode(path, "UTF-8"); - String rpcParams = "files/write?arg=" + arg + "&" + args.toQueryString(); - URL target = new URL(protocol,host,port,apiVersion + rpcParams); - Multipart m = new Multipart(target.toString(),"UTF-8"); - if (uploadFile.isDirectory()) { - throw new IllegalArgumentException("Input must be a file"); - } else { - m.addFilePart("file", Paths.get(""), uploadFile); - } - return m.finish(); - } + public List add() throws IOException { + return ((List) retrieveMap("bootstrap/add/default").get("Peers")) + .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); } - public class FileStore { + public List list() throws IOException { + return ((List) retrieveMap("bootstrap/list?expand-auto=true").get("Peers")) + .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); + } - public Map dups() throws IOException { - return retrieveMap("filestore/dups"); - } + public List rm(MultiAddress addr) throws IOException { + return rm(addr, false); + } - public Map ls(boolean fileOrder) throws IOException { - return retrieveMap("filestore/ls?file-order=" + fileOrder); - } + public List rm(MultiAddress addr, boolean all) throws IOException { + return ((List) + retrieveMap("bootstrap/rm?" + (all ? "all=true&" : "") + "arg=" + addr).get("Peers")) + .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); + } - public Map verify(boolean fileOrder) throws IOException { - return retrieveMap("filestore/verify?file-order=" + fileOrder); - } + public List rmAll() throws IOException { + return ((List) retrieveMap("bootstrap/rm/all").get("Peers")) + .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); } + } - // Network commands - public List bootstrap() throws IOException { - return ((List)retrieveMap("bootstrap/").get("Peers")) - .stream() - .flatMap(x -> { + /* ipfs swarm is a tool to manipulate the network swarm. The swarm is the + component that opens, listens for, and maintains connections to other + ipfs peers in the internet. + */ + public class Swarm { + public List peers() throws IOException { + Map m = retrieveMap("swarm/peers?stream-channels=true"); + if (m.get("Peers") == null) { + return Collections.emptyList(); + } + return ((List) m.get("Peers")) + .stream() + .flatMap( + json -> { try { - return Stream.of(new MultiAddress(x)); + return Stream.of(Peer.fromJSON(json)); } catch (Exception e) { - return Stream.empty(); + return Stream.empty(); } - }).collect(Collectors.toList()); + }) + .collect(Collectors.toList()); } - public class Bitswap { - public Map ledger(Multihash peerId) throws IOException { - return retrieveMap("bitswap/ledger?arg="+peerId); - } - - public String reprovide() throws IOException { - return retrieveString("bitswap/reprovide"); - } - public Map stat() throws IOException { - return retrieveMap("bitswap/stat"); - } - public Map stat(boolean verbose) throws IOException { - return retrieveMap("bitswap/stat?verbose=" + verbose); - } - public Map wantlist(Multihash peerId) throws IOException { - return retrieveMap("bitswap/wantlist?peer=" + peerId); - } + public Map> addrs() throws IOException { + Map m = retrieveMap("swarm/addrs?stream-channels=true"); + return ((Map) m.get("Addrs")) + .entrySet().stream() + .collect( + Collectors.toMap( + e -> Multihash.fromBase58(e.getKey()), + e -> + ((List) e.getValue()) + .stream().map(MultiAddress::new).collect(Collectors.toList()))); } - public class Bootstrap { - - public List add(MultiAddress addr) throws IOException { - return ((List)retrieveMap("bootstrap/add?arg="+addr).get("Peers")) - .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); - } - - public List add() throws IOException { - return ((List)retrieveMap("bootstrap/add/default").get("Peers")) - .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); - } - public List list() throws IOException { - return ((List)retrieveMap("bootstrap/list?expand-auto=true").get("Peers")) - .stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); - } - - public List rm(MultiAddress addr) throws IOException { - return rm(addr, false); - } - - public List rm(MultiAddress addr, boolean all) throws IOException { - return ((List)retrieveMap("bootstrap/rm?"+(all ? "all=true&":"")+"arg="+addr).get("Peers")).stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); - } - - public List rmAll() throws IOException { - return ((List)retrieveMap("bootstrap/rm/all").get("Peers")).stream().map(x -> new MultiAddress(x)).collect(Collectors.toList()); - } + public Map listenAddrs() throws IOException { + return retrieveMap("swarm/addrs/listen"); } - /* ipfs swarm is a tool to manipulate the network swarm. The swarm is the - component that opens, listens for, and maintains connections to other - ipfs peers in the internet. - */ - public class Swarm { - public List peers() throws IOException { - Map m = retrieveMap("swarm/peers?stream-channels=true"); - if (m.get("Peers") == null) { - return Collections.emptyList(); - } - return ((List)m.get("Peers")).stream() - .flatMap(json -> { - try { - return Stream.of(Peer.fromJSON(json)); - } catch (Exception e) { - return Stream.empty(); - } - }).collect(Collectors.toList()); - } - - public Map> addrs() throws IOException { - Map m = retrieveMap("swarm/addrs?stream-channels=true"); - return ((Map)m.get("Addrs")).entrySet() - .stream() - .collect(Collectors.toMap( - e -> Multihash.fromBase58(e.getKey()), - e -> ((List)e.getValue()) - .stream() - .map(MultiAddress::new) - .collect(Collectors.toList()))); - } - public Map listenAddrs() throws IOException { - return retrieveMap("swarm/addrs/listen"); - } - public Map localAddrs(boolean showPeerId) throws IOException { - return retrieveMap("swarm/addrs/local?id=" + showPeerId); - } - public Map connect(MultiAddress multiAddr) throws IOException { - Map m = retrieveMap("swarm/connect?arg="+multiAddr); - return m; - } - - public Map disconnect(MultiAddress multiAddr) throws IOException { - Map m = retrieveMap("swarm/disconnect?arg="+multiAddr); - return m; - } - public Map filters() throws IOException { - return retrieveMap("swarm/filters"); - } - public Map addFilter(String multiAddrFilter) throws IOException { - return retrieveMap("swarm/filters/add?arg="+multiAddrFilter); - } - public Map rmFilter(String multiAddrFilter) throws IOException { - return retrieveMap("swarm/filters/rm?arg="+multiAddrFilter); - } - public Map lsPeering() throws IOException { - return retrieveMap("swarm/peering/ls"); - } - public Map addPeering(MultiAddress multiAddr) throws IOException { - return retrieveMap("swarm/peering/add?arg="+multiAddr); - } - public Map rmPeering(Multihash multiAddr) throws IOException { - return retrieveMap("swarm/peering/rm?arg="+multiAddr); - } + public Map localAddrs(boolean showPeerId) throws IOException { + return retrieveMap("swarm/addrs/local?id=" + showPeerId); } - public class Dag { - public byte[] get(Cid cid) throws IOException { - return retrieve("dag/get?stream-channels=true&arg=" + cid); - } - - public MerkleNode put(byte[] object) throws IOException { - return put("dag-json", object, "dag-cbor"); - } - - public MerkleNode put(String inputFormat, byte[] object) throws IOException { - return put(inputFormat, object, "dag-cbor"); - } - - public MerkleNode put(byte[] object, String outputFormat) throws IOException { - return put("dag-json", object, outputFormat); - } - - public MerkleNode put(String inputFormat, byte[] object, String outputFormat) throws IOException { - String prefix = protocol + "://" + host + ":" + port + apiVersion; - Multipart m = new Multipart(prefix + "dag/put/?stream-channels=true&input-codec=" + inputFormat + "&store-codec=" + outputFormat, "UTF-8"); - m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(object)); - String res = m.finish(); - return MerkleNode.fromJSON(JSONParser.parse(res)); - } - - public Map resolve(String path) throws IOException { - return retrieveMap("dag/resolve?&arg=" + path); - } - - public Map stat(Cid cid) throws IOException { - return retrieveMap("dag/stat?&arg=" + cid); - } + public Map connect(MultiAddress multiAddr) throws IOException { + Map m = retrieveMap("swarm/connect?arg=" + multiAddr); + return m; } - public class Diag { - public List cmds() throws IOException { - return (List)retrieveAndParse("diag/cmds"); - } + public Map disconnect(MultiAddress multiAddr) throws IOException { + Map m = retrieveMap("swarm/disconnect?arg=" + multiAddr); + return m; + } - public List cmds(boolean verbose) throws IOException { - return (List)retrieveAndParse("diag/cmds?verbose=" + verbose); - } + public Map filters() throws IOException { + return retrieveMap("swarm/filters"); + } - public String clearCmds() throws IOException { - return retrieveString("diag/cmds/clear"); - } + public Map addFilter(String multiAddrFilter) throws IOException { + return retrieveMap("swarm/filters/add?arg=" + multiAddrFilter); + } - public String profile() throws IOException { - return retrieveString("diag/profile"); - } + public Map rmFilter(String multiAddrFilter) throws IOException { + return retrieveMap("swarm/filters/rm?arg=" + multiAddrFilter); + } - public Map sys() throws IOException { - return retrieveMap("diag/sys?stream-channels=true"); - } + public Map lsPeering() throws IOException { + return retrieveMap("swarm/peering/ls"); } - public Map ping(Multihash target) throws IOException { - return retrieveMap("ping/" + target.toBase58()); + public Map addPeering(MultiAddress multiAddr) throws IOException { + return retrieveMap("swarm/peering/add?arg=" + multiAddr); } - public Map id(Multihash target) throws IOException { - return retrieveMap("id/" + target.toBase58()); + public Map rmPeering(Multihash multiAddr) throws IOException { + return retrieveMap("swarm/peering/rm?arg=" + multiAddr); } + } - public Map id() throws IOException { - return retrieveMap("id"); + public class Dag { + public byte[] get(Cid cid) throws IOException { + return retrieve("dag/get?stream-channels=true&arg=" + cid); } - public class Stats { - public Map bitswap(boolean verbose) throws IOException { - return retrieveMap("stats/bitswap?verbose=" + verbose); - } - public Map bw() throws IOException { - return retrieveMap("stats/bw"); - } - public Map dht() throws IOException { - return retrieveMap("stats/dht"); - } - public Map provide() throws IOException { - return retrieveMap("stats/provide"); - } - public RepoStat repo(boolean sizeOnly) throws IOException { - return RepoStat.fromJson(retrieveAndParse("stats/repo?size-only=" + sizeOnly)); - } + public MerkleNode put(byte[] object) throws IOException { + return put("dag-json", object, "dag-cbor"); } - // Tools - public String version() throws IOException { - Map m = (Map)retrieveAndParse("version"); - return (String)m.get("Version"); + public MerkleNode put(String inputFormat, byte[] object) throws IOException { + return put(inputFormat, object, "dag-cbor"); } - public Map commands() throws IOException { - return retrieveMap("commands"); + public MerkleNode put(byte[] object, String outputFormat) throws IOException { + return put("dag-json", object, outputFormat); } - public Map log() throws IOException { - return retrieveMap("log/tail"); + public MerkleNode put(String inputFormat, byte[] object, String outputFormat) + throws IOException { + String prefix = protocol + "://" + host + ":" + port + apiVersion; + Multipart m = + new Multipart( + prefix + + "dag/put/?stream-channels=true&input-codec=" + + inputFormat + + "&store-codec=" + + outputFormat, + "UTF-8"); + m.addFilePart("file", Paths.get(""), new NamedStreamable.ByteArrayWrapper(object)); + String res = m.finish(); + return MerkleNode.fromJSON(JSONParser.parse(res)); } - public Map config(String entry, Optional value, Optional setBool) throws IOException { - String valArg = value.isPresent() ? "&arg=" + value.get() : ""; - String setBoolArg = setBool.isPresent() ? "&arg=" + setBool.get() : ""; - return retrieveMap("config?arg=" + entry + valArg + setBoolArg); + public Map resolve(String path) throws IOException { + return retrieveMap("dag/resolve?&arg=" + path); } - public class Config { - public Map show() throws IOException { - return (Map)retrieveAndParse("config/show"); - } + public Map stat(Cid cid) throws IOException { + return retrieveMap("dag/stat?&arg=" + cid); + } + } - public Map profileApply(String profile, boolean dryRun) throws IOException { - return (Map)retrieveAndParse("config/profile/apply?arg="+profile + "&dry-run" + dryRun); - } + public class Diag { + public List cmds() throws IOException { + return (List) retrieveAndParse("diag/cmds"); + } - public void replace(NamedStreamable file) throws IOException { - Multipart m = new Multipart(protocol +"://" + host + ":" + port + apiVersion+"config/replace?stream-channels=true", "UTF-8"); - m.addFilePart("file", Paths.get(""), file); - String res = m.finish(); - } + public List cmds(boolean verbose) throws IOException { + return (List) retrieveAndParse("diag/cmds?verbose=" + verbose); + } - public Object get(String key) throws IOException { - Map m = (Map)retrieveAndParse("config?arg="+key); - return m.get("Value"); - } + public String clearCmds() throws IOException { + return retrieveString("diag/cmds/clear"); + } - public Map set(String key, Object value) throws IOException { - return retrieveMap("config?arg=" + key + "&arg=" + value); - } + public String profile() throws IOException { + return retrieveString("diag/profile"); } - public Object update() throws IOException { - return retrieveAndParse("update"); + public Map sys() throws IOException { + return retrieveMap("diag/sys?stream-channels=true"); } + } - public class Update { - public Object check() throws IOException { - return retrieveAndParse("update/check"); - } + public Map ping(Multihash target) throws IOException { + return retrieveMap("ping/" + target.toBase58()); + } - public Object log() throws IOException { - return retrieveAndParse("update/log"); - } - } + public Map id(Multihash target) throws IOException { + return retrieveMap("id/" + target.toBase58()); + } + + public Map id() throws IOException { + return retrieveMap("id"); + } - private Map retrieveMap(String path) throws IOException { - return (Map)retrieveAndParse(path); + public class Stats { + public Map bitswap(boolean verbose) throws IOException { + return retrieveMap("stats/bitswap?verbose=" + verbose); } - private Object retrieveAndParse(String path) throws IOException { - byte[] res = retrieve(path); - return JSONParser.parse(new String(res)); + public Map bw() throws IOException { + return retrieveMap("stats/bw"); } - private Stream retrieveAndParseStream(String path, ForkJoinPool executor) throws IOException { - BlockingQueue> results = new LinkedBlockingQueue<>(); - InputStream in = retrieveStream(path); - executor.submit(() -> getObjectStream(in, - res -> { - results.add(CompletableFuture.completedFuture(res)); - }, - err -> { - CompletableFuture fut = new CompletableFuture<>(); - fut.completeExceptionally(err); - results.add(fut); - }) - ); - return Stream.generate(() -> { - try { - return JSONParser.parse(new String(results.take().get())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + public Map dht() throws IOException { + return retrieveMap("stats/dht"); } - /** - * A synchronous stream retriever that consumes the calling thread - * @param path - * @param results - * @throws IOException - */ - private void retrieveAndParseStream(String path, Consumer results, Consumer err) throws IOException { - getObjectStream(retrieveStream(path), d -> results.accept(JSONParser.parse(new String(d))), err); - } - - private String retrieveString(String path) throws IOException { - URL target = new URL(protocol, host, port, apiVersion + path); - return new String(IPFS.get(target, connectTimeoutMillis, readTimeoutMillis)); - } - - private byte[] retrieve(String path) throws IOException { - URL target = new URL(protocol, host, port, apiVersion + path); - return IPFS.get(target, connectTimeoutMillis, readTimeoutMillis); - } - - private static byte[] get(URL target, int connectTimeoutMillis, int readTimeoutMillis) throws IOException { - HttpURLConnection conn = configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); - conn.setDoOutput(true); - /* See IPFS commit for why this is a POST and not a GET https://github.com/ipfs/go-ipfs/pull/7097 - This commit upgrades go-ipfs-cmds and configures the commands HTTP API Handler - to only allow POST/OPTIONS, disallowing GET and others in the handling of - command requests in the IPFS HTTP API (where before every type of request - method was handled, with GET/POST/PUT/PATCH being equivalent). - - The Read-Only commands that the HTTP API attaches to the gateway endpoint will - additional handled GET as they did before (but stop handling PUT,DELETEs). - - By limiting the request types we address the possibility that a website - accessed by a browser abuses the IPFS API by issuing GET requests to it which - have no Origin or Referrer set, and are thus bypass CORS and CSRF protections. - - This is a breaking change for clients that relay on GET requests against the - HTTP endpoint (usually :5001). Applications integrating on top of the - gateway-read-only API should still work (including cross-domain access). - */ - conn.setRequestMethod("POST"); - conn.setRequestProperty("Content-Type", "application/json"); - - try { - OutputStream out = conn.getOutputStream(); - out.write(new byte[0]); - out.flush(); - out.close(); - InputStream in = conn.getInputStream(); - ByteArrayOutputStream resp = new ByteArrayOutputStream(); - - byte[] buf = new byte[4096]; - int r; - while ((r = in.read(buf)) >= 0) - resp.write(buf, 0, r); - return resp.toByteArray(); - } catch (ConnectException e) { - throw new RuntimeException("Couldn't connect to IPFS daemon at "+target+"\n Is IPFS running?"); - } catch (IOException e) { - throw extractError(e, conn); - } + public Map provide() throws IOException { + return retrieveMap("stats/provide"); } - public static RuntimeException extractError(IOException e, HttpURLConnection conn) { - InputStream errorStream = conn.getErrorStream(); - String err = errorStream == null ? e.getMessage() : new String(readFully(errorStream)); - return new RuntimeException("IOException contacting IPFS daemon.\n"+err+"\nTrailer: " + conn.getHeaderFields().get("Trailer"), e); + public RepoStat repo(boolean sizeOnly) throws IOException { + return RepoStat.fromJson(retrieveAndParse("stats/repo?size-only=" + sizeOnly)); } + } - private void getObjectStream(InputStream in, Consumer processor, Consumer error) { - byte LINE_FEED = (byte)10; + // Tools + public String version() throws IOException { + Map m = (Map) retrieveAndParse("version"); + return (String) m.get("Version"); + } - try { - ByteArrayOutputStream resp = new ByteArrayOutputStream(); + public Map commands() throws IOException { + return retrieveMap("commands"); + } - byte[] buf = new byte[4096]; - int r; - while ((r = in.read(buf)) >= 0) { - resp.write(buf, 0, r); - if (buf[r - 1] == LINE_FEED) { - processor.accept(resp.toByteArray()); - resp.reset(); - } - } - } catch (IOException e) { - error.accept(e); - } - } + public Map log() throws IOException { + return retrieveMap("log/tail"); + } - private List getAndParseStream(String path) throws IOException { - InputStream in = retrieveStream(path); - byte LINE_FEED = (byte)10; + public Map config(String entry, Optional value, Optional setBool) + throws IOException { + String valArg = value.isPresent() ? "&arg=" + value.get() : ""; + String setBoolArg = setBool.isPresent() ? "&arg=" + setBool.get() : ""; + return retrieveMap("config?arg=" + entry + valArg + setBoolArg); + } - ByteArrayOutputStream resp = new ByteArrayOutputStream(); + public class Config { + public Map show() throws IOException { + return (Map) retrieveAndParse("config/show"); + } - byte[] buf = new byte[4096]; - int r; - List res = new ArrayList<>(); - while ((r = in.read(buf)) >= 0) { - resp.write(buf, 0, r); - if (buf[r - 1] == LINE_FEED) { - res.add(JSONParser.parse(new String(resp.toByteArray()))); - resp.reset(); - } - } - return res; + public Map profileApply(String profile, boolean dryRun) throws IOException { + return (Map) retrieveAndParse("config/profile/apply?arg=" + profile + "&dry-run" + dryRun); } - private InputStream retrieveStream(String path) throws IOException { - URL target = new URL(protocol, host, port, apiVersion + path); - return IPFS.getStream(target, connectTimeoutMillis, readTimeoutMillis); + public void replace(NamedStreamable file) throws IOException { + Multipart m = + new Multipart( + protocol + + "://" + + host + + ":" + + port + + apiVersion + + "config/replace?stream-channels=true", + "UTF-8"); + m.addFilePart("file", Paths.get(""), file); + String res = m.finish(); } - private static InputStream getStream(URL target, int connectTimeoutMillis, int readTimeoutMillis) throws IOException { - HttpURLConnection conn = configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); - try { - return conn.getInputStream(); - } catch (IOException e) { - throw extractError(e, conn); - } + public Object get(String key) throws IOException { + Map m = (Map) retrieveAndParse("config?arg=" + key); + return m.get("Value"); } - private Map postMap(String path, byte[] body, Map headers) throws IOException { - URL target = new URL(protocol, host, port, apiVersion + path); - return (Map) JSONParser.parse(new String(post(target, body, headers, connectTimeoutMillis, readTimeoutMillis))); + public Map set(String key, Object value) throws IOException { + return retrieveMap("config?arg=" + key + "&arg=" + value); } + } - private static byte[] post(URL target, byte[] body, Map headers, int connectTimeoutMillis, int readTimeoutMillis) throws IOException { - HttpURLConnection conn = configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); - for (String key: headers.keySet()) - conn.setRequestProperty(key, headers.get(key)); - conn.setDoOutput(true); - OutputStream out = conn.getOutputStream(); - out.write(body); - out.flush(); - out.close(); + public Object update() throws IOException { + return retrieveAndParse("update"); + } - try { - InputStream in = conn.getInputStream(); - return readFully(in); - } catch (IOException e) { - throw extractError(e, conn); - } + public class Update { + public Object check() throws IOException { + return retrieveAndParse("update/check"); } - private static final byte[] readFully(InputStream in) { - try { - ByteArrayOutputStream resp = new ByteArrayOutputStream(); - byte[] buf = new byte[4096]; - int r; - while ((r=in.read(buf)) >= 0) - resp.write(buf, 0, r); - return resp.toByteArray(); - - } catch(IOException ex) { - throw new RuntimeException("Error reading InputStrean", ex); - } + public Object log() throws IOException { + return retrieveAndParse("update/log"); } + } - private static boolean detectSSL(MultiAddress multiaddress) { - return multiaddress.toString().contains("/https"); - } - - private static HttpURLConnection configureConnection(URL target, String method, int connectTimeoutMillis, int readTimeoutMillis) throws IOException { - HttpURLConnection conn = (HttpURLConnection) target.openConnection(); - conn.setRequestMethod(method); - conn.setRequestProperty("Content-Type", "application/json"); - conn.setConnectTimeout(connectTimeoutMillis); - conn.setReadTimeout(readTimeoutMillis); - return conn; - } + private Map retrieveMap(String path) throws IOException { + return (Map) retrieveAndParse(path); + } + + private Object retrieveAndParse(String path) throws IOException { + byte[] res = retrieve(path); + return JSONParser.parse(new String(res)); + } + + private Stream retrieveAndParseStream(String path, ForkJoinPool executor) + throws IOException { + BlockingQueue> results = new LinkedBlockingQueue<>(); + InputStream in = retrieveStream(path); + executor.submit( + () -> + getObjectStream( + in, + res -> { + results.add(CompletableFuture.completedFuture(res)); + }, + err -> { + CompletableFuture fut = new CompletableFuture<>(); + fut.completeExceptionally(err); + results.add(fut); + })); + return Stream.generate( + () -> { + try { + return JSONParser.parse(new String(results.take().get())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + /** + * A synchronous stream retriever that consumes the calling thread + * + * @param path + * @param results + * @throws IOException + */ + private void retrieveAndParseStream( + String path, Consumer results, Consumer err) throws IOException { + getObjectStream( + retrieveStream(path), d -> results.accept(JSONParser.parse(new String(d))), err); + } + + private String retrieveString(String path) throws IOException { + URL target = new URL(protocol, host, port, apiVersion + path); + return new String(IPFS.get(target, connectTimeoutMillis, readTimeoutMillis)); + } + + private byte[] retrieve(String path) throws IOException { + URL target = new URL(protocol, host, port, apiVersion + path); + return IPFS.get(target, connectTimeoutMillis, readTimeoutMillis); + } + + private static byte[] get(URL target, int connectTimeoutMillis, int readTimeoutMillis) + throws IOException { + HttpURLConnection conn = + configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); + conn.setDoOutput(true); + /* See IPFS commit for why this is a POST and not a GET https://github.com/ipfs/go-ipfs/pull/7097 + This commit upgrades go-ipfs-cmds and configures the commands HTTP API Handler + to only allow POST/OPTIONS, disallowing GET and others in the handling of + command requests in the IPFS HTTP API (where before every type of request + method was handled, with GET/POST/PUT/PATCH being equivalent). + + The Read-Only commands that the HTTP API attaches to the gateway endpoint will + additional handled GET as they did before (but stop handling PUT,DELETEs). + + By limiting the request types we address the possibility that a website + accessed by a browser abuses the IPFS API by issuing GET requests to it which + have no Origin or Referrer set, and are thus bypass CORS and CSRF protections. + + This is a breaking change for clients that relay on GET requests against the + HTTP endpoint (usually :5001). Applications integrating on top of the + gateway-read-only API should still work (including cross-domain access). + */ + conn.setRequestMethod("POST"); + conn.setRequestProperty("Content-Type", "application/json"); + + try { + OutputStream out = conn.getOutputStream(); + out.write(new byte[0]); + out.flush(); + out.close(); + InputStream in = conn.getInputStream(); + ByteArrayOutputStream resp = new ByteArrayOutputStream(); + + byte[] buf = new byte[4096]; + int r; + while ((r = in.read(buf)) >= 0) resp.write(buf, 0, r); + return resp.toByteArray(); + } catch (ConnectException e) { + throw new RuntimeException( + "Couldn't connect to IPFS daemon at " + target + "\n Is IPFS running?"); + } catch (IOException e) { + throw extractError(e, conn); + } + } + + public static RuntimeException extractError(IOException e, HttpURLConnection conn) { + InputStream errorStream = conn.getErrorStream(); + String err = errorStream == null ? e.getMessage() : new String(readFully(errorStream)); + return new RuntimeException( + "IOException contacting IPFS daemon.\n" + + err + + "\nTrailer: " + + conn.getHeaderFields().get("Trailer"), + e); + } + + private void getObjectStream( + InputStream in, Consumer processor, Consumer error) { + byte LINE_FEED = (byte) 10; + + try { + ByteArrayOutputStream resp = new ByteArrayOutputStream(); + + byte[] buf = new byte[4096]; + int r; + while ((r = in.read(buf)) >= 0) { + resp.write(buf, 0, r); + if (buf[r - 1] == LINE_FEED) { + processor.accept(resp.toByteArray()); + resp.reset(); + } + } + } catch (IOException e) { + error.accept(e); + } + } + + private List getAndParseStream(String path) throws IOException { + InputStream in = retrieveStream(path); + byte LINE_FEED = (byte) 10; + + ByteArrayOutputStream resp = new ByteArrayOutputStream(); + + byte[] buf = new byte[4096]; + int r; + List res = new ArrayList<>(); + while ((r = in.read(buf)) >= 0) { + resp.write(buf, 0, r); + if (buf[r - 1] == LINE_FEED) { + res.add(JSONParser.parse(new String(resp.toByteArray()))); + resp.reset(); + } + } + return res; + } + + private InputStream retrieveStream(String path) throws IOException { + URL target = new URL(protocol, host, port, apiVersion + path); + return IPFS.getStream(target, connectTimeoutMillis, readTimeoutMillis); + } + + private static InputStream getStream(URL target, int connectTimeoutMillis, int readTimeoutMillis) + throws IOException { + HttpURLConnection conn = + configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); + try { + return conn.getInputStream(); + } catch (IOException e) { + throw extractError(e, conn); + } + } + + private Map postMap(String path, byte[] body, Map headers) throws IOException { + URL target = new URL(protocol, host, port, apiVersion + path); + return (Map) + JSONParser.parse( + new String(post(target, body, headers, connectTimeoutMillis, readTimeoutMillis))); + } + + private static byte[] post( + URL target, + byte[] body, + Map headers, + int connectTimeoutMillis, + int readTimeoutMillis) + throws IOException { + HttpURLConnection conn = + configureConnection(target, "POST", connectTimeoutMillis, readTimeoutMillis); + for (String key : headers.keySet()) conn.setRequestProperty(key, headers.get(key)); + conn.setDoOutput(true); + OutputStream out = conn.getOutputStream(); + out.write(body); + out.flush(); + out.close(); + + try { + InputStream in = conn.getInputStream(); + return readFully(in); + } catch (IOException e) { + throw extractError(e, conn); + } + } + + private static final byte[] readFully(InputStream in) { + try { + ByteArrayOutputStream resp = new ByteArrayOutputStream(); + byte[] buf = new byte[4096]; + int r; + while ((r = in.read(buf)) >= 0) resp.write(buf, 0, r); + return resp.toByteArray(); + + } catch (IOException ex) { + throw new RuntimeException("Error reading InputStrean", ex); + } + } + + private static boolean detectSSL(MultiAddress multiaddress) { + return multiaddress.toString().contains("/https"); + } + + private static HttpURLConnection configureConnection( + URL target, String method, int connectTimeoutMillis, int readTimeoutMillis) + throws IOException { + HttpURLConnection conn = (HttpURLConnection) target.openConnection(); + conn.setRequestMethod(method); + conn.setRequestProperty("Content-Type", "application/json"); + conn.setConnectTimeout(connectTimeoutMillis); + conn.setReadTimeout(readTimeoutMillis); + return conn; + } } diff --git a/src/main/java/io/ipfs/api/IpldNode.java b/src/main/java/io/ipfs/api/IpldNode.java index 6663767e..ed617d68 100644 --- a/src/main/java/io/ipfs/api/IpldNode.java +++ b/src/main/java/io/ipfs/api/IpldNode.java @@ -1,216 +1,217 @@ package io.ipfs.api; -import io.ipfs.api.cbor.*; -import io.ipfs.cid.*; -import io.ipfs.multihash.*; - -import java.security.*; -import java.util.*; -import java.util.stream.*; +import io.ipfs.api.cbor.CborObject; +import io.ipfs.api.cbor.Cborable; +import io.ipfs.cid.Cid; +import io.ipfs.multihash.Multihash; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; public interface IpldNode extends Cborable { - Pair> resolve(List path); - - /** Lists all paths within the object under 'path', and up to the given depth. - * To list the entire object (similar to `find .`) pass "" and -1 - * @param path - * @param depth - * @return - */ - List tree(String path, int depth); - - /** - * - * @return calculate this objects Cid - */ - default Cid cid() { - byte[] raw = rawData(); - try { - MessageDigest md = MessageDigest.getInstance("SHA-256"); - md.update(raw); - byte[] digest = md.digest(); - return new Cid(1, Cid.Codec.DagCbor, Multihash.Type.sha2_256, digest); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - /** - * - * @return size of this object when encoded - */ - long size(); - - /** - * - * @return this object's serialization - */ - byte[] rawData(); - - /** - * - * @return the merkle links from this object - */ - List getLinks(); - - static IpldNode fromCBOR(CborObject cbor) { - return new CborIpldNode(cbor); - } - - static IpldNode fromJSON(Object json) { - return new JsonIpldNode(json); - } - - class CborIpldNode implements IpldNode { - private final CborObject base; - - public CborIpldNode(CborObject base) { - this.base = base; - } - - @Override - public CborObject toCbor() { - return base; - } - - @Override - public Pair> resolve(List path) { - throw new IllegalStateException("Unimplemented!"); - } - - @Override - public List tree(String path, int depth) { - return tree(base, path, depth); - } - - private List tree(CborObject base, String rawPath, int depth) { - String path = rawPath.startsWith("/") ? rawPath.substring(1) : rawPath; - - if (depth == 0 || (path.equals("") && depth != -1)) - return Collections.singletonList(""); - - if (base instanceof CborObject.CborMap) { - return ((CborObject.CborMap) base).values.entrySet() - .stream() - .flatMap(e -> { - String name = ((CborObject.CborString) e.getKey()).value; - if (path.startsWith(name) || depth == -1) - return tree(e.getValue(), path.length() > 0 ? path.substring(name.length()) : path, - depth == -1 ? -1 : depth - 1) - .stream() - .map(p -> "/" + name + p); - return Stream.empty(); - }).collect(Collectors.toList()); - } - if (depth == -1) - return Collections.singletonList(""); - return Collections.emptyList(); - } - - @Override - public long size() { - return rawData().length; - } - - @Override - public byte[] rawData() { - return base.toByteArray(); - } - - @Override - public List getLinks() { - return getLinks(base); - } - - private static List getLinks(CborObject base) { - if (base instanceof CborObject.CborMerkleLink) - return Collections.singletonList(new Link("", 0, ((CborObject.CborMerkleLink) base).target)); - if (base instanceof CborObject.CborMap) { - return ((CborObject.CborMap) base).values.values() - .stream() - .flatMap(cbor -> getLinks(cbor).stream()) - .collect(Collectors.toList()); - } - if (base instanceof CborObject.CborList) { - return ((CborObject.CborList) base).value - .stream() - .flatMap(cbor -> getLinks(cbor).stream()) - .collect(Collectors.toList()); - } - return Collections.emptyList(); - } - } - - class JsonIpldNode implements IpldNode { - private final Object json; - - public JsonIpldNode(Object json) { - this.json = json; - } - - @Override - public CborObject toCbor() { - throw new IllegalStateException("Unimplemented!"); - } - - @Override - public Pair> resolve(List path) { - throw new IllegalStateException("Unimplemented!"); - } - - @Override - public List tree(String path, int depth) { - throw new IllegalStateException("Unimplemented!"); - } - - @Override - public long size() { - return rawData().length; - } - - @Override - public byte[] rawData() { - return JSONParser.toString(json).getBytes(); - } - - @Override - public List getLinks() { - throw new IllegalStateException("Unimplemented!"); - } - } - - class Link { - public final String name; - // Cumulative size of target - public final long size; - public final Multihash target; - - public Link(String name, long size, Multihash target) { - this.name = name; - this.size = size; - this.target = target; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Link link = (Link) o; - - if (size != link.size) return false; - if (name != null ? !name.equals(link.name) : link.name != null) return false; - return target != null ? target.equals(link.target) : link.target == null; - - } - - @Override - public int hashCode() { - int result = name != null ? name.hashCode() : 0; - result = 31 * result + (int) (size ^ (size >>> 32)); - result = 31 * result + (target != null ? target.hashCode() : 0); - return result; - } + Pair> resolve(List path); + + /** + * Lists all paths within the object under 'path', and up to the given depth. To list the entire + * object (similar to `find .`) pass "" and -1 + * + * @param path + * @param depth + * @return + */ + List tree(String path, int depth); + + /** + * @return calculate this objects Cid + */ + default Cid cid() { + byte[] raw = rawData(); + try { + MessageDigest md = MessageDigest.getInstance("SHA-256"); + md.update(raw); + byte[] digest = md.digest(); + return new Cid(1, Cid.Codec.DagCbor, Multihash.Type.sha2_256, digest); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * @return size of this object when encoded + */ + long size(); + + /** + * @return this object's serialization + */ + byte[] rawData(); + + /** + * @return the merkle links from this object + */ + List getLinks(); + + static IpldNode fromCBOR(CborObject cbor) { + return new CborIpldNode(cbor); + } + + static IpldNode fromJSON(Object json) { + return new JsonIpldNode(json); + } + + class CborIpldNode implements IpldNode { + private final CborObject base; + + public CborIpldNode(CborObject base) { + this.base = base; + } + + @Override + public CborObject toCbor() { + return base; + } + + @Override + public Pair> resolve(List path) { + throw new IllegalStateException("Unimplemented!"); + } + + @Override + public List tree(String path, int depth) { + return tree(base, path, depth); + } + + private List tree(CborObject base, String rawPath, int depth) { + String path = rawPath.startsWith("/") ? rawPath.substring(1) : rawPath; + + if (depth == 0 || (path.equals("") && depth != -1)) return Collections.singletonList(""); + + if (base instanceof CborObject.CborMap) { + return ((CborObject.CborMap) base) + .values.entrySet().stream() + .flatMap( + e -> { + String name = ((CborObject.CborString) e.getKey()).value; + if (path.startsWith(name) || depth == -1) + return tree( + e.getValue(), + path.length() > 0 ? path.substring(name.length()) : path, + depth == -1 ? -1 : depth - 1) + .stream() + .map(p -> "/" + name + p); + return Stream.empty(); + }) + .collect(Collectors.toList()); + } + if (depth == -1) return Collections.singletonList(""); + return Collections.emptyList(); + } + + @Override + public long size() { + return rawData().length; + } + + @Override + public byte[] rawData() { + return base.toByteArray(); + } + + @Override + public List getLinks() { + return getLinks(base); + } + + private static List getLinks(CborObject base) { + if (base instanceof CborObject.CborMerkleLink) + return Collections.singletonList( + new Link("", 0, ((CborObject.CborMerkleLink) base).target)); + if (base instanceof CborObject.CborMap) { + return ((CborObject.CborMap) base) + .values.values().stream() + .flatMap(cbor -> getLinks(cbor).stream()) + .collect(Collectors.toList()); + } + if (base instanceof CborObject.CborList) { + return ((CborObject.CborList) base) + .value.stream().flatMap(cbor -> getLinks(cbor).stream()).collect(Collectors.toList()); + } + return Collections.emptyList(); + } + } + + class JsonIpldNode implements IpldNode { + private final Object json; + + public JsonIpldNode(Object json) { + this.json = json; + } + + @Override + public CborObject toCbor() { + throw new IllegalStateException("Unimplemented!"); + } + + @Override + public Pair> resolve(List path) { + throw new IllegalStateException("Unimplemented!"); + } + + @Override + public List tree(String path, int depth) { + throw new IllegalStateException("Unimplemented!"); + } + + @Override + public long size() { + return rawData().length; + } + + @Override + public byte[] rawData() { + return JSONParser.toString(json).getBytes(); + } + + @Override + public List getLinks() { + throw new IllegalStateException("Unimplemented!"); + } + } + + class Link { + public final String name; + // Cumulative size of target + public final long size; + public final Multihash target; + + public Link(String name, long size, Multihash target) { + this.name = name; + this.size = size; + this.target = target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Link link = (Link) o; + + if (size != link.size) return false; + if (name != null ? !name.equals(link.name) : link.name != null) return false; + return target != null ? target.equals(link.target) : link.target == null; + } + + @Override + public int hashCode() { + int result = name != null ? name.hashCode() : 0; + result = 31 * result + (int) (size ^ (size >>> 32)); + result = 31 * result + (target != null ? target.hashCode() : 0); + return result; } + } } diff --git a/src/main/java/io/ipfs/api/JSONParser.java b/src/main/java/io/ipfs/api/JSONParser.java index f3dcbfc3..00766427 100644 --- a/src/main/java/io/ipfs/api/JSONParser.java +++ b/src/main/java/io/ipfs/api/JSONParser.java @@ -1,454 +1,363 @@ package io.ipfs.api; -import java.util.*; - -import java.lang.reflect.*; - -public class JSONParser -{ - private static char skipSpaces(String json, int[] pos) - { - while (true) - { - if (pos[0] >= json.length()) - return 0; - char ch = json.charAt(pos[0]); - if (Character.isWhitespace(ch)) - pos[0]++; - else - return ch; - } +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +public class JSONParser { + private static char skipSpaces(String json, int[] pos) { + while (true) { + if (pos[0] >= json.length()) return 0; + char ch = json.charAt(pos[0]); + if (Character.isWhitespace(ch)) pos[0]++; + else return ch; } + } - private static Boolean parseBoolean(String json, int[] pos) - { - if (json.regionMatches(pos[0], "true", 0, 4)) - { - pos[0] += 4; - return Boolean.TRUE; - } + private static Boolean parseBoolean(String json, int[] pos) { + if (json.regionMatches(pos[0], "true", 0, 4)) { + pos[0] += 4; + return Boolean.TRUE; + } - if (json.regionMatches(pos[0], "false", 0, 5)) - { - pos[0] += 5; - return Boolean.FALSE; - } + if (json.regionMatches(pos[0], "false", 0, 5)) { + pos[0] += 5; + return Boolean.FALSE; + } - return null; + return null; + } + + private static Number parseNumber(String json, int[] pos) { + int endPos = json.length(); + int startPos = pos[0]; + + boolean foundExp = false; + boolean foundDot = false; + boolean allowPM = true; + for (int i = startPos; i < endPos; i++) { + char ch = json.charAt(i); + if ((ch == 'e') || (ch == 'E')) { + if (foundExp) return null; + allowPM = true; + foundExp = true; + continue; + } + + if ((ch == '+') || (ch == '-')) { + if (allowPM) { + allowPM = false; + ch = skipSpaces(json, pos); + if (ch == 0) return null; + else continue; + } else return null; + } + + allowPM = false; + if (ch == '.') { + if (foundDot) return null; + foundDot = true; + continue; + } + + if (!Character.isDigit(json.charAt(i))) { + pos[0] = endPos = i; + break; + } } - private static Number parseNumber(String json, int[] pos) - { - int endPos = json.length(); - int startPos = pos[0]; - - boolean foundExp = false; - boolean foundDot = false; - boolean allowPM = true; - for (int i=startPos; i parseStream(String json) - { - if (json == null) - return null; - int[] pos = new int[1]; - List res = new ArrayList<>(); - json = json.trim(); - while (pos[0] < json.length()) - res.add(parse(json, pos)); - return res; - } + result = parseBoolean(json, pos); + if (result != null) return result; - private static void escapeString(String s, StringBuffer buf) - { - buf.append('"'); - for (int i=0; i parseStream(String json) { + if (json == null) return null; + int[] pos = new int[1]; + List res = new ArrayList<>(); + json = json.trim(); + while (pos[0] < json.length()) res.add(parse(json, pos)); + return res; + } + + private static void escapeString(String s, StringBuffer buf) { + buf.append('"'); + for (int i = 0; i < s.length(); i++) { + char ch = s.charAt(i); + if ((ch == '"') || (ch == '\\')) buf.append('\\'); + buf.append(ch); + } + buf.append('"'); + } + + private static void toString(Object obj, StringBuffer buf) { + if (obj == null) buf.append("null"); + else if ((obj instanceof Boolean) || (obj instanceof Number)) buf.append(obj.toString()); + else if (obj instanceof Map) { + Map m = (Map) obj; + boolean first = true; + Iterator itt = m.keySet().iterator(); + + buf.append('{'); + while (itt.hasNext()) { + if (!first) buf.append(','); + + String s = (String) itt.next(); + Object val = m.get(s); + escapeString(s, buf); + buf.append(":"); + toString(val, buf); + first = false; + } + buf.append('}'); + } else if (obj instanceof Object[]) { + Object[] l = (Object[]) obj; + boolean first = true; + + buf.append('['); + for (int i = 0; i < l.length; i++) { + if (!first) buf.append(','); + + toString(l[i], buf); + first = false; + } + buf.append(']'); + } else if (obj instanceof List) { + List l = (List) obj; + boolean first = true; + Iterator itt = l.iterator(); + + buf.append('['); + while (itt.hasNext()) { + if (!first) buf.append(','); + + Object val = itt.next(); + toString(val, buf); + first = false; + } + buf.append(']'); + } else if (obj instanceof String) escapeString(obj.toString(), buf); + else { + try { + Class cls = obj.getClass(); + Method m = cls.getDeclaredMethod("toJSON", new Class[0]); + Object jsonObj = m.invoke(obj, new Object[0]); + buf.append(toString(jsonObj)); + } catch (Exception e) { + escapeString(obj.toString(), buf); + } + } + } + + public static String toString(Object obj) { + StringBuffer buf = new StringBuffer(); + toString(obj, buf); + return buf.toString(); + } + + public static String stripWhitespace(String src) { + boolean inQuote = false, isEscaped = false; + StringBuffer buf = new StringBuffer(); + + for (int i = 0; i < src.length(); i++) { + char ch = src.charAt(i); + + if (!inQuote) { + if (ch == '"') { + inQuote = true; + isEscaped = false; + } else if (Character.isWhitespace(ch)) continue; + } else if (inQuote) { + if (ch == '\\') isEscaped = !isEscaped; + else if ((ch == '"') && !isEscaped) inQuote = false; + } + + buf.append(ch); } - public static String stripWhitespace(String src) - { - boolean inQuote = false, isEscaped = false; - StringBuffer buf = new StringBuffer(); - - for (int i=0; i= 0) - { - if ((json != null) && (json instanceof List)) - json = ((List) json).get(index); - else - return null; - } + if (key.endsWith("]")) { + int b = key.indexOf("["); + try { + index = Integer.parseInt(key.substring(b + 1, key.length() - 1)); + key = key.substring(0, b); + } catch (Exception e) { + throw new IllegalStateException("Path syntax error - invalid index"); } + } - return json; + if ((json != null) && (json instanceof Map)) json = ((Map) json).get(key); + else return null; + + if (index >= 0) { + if ((json != null) && (json instanceof List)) json = ((List) json).get(index); + else return null; + } } + + return json; + } } diff --git a/src/main/java/io/ipfs/api/KeyInfo.java b/src/main/java/io/ipfs/api/KeyInfo.java index 19291752..a991ec79 100644 --- a/src/main/java/io/ipfs/api/KeyInfo.java +++ b/src/main/java/io/ipfs/api/KeyInfo.java @@ -1,44 +1,43 @@ package io.ipfs.api; -import io.ipfs.cid.*; -import io.ipfs.multihash.*; - -import java.util.*; +import io.ipfs.cid.Cid; +import io.ipfs.multihash.Multihash; +import java.util.Map; public class KeyInfo { - public final String name; - public final Multihash id; + public final String name; + public final Multihash id; - public KeyInfo(String name, Multihash id) { - this.name = name; - this.id = id; - } + public KeyInfo(String name, Multihash id) { + this.name = name; + this.id = id; + } - public String toString() { - return name + ": " + id; - } + public String toString() { + return name + ": " + id; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - KeyInfo keyInfo = (KeyInfo) o; + KeyInfo keyInfo = (KeyInfo) o; - if (name != null ? !name.equals(keyInfo.name) : keyInfo.name != null) return false; - return id != null ? id.equals(keyInfo.id) : keyInfo.id == null; - } + if (name != null ? !name.equals(keyInfo.name) : keyInfo.name != null) return false; + return id != null ? id.equals(keyInfo.id) : keyInfo.id == null; + } - @Override - public int hashCode() { - int result = name != null ? name.hashCode() : 0; - result = 31 * result + (id != null ? id.hashCode() : 0); - return result; - } + @Override + public int hashCode() { + int result = name != null ? name.hashCode() : 0; + result = 31 * result + (id != null ? id.hashCode() : 0); + return result; + } - public static KeyInfo fromJson(Object json) { - Map m = (Map) json; - return new KeyInfo(m.get("Name"), Cid.decode(m.get("Id"))); - } + public static KeyInfo fromJson(Object json) { + Map m = (Map) json; + return new KeyInfo(m.get("Name"), Cid.decode(m.get("Id"))); + } } diff --git a/src/main/java/io/ipfs/api/MerkleNode.java b/src/main/java/io/ipfs/api/MerkleNode.java index e73f7f3f..2f809878 100644 --- a/src/main/java/io/ipfs/api/MerkleNode.java +++ b/src/main/java/io/ipfs/api/MerkleNode.java @@ -1,103 +1,111 @@ package io.ipfs.api; -import io.ipfs.cid.*; +import io.ipfs.cid.Cid; import io.ipfs.multihash.Multihash; - -import java.util.*; -import java.util.stream.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.TreeMap; +import java.util.stream.Collectors; public class MerkleNode { - public final Multihash hash; - public final Optional name; - public final Optional size; - public final Optional largeSize; - public final Optional type; - public final List links; - public final Optional data; + public final Multihash hash; + public final Optional name; + public final Optional size; + public final Optional largeSize; + public final Optional type; + public final List links; + public final Optional data; - public MerkleNode(String hash, - Optional name, - Optional size, - Optional largeSize, - Optional type, - List links, - Optional data) { - this.name = name; - this.hash = Cid.decode(hash); - this.size = size; - this.largeSize = largeSize; - this.type = type; - this.links = links; - this.data = data; - } + public MerkleNode( + String hash, + Optional name, + Optional size, + Optional largeSize, + Optional type, + List links, + Optional data) { + this.name = name; + this.hash = Cid.decode(hash); + this.size = size; + this.largeSize = largeSize; + this.type = type; + this.links = links; + this.data = data; + } - public MerkleNode(String hash) { - this(hash, Optional.empty()); - } + public MerkleNode(String hash) { + this(hash, Optional.empty()); + } - public MerkleNode(String hash, Optional name) { - this(hash, name, Optional.empty(), Optional.empty(), Optional.empty(), Arrays.asList(), Optional.empty()); - } + public MerkleNode(String hash, Optional name) { + this( + hash, + name, + Optional.empty(), + Optional.empty(), + Optional.empty(), + Arrays.asList(), + Optional.empty()); + } - @Override - public boolean equals(Object b) { - if (!(b instanceof MerkleNode)) - return false; - MerkleNode other = (MerkleNode) b; - return hash.equals(other.hash); // ignore name hash says it all - } + @Override + public boolean equals(Object b) { + if (!(b instanceof MerkleNode)) return false; + MerkleNode other = (MerkleNode) b; + return hash.equals(other.hash); // ignore name hash says it all + } - @Override - public int hashCode() { - return hash.hashCode(); - } + @Override + public int hashCode() { + return hash.hashCode(); + } - public static MerkleNode fromJSON(Object rawjson) { - if (rawjson instanceof String) - return new MerkleNode((String)rawjson); - Map json = (Map)rawjson; - if ("error".equals(json.get("Type"))) - throw new IllegalStateException("Remote IPFS error: " + json.get("Message")); - String hash = (String)json.get("Hash"); - if (hash == null) - hash = (String)json.get("Key"); - if (hash == null && json.containsKey("Cid")) - hash = (String) (((Map) json.get("Cid")).get("/")); - Optional name = json.containsKey("Name") ? - Optional.of((String) json.get("Name")) : - Optional.empty(); - Object rawSize = json.get("Size"); - Optional size = rawSize instanceof Integer ? - Optional.of((Integer) rawSize) : - Optional.empty(); - Optional largeSize = rawSize instanceof String ? - Optional.of((String) json.get("Size")) : - Optional.empty(); - Optional type = json.containsKey("Type") ? - Optional.of((Integer) json.get("Type")) : - Optional.empty(); - List linksRaw = (List) json.get("Links"); - List links = linksRaw == null ? - Collections.emptyList() : - linksRaw.stream().map(x -> MerkleNode.fromJSON(x)).collect(Collectors.toList()); - Optional data = json.containsKey("Data") ? Optional.of(((String)json.get("Data")).getBytes()): Optional.empty(); - return new MerkleNode(hash, name, size, largeSize, type, links, data); - } + public static MerkleNode fromJSON(Object rawjson) { + if (rawjson instanceof String) return new MerkleNode((String) rawjson); + Map json = (Map) rawjson; + if ("error".equals(json.get("Type"))) + throw new IllegalStateException("Remote IPFS error: " + json.get("Message")); + String hash = (String) json.get("Hash"); + if (hash == null) hash = (String) json.get("Key"); + if (hash == null && json.containsKey("Cid")) hash = (String) (((Map) json.get("Cid")).get("/")); + Optional name = + json.containsKey("Name") ? Optional.of((String) json.get("Name")) : Optional.empty(); + Object rawSize = json.get("Size"); + Optional size = + rawSize instanceof Integer ? Optional.of((Integer) rawSize) : Optional.empty(); + Optional largeSize = + rawSize instanceof String ? Optional.of((String) json.get("Size")) : Optional.empty(); + Optional type = + json.containsKey("Type") ? Optional.of((Integer) json.get("Type")) : Optional.empty(); + List linksRaw = (List) json.get("Links"); + List links = + linksRaw == null + ? Collections.emptyList() + : linksRaw.stream().map(x -> MerkleNode.fromJSON(x)).collect(Collectors.toList()); + Optional data = + json.containsKey("Data") + ? Optional.of(((String) json.get("Data")).getBytes()) + : Optional.empty(); + return new MerkleNode(hash, name, size, largeSize, type, links, data); + } - public Object toJSON() { - Map res = new TreeMap<>(); - res.put("Links", links.stream().map(x -> x.hash).collect(Collectors.toList())); - data.ifPresent(bytes -> res.put("Data", bytes)); - return res; - } + public Object toJSON() { + Map res = new TreeMap<>(); + res.put("Links", links.stream().map(x -> x.hash).collect(Collectors.toList())); + data.ifPresent(bytes -> res.put("Data", bytes)); + return res; + } - public String toJSONString() { - return JSONParser.toString(toJSON()); - } + public String toJSONString() { + return JSONParser.toString(toJSON()); + } - @Override - public String toString() { - return hash + "-" + name.orElse(""); - } + @Override + public String toString() { + return hash + "-" + name.orElse(""); + } } diff --git a/src/main/java/io/ipfs/api/Multipart.java b/src/main/java/io/ipfs/api/Multipart.java index 2f01bbf9..1c9d062c 100644 --- a/src/main/java/io/ipfs/api/Multipart.java +++ b/src/main/java/io/ipfs/api/Multipart.java @@ -1,159 +1,179 @@ package io.ipfs.api; -import java.io.*; -import java.net.*; -import java.nio.file.*; -import java.util.*; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.file.Path; +import java.util.Optional; +import java.util.Random; public class Multipart { - private final String boundary; - private static final String LINE_FEED = "\r\n"; - private HttpURLConnection httpConn; - private String charset; - private OutputStream out; - - public Multipart(String requestURL, String charset) { - this.charset = charset; - - boundary = createBoundary(); - - try { - URL url = new URL(requestURL); - httpConn = (HttpURLConnection) url.openConnection(); - httpConn.setUseCaches(false); - httpConn.setDoOutput(true); - httpConn.setDoInput(true); - httpConn.setRequestProperty("Expect", "100-continue"); - httpConn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary); - httpConn.setRequestProperty("User-Agent", "Java IPFS Client"); - httpConn.setChunkedStreamingMode(4096); - out = httpConn.getOutputStream(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } + private final String boundary; + private static final String LINE_FEED = "\r\n"; + private HttpURLConnection httpConn; + private String charset; + private OutputStream out; + + public Multipart(String requestURL, String charset) { + this.charset = charset; + + boundary = createBoundary(); + + try { + URL url = new URL(requestURL); + httpConn = (HttpURLConnection) url.openConnection(); + httpConn.setUseCaches(false); + httpConn.setDoOutput(true); + httpConn.setDoInput(true); + httpConn.setRequestProperty("Expect", "100-continue"); + httpConn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary); + httpConn.setRequestProperty("User-Agent", "Java IPFS Client"); + httpConn.setChunkedStreamingMode(4096); + out = httpConn.getOutputStream(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); } - - public static String createBoundary() { - Random r = new Random(); - String allowed = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; - StringBuilder b = new StringBuilder(); - for (int i=0; i < 32; i++) - b.append(allowed.charAt(r.nextInt(allowed.length()))); - return b.toString(); + } + + public static String createBoundary() { + Random r = new Random(); + String allowed = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; + StringBuilder b = new StringBuilder(); + for (int i = 0; i < 32; i++) b.append(allowed.charAt(r.nextInt(allowed.length()))); + return b.toString(); + } + + private Multipart append(String value) throws IOException { + out.write(value.getBytes(charset)); + return this; + } + + public void addFormField(String name, String value) throws IOException { + append("--").append(boundary).append(LINE_FEED); + append("Content-Disposition: form-data; name=\"").append(name).append("\"").append(LINE_FEED); + append("Content-Type: text/plain; charset=").append(charset).append(LINE_FEED); + append(LINE_FEED); + append(value).append(LINE_FEED); + out.flush(); + } + + public void addSubtree(Path parentPath, NamedStreamable dir) throws IOException { + Path dirPath = parentPath.resolve(dir.getName().get()); + addDirectoryPart(dirPath); + for (NamedStreamable f : dir.getChildren()) { + if (f.isDirectory()) addSubtree(dirPath, f); + else addFilePart("file", dirPath, f); } - - private Multipart append(String value) throws IOException { - out.write(value.getBytes(charset)); - return this; + } + + public void addDirectoryPart(Path path) throws IOException { + append("--").append(boundary).append(LINE_FEED); + append("Content-Disposition: file; filename=\"") + .append(encode(path.toString())) + .append("\"") + .append(LINE_FEED); + append("Content-Type: application/x-directory").append(LINE_FEED); + append("Content-Transfer-Encoding: binary").append(LINE_FEED); + append(LINE_FEED); + append(LINE_FEED); + out.flush(); + } + + private static String encode(String in) { + try { + return URLEncoder.encode(in, "UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); } - - public void addFormField(String name, String value) throws IOException { - append("--").append(boundary).append(LINE_FEED); - append("Content-Disposition: form-data; name=\"").append(name).append("\"") - .append(LINE_FEED); - append("Content-Type: text/plain; charset=").append(charset).append(LINE_FEED); - append(LINE_FEED); - append(value).append(LINE_FEED); - out.flush(); + } + + public void addFilePart(String fieldName, Path parent, NamedStreamable uploadFile) + throws IOException { + Optional fileName = + uploadFile.getName().map(n -> encode(parent.resolve(n).toString().replace('\\', '/'))); + append("--").append(boundary).append(LINE_FEED); + if (!fileName.isPresent()) + append("Content-Disposition: file; name=\"") + .append(fieldName) + .append("\";") + .append(LINE_FEED); + else + append("Content-Disposition: file; filename=\"") + .append(fileName.get()) + .append("\";") + .append(LINE_FEED); + append("Content-Type: application/octet-stream").append(LINE_FEED); + append("Content-Transfer-Encoding: binary").append(LINE_FEED); + append(LINE_FEED); + out.flush(); + + try { + InputStream inputStream = uploadFile.getInputStream(); + byte[] buffer = new byte[4096]; + int r; + while ((r = inputStream.read(buffer)) != -1) out.write(buffer, 0, r); + out.flush(); + inputStream.close(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); } - public void addSubtree(Path parentPath, NamedStreamable dir) throws IOException { - Path dirPath = parentPath.resolve(dir.getName().get()); - addDirectoryPart(dirPath); - for (NamedStreamable f: dir.getChildren()) { - if (f.isDirectory()) - addSubtree(dirPath, f); - else - addFilePart("file", dirPath, f); - } - } - - public void addDirectoryPart(Path path) throws IOException { - append("--").append(boundary).append(LINE_FEED); - append("Content-Disposition: file; filename=\"").append(encode(path.toString())).append("\"").append(LINE_FEED); - append("Content-Type: application/x-directory").append(LINE_FEED); - append("Content-Transfer-Encoding: binary").append(LINE_FEED); - append(LINE_FEED); - append(LINE_FEED); - out.flush(); - } - - private static String encode(String in) { - try { - return URLEncoder.encode(in, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); + append(LINE_FEED); + out.flush(); + } + + public void addHeaderField(String name, String value) throws IOException { + append(name + ": " + value).append(LINE_FEED); + out.flush(); + } + + public String finish() throws IOException { + StringBuilder b = new StringBuilder(); + + append("--" + boundary + "--").append(LINE_FEED); + out.flush(); + out.close(); + + try { + int status = httpConn.getResponseCode(); + if (status == HttpURLConnection.HTTP_OK) { + BufferedReader reader = + new BufferedReader(new InputStreamReader(httpConn.getInputStream())); + String line; + while ((line = reader.readLine()) != null) { + b.append(line); } - } - - public void addFilePart(String fieldName, Path parent, NamedStreamable uploadFile) throws IOException { - Optional fileName = uploadFile.getName().map(n -> encode(parent.resolve(n).toString().replace('\\','/'))); - append("--").append(boundary).append(LINE_FEED); - if (!fileName.isPresent()) - append("Content-Disposition: file; name=\"").append(fieldName).append("\";").append(LINE_FEED); - else - append("Content-Disposition: file; filename=\"").append(fileName.get()).append("\";").append(LINE_FEED); - append("Content-Type: application/octet-stream").append(LINE_FEED); - append("Content-Transfer-Encoding: binary").append(LINE_FEED); - append(LINE_FEED); - out.flush(); - - try { - InputStream inputStream = uploadFile.getInputStream(); - byte[] buffer = new byte[4096]; - int r; - while ((r = inputStream.read(buffer)) != -1) - out.write(buffer, 0, r); - out.flush(); - inputStream.close(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - - append(LINE_FEED); - out.flush(); - } - - public void addHeaderField(String name, String value) throws IOException { - append(name + ": " + value).append(LINE_FEED); - out.flush(); - } - - public String finish() throws IOException { - StringBuilder b = new StringBuilder(); - - append("--" + boundary + "--").append(LINE_FEED); - out.flush(); - out.close(); - + reader.close(); + httpConn.disconnect(); + } else { try { - int status = httpConn.getResponseCode(); - if (status == HttpURLConnection.HTTP_OK) { - BufferedReader reader = new BufferedReader(new InputStreamReader( - httpConn.getInputStream())); - String line; - while ((line = reader.readLine()) != null) { - b.append(line); - } - reader.close(); - httpConn.disconnect(); - } else { - try { - BufferedReader reader = new BufferedReader(new InputStreamReader( - httpConn.getInputStream())); - String line; - while ((line = reader.readLine()) != null) { - b.append(line); - } - reader.close(); - } catch (Throwable t) { - } - throw new IOException("Server returned status: " + status + " with body: " + b.toString() + " and Trailer header: " + httpConn.getHeaderFields().get("Trailer")); - } - - return b.toString(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); + BufferedReader reader = + new BufferedReader(new InputStreamReader(httpConn.getInputStream())); + String line; + while ((line = reader.readLine()) != null) { + b.append(line); + } + reader.close(); + } catch (Throwable t) { } + throw new IOException( + "Server returned status: " + + status + + " with body: " + + b.toString() + + " and Trailer header: " + + httpConn.getHeaderFields().get("Trailer")); + } + + return b.toString(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); } + } } diff --git a/src/main/java/io/ipfs/api/NamedStreamable.java b/src/main/java/io/ipfs/api/NamedStreamable.java index 642d9ba3..12112998 100644 --- a/src/main/java/io/ipfs/api/NamedStreamable.java +++ b/src/main/java/io/ipfs/api/NamedStreamable.java @@ -1,159 +1,163 @@ package io.ipfs.api; -import java.io.*; -import java.net.*; -import java.util.*; -import java.util.stream.*; - -public interface NamedStreamable -{ - InputStream getInputStream() throws IOException; - - Optional getName(); - - List getChildren(); - - boolean isDirectory(); - - default byte[] getContents() throws IOException { - InputStream in = getInputStream(); - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - byte[] tmp = new byte[4096]; - int r; - while ((r=in.read(tmp))>= 0) - bout.write(tmp, 0, r); - return bout.toByteArray(); - } +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public interface NamedStreamable { + InputStream getInputStream() throws IOException; + + Optional getName(); + + List getChildren(); + + boolean isDirectory(); + + default byte[] getContents() throws IOException { + InputStream in = getInputStream(); + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + byte[] tmp = new byte[4096]; + int r; + while ((r = in.read(tmp)) >= 0) bout.write(tmp, 0, r); + return bout.toByteArray(); + } + + class FileWrapper implements NamedStreamable { + private final File source; + + public FileWrapper(File source) { + if (!source.exists()) throw new IllegalStateException("File does not exist: " + source); + this.source = source; + } - class FileWrapper implements NamedStreamable { - private final File source; + public InputStream getInputStream() throws IOException { + return new FileInputStream(source); + } - public FileWrapper(File source) { - if (! source.exists()) - throw new IllegalStateException("File does not exist: " + source); - this.source = source; - } + public boolean isDirectory() { + return source.isDirectory(); + } - public InputStream getInputStream() throws IOException { - return new FileInputStream(source); - } + @Override + public List getChildren() { + return isDirectory() + ? Stream.of(source.listFiles()) + .map(NamedStreamable.FileWrapper::new) + .collect(Collectors.toList()) + : Collections.emptyList(); + } - public boolean isDirectory() { - return source.isDirectory(); - } + public Optional getName() { + return Optional.of(source.getName()); + } + } - @Override - public List getChildren() { - return isDirectory() ? - Stream.of(source.listFiles()) - .map(NamedStreamable.FileWrapper::new) - .collect(Collectors.toList()) : - Collections.emptyList(); - } + class InputStreamWrapper implements NamedStreamable { + private final Optional name; + private final InputStream data; - public Optional getName() { - return Optional.of(source.getName()); - } + public InputStreamWrapper(InputStream data) { + this(Optional.empty(), data); } - class InputStreamWrapper implements NamedStreamable { - private final Optional name; - private final InputStream data; + public InputStreamWrapper(String name, InputStream data) { + this(Optional.of(name), data); + } - public InputStreamWrapper(InputStream data) { - this(Optional.empty(), data); - } + public InputStreamWrapper(Optional name, InputStream data) { + this.name = name; + this.data = data; + } - public InputStreamWrapper(String name, InputStream data) { - this(Optional.of(name), data); - } + public boolean isDirectory() { + return false; + } - public InputStreamWrapper(Optional name, InputStream data) { - this.name = name; - this.data = data; - } + public InputStream getInputStream() { + return data; + } - public boolean isDirectory() { - return false; - } + @Override + public List getChildren() { + return Collections.emptyList(); + } - public InputStream getInputStream() { - return data; - } + public Optional getName() { + return name; + } + } - @Override - public List getChildren() { - return Collections.emptyList(); - } + class ByteArrayWrapper implements NamedStreamable { + private final Optional name; + private final byte[] data; - public Optional getName() { - return name; - } + public ByteArrayWrapper(byte[] data) { + this(Optional.empty(), data); } - class ByteArrayWrapper implements NamedStreamable { - private final Optional name; - private final byte[] data; - - public ByteArrayWrapper(byte[] data) { - this(Optional.empty(), data); - } + public ByteArrayWrapper(String name, byte[] data) { + this(Optional.of(name), data); + } - public ByteArrayWrapper(String name, byte[] data) { - this(Optional.of(name), data); - } + public ByteArrayWrapper(Optional name, byte[] data) { + this.name = name; + this.data = data; + } - public ByteArrayWrapper(Optional name, byte[] data) { - this.name = name; - this.data = data; - } + public boolean isDirectory() { + return false; + } - public boolean isDirectory() { - return false; - } + public InputStream getInputStream() throws IOException { + return new ByteArrayInputStream(data); + } - public InputStream getInputStream() throws IOException { - return new ByteArrayInputStream(data); - } - - @Override - public List getChildren() { - return Collections.emptyList(); - } + @Override + public List getChildren() { + return Collections.emptyList(); + } - public Optional getName() { - return name; - } + public Optional getName() { + return name; } + } - class DirWrapper implements NamedStreamable { + class DirWrapper implements NamedStreamable { - private final String name; - private final List children; + private final String name; + private final List children; - public DirWrapper(String name, List children) { - this.name = name; - this.children = children; - } + public DirWrapper(String name, List children) { + this.name = name; + this.children = children; + } - @Override - public InputStream getInputStream() throws IOException { - throw new IllegalStateException("Cannot get an input stream for a directory!"); - } + @Override + public InputStream getInputStream() throws IOException { + throw new IllegalStateException("Cannot get an input stream for a directory!"); + } - @Override - public Optional getName() { - return Optional.of(name); - } + @Override + public Optional getName() { + return Optional.of(name); + } - @Override - public List getChildren() { - return children; - } + @Override + public List getChildren() { + return children; + } - @Override - public boolean isDirectory() { - return true; - } + @Override + public boolean isDirectory() { + return true; } + } } diff --git a/src/main/java/io/ipfs/api/Pair.java b/src/main/java/io/ipfs/api/Pair.java index b6d5fe17..d1517721 100644 --- a/src/main/java/io/ipfs/api/Pair.java +++ b/src/main/java/io/ipfs/api/Pair.java @@ -2,46 +2,43 @@ import java.util.function.Function; -public class Pair { - public final L left; - public final R right; - - public Pair(L left, R right) { - this.left = left; - this.right = right; - } - - public Pair swapped() { - return new Pair<>(right, left); - } - - public Pair apply(Function applyLeft, Function applyRight) { - return new Pair<>( - applyLeft.apply(left), - applyRight.apply(right)); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Pair pair = (Pair) o; - - if (left != null ? !left.equals(pair.left) : pair.left != null) return false; - return right != null ? right.equals(pair.right) : pair.right == null; - - } - - @Override - public int hashCode() { - int result = left != null ? left.hashCode() : 0; - result = 31 * result + (right != null ? right.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return String.format("(%s, %s)", left.toString(), right.toString()); - } -} \ No newline at end of file +public class Pair { + public final L left; + public final R right; + + public Pair(L left, R right) { + this.left = left; + this.right = right; + } + + public Pair swapped() { + return new Pair<>(right, left); + } + + public Pair apply(Function applyLeft, Function applyRight) { + return new Pair<>(applyLeft.apply(left), applyRight.apply(right)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Pair pair = (Pair) o; + + if (left != null ? !left.equals(pair.left) : pair.left != null) return false; + return right != null ? right.equals(pair.right) : pair.right == null; + } + + @Override + public int hashCode() { + int result = left != null ? left.hashCode() : 0; + result = 31 * result + (right != null ? right.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return String.format("(%s, %s)", left.toString(), right.toString()); + } +} diff --git a/src/main/java/io/ipfs/api/Peer.java b/src/main/java/io/ipfs/api/Peer.java index 9dd42b72..8d3d8b2d 100644 --- a/src/main/java/io/ipfs/api/Peer.java +++ b/src/main/java/io/ipfs/api/Peer.java @@ -1,49 +1,55 @@ package io.ipfs.api; -import io.ipfs.cid.*; -import io.ipfs.multiaddr.*; +import io.ipfs.cid.Cid; +import io.ipfs.multiaddr.MultiAddress; import io.ipfs.multibase.Base58; -import io.ipfs.multihash.*; - -import java.util.*; -import java.util.function.*; +import io.ipfs.multihash.Multihash; +import java.util.Map; +import java.util.function.Function; public class Peer { - public final MultiAddress address; - public final Cid id; - public final long latency; - public final String muxer; - public final Object streams; + public final MultiAddress address; + public final Cid id; + public final long latency; + public final String muxer; + public final Object streams; - public Peer(MultiAddress address, Cid id, long latency, String muxer, Object streams) { - this.address = address; - this.id = id; - this.latency = latency; - this.muxer = muxer; - this.streams = streams; - } + public Peer(MultiAddress address, Cid id, long latency, String muxer, Object streams) { + this.address = address; + this.id = id; + this.latency = latency; + this.muxer = muxer; + this.streams = streams; + } - public static Peer fromJSON(Object json) { - if (! (json instanceof Map)) - throw new IllegalStateException("Incorrect json for Peer: " + JSONParser.toString(json)); - Map m = (Map) json; - Function val = key -> (String) m.get(key); - Cid peer = decodePeerId(val.apply("Peer")); - long latency = m.containsKey("Latency") ? Long.parseLong(val.apply("Latency")) : -1; - return new Peer(new MultiAddress(val.apply("Addr")), peer, latency, val.apply("Muxer"), val.apply("Streams")); - } + public static Peer fromJSON(Object json) { + if (!(json instanceof Map)) + throw new IllegalStateException("Incorrect json for Peer: " + JSONParser.toString(json)); + Map m = (Map) json; + Function val = key -> (String) m.get(key); + Cid peer = decodePeerId(val.apply("Peer")); + long latency = m.containsKey("Latency") ? Long.parseLong(val.apply("Latency")) : -1; + return new Peer( + new MultiAddress(val.apply("Addr")), + peer, + latency, + val.apply("Muxer"), + val.apply("Streams")); + } - // See https://github.com/Peergos/Peergos/blob/81064fdb2cdf6b6fe126cf6a20d4d40ecd148938/src/peergos/shared/io/ipfs/Cid.java#L148 - public static Cid decodePeerId(String peerId) { - if (peerId.startsWith("1")) { - // convert base58 encoded identity multihash to cidV1 - Multihash hash = Multihash.deserialize(Base58.decode(peerId)); - return new Cid(1, Cid.Codec.Libp2pKey, hash.getType(), hash.getHash()); - } - return Cid.decode(peerId); - } - @Override - public String toString() { - return id + "@" + address; + // See + // https://github.com/Peergos/Peergos/blob/81064fdb2cdf6b6fe126cf6a20d4d40ecd148938/src/peergos/shared/io/ipfs/Cid.java#L148 + public static Cid decodePeerId(String peerId) { + if (peerId.startsWith("1")) { + // convert base58 encoded identity multihash to cidV1 + Multihash hash = Multihash.deserialize(Base58.decode(peerId)); + return new Cid(1, Cid.Codec.Libp2pKey, hash.getType(), hash.getHash()); } + return Cid.decode(peerId); + } + + @Override + public String toString() { + return id + "@" + address; + } } diff --git a/src/main/java/io/ipfs/api/RepoStat.java b/src/main/java/io/ipfs/api/RepoStat.java index 7dcf3aef..298178cf 100644 --- a/src/main/java/io/ipfs/api/RepoStat.java +++ b/src/main/java/io/ipfs/api/RepoStat.java @@ -4,27 +4,29 @@ public class RepoStat { - public final long RepoSize; - public final long StorageMax; - public final long NumObjects; - public final String RepoPath; - public final String Version; + public final long RepoSize; + public final long StorageMax; + public final long NumObjects; + public final String RepoPath; + public final String Version; - public RepoStat(long repoSize, long storageMax, long numObjects, String repoPath, String version ) { - this.RepoSize = repoSize; - this.StorageMax = storageMax; - this.NumObjects = numObjects; - this.RepoPath = repoPath; - this.Version = version; - } - public static RepoStat fromJson(Object rawjson) { - Map json = (Map)rawjson; - long repoSize = Long.parseLong(json.get("RepoSize").toString()); - long storageMax = Long.parseLong(json.get("StorageMax").toString()); - long numObjects = Long.parseLong(json.get("NumObjects").toString()); - String repoPath = (String)json.get("RepoPath"); - String version = (String)json.get("Version"); + public RepoStat( + long repoSize, long storageMax, long numObjects, String repoPath, String version) { + this.RepoSize = repoSize; + this.StorageMax = storageMax; + this.NumObjects = numObjects; + this.RepoPath = repoPath; + this.Version = version; + } - return new RepoStat(repoSize, storageMax, numObjects, repoPath, version); - } + public static RepoStat fromJson(Object rawjson) { + Map json = (Map) rawjson; + long repoSize = Long.parseLong(json.get("RepoSize").toString()); + long storageMax = Long.parseLong(json.get("StorageMax").toString()); + long numObjects = Long.parseLong(json.get("NumObjects").toString()); + String repoPath = (String) json.get("RepoPath"); + String version = (String) json.get("Version"); + + return new RepoStat(repoSize, storageMax, numObjects, repoPath, version); + } } diff --git a/src/main/java/io/ipfs/api/Version.java b/src/main/java/io/ipfs/api/Version.java index 15aba86b..78d2efa0 100644 --- a/src/main/java/io/ipfs/api/Version.java +++ b/src/main/java/io/ipfs/api/Version.java @@ -2,51 +2,46 @@ public class Version implements Comparable { - public final int major, minor, patch; - public final String suffix; - - public Version(int major, int minor, int patch, String suffix) { - this.major = major; - this.minor = minor; - this.patch = patch; - this.suffix = suffix; - } - - public String toString() { - return major + "." + minor + "." + patch + (suffix.length() > 0 ? "-" + suffix : ""); - } - - public boolean isBefore(Version other) { - return this.compareTo(other) < 0; - } - - @Override - public int compareTo(Version other) { - int major = Integer.compare(this.major, other.major); - if (major != 0) - return major; - int minor = Integer.compare(this.minor, other.minor); - if (minor != 0) - return minor; - int patch = Integer.compare(this.patch, other.patch); - if (patch != 0) - return patch; - if (suffix.length() == 0) - return 1; - if (other.suffix.length() == 0) - return -1; - return suffix.compareTo(other.suffix); - } - - public static Version parse(String version) { - int first = version.indexOf("."); - int second = version.indexOf(".", first + 1); - int third = version.contains("-") ? version.indexOf("-") : version.length(); - - int major = Integer.parseInt(version.substring(0, first)); - int minor = Integer.parseInt(version.substring(first + 1, second)); - int patch = Integer.parseInt(version.substring(second + 1, third)); - String suffix = third < version.length() ? version.substring(third + 1) : ""; - return new Version(major, minor, patch, suffix); - } + public final int major, minor, patch; + public final String suffix; + + public Version(int major, int minor, int patch, String suffix) { + this.major = major; + this.minor = minor; + this.patch = patch; + this.suffix = suffix; + } + + public String toString() { + return major + "." + minor + "." + patch + (suffix.length() > 0 ? "-" + suffix : ""); + } + + public boolean isBefore(Version other) { + return this.compareTo(other) < 0; + } + + @Override + public int compareTo(Version other) { + int major = Integer.compare(this.major, other.major); + if (major != 0) return major; + int minor = Integer.compare(this.minor, other.minor); + if (minor != 0) return minor; + int patch = Integer.compare(this.patch, other.patch); + if (patch != 0) return patch; + if (suffix.length() == 0) return 1; + if (other.suffix.length() == 0) return -1; + return suffix.compareTo(other.suffix); + } + + public static Version parse(String version) { + int first = version.indexOf("."); + int second = version.indexOf(".", first + 1); + int third = version.contains("-") ? version.indexOf("-") : version.length(); + + int major = Integer.parseInt(version.substring(0, first)); + int minor = Integer.parseInt(version.substring(first + 1, second)); + int patch = Integer.parseInt(version.substring(second + 1, third)); + String suffix = third < version.length() ? version.substring(third + 1) : ""; + return new Version(major, minor, patch, suffix); + } } diff --git a/src/main/java/io/ipfs/api/WriteFilesArgs.java b/src/main/java/io/ipfs/api/WriteFilesArgs.java index 1f134b0e..62f40dec 100644 --- a/src/main/java/io/ipfs/api/WriteFilesArgs.java +++ b/src/main/java/io/ipfs/api/WriteFilesArgs.java @@ -1,7 +1,11 @@ package io.ipfs.api; import java.net.URLEncoder; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; /* @@ -11,77 +15,84 @@ .setParents() .build(); */ -final public class WriteFilesArgs { +public final class WriteFilesArgs { + private final Map args = new HashMap<>(); + + public WriteFilesArgs(Builder builder) { + args.putAll(builder.args); + } + + @Override + public String toString() { + List asList = + args.entrySet().stream() + .sorted(Comparator.comparing(Map.Entry::getKey)) + .map(e -> e.getKey() + " = " + e.getValue()) + .collect(Collectors.toList()); + return Arrays.toString(asList.toArray()); + } + + public String toQueryString() { + StringBuilder sb = new StringBuilder(); + for (Map.Entry entry : args.entrySet()) { + sb.append("&").append(entry.getKey()).append("=").append(URLEncoder.encode(entry.getValue())); + } + return sb.length() > 0 ? sb.toString().substring(1) : sb.toString(); + } + + public static class Builder { + private static final String TRUE = "true"; private final Map args = new HashMap<>(); - public WriteFilesArgs(Builder builder) - { - args.putAll(builder.args); + private Builder() {} + + public static Builder newInstance() { + return new Builder(); } - @Override - public String toString() - { - List asList = args.entrySet() - .stream() - .sorted(Comparator.comparing(Map.Entry::getKey)) - .map(e -> e.getKey() + " = " + e.getValue()).collect(Collectors.toList()); - return Arrays.toString(asList.toArray()); + + public Builder setOffset(int offset) { + args.put("offset", String.valueOf(offset)); + return this; } - public String toQueryString() - { - StringBuilder sb = new StringBuilder(); - for (Map.Entry entry: args.entrySet()) { - sb.append("&").append(entry.getKey()) - .append("=") - .append(URLEncoder.encode(entry.getValue())); - } - return sb.length() > 0 ? sb.toString().substring(1) : sb.toString(); + + public Builder setCreate() { + args.put("create", TRUE); + return this; } - public static class Builder { - private static final String TRUE = "true"; - private final Map args = new HashMap<>(); - private Builder() {} - public static Builder newInstance() - { - return new Builder(); - } - - public Builder setOffset(int offset) { - args.put("offset", String.valueOf(offset)); - return this; - } - public Builder setCreate() { - args.put("create", TRUE); - return this; - } - public Builder setParents() { - args.put("parents", TRUE); - return this; - } - public Builder setTruncate() { - args.put("truncate", TRUE); - return this; - } - public Builder setCount(int count) { - args.put("count", String.valueOf(count)); - return this; - } - public Builder setRawLeaves() { - args.put("raw-leaves", TRUE); - return this; - } - public Builder setCidVersion(int version) { - args.put("cid-version", String.valueOf(version)); - return this; - } - public Builder setHash(String hashFunction) { - args.put("hash", hashFunction); - return this; - } - public WriteFilesArgs build() - { - return new WriteFilesArgs(this); - } + + public Builder setParents() { + args.put("parents", TRUE); + return this; + } + + public Builder setTruncate() { + args.put("truncate", TRUE); + return this; + } + + public Builder setCount(int count) { + args.put("count", String.valueOf(count)); + return this; + } + + public Builder setRawLeaves() { + args.put("raw-leaves", TRUE); + return this; + } + + public Builder setCidVersion(int version) { + args.put("cid-version", String.valueOf(version)); + return this; + } + + public Builder setHash(String hashFunction) { + args.put("hash", hashFunction); + return this; + } + + public WriteFilesArgs build() { + return new WriteFilesArgs(this); } + } } diff --git a/src/main/java/io/ipfs/api/cbor/CborConstants.java b/src/main/java/io/ipfs/api/cbor/CborConstants.java index 3494dde6..acc66991 100644 --- a/src/main/java/io/ipfs/api/cbor/CborConstants.java +++ b/src/main/java/io/ipfs/api/cbor/CborConstants.java @@ -8,83 +8,121 @@ * Licensed under Apache License v2.0. */ -/** - * Constant values used by the CBOR format. - */ +/** Constant values used by the CBOR format. */ public interface CborConstants { - /** Major type 0: unsigned integers. */ - int TYPE_UNSIGNED_INTEGER = 0x00; - /** Major type 1: negative integers. */ - int TYPE_NEGATIVE_INTEGER = 0x01; - /** Major type 2: byte string. */ - int TYPE_BYTE_STRING = 0x02; - /** Major type 3: text/UTF8 string. */ - int TYPE_TEXT_STRING = 0x03; - /** Major type 4: array of items. */ - int TYPE_ARRAY = 0x04; - /** Major type 5: map of pairs. */ - int TYPE_MAP = 0x05; - /** Major type 6: semantic tags. */ - int TYPE_TAG = 0x06; - /** Major type 7: floating point, simple data types. */ - int TYPE_FLOAT_SIMPLE = 0x07; - - /** Denotes a one-byte value (uint8). */ - int ONE_BYTE = 0x18; - /** Denotes a two-byte value (uint16). */ - int TWO_BYTES = 0x19; - /** Denotes a four-byte value (uint32). */ - int FOUR_BYTES = 0x1a; - /** Denotes a eight-byte value (uint64). */ - int EIGHT_BYTES = 0x1b; - - /** The CBOR-encoded boolean false value (encoded as "simple value": {@link #MT_SIMPLE}). */ - int FALSE = 0x14; - /** The CBOR-encoded boolean true value (encoded as "simple value": {@link #MT_SIMPLE}). */ - int TRUE = 0x15; - /** The CBOR-encoded null value (encoded as "simple value": {@link #MT_SIMPLE}). */ - int NULL = 0x16; - /** The CBOR-encoded "undefined" value (encoded as "simple value": {@link #MT_SIMPLE}). */ - int UNDEFINED = 0x17; - /** Denotes a half-precision float (two-byte IEEE 754, see {@link #MT_FLOAT}). */ - int HALF_PRECISION_FLOAT = 0x19; - /** Denotes a single-precision float (four-byte IEEE 754, see {@link #MT_FLOAT}). */ - int SINGLE_PRECISION_FLOAT = 0x1a; - /** Denotes a double-precision float (eight-byte IEEE 754, see {@link #MT_FLOAT}). */ - int DOUBLE_PRECISION_FLOAT = 0x1b; - /** The CBOR-encoded "break" stop code for unlimited arrays/maps. */ - int BREAK = 0x1f; - - /** Semantic tag value describing date/time values in the standard format (UTF8 string, RFC3339). */ - int TAG_STANDARD_DATE_TIME = 0; - /** Semantic tag value describing date/time values as Epoch timestamp (numeric, RFC3339). */ - int TAG_EPOCH_DATE_TIME = 1; - /** Semantic tag value describing a positive big integer value (byte string). */ - int TAG_POSITIVE_BIGINT = 2; - /** Semantic tag value describing a negative big integer value (byte string). */ - int TAG_NEGATIVE_BIGINT = 3; - /** Semantic tag value describing a decimal fraction value (two-element array, base 10). */ - int TAG_DECIMAL_FRACTION = 4; - /** Semantic tag value describing a big decimal value (two-element array, base 2). */ - int TAG_BIGDECIMAL = 5; - /** Semantic tag value describing an expected conversion to base64url encoding. */ - int TAG_EXPECTED_BASE64_URL_ENCODED = 21; - /** Semantic tag value describing an expected conversion to base64 encoding. */ - int TAG_EXPECTED_BASE64_ENCODED = 22; - /** Semantic tag value describing an expected conversion to base16 encoding. */ - int TAG_EXPECTED_BASE16_ENCODED = 23; - /** Semantic tag value describing an encoded CBOR data item (byte string). */ - int TAG_CBOR_ENCODED = 24; - /** Semantic tag value describing an URL (UTF8 string). */ - int TAG_URI = 32; - /** Semantic tag value describing a base64url encoded string (UTF8 string). */ - int TAG_BASE64_URL_ENCODED = 33; - /** Semantic tag value describing a base64 encoded string (UTF8 string). */ - int TAG_BASE64_ENCODED = 34; - /** Semantic tag value describing a regular expression string (UTF8 string, PCRE). */ - int TAG_REGEXP = 35; - /** Semantic tag value describing a MIME message (UTF8 string, RFC2045). */ - int TAG_MIME_MESSAGE = 36; - /** Semantic tag value describing CBOR content. */ - int TAG_CBOR_MARKER = 55799; + /** Major type 0: unsigned integers. */ + int TYPE_UNSIGNED_INTEGER = 0x00; + + /** Major type 1: negative integers. */ + int TYPE_NEGATIVE_INTEGER = 0x01; + + /** Major type 2: byte string. */ + int TYPE_BYTE_STRING = 0x02; + + /** Major type 3: text/UTF8 string. */ + int TYPE_TEXT_STRING = 0x03; + + /** Major type 4: array of items. */ + int TYPE_ARRAY = 0x04; + + /** Major type 5: map of pairs. */ + int TYPE_MAP = 0x05; + + /** Major type 6: semantic tags. */ + int TYPE_TAG = 0x06; + + /** Major type 7: floating point, simple data types. */ + int TYPE_FLOAT_SIMPLE = 0x07; + + /** Denotes a one-byte value (uint8). */ + int ONE_BYTE = 0x18; + + /** Denotes a two-byte value (uint16). */ + int TWO_BYTES = 0x19; + + /** Denotes a four-byte value (uint32). */ + int FOUR_BYTES = 0x1a; + + /** Denotes a eight-byte value (uint64). */ + int EIGHT_BYTES = 0x1b; + + /** + * The CBOR-encoded boolean false value (encoded as "simple value": {@link + * #MT_SIMPLE}). + */ + int FALSE = 0x14; + + /** + * The CBOR-encoded boolean true value (encoded as "simple value": {@link + * #MT_SIMPLE}). + */ + int TRUE = 0x15; + + /** The CBOR-encoded null value (encoded as "simple value": {@link #MT_SIMPLE}). */ + int NULL = 0x16; + + /** The CBOR-encoded "undefined" value (encoded as "simple value": {@link #MT_SIMPLE}). */ + int UNDEFINED = 0x17; + + /** Denotes a half-precision float (two-byte IEEE 754, see {@link #MT_FLOAT}). */ + int HALF_PRECISION_FLOAT = 0x19; + + /** Denotes a single-precision float (four-byte IEEE 754, see {@link #MT_FLOAT}). */ + int SINGLE_PRECISION_FLOAT = 0x1a; + + /** Denotes a double-precision float (eight-byte IEEE 754, see {@link #MT_FLOAT}). */ + int DOUBLE_PRECISION_FLOAT = 0x1b; + + /** The CBOR-encoded "break" stop code for unlimited arrays/maps. */ + int BREAK = 0x1f; + + /** + * Semantic tag value describing date/time values in the standard format (UTF8 string, RFC3339). + */ + int TAG_STANDARD_DATE_TIME = 0; + + /** Semantic tag value describing date/time values as Epoch timestamp (numeric, RFC3339). */ + int TAG_EPOCH_DATE_TIME = 1; + + /** Semantic tag value describing a positive big integer value (byte string). */ + int TAG_POSITIVE_BIGINT = 2; + + /** Semantic tag value describing a negative big integer value (byte string). */ + int TAG_NEGATIVE_BIGINT = 3; + + /** Semantic tag value describing a decimal fraction value (two-element array, base 10). */ + int TAG_DECIMAL_FRACTION = 4; + + /** Semantic tag value describing a big decimal value (two-element array, base 2). */ + int TAG_BIGDECIMAL = 5; + + /** Semantic tag value describing an expected conversion to base64url encoding. */ + int TAG_EXPECTED_BASE64_URL_ENCODED = 21; + + /** Semantic tag value describing an expected conversion to base64 encoding. */ + int TAG_EXPECTED_BASE64_ENCODED = 22; + + /** Semantic tag value describing an expected conversion to base16 encoding. */ + int TAG_EXPECTED_BASE16_ENCODED = 23; + + /** Semantic tag value describing an encoded CBOR data item (byte string). */ + int TAG_CBOR_ENCODED = 24; + + /** Semantic tag value describing an URL (UTF8 string). */ + int TAG_URI = 32; + + /** Semantic tag value describing a base64url encoded string (UTF8 string). */ + int TAG_BASE64_URL_ENCODED = 33; + + /** Semantic tag value describing a base64 encoded string (UTF8 string). */ + int TAG_BASE64_ENCODED = 34; + + /** Semantic tag value describing a regular expression string (UTF8 string, PCRE). */ + int TAG_REGEXP = 35; + + /** Semantic tag value describing a MIME message (UTF8 string, RFC2045). */ + int TAG_MIME_MESSAGE = 36; + + /** Semantic tag value describing CBOR content. */ + int TAG_CBOR_MARKER = 55799; } diff --git a/src/main/java/io/ipfs/api/cbor/CborDecoder.java b/src/main/java/io/ipfs/api/cbor/CborDecoder.java index 1229ccf6..ca130aca 100644 --- a/src/main/java/io/ipfs/api/cbor/CborDecoder.java +++ b/src/main/java/io/ipfs/api/cbor/CborDecoder.java @@ -6,491 +6,585 @@ * (C) Copyright - 2013 - J.W. Janssen */ -import java.io.*; - -import static io.ipfs.api.cbor.CborConstants.*; -import static io.ipfs.api.cbor.CborType.*; - -/** - * Provides a decoder capable of handling CBOR encoded data from a {@link InputStream}. - */ +import static io.ipfs.api.cbor.CborConstants.BREAK; +import static io.ipfs.api.cbor.CborConstants.DOUBLE_PRECISION_FLOAT; +import static io.ipfs.api.cbor.CborConstants.EIGHT_BYTES; +import static io.ipfs.api.cbor.CborConstants.FALSE; +import static io.ipfs.api.cbor.CborConstants.FOUR_BYTES; +import static io.ipfs.api.cbor.CborConstants.HALF_PRECISION_FLOAT; +import static io.ipfs.api.cbor.CborConstants.NULL; +import static io.ipfs.api.cbor.CborConstants.ONE_BYTE; +import static io.ipfs.api.cbor.CborConstants.SINGLE_PRECISION_FLOAT; +import static io.ipfs.api.cbor.CborConstants.TRUE; +import static io.ipfs.api.cbor.CborConstants.TWO_BYTES; +import static io.ipfs.api.cbor.CborConstants.TYPE_ARRAY; +import static io.ipfs.api.cbor.CborConstants.TYPE_BYTE_STRING; +import static io.ipfs.api.cbor.CborConstants.TYPE_FLOAT_SIMPLE; +import static io.ipfs.api.cbor.CborConstants.TYPE_MAP; +import static io.ipfs.api.cbor.CborConstants.TYPE_NEGATIVE_INTEGER; +import static io.ipfs.api.cbor.CborConstants.TYPE_TAG; +import static io.ipfs.api.cbor.CborConstants.TYPE_TEXT_STRING; +import static io.ipfs.api.cbor.CborConstants.TYPE_UNSIGNED_INTEGER; +import static io.ipfs.api.cbor.CborConstants.UNDEFINED; +import static io.ipfs.api.cbor.CborType.getName; +import static io.ipfs.api.cbor.CborType.valueOf; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.PushbackInputStream; + +/** Provides a decoder capable of handling CBOR encoded data from a {@link InputStream}. */ public class CborDecoder { - protected final PushbackInputStream m_is; - - /** - * Creates a new {@link CborDecoder} instance. - * - * @param is the actual input stream to read the CBOR-encoded data from, cannot be null. - */ - public CborDecoder(InputStream is) { - if (is == null) { - throw new IllegalArgumentException("InputStream cannot be null!"); - } - m_is = (is instanceof PushbackInputStream) ? (PushbackInputStream) is : new PushbackInputStream(is); - } - - private static void fail(String msg, Object... args) throws IOException { - throw new IOException(msg + args); - } - - private static String lengthToString(int len) { - return (len < 0) ? "no payload" : (len == ONE_BYTE) ? "one byte" : (len == TWO_BYTES) ? "two bytes" - : (len == FOUR_BYTES) ? "four bytes" : (len == EIGHT_BYTES) ? "eight bytes" : "(unknown)"; - } - - /** - * Peeks in the input stream for the upcoming type. - * - * @return the upcoming type in the stream, or null in case of an end-of-stream. - * @throws IOException in case of I/O problems reading the CBOR-type from the underlying input stream. - */ - public CborType peekType() throws IOException { - int p = m_is.read(); - if (p < 0) { - // EOF, nothing to peek at... - return null; - } - m_is.unread(p); - return valueOf(p); - } - - /** - * Prolog to reading an array value in CBOR format. - * - * @return the number of elements in the array to read, or -1 in case of infinite-length arrays. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readArrayLength() throws IOException { - return readMajorTypeWithSize(TYPE_ARRAY); - } - - /** - * Reads a boolean value in CBOR format. - * - * @return the read boolean. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public boolean readBoolean() throws IOException { - int b = readMajorType(TYPE_FLOAT_SIMPLE); - if (b != FALSE && b != TRUE) { - fail("Unexpected boolean value: %d!", b); - } - return b == TRUE; - } - - /** - * Reads a "break"/stop value in CBOR format. - * - * @return always null. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public Object readBreak() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, BREAK); - - return null; - } - - /** - * Reads a byte string value in CBOR format. - * - * @return the read byte string, never null. In case the encoded string has a length of 0, an empty string is returned. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public byte[] readByteString() throws IOException { - long len = readMajorTypeWithSize(TYPE_BYTE_STRING); - if (len < 0) { - fail("Infinite-length byte strings not supported!"); - } - if (len > Integer.MAX_VALUE) { - fail("String length too long!"); - } - return readFully(new byte[(int) len]); - } - - /** - * Prolog to reading a byte string value in CBOR format. - * - * @return the number of bytes in the string to read, or -1 in case of infinite-length strings. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readByteStringLength() throws IOException { - return readMajorTypeWithSize(TYPE_BYTE_STRING); - } - - /** - * Reads a double-precision float value in CBOR format. - * - * @return the read double value, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public double readDouble() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, DOUBLE_PRECISION_FLOAT); - - return Double.longBitsToDouble(readUInt64()); - } - - /** - * Reads a single-precision float value in CBOR format. - * - * @return the read float value, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public float readFloat() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, SINGLE_PRECISION_FLOAT); - - return Float.intBitsToFloat((int) readUInt32()); - } - - /** - * Reads a half-precision float value in CBOR format. - * - * @return the read half-precision float value, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public double readHalfPrecisionFloat() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, HALF_PRECISION_FLOAT); - - int half = readUInt16(); - int exp = (half >> 10) & 0x1f; - int mant = half & 0x3ff; - - double val; - if (exp == 0) { - val = mant * Math.pow(2, -24); - } else if (exp != 31) { - val = (mant + 1024) * Math.pow(2, exp - 25); - } else if (mant != 0) { - val = Double.NaN; - } else { - val = Double.POSITIVE_INFINITY; - } - - return ((half & 0x8000) == 0) ? val : -val; - } - - /** - * Reads a signed or unsigned integer value in CBOR format. - * - * @return the read integer value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readInt() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return ui ^ readUInt(ib & 0x1f, false /* breakAllowed */); - } - - /** - * Reads a signed or unsigned 16-bit integer value in CBOR format. - * - * @read the small integer value, values from [-65536..65535] are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying output stream. - */ - public int readInt16() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return (int) (ui ^ readUIntExact(TWO_BYTES, ib & 0x1f)); - } - - /** - * Reads a signed or unsigned 32-bit integer value in CBOR format. - * - * @read the small integer value, values in the range [-4294967296..4294967295] are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying output stream. - */ - public long readInt32() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return ui ^ readUIntExact(FOUR_BYTES, ib & 0x1f); - } - - /** - * Reads a signed or unsigned 64-bit integer value in CBOR format. - * - * @read the small integer value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying output stream. - */ - public long readInt64() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return ui ^ readUIntExact(EIGHT_BYTES, ib & 0x1f); - } - - /** - * Reads a signed or unsigned 8-bit integer value in CBOR format. - * - * @read the small integer value, values in the range [-256..255] are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying output stream. - */ - public int readInt8() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return (int) (ui ^ readUIntExact(ONE_BYTE, ib & 0x1f)); - } - - /** - * Prolog to reading a map of key-value pairs in CBOR format. - * - * @return the number of entries in the map, >= 0. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readMapLength() throws IOException { - return readMajorTypeWithSize(TYPE_MAP); - } - - /** - * Reads a null-value in CBOR format. - * - * @return always null. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public Object readNull() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, NULL); - return null; - } - - /** - * Reads a single byte value in CBOR format. - * - * @return the read byte value. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public byte readSimpleValue() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, ONE_BYTE); - return (byte) readUInt8(); - } - - /** - * Reads a signed or unsigned small (<= 23) integer value in CBOR format. - * - * @read the small integer value, values in the range [-24..23] are supported. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying output stream. - */ - public int readSmallInt() throws IOException { - int ib = m_is.read(); - - // in case of negative integers, extends the sign to all bits; otherwise zero... - long ui = expectIntegerType(ib); - // in case of negative integers does a ones complement - return (int) (ui ^ readUIntExact(-1, ib & 0x1f)); - } - - /** - * Reads a semantic tag value in CBOR format. - * - * @return the read tag value. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readTag() throws IOException { - return readUInt(readMajorType(TYPE_TAG), false /* breakAllowed */); - } - - /** - * Reads an UTF-8 encoded string value in CBOR format. - * - * @return the read UTF-8 encoded string, never null. In case the encoded string has a length of 0, an empty string is returned. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public String readTextString() throws IOException { - long len = readMajorTypeWithSize(TYPE_TEXT_STRING); - if (len < 0) { - fail("Infinite-length text strings not supported!"); - } - if (len > Integer.MAX_VALUE) { - fail("String length too long!"); - } - return new String(readFully(new byte[(int) len]), "UTF-8"); - } - - /** - * Prolog to reading an UTF-8 encoded string value in CBOR format. - * - * @return the length of the string to read, or -1 in case of infinite-length strings. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public long readTextStringLength() throws IOException { - return readMajorTypeWithSize(TYPE_TEXT_STRING); - } - - /** - * Reads an undefined value in CBOR format. - * - * @return always null. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - public Object readUndefined() throws IOException { - readMajorTypeExact(TYPE_FLOAT_SIMPLE, UNDEFINED); - return null; - } - - /** - * Reads the next major type from the underlying input stream, and verifies whether it matches the given expectation. - * - * @param majorType the expected major type, cannot be null (unchecked). - * @return either -1 if the major type was an signed integer, or 0 otherwise. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - protected long expectIntegerType(int ib) throws IOException { - int majorType = ((ib & 0xFF) >>> 5); - if ((majorType != TYPE_UNSIGNED_INTEGER) && (majorType != TYPE_NEGATIVE_INTEGER)) { - fail("Unexpected type: %s, expected type %s or %s!", getName(majorType), getName(TYPE_UNSIGNED_INTEGER), - getName(TYPE_NEGATIVE_INTEGER)); - } - return -majorType; - } - - /** - * Reads the next major type from the underlying input stream, and verifies whether it matches the given expectation. - * - * @param majorType the expected major type, cannot be null (unchecked). - * @return the read subtype, or payload, of the read major type. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - protected int readMajorType(int majorType) throws IOException { - int ib = m_is.read(); - if (majorType != ((ib >>> 5) & 0x07)) { - fail("Unexpected type: %s, expected: %s!", getName(ib), getName(majorType)); - } - return ib & 0x1F; - } - - /** - * Reads the next major type from the underlying input stream, and verifies whether it matches the given expectations. - * - * @param majorType the expected major type, cannot be null (unchecked); - * @param subtype the expected subtype. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - protected void readMajorTypeExact(int majorType, int subtype) throws IOException { - int st = readMajorType(majorType); - if ((st ^ subtype) != 0) { - fail("Unexpected subtype: %d, expected: %d!", st, subtype); - } - } - - /** - * Reads the next major type from the underlying input stream, verifies whether it matches the given expectation, and decodes the payload into a size. - * - * @param majorType the expected major type, cannot be null (unchecked). - * @return the number of succeeding bytes, >= 0, or -1 if an infinite-length type is read. - * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying input stream. - */ - protected long readMajorTypeWithSize(int majorType) throws IOException { - return readUInt(readMajorType(majorType), true /* breakAllowed */); - } - - /** - * Reads an unsigned integer with a given length-indicator. - * - * @param length the length indicator to use; - * @return the read unsigned integer, as long value. - * @throws IOException in case of I/O problems reading the unsigned integer from the underlying input stream. - */ - protected long readUInt(int length, boolean breakAllowed) throws IOException { - long result = -1; - if (length < ONE_BYTE) { - result = length; - } else if (length == ONE_BYTE) { - result = readUInt8(); - } else if (length == TWO_BYTES) { - result = readUInt16(); - } else if (length == FOUR_BYTES) { - result = readUInt32(); - } else if (length == EIGHT_BYTES) { - result = readUInt64(); - } else if (breakAllowed && length == BREAK) { - return -1; - } - if (result < 0) { - fail("Not well-formed CBOR integer found, invalid length: %d!", result); - } - return result; - } - - /** - * Reads an unsigned 16-bit integer value - * - * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected int readUInt16() throws IOException { - byte[] buf = readFully(new byte[2]); - return (buf[0] & 0xFF) << 8 | (buf[1] & 0xFF); - } - - /** - * Reads an unsigned 32-bit integer value - * - * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected long readUInt32() throws IOException { - byte[] buf = readFully(new byte[4]); - return ((buf[0] & 0xFF) << 24 | (buf[1] & 0xFF) << 16 | (buf[2] & 0xFF) << 8 | (buf[3] & 0xFF)) & 0xffffffffL; - } - - /** - * Reads an unsigned 64-bit integer value - * - * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected long readUInt64() throws IOException { - byte[] buf = readFully(new byte[8]); - return (buf[0] & 0xFFL) << 56 | (buf[1] & 0xFFL) << 48 | (buf[2] & 0xFFL) << 40 | (buf[3] & 0xFFL) << 32 | // - (buf[4] & 0xFFL) << 24 | (buf[5] & 0xFFL) << 16 | (buf[6] & 0xFFL) << 8 | (buf[7] & 0xFFL); - } - - /** - * Reads an unsigned 8-bit integer value - * - * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected int readUInt8() throws IOException { - return m_is.read() & 0xff; - } - - /** - * Reads an unsigned integer with a given length-indicator. - * - * @param length the length indicator to use; - * @return the read unsigned integer, as long value. - * @throws IOException in case of I/O problems reading the unsigned integer from the underlying input stream. - */ - protected long readUIntExact(int expectedLength, int length) throws IOException { - if (((expectedLength == -1) && (length >= ONE_BYTE)) || ((expectedLength >= 0) && (length != expectedLength))) { - fail("Unexpected payload/length! Expected %s, but got %s.", lengthToString(expectedLength), - lengthToString(length)); - } - return readUInt(length, false /* breakAllowed */); - } - - private byte[] readFully(byte[] buf) throws IOException { - int len = buf.length; - int n = 0, off = 0; - while (n < len) { - int count = m_is.read(buf, off + n, len - n); - if (count < 0) { - throw new EOFException(); - } - n += count; - } - return buf; - } -} \ No newline at end of file + protected final PushbackInputStream m_is; + + /** + * Creates a new {@link CborDecoder} instance. + * + * @param is the actual input stream to read the CBOR-encoded data from, cannot be null + * . + */ + public CborDecoder(InputStream is) { + if (is == null) { + throw new IllegalArgumentException("InputStream cannot be null!"); + } + m_is = + (is instanceof PushbackInputStream) + ? (PushbackInputStream) is + : new PushbackInputStream(is); + } + + private static void fail(String msg, Object... args) throws IOException { + throw new IOException(msg + args); + } + + private static String lengthToString(int len) { + return (len < 0) + ? "no payload" + : (len == ONE_BYTE) + ? "one byte" + : (len == TWO_BYTES) + ? "two bytes" + : (len == FOUR_BYTES) + ? "four bytes" + : (len == EIGHT_BYTES) ? "eight bytes" : "(unknown)"; + } + + /** + * Peeks in the input stream for the upcoming type. + * + * @return the upcoming type in the stream, or null in case of an end-of-stream. + * @throws IOException in case of I/O problems reading the CBOR-type from the underlying input + * stream. + */ + public CborType peekType() throws IOException { + int p = m_is.read(); + if (p < 0) { + // EOF, nothing to peek at... + return null; + } + m_is.unread(p); + return valueOf(p); + } + + /** + * Prolog to reading an array value in CBOR format. + * + * @return the number of elements in the array to read, or -1 in case of infinite-length + * arrays. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readArrayLength() throws IOException { + return readMajorTypeWithSize(TYPE_ARRAY); + } + + /** + * Reads a boolean value in CBOR format. + * + * @return the read boolean. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public boolean readBoolean() throws IOException { + int b = readMajorType(TYPE_FLOAT_SIMPLE); + if (b != FALSE && b != TRUE) { + fail("Unexpected boolean value: %d!", b); + } + return b == TRUE; + } + + /** + * Reads a "break"/stop value in CBOR format. + * + * @return always null. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public Object readBreak() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, BREAK); + + return null; + } + + /** + * Reads a byte string value in CBOR format. + * + * @return the read byte string, never null. In case the encoded string has a length + * of 0, an empty string is returned. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public byte[] readByteString() throws IOException { + long len = readMajorTypeWithSize(TYPE_BYTE_STRING); + if (len < 0) { + fail("Infinite-length byte strings not supported!"); + } + if (len > Integer.MAX_VALUE) { + fail("String length too long!"); + } + return readFully(new byte[(int) len]); + } + + /** + * Prolog to reading a byte string value in CBOR format. + * + * @return the number of bytes in the string to read, or -1 in case of infinite-length + * strings. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readByteStringLength() throws IOException { + return readMajorTypeWithSize(TYPE_BYTE_STRING); + } + + /** + * Reads a double-precision float value in CBOR format. + * + * @return the read double value, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public double readDouble() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, DOUBLE_PRECISION_FLOAT); + + return Double.longBitsToDouble(readUInt64()); + } + + /** + * Reads a single-precision float value in CBOR format. + * + * @return the read float value, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public float readFloat() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, SINGLE_PRECISION_FLOAT); + + return Float.intBitsToFloat((int) readUInt32()); + } + + /** + * Reads a half-precision float value in CBOR format. + * + * @return the read half-precision float value, values from {@link Float#MIN_VALUE} to {@link + * Float#MAX_VALUE} are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public double readHalfPrecisionFloat() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, HALF_PRECISION_FLOAT); + + int half = readUInt16(); + int exp = (half >> 10) & 0x1f; + int mant = half & 0x3ff; + + double val; + if (exp == 0) { + val = mant * Math.pow(2, -24); + } else if (exp != 31) { + val = (mant + 1024) * Math.pow(2, exp - 25); + } else if (mant != 0) { + val = Double.NaN; + } else { + val = Double.POSITIVE_INFINITY; + } + + return ((half & 0x8000) == 0) ? val : -val; + } + + /** + * Reads a signed or unsigned integer value in CBOR format. + * + * @return the read integer value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readInt() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return ui ^ readUInt(ib & 0x1f, false /* breakAllowed */); + } + + /** + * Reads a signed or unsigned 16-bit integer value in CBOR format. + * + * @read the small integer value, values from [-65536..65535] are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * output stream. + */ + public int readInt16() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return (int) (ui ^ readUIntExact(TWO_BYTES, ib & 0x1f)); + } + + /** + * Reads a signed or unsigned 32-bit integer value in CBOR format. + * + * @read the small integer value, values in the range [-4294967296..4294967295] are + * supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * output stream. + */ + public long readInt32() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return ui ^ readUIntExact(FOUR_BYTES, ib & 0x1f); + } + + /** + * Reads a signed or unsigned 64-bit integer value in CBOR format. + * + * @read the small integer value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are + * supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * output stream. + */ + public long readInt64() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return ui ^ readUIntExact(EIGHT_BYTES, ib & 0x1f); + } + + /** + * Reads a signed or unsigned 8-bit integer value in CBOR format. + * + * @read the small integer value, values in the range [-256..255] are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * output stream. + */ + public int readInt8() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return (int) (ui ^ readUIntExact(ONE_BYTE, ib & 0x1f)); + } + + /** + * Prolog to reading a map of key-value pairs in CBOR format. + * + * @return the number of entries in the map, >= 0. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readMapLength() throws IOException { + return readMajorTypeWithSize(TYPE_MAP); + } + + /** + * Reads a null-value in CBOR format. + * + * @return always null. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public Object readNull() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, NULL); + return null; + } + + /** + * Reads a single byte value in CBOR format. + * + * @return the read byte value. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public byte readSimpleValue() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, ONE_BYTE); + return (byte) readUInt8(); + } + + /** + * Reads a signed or unsigned small (<= 23) integer value in CBOR format. + * + * @read the small integer value, values in the range [-24..23] are supported. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * output stream. + */ + public int readSmallInt() throws IOException { + int ib = m_is.read(); + + // in case of negative integers, extends the sign to all bits; otherwise zero... + long ui = expectIntegerType(ib); + // in case of negative integers does a ones complement + return (int) (ui ^ readUIntExact(-1, ib & 0x1f)); + } + + /** + * Reads a semantic tag value in CBOR format. + * + * @return the read tag value. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readTag() throws IOException { + return readUInt(readMajorType(TYPE_TAG), false /* breakAllowed */); + } + + /** + * Reads an UTF-8 encoded string value in CBOR format. + * + * @return the read UTF-8 encoded string, never null. In case the encoded string has + * a length of 0, an empty string is returned. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public String readTextString() throws IOException { + long len = readMajorTypeWithSize(TYPE_TEXT_STRING); + if (len < 0) { + fail("Infinite-length text strings not supported!"); + } + if (len > Integer.MAX_VALUE) { + fail("String length too long!"); + } + return new String(readFully(new byte[(int) len]), "UTF-8"); + } + + /** + * Prolog to reading an UTF-8 encoded string value in CBOR format. + * + * @return the length of the string to read, or -1 in case of infinite-length strings. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public long readTextStringLength() throws IOException { + return readMajorTypeWithSize(TYPE_TEXT_STRING); + } + + /** + * Reads an undefined value in CBOR format. + * + * @return always null. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + public Object readUndefined() throws IOException { + readMajorTypeExact(TYPE_FLOAT_SIMPLE, UNDEFINED); + return null; + } + + /** + * Reads the next major type from the underlying input stream, and verifies whether it matches the + * given expectation. + * + * @param majorType the expected major type, cannot be null (unchecked). + * @return either -1 if the major type was an signed integer, or 0 otherwise. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + protected long expectIntegerType(int ib) throws IOException { + int majorType = ((ib & 0xFF) >>> 5); + if ((majorType != TYPE_UNSIGNED_INTEGER) && (majorType != TYPE_NEGATIVE_INTEGER)) { + fail( + "Unexpected type: %s, expected type %s or %s!", + getName(majorType), getName(TYPE_UNSIGNED_INTEGER), getName(TYPE_NEGATIVE_INTEGER)); + } + return -majorType; + } + + /** + * Reads the next major type from the underlying input stream, and verifies whether it matches the + * given expectation. + * + * @param majorType the expected major type, cannot be null (unchecked). + * @return the read subtype, or payload, of the read major type. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + protected int readMajorType(int majorType) throws IOException { + int ib = m_is.read(); + if (majorType != ((ib >>> 5) & 0x07)) { + fail("Unexpected type: %s, expected: %s!", getName(ib), getName(majorType)); + } + return ib & 0x1F; + } + + /** + * Reads the next major type from the underlying input stream, and verifies whether it matches the + * given expectations. + * + * @param majorType the expected major type, cannot be null (unchecked); + * @param subtype the expected subtype. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + protected void readMajorTypeExact(int majorType, int subtype) throws IOException { + int st = readMajorType(majorType); + if ((st ^ subtype) != 0) { + fail("Unexpected subtype: %d, expected: %d!", st, subtype); + } + } + + /** + * Reads the next major type from the underlying input stream, verifies whether it matches the + * given expectation, and decodes the payload into a size. + * + * @param majorType the expected major type, cannot be null (unchecked). + * @return the number of succeeding bytes, >= 0, or -1 if an infinite-length type is + * read. + * @throws IOException in case of I/O problems reading the CBOR-encoded value from the underlying + * input stream. + */ + protected long readMajorTypeWithSize(int majorType) throws IOException { + return readUInt(readMajorType(majorType), true /* breakAllowed */); + } + + /** + * Reads an unsigned integer with a given length-indicator. + * + * @param length the length indicator to use; + * @return the read unsigned integer, as long value. + * @throws IOException in case of I/O problems reading the unsigned integer from the underlying + * input stream. + */ + protected long readUInt(int length, boolean breakAllowed) throws IOException { + long result = -1; + if (length < ONE_BYTE) { + result = length; + } else if (length == ONE_BYTE) { + result = readUInt8(); + } else if (length == TWO_BYTES) { + result = readUInt16(); + } else if (length == FOUR_BYTES) { + result = readUInt32(); + } else if (length == EIGHT_BYTES) { + result = readUInt64(); + } else if (breakAllowed && length == BREAK) { + return -1; + } + if (result < 0) { + fail("Not well-formed CBOR integer found, invalid length: %d!", result); + } + return result; + } + + /** + * Reads an unsigned 16-bit integer value + * + * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are + * supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected int readUInt16() throws IOException { + byte[] buf = readFully(new byte[2]); + return (buf[0] & 0xFF) << 8 | (buf[1] & 0xFF); + } + + /** + * Reads an unsigned 32-bit integer value + * + * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are + * supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected long readUInt32() throws IOException { + byte[] buf = readFully(new byte[4]); + return ((buf[0] & 0xFF) << 24 | (buf[1] & 0xFF) << 16 | (buf[2] & 0xFF) << 8 | (buf[3] & 0xFF)) + & 0xffffffffL; + } + + /** + * Reads an unsigned 64-bit integer value + * + * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are + * supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected long readUInt64() throws IOException { + byte[] buf = readFully(new byte[8]); + return (buf[0] & 0xFFL) << 56 + | (buf[1] & 0xFFL) << 48 + | (buf[2] & 0xFFL) << 40 + | (buf[3] & 0xFFL) << 32 + | // + (buf[4] & 0xFFL) << 24 + | (buf[5] & 0xFFL) << 16 + | (buf[6] & 0xFFL) << 8 + | (buf[7] & 0xFFL); + } + + /** + * Reads an unsigned 8-bit integer value + * + * @return value the read value, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are + * supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected int readUInt8() throws IOException { + return m_is.read() & 0xff; + } + + /** + * Reads an unsigned integer with a given length-indicator. + * + * @param length the length indicator to use; + * @return the read unsigned integer, as long value. + * @throws IOException in case of I/O problems reading the unsigned integer from the underlying + * input stream. + */ + protected long readUIntExact(int expectedLength, int length) throws IOException { + if (((expectedLength == -1) && (length >= ONE_BYTE)) + || ((expectedLength >= 0) && (length != expectedLength))) { + fail( + "Unexpected payload/length! Expected %s, but got %s.", + lengthToString(expectedLength), lengthToString(length)); + } + return readUInt(length, false /* breakAllowed */); + } + + private byte[] readFully(byte[] buf) throws IOException { + int len = buf.length; + int n = 0, off = 0; + while (n < len) { + int count = m_is.read(buf, off + n, len - n); + if (count < 0) { + throw new EOFException(); + } + n += count; + } + return buf; + } +} diff --git a/src/main/java/io/ipfs/api/cbor/CborEncoder.java b/src/main/java/io/ipfs/api/cbor/CborEncoder.java index fd57b21f..865541ff 100644 --- a/src/main/java/io/ipfs/api/cbor/CborEncoder.java +++ b/src/main/java/io/ipfs/api/cbor/CborEncoder.java @@ -8,481 +8,553 @@ * Licensed under Apache License v2.0. */ -import java.io.*; - -import static io.ipfs.api.cbor.CborConstants.*; +import static io.ipfs.api.cbor.CborConstants.BREAK; +import static io.ipfs.api.cbor.CborConstants.EIGHT_BYTES; +import static io.ipfs.api.cbor.CborConstants.FALSE; +import static io.ipfs.api.cbor.CborConstants.FOUR_BYTES; +import static io.ipfs.api.cbor.CborConstants.NULL; +import static io.ipfs.api.cbor.CborConstants.ONE_BYTE; +import static io.ipfs.api.cbor.CborConstants.TRUE; +import static io.ipfs.api.cbor.CborConstants.TWO_BYTES; +import static io.ipfs.api.cbor.CborConstants.TYPE_ARRAY; +import static io.ipfs.api.cbor.CborConstants.TYPE_BYTE_STRING; +import static io.ipfs.api.cbor.CborConstants.TYPE_FLOAT_SIMPLE; +import static io.ipfs.api.cbor.CborConstants.TYPE_MAP; +import static io.ipfs.api.cbor.CborConstants.TYPE_NEGATIVE_INTEGER; +import static io.ipfs.api.cbor.CborConstants.TYPE_TAG; +import static io.ipfs.api.cbor.CborConstants.TYPE_TEXT_STRING; +import static io.ipfs.api.cbor.CborConstants.UNDEFINED; + +import java.io.IOException; +import java.io.OutputStream; /** * Provides an encoder capable of encoding data into CBOR format to a given {@link OutputStream}. */ public class CborEncoder { - private static final int NEG_INT_MASK = TYPE_NEGATIVE_INTEGER << 5; - - private final OutputStream m_os; - - /** - * Creates a new {@link CborEncoder} instance. - * - * @param os the actual output stream to write the CBOR-encoded data to, cannot be null. - */ - public CborEncoder(OutputStream os) { - if (os == null) { - throw new IllegalArgumentException("OutputStream cannot be null!"); - } - m_os = os; - } - - /** - * Interprets a given float-value as a half-precision float value and - * converts it to its raw integer form, as defined in IEEE 754. - *

- * Taken from: this Stack Overflow answer. - *

- * - * @param fval the value to convert. - * @return the raw integer representation of the given float value. - */ - static int halfPrecisionToRawIntBits(float fval) { - int fbits = Float.floatToIntBits(fval); - int sign = (fbits >>> 16) & 0x8000; - int val = (fbits & 0x7fffffff) + 0x1000; - - // might be or become NaN/Inf - if (val >= 0x47800000) { - if ((fbits & 0x7fffffff) >= 0x47800000) { // is or must become NaN/Inf - if (val < 0x7f800000) { - // was value but too large, make it +/-Inf - return sign | 0x7c00; - } - return sign | 0x7c00 | (fbits & 0x007fffff) >>> 13; // keep NaN (and Inf) bits - } - return sign | 0x7bff; // unrounded not quite Inf - } - if (val >= 0x38800000) { - // remains normalized value - return sign | val - 0x38000000 >>> 13; // exp - 127 + 15 + private static final int NEG_INT_MASK = TYPE_NEGATIVE_INTEGER << 5; + + private final OutputStream m_os; + + /** + * Creates a new {@link CborEncoder} instance. + * + * @param os the actual output stream to write the CBOR-encoded data to, cannot be null + * . + */ + public CborEncoder(OutputStream os) { + if (os == null) { + throw new IllegalArgumentException("OutputStream cannot be null!"); + } + m_os = os; + } + + /** + * Interprets a given float-value as a half-precision float value and converts it to its raw + * integer form, as defined in IEEE 754. + * + *

Taken from: this Stack Overflow + * answer. + * + * @param fval the value to convert. + * @return the raw integer representation of the given float value. + */ + static int halfPrecisionToRawIntBits(float fval) { + int fbits = Float.floatToIntBits(fval); + int sign = (fbits >>> 16) & 0x8000; + int val = (fbits & 0x7fffffff) + 0x1000; + + // might be or become NaN/Inf + if (val >= 0x47800000) { + if ((fbits & 0x7fffffff) >= 0x47800000) { // is or must become NaN/Inf + if (val < 0x7f800000) { + // was value but too large, make it +/-Inf + return sign | 0x7c00; } - if (val < 0x33000000) { - // too small for subnormal - return sign; // becomes +/-0 - } - - val = (fbits & 0x7fffffff) >>> 23; - // add subnormal bit, round depending on cut off and div by 2^(1-(exp-127+15)) and >> 13 | exp=0 - return sign | ((fbits & 0x7fffff | 0x800000) + (0x800000 >>> val - 102) >>> 126 - val); - } - - /** - * Writes the start of an indefinite-length array. - *

- * After calling this method, one is expected to write the given number of array elements, which can be of any type. No length checks are performed.
- * After all array elements are written, one should write a single break value to end the array, see {@link #writeBreak()}. - *

- * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeArrayStart() throws IOException { - writeSimpleType(TYPE_ARRAY, BREAK); - } - - /** - * Writes the start of a definite-length array. - *

- * After calling this method, one is expected to write the given number of array elements, which can be of any type. No length checks are performed. - *

- * - * @param length the number of array elements to write, should >= 0. - * @throws IllegalArgumentException in case the given length was negative; - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeArrayStart(int length) throws IOException { - if (length < 0) { - throw new IllegalArgumentException("Invalid array-length!"); - } - writeType(TYPE_ARRAY, length); - } - - /** - * Writes a boolean value in canonical CBOR format. - * - * @param value the boolean to write. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeBoolean(boolean value) throws IOException { - writeSimpleType(TYPE_FLOAT_SIMPLE, value ? TRUE : FALSE); - } - - /** - * Writes a "break" stop-value in canonical CBOR format. - * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeBreak() throws IOException { - writeSimpleType(TYPE_FLOAT_SIMPLE, BREAK); - } - - /** - * Writes a byte string in canonical CBOR-format. - * - * @param bytes the byte string to write, can be null in which case a byte-string of length 0 is written. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeByteString(byte[] bytes) throws IOException { - writeString(TYPE_BYTE_STRING, bytes); - } - - /** - * Writes the start of an indefinite-length byte string. - *

- * After calling this method, one is expected to write the given number of string parts. No length checks are performed.
- * After all string parts are written, one should write a single break value to end the string, see {@link #writeBreak()}. - *

- * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeByteStringStart() throws IOException { - writeSimpleType(TYPE_BYTE_STRING, BREAK); - } - - /** - * Writes a double-precision float value in canonical CBOR format. - * - * @param value the value to write, values from {@link Double#MIN_VALUE} to {@link Double#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeDouble(double value) throws IOException { - writeUInt64(TYPE_FLOAT_SIMPLE << 5, Double.doubleToRawLongBits(value)); - } - - /** - * Writes a single-precision float value in canonical CBOR format. - * - * @param value the value to write, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeFloat(float value) throws IOException { - writeUInt32(TYPE_FLOAT_SIMPLE << 5, Float.floatToRawIntBits(value)); - } - - /** - * Writes a half-precision float value in canonical CBOR format. - * - * @param value the value to write, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeHalfPrecisionFloat(float value) throws IOException { - writeUInt16(TYPE_FLOAT_SIMPLE << 5, halfPrecisionToRawIntBits(value)); - } - - /** - * Writes a signed or unsigned integer value in canonical CBOR format, that is, tries to encode it in a little bytes as possible.. - * - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeInt(long value) throws IOException { - // extends the sign over all bits... - long sign = value >> 63; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - value = (sign ^ value); - - writeUInt(mt, value); - } - - /** - * Writes a signed or unsigned 16-bit integer value in CBOR format. - * - * @param value the value to write, values from [-65536..65535] are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeInt16(int value) throws IOException { - // extends the sign over all bits... - int sign = value >> 31; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - writeUInt16(mt, (sign ^ value) & 0xffff); - } - - /** - * Writes a signed or unsigned 32-bit integer value in CBOR format. - * - * @param value the value to write, values in the range [-4294967296..4294967295] are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeInt32(long value) throws IOException { - // extends the sign over all bits... - long sign = value >> 63; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - writeUInt32(mt, (int) ((sign ^ value) & 0xffffffffL)); - } - - /** - * Writes a signed or unsigned 64-bit integer value in CBOR format. - * - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeInt64(long value) throws IOException { - // extends the sign over all bits... - long sign = value >> 63; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - writeUInt64(mt, sign ^ value); - } - - /** - * Writes a signed or unsigned 8-bit integer value in CBOR format. - * - * @param value the value to write, values in the range [-256..255] are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeInt8(int value) throws IOException { - // extends the sign over all bits... - int sign = value >> 31; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - writeUInt8(mt, (sign ^ value) & 0xff); - } - - /** - * Writes the start of an indefinite-length map. - *

- * After calling this method, one is expected to write any number of map entries, as separate key and value. Keys and values can both be of any type. No length checks are performed.
- * After all map entries are written, one should write a single break value to end the map, see {@link #writeBreak()}. - *

- * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeMapStart() throws IOException { - writeSimpleType(TYPE_MAP, BREAK); - } - - /** - * Writes the start of a finite-length map. - *

- * After calling this method, one is expected to write any number of map entries, as separate key and value. Keys and values can both be of any type. No length checks are performed. - *

- * - * @param length the number of map entries to write, should >= 0. - * @throws IllegalArgumentException in case the given length was negative; - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeMapStart(int length) throws IOException { - if (length < 0) { - throw new IllegalArgumentException("Invalid length of map!"); - } - writeType(TYPE_MAP, length); - } - - /** - * Writes a null value in canonical CBOR format. - * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeNull() throws IOException { - writeSimpleType(TYPE_FLOAT_SIMPLE, NULL); - } - - /** - * Writes a simple value, i.e., an "atom" or "constant" value in canonical CBOR format. - * - * @param simpleValue the (unsigned byte) value to write, values from 32 to 255 are supported (though not enforced). - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeSimpleValue(byte simpleValue) throws IOException { - // convert to unsigned value... - int value = (simpleValue & 0xff); - writeType(TYPE_FLOAT_SIMPLE, value); - } - - /** - * Writes a signed or unsigned small (<= 23) integer value in CBOR format. - * - * @param value the value to write, values in the range [-24..23] are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeSmallInt(int value) throws IOException { - // extends the sign over all bits... - int sign = value >> 31; - // in case value is negative, this bit should be set... - int mt = (int) (sign & NEG_INT_MASK); - // complement negative value... - value = Math.min(0x17, (sign ^ value)); - - m_os.write((int) (mt | value)); - } - - /** - * Writes a semantic tag in canonical CBOR format. - * - * @param tag the tag to write, should >= 0. - * @throws IllegalArgumentException in case the given tag was negative; - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeTag(long tag) throws IOException { - if (tag < 0) { - throw new IllegalArgumentException("Invalid tag specification, cannot be negative!"); - } - writeType(TYPE_TAG, tag); - } - - /** - * Writes an UTF-8 string in canonical CBOR-format. - *

- * Note that this method is platform specific, as the given string value will be encoded in a byte array - * using the platform encoding! This means that the encoding must be standardized and known. - *

- * - * @param value the UTF-8 string to write, can be null in which case an UTF-8 string of length 0 is written. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeTextString(String value) throws IOException { - writeString(TYPE_TEXT_STRING, value == null ? null : value.getBytes("UTF-8")); - } - - /** - * Writes the start of an indefinite-length UTF-8 string. - *

- * After calling this method, one is expected to write the given number of string parts. No length checks are performed.
- * After all string parts are written, one should write a single break value to end the string, see {@link #writeBreak()}. - *

- * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeTextStringStart() throws IOException { - writeSimpleType(TYPE_TEXT_STRING, BREAK); - } - - /** - * Writes an "undefined" value in canonical CBOR format. - * - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - public void writeUndefined() throws IOException { - writeSimpleType(TYPE_FLOAT_SIMPLE, UNDEFINED); - } - - /** - * Encodes and writes the major type and value as a simple type. - * - * @param majorType the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from [0..31] are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeSimpleType(int majorType, int value) throws IOException { - m_os.write((majorType << 5) | (value & 0x1f)); - } - - /** - * Writes a byte string in canonical CBOR-format. - * - * @param majorType the major type of the string, should be either 0x40 or 0x60; - * @param bytes the byte string to write, can be null in which case a byte-string of length 0 is written. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeString(int majorType, byte[] bytes) throws IOException { - int len = (bytes == null) ? 0 : bytes.length; - writeType(majorType, len); - for (int i = 0; i < len; i++) { - m_os.write(bytes[i]); - } - } - - /** - * Encodes and writes the major type indicator with a given payload (length). - * - * @param majorType the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeType(int majorType, long value) throws IOException { - writeUInt((majorType << 5), value); - } - - /** - * Encodes and writes an unsigned integer value, that is, tries to encode it in a little bytes as possible. - * - * @param mt the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeUInt(int mt, long value) throws IOException { - if (value < 0x18L) { - m_os.write((int) (mt | value)); - } else if (value < 0x100L) { - writeUInt8(mt, (int) value); - } else if (value < 0x10000L) { - writeUInt16(mt, (int) value); - } else if (value < 0x100000000L) { - writeUInt32(mt, (int) value); - } else { - writeUInt64(mt, value); - } - } - - /** - * Encodes and writes an unsigned 16-bit integer value - * - * @param mt the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeUInt16(int mt, int value) throws IOException { - m_os.write(mt | TWO_BYTES); - m_os.write(value >> 8); - m_os.write(value & 0xFF); - } - - /** - * Encodes and writes an unsigned 32-bit integer value - * - * @param mt the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeUInt32(int mt, int value) throws IOException { - m_os.write(mt | FOUR_BYTES); - m_os.write(value >> 24); - m_os.write(value >> 16); - m_os.write(value >> 8); - m_os.write(value & 0xFF); - } - - /** - * Encodes and writes an unsigned 64-bit integer value - * - * @param mt the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeUInt64(int mt, long value) throws IOException { - m_os.write(mt | EIGHT_BYTES); - m_os.write((int) (value >> 56)); - m_os.write((int) (value >> 48)); - m_os.write((int) (value >> 40)); - m_os.write((int) (value >> 32)); - m_os.write((int) (value >> 24)); - m_os.write((int) (value >> 16)); - m_os.write((int) (value >> 8)); - m_os.write((int) (value & 0xFF)); - } - - /** - * Encodes and writes an unsigned 8-bit integer value - * - * @param mt the major type of the value to write, denotes what semantics the written value has; - * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} are supported. - * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying output stream. - */ - protected void writeUInt8(int mt, int value) throws IOException { - m_os.write(mt | ONE_BYTE); - m_os.write(value & 0xFF); - } -} \ No newline at end of file + return sign | 0x7c00 | (fbits & 0x007fffff) >>> 13; // keep NaN (and Inf) bits + } + return sign | 0x7bff; // unrounded not quite Inf + } + if (val >= 0x38800000) { + // remains normalized value + return sign | val - 0x38000000 >>> 13; // exp - 127 + 15 + } + if (val < 0x33000000) { + // too small for subnormal + return sign; // becomes +/-0 + } + + val = (fbits & 0x7fffffff) >>> 23; + // add subnormal bit, round depending on cut off and div by 2^(1-(exp-127+15)) and >> 13 | exp=0 + return sign | ((fbits & 0x7fffff | 0x800000) + (0x800000 >>> val - 102) >>> 126 - val); + } + + /** + * Writes the start of an indefinite-length array. + * + *

After calling this method, one is expected to write the given number of array elements, + * which can be of any type. No length checks are performed.
+ * After all array elements are written, one should write a single break value to end the array, + * see {@link #writeBreak()}. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeArrayStart() throws IOException { + writeSimpleType(TYPE_ARRAY, BREAK); + } + + /** + * Writes the start of a definite-length array. + * + *

After calling this method, one is expected to write the given number of array elements, + * which can be of any type. No length checks are performed. + * + * @param length the number of array elements to write, should >= 0. + * @throws IllegalArgumentException in case the given length was negative; + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeArrayStart(int length) throws IOException { + if (length < 0) { + throw new IllegalArgumentException("Invalid array-length!"); + } + writeType(TYPE_ARRAY, length); + } + + /** + * Writes a boolean value in canonical CBOR format. + * + * @param value the boolean to write. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeBoolean(boolean value) throws IOException { + writeSimpleType(TYPE_FLOAT_SIMPLE, value ? TRUE : FALSE); + } + + /** + * Writes a "break" stop-value in canonical CBOR format. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeBreak() throws IOException { + writeSimpleType(TYPE_FLOAT_SIMPLE, BREAK); + } + + /** + * Writes a byte string in canonical CBOR-format. + * + * @param bytes the byte string to write, can be null in which case a byte-string of + * length 0 is written. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeByteString(byte[] bytes) throws IOException { + writeString(TYPE_BYTE_STRING, bytes); + } + + /** + * Writes the start of an indefinite-length byte string. + * + *

After calling this method, one is expected to write the given number of string parts. No + * length checks are performed.
+ * After all string parts are written, one should write a single break value to end the string, + * see {@link #writeBreak()}. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeByteStringStart() throws IOException { + writeSimpleType(TYPE_BYTE_STRING, BREAK); + } + + /** + * Writes a double-precision float value in canonical CBOR format. + * + * @param value the value to write, values from {@link Double#MIN_VALUE} to {@link + * Double#MAX_VALUE} are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeDouble(double value) throws IOException { + writeUInt64(TYPE_FLOAT_SIMPLE << 5, Double.doubleToRawLongBits(value)); + } + + /** + * Writes a single-precision float value in canonical CBOR format. + * + * @param value the value to write, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeFloat(float value) throws IOException { + writeUInt32(TYPE_FLOAT_SIMPLE << 5, Float.floatToRawIntBits(value)); + } + + /** + * Writes a half-precision float value in canonical CBOR format. + * + * @param value the value to write, values from {@link Float#MIN_VALUE} to {@link Float#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeHalfPrecisionFloat(float value) throws IOException { + writeUInt16(TYPE_FLOAT_SIMPLE << 5, halfPrecisionToRawIntBits(value)); + } + + /** + * Writes a signed or unsigned integer value in canonical CBOR format, that is, tries to encode it + * in a little bytes as possible.. + * + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeInt(long value) throws IOException { + // extends the sign over all bits... + long sign = value >> 63; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + value = (sign ^ value); + + writeUInt(mt, value); + } + + /** + * Writes a signed or unsigned 16-bit integer value in CBOR format. + * + * @param value the value to write, values from [-65536..65535] are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeInt16(int value) throws IOException { + // extends the sign over all bits... + int sign = value >> 31; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + writeUInt16(mt, (sign ^ value) & 0xffff); + } + + /** + * Writes a signed or unsigned 32-bit integer value in CBOR format. + * + * @param value the value to write, values in the range [-4294967296..4294967295] are + * supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeInt32(long value) throws IOException { + // extends the sign over all bits... + long sign = value >> 63; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + writeUInt32(mt, (int) ((sign ^ value) & 0xffffffffL)); + } + + /** + * Writes a signed or unsigned 64-bit integer value in CBOR format. + * + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeInt64(long value) throws IOException { + // extends the sign over all bits... + long sign = value >> 63; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + writeUInt64(mt, sign ^ value); + } + + /** + * Writes a signed or unsigned 8-bit integer value in CBOR format. + * + * @param value the value to write, values in the range [-256..255] are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeInt8(int value) throws IOException { + // extends the sign over all bits... + int sign = value >> 31; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + writeUInt8(mt, (sign ^ value) & 0xff); + } + + /** + * Writes the start of an indefinite-length map. + * + *

After calling this method, one is expected to write any number of map entries, as separate + * key and value. Keys and values can both be of any type. No length checks are performed.
+ * After all map entries are written, one should write a single break value to end the map, see + * {@link #writeBreak()}. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeMapStart() throws IOException { + writeSimpleType(TYPE_MAP, BREAK); + } + + /** + * Writes the start of a finite-length map. + * + *

After calling this method, one is expected to write any number of map entries, as separate + * key and value. Keys and values can both be of any type. No length checks are performed. + * + * @param length the number of map entries to write, should >= 0. + * @throws IllegalArgumentException in case the given length was negative; + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeMapStart(int length) throws IOException { + if (length < 0) { + throw new IllegalArgumentException("Invalid length of map!"); + } + writeType(TYPE_MAP, length); + } + + /** + * Writes a null value in canonical CBOR format. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeNull() throws IOException { + writeSimpleType(TYPE_FLOAT_SIMPLE, NULL); + } + + /** + * Writes a simple value, i.e., an "atom" or "constant" value in canonical CBOR format. + * + * @param simpleValue the (unsigned byte) value to write, values from 32 to 255 + * are supported (though not enforced). + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeSimpleValue(byte simpleValue) throws IOException { + // convert to unsigned value... + int value = (simpleValue & 0xff); + writeType(TYPE_FLOAT_SIMPLE, value); + } + + /** + * Writes a signed or unsigned small (<= 23) integer value in CBOR format. + * + * @param value the value to write, values in the range [-24..23] are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeSmallInt(int value) throws IOException { + // extends the sign over all bits... + int sign = value >> 31; + // in case value is negative, this bit should be set... + int mt = (int) (sign & NEG_INT_MASK); + // complement negative value... + value = Math.min(0x17, (sign ^ value)); + + m_os.write((int) (mt | value)); + } + + /** + * Writes a semantic tag in canonical CBOR format. + * + * @param tag the tag to write, should >= 0. + * @throws IllegalArgumentException in case the given tag was negative; + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeTag(long tag) throws IOException { + if (tag < 0) { + throw new IllegalArgumentException("Invalid tag specification, cannot be negative!"); + } + writeType(TYPE_TAG, tag); + } + + /** + * Writes an UTF-8 string in canonical CBOR-format. + * + *

Note that this method is platform specific, as the given string value will be + * encoded in a byte array using the platform encoding! This means that the encoding must + * be standardized and known. + * + * @param value the UTF-8 string to write, can be null in which case an UTF-8 string + * of length 0 is written. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeTextString(String value) throws IOException { + writeString(TYPE_TEXT_STRING, value == null ? null : value.getBytes("UTF-8")); + } + + /** + * Writes the start of an indefinite-length UTF-8 string. + * + *

After calling this method, one is expected to write the given number of string parts. No + * length checks are performed.
+ * After all string parts are written, one should write a single break value to end the string, + * see {@link #writeBreak()}. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeTextStringStart() throws IOException { + writeSimpleType(TYPE_TEXT_STRING, BREAK); + } + + /** + * Writes an "undefined" value in canonical CBOR format. + * + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + public void writeUndefined() throws IOException { + writeSimpleType(TYPE_FLOAT_SIMPLE, UNDEFINED); + } + + /** + * Encodes and writes the major type and value as a simple type. + * + * @param majorType the major type of the value to write, denotes what semantics the written value + * has; + * @param value the value to write, values from [0..31] are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeSimpleType(int majorType, int value) throws IOException { + m_os.write((majorType << 5) | (value & 0x1f)); + } + + /** + * Writes a byte string in canonical CBOR-format. + * + * @param majorType the major type of the string, should be either 0x40 or 0x60; + * @param bytes the byte string to write, can be null in which case a byte-string of + * length 0 is written. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeString(int majorType, byte[] bytes) throws IOException { + int len = (bytes == null) ? 0 : bytes.length; + writeType(majorType, len); + for (int i = 0; i < len; i++) { + m_os.write(bytes[i]); + } + } + + /** + * Encodes and writes the major type indicator with a given payload (length). + * + * @param majorType the major type of the value to write, denotes what semantics the written value + * has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeType(int majorType, long value) throws IOException { + writeUInt((majorType << 5), value); + } + + /** + * Encodes and writes an unsigned integer value, that is, tries to encode it in a little bytes as + * possible. + * + * @param mt the major type of the value to write, denotes what semantics the written value has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeUInt(int mt, long value) throws IOException { + if (value < 0x18L) { + m_os.write((int) (mt | value)); + } else if (value < 0x100L) { + writeUInt8(mt, (int) value); + } else if (value < 0x10000L) { + writeUInt16(mt, (int) value); + } else if (value < 0x100000000L) { + writeUInt32(mt, (int) value); + } else { + writeUInt64(mt, value); + } + } + + /** + * Encodes and writes an unsigned 16-bit integer value + * + * @param mt the major type of the value to write, denotes what semantics the written value has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeUInt16(int mt, int value) throws IOException { + m_os.write(mt | TWO_BYTES); + m_os.write(value >> 8); + m_os.write(value & 0xFF); + } + + /** + * Encodes and writes an unsigned 32-bit integer value + * + * @param mt the major type of the value to write, denotes what semantics the written value has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeUInt32(int mt, int value) throws IOException { + m_os.write(mt | FOUR_BYTES); + m_os.write(value >> 24); + m_os.write(value >> 16); + m_os.write(value >> 8); + m_os.write(value & 0xFF); + } + + /** + * Encodes and writes an unsigned 64-bit integer value + * + * @param mt the major type of the value to write, denotes what semantics the written value has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeUInt64(int mt, long value) throws IOException { + m_os.write(mt | EIGHT_BYTES); + m_os.write((int) (value >> 56)); + m_os.write((int) (value >> 48)); + m_os.write((int) (value >> 40)); + m_os.write((int) (value >> 32)); + m_os.write((int) (value >> 24)); + m_os.write((int) (value >> 16)); + m_os.write((int) (value >> 8)); + m_os.write((int) (value & 0xFF)); + } + + /** + * Encodes and writes an unsigned 8-bit integer value + * + * @param mt the major type of the value to write, denotes what semantics the written value has; + * @param value the value to write, values from {@link Long#MIN_VALUE} to {@link Long#MAX_VALUE} + * are supported. + * @throws IOException in case of I/O problems writing the CBOR-encoded value to the underlying + * output stream. + */ + protected void writeUInt8(int mt, int value) throws IOException { + m_os.write(mt | ONE_BYTE); + m_os.write(value & 0xFF); + } +} diff --git a/src/main/java/io/ipfs/api/cbor/CborObject.java b/src/main/java/io/ipfs/api/cbor/CborObject.java index e2a34c9a..6bfea97c 100644 --- a/src/main/java/io/ipfs/api/cbor/CborObject.java +++ b/src/main/java/io/ipfs/api/cbor/CborObject.java @@ -1,427 +1,420 @@ package io.ipfs.api.cbor; -import io.ipfs.cid.*; -import io.ipfs.multiaddr.*; -import io.ipfs.multihash.*; - -import java.io.*; -import java.util.*; -import java.util.stream.*; +import io.ipfs.cid.Cid; +import io.ipfs.multihash.Multihash; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.stream.Collectors; public interface CborObject { - void serialize(CborEncoder encoder); - - default byte[] toByteArray() { - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - CborEncoder encoder = new CborEncoder(bout); - serialize(encoder); - return bout.toByteArray(); - } - - int LINK_TAG = 42; - - static CborObject fromByteArray(byte[] cbor) { - return deserialize(new CborDecoder(new ByteArrayInputStream(cbor))); - } - - static CborObject deserialize(CborDecoder decoder) { - try { - CborType type = decoder.peekType(); - switch (type.getMajorType()) { - case CborConstants.TYPE_TEXT_STRING: - return new CborString(decoder.readTextString()); - case CborConstants.TYPE_BYTE_STRING: - return new CborByteArray(decoder.readByteString()); - case CborConstants.TYPE_UNSIGNED_INTEGER: - return new CborLong(decoder.readInt()); - case CborConstants.TYPE_NEGATIVE_INTEGER: - return new CborLong(decoder.readInt()); - case CborConstants.TYPE_FLOAT_SIMPLE: - if (type.getAdditionalInfo() == CborConstants.NULL) { - decoder.readNull(); - return new CborNull(); - } - if (type.getAdditionalInfo() == CborConstants.TRUE) { - decoder.readBoolean(); - return new CborBoolean(true); - } - if (type.getAdditionalInfo() == CborConstants.FALSE) { - decoder.readBoolean(); - return new CborBoolean(false); - } - throw new IllegalStateException("Unimplemented simple type! " + type.getAdditionalInfo()); - case CborConstants.TYPE_MAP: { - long nValues = decoder.readMapLength(); - SortedMap result = new TreeMap<>(); - for (long i=0; i < nValues; i++) { - CborObject key = deserialize(decoder); - CborObject value = deserialize(decoder); - result.put(key, value); - } - return new CborMap(result); - } - case CborConstants.TYPE_ARRAY: - long nItems = decoder.readArrayLength(); - List res = new ArrayList<>((int) nItems); - for (long i=0; i < nItems; i++) - res.add(deserialize(decoder)); - return new CborList(res); - case CborConstants.TYPE_TAG: - long tag = decoder.readTag(); - if (tag == LINK_TAG) { - CborObject value = deserialize(decoder); - if (value instanceof CborString) - return new CborMerkleLink(Cid.decode(((CborString) value).value)); - if (value instanceof CborByteArray) { - byte[] bytes = ((CborByteArray) value).value; - if (bytes[0] == 0) // multibase for binary - return new CborMerkleLink(Cid.cast(Arrays.copyOfRange(bytes, 1, bytes.length))); - throw new IllegalStateException("Unknown Multibase decoding Merkle link: " + bytes[0]); - } - throw new IllegalStateException("Invalid type for merkle link: " + value); - } - throw new IllegalStateException("Unknown TAG in CBOR: " + type.getAdditionalInfo()); - default: - throw new IllegalStateException("Unimplemented cbor type: " + type); + void serialize(CborEncoder encoder); + + default byte[] toByteArray() { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + CborEncoder encoder = new CborEncoder(bout); + serialize(encoder); + return bout.toByteArray(); + } + + int LINK_TAG = 42; + + static CborObject fromByteArray(byte[] cbor) { + return deserialize(new CborDecoder(new ByteArrayInputStream(cbor))); + } + + static CborObject deserialize(CborDecoder decoder) { + try { + CborType type = decoder.peekType(); + switch (type.getMajorType()) { + case CborConstants.TYPE_TEXT_STRING: + return new CborString(decoder.readTextString()); + case CborConstants.TYPE_BYTE_STRING: + return new CborByteArray(decoder.readByteString()); + case CborConstants.TYPE_UNSIGNED_INTEGER: + return new CborLong(decoder.readInt()); + case CborConstants.TYPE_NEGATIVE_INTEGER: + return new CborLong(decoder.readInt()); + case CborConstants.TYPE_FLOAT_SIMPLE: + if (type.getAdditionalInfo() == CborConstants.NULL) { + decoder.readNull(); + return new CborNull(); + } + if (type.getAdditionalInfo() == CborConstants.TRUE) { + decoder.readBoolean(); + return new CborBoolean(true); + } + if (type.getAdditionalInfo() == CborConstants.FALSE) { + decoder.readBoolean(); + return new CborBoolean(false); + } + throw new IllegalStateException("Unimplemented simple type! " + type.getAdditionalInfo()); + case CborConstants.TYPE_MAP: + { + long nValues = decoder.readMapLength(); + SortedMap result = new TreeMap<>(); + for (long i = 0; i < nValues; i++) { + CborObject key = deserialize(decoder); + CborObject value = deserialize(decoder); + result.put(key, value); } - } catch (IOException e) { - throw new RuntimeException(e); - } + return new CborMap(result); + } + case CborConstants.TYPE_ARRAY: + long nItems = decoder.readArrayLength(); + List res = new ArrayList<>((int) nItems); + for (long i = 0; i < nItems; i++) res.add(deserialize(decoder)); + return new CborList(res); + case CborConstants.TYPE_TAG: + long tag = decoder.readTag(); + if (tag == LINK_TAG) { + CborObject value = deserialize(decoder); + if (value instanceof CborString) + return new CborMerkleLink(Cid.decode(((CborString) value).value)); + if (value instanceof CborByteArray) { + byte[] bytes = ((CborByteArray) value).value; + if (bytes[0] == 0) // multibase for binary + return new CborMerkleLink(Cid.cast(Arrays.copyOfRange(bytes, 1, bytes.length))); + throw new IllegalStateException( + "Unknown Multibase decoding Merkle link: " + bytes[0]); + } + throw new IllegalStateException("Invalid type for merkle link: " + value); + } + throw new IllegalStateException("Unknown TAG in CBOR: " + type.getAdditionalInfo()); + default: + throw new IllegalStateException("Unimplemented cbor type: " + type); + } + } catch (IOException e) { + throw new RuntimeException(e); } + } - final class CborMap implements CborObject { - public final SortedMap values; + final class CborMap implements CborObject { + public final SortedMap values; - public CborMap(SortedMap values) { - this.values = values; - } - - public static CborMap build(Map values) { - SortedMap transformed = values.entrySet() - .stream() - .collect(Collectors.toMap( - e -> new CborString(e.getKey()), - e -> e.getValue(), - (a, b) -> a, TreeMap::new)); - return new CborMap(transformed); - } + public CborMap(SortedMap values) { + this.values = values; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeMapStart(values.size()); - for (Map.Entry entry : values.entrySet()) { - entry.getKey().serialize(encoder); - entry.getValue().serialize(encoder); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } + public static CborMap build(Map values) { + SortedMap transformed = + values.entrySet().stream() + .collect( + Collectors.toMap( + e -> new CborString(e.getKey()), + e -> e.getValue(), + (a, b) -> a, + TreeMap::new)); + return new CborMap(transformed); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeMapStart(values.size()); + for (Map.Entry entry : values.entrySet()) { + entry.getKey().serialize(encoder); + entry.getValue().serialize(encoder); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } - CborMap cborMap = (CborMap) o; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return values != null ? values.equals(cborMap.values) : cborMap.values == null; + CborMap cborMap = (CborMap) o; - } - - @Override - public int hashCode() { - return values != null ? values.hashCode() : 0; - } + return values != null ? values.equals(cborMap.values) : cborMap.values == null; } - final class CborMerkleLink implements CborObject { - public final Multihash target; + @Override + public int hashCode() { + return values != null ? values.hashCode() : 0; + } + } - public CborMerkleLink(Multihash target) { - this.target = target; - } + final class CborMerkleLink implements CborObject { + public final Multihash target; - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeTag(LINK_TAG); - byte[] cid = target.toBytes(); - byte[] withMultibaseHeader = new byte[cid.length + 1]; - System.arraycopy(cid, 0, withMultibaseHeader, 1, cid.length); - encoder.writeByteString(withMultibaseHeader); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + public CborMerkleLink(Multihash target) { + this.target = target; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeTag(LINK_TAG); + byte[] cid = target.toBytes(); + byte[] withMultibaseHeader = new byte[cid.length + 1]; + System.arraycopy(cid, 0, withMultibaseHeader, 1, cid.length); + encoder.writeByteString(withMultibaseHeader); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - CborMerkleLink that = (CborMerkleLink) o; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return target != null ? target.equals(that.target) : that.target == null; + CborMerkleLink that = (CborMerkleLink) o; - } + return target != null ? target.equals(that.target) : that.target == null; + } - @Override - public int hashCode() { - return target != null ? target.hashCode() : 0; - } + @Override + public int hashCode() { + return target != null ? target.hashCode() : 0; } + } - final class CborList implements CborObject { - public final List value; + final class CborList implements CborObject { + public final List value; - public CborList(List value) { - this.value = value; - } + public CborList(List value) { + this.value = value; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeArrayStart(value.size()); - for (CborObject object : value) { - object.serialize(encoder); - } - } catch (IOException e) { - throw new RuntimeException(e); - } + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeArrayStart(value.size()); + for (CborObject object : value) { + object.serialize(encoder); } + } catch (IOException e) { + throw new RuntimeException(e); + } + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - CborList cborList = (CborList) o; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return value != null ? value.equals(cborList.value) : cborList.value == null; - } + CborList cborList = (CborList) o; - @Override - public int hashCode() { - return value != null ? value.hashCode() : 0; - } + return value != null ? value.equals(cborList.value) : cborList.value == null; } - final class CborBoolean implements CborObject { - public final boolean value; - - public CborBoolean(boolean value) { - this.value = value; - } - - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeBoolean(value); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public int hashCode() { + return value != null ? value.hashCode() : 0; + } + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + final class CborBoolean implements CborObject { + public final boolean value; - CborBoolean that = (CborBoolean) o; + public CborBoolean(boolean value) { + this.value = value; + } - return value == that.value; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeBoolean(value); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - @Override - public int hashCode() { - return (value ? 1 : 0); - } + CborBoolean that = (CborBoolean) o; - @Override - public String toString() { - return "CborBoolean{" + - value + - '}'; - } + return value == that.value; } - final class CborByteArray implements CborObject, Comparable { - public final byte[] value; + @Override + public int hashCode() { + return (value ? 1 : 0); + } - public CborByteArray(byte[] value) { - this.value = value; - } + @Override + public String toString() { + return "CborBoolean{" + value + '}'; + } + } - @Override - public int compareTo(CborByteArray other) { - return compare(value, other.value); - } + final class CborByteArray implements CborObject, Comparable { + public final byte[] value; - public static int compare(byte[] a, byte[] b) - { - for (int i=0; i < Math.min(a.length, b.length); i++) - if (a[i] != b[i]) - return a[i] & 0xff - b[i] & 0xff; - return 0; - } + public CborByteArray(byte[] value) { + this.value = value; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeByteString(value); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public int compareTo(CborByteArray other) { + return compare(value, other.value); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + public static int compare(byte[] a, byte[] b) { + for (int i = 0; i < Math.min(a.length, b.length); i++) + if (a[i] != b[i]) return a[i] & 0xff - b[i] & 0xff; + return 0; + } - CborByteArray that = (CborByteArray) o; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeByteString(value); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - return Arrays.equals(value, that.value); + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - } + CborByteArray that = (CborByteArray) o; - @Override - public int hashCode() { - return Arrays.hashCode(value); - } + return Arrays.equals(value, that.value); } - final class CborString implements CborObject, Comparable { + @Override + public int hashCode() { + return Arrays.hashCode(value); + } + } - public final String value; + final class CborString implements CborObject, Comparable { - public CborString(String value) { - this.value = value; - } + public final String value; - @Override - public int compareTo(CborString cborString) { - int lenDiff = value.length() - cborString.value.length(); - if (lenDiff != 0) - return lenDiff; - return value.compareTo(cborString.value); - } + public CborString(String value) { + this.value = value; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeTextString(value); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public int compareTo(CborString cborString) { + int lenDiff = value.length() - cborString.value.length(); + if (lenDiff != 0) return lenDiff; + return value.compareTo(cborString.value); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeTextString(value); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - CborString that = (CborString) o; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return value.equals(that.value); + CborString that = (CborString) o; - } + return value.equals(that.value); + } - @Override - public int hashCode() { - return value.hashCode(); - } + @Override + public int hashCode() { + return value.hashCode(); + } - @Override - public String toString() { - return "CborString{\"" + - value + - "\"}"; - } + @Override + public String toString() { + return "CborString{\"" + value + "\"}"; } + } - final class CborLong implements CborObject, Comparable { - public final long value; + final class CborLong implements CborObject, Comparable { + public final long value; - public CborLong(long value) { - this.value = value; - } - - @Override - public int compareTo(CborLong other) { - return Long.compare(value, other.value); - } + public CborLong(long value) { + this.value = value; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeInt(value); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public int compareTo(CborLong other) { + return Long.compare(value, other.value); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeInt(value); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - CborLong cborLong = (CborLong) o; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return value == cborLong.value; + CborLong cborLong = (CborLong) o; - } + return value == cborLong.value; + } - @Override - public int hashCode() { - return (int) (value ^ (value >>> 32)); - } + @Override + public int hashCode() { + return (int) (value ^ (value >>> 32)); + } - @Override - public String toString() { - return "CborLong{" + - value + - '}'; - } + @Override + public String toString() { + return "CborLong{" + value + '}'; } + } - final class CborNull implements CborObject, Comparable { - public CborNull() {} + final class CborNull implements CborObject, Comparable { + public CborNull() {} - @Override - public int compareTo(CborNull cborNull) { - return 0; - } + @Override + public int compareTo(CborNull cborNull) { + return 0; + } - @Override - public void serialize(CborEncoder encoder) { - try { - encoder.writeNull(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public void serialize(CborEncoder encoder) { + try { + encoder.writeNull(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - return true; - } + return true; + } - @Override - public int hashCode() { - return 0; - } + @Override + public int hashCode() { + return 0; + } - @Override - public String toString() { - return "CborNull{}"; - } + @Override + public String toString() { + return "CborNull{}"; } + } } diff --git a/src/main/java/io/ipfs/api/cbor/CborType.java b/src/main/java/io/ipfs/api/cbor/CborType.java index 68b325cc..9ff94415 100644 --- a/src/main/java/io/ipfs/api/cbor/CborType.java +++ b/src/main/java/io/ipfs/api/cbor/CborType.java @@ -8,136 +8,149 @@ * Licensed under Apache License v2.0. */ -import static io.ipfs.api.cbor.CborConstants.*; +import static io.ipfs.api.cbor.CborConstants.TYPE_ARRAY; +import static io.ipfs.api.cbor.CborConstants.TYPE_BYTE_STRING; +import static io.ipfs.api.cbor.CborConstants.TYPE_FLOAT_SIMPLE; +import static io.ipfs.api.cbor.CborConstants.TYPE_MAP; +import static io.ipfs.api.cbor.CborConstants.TYPE_NEGATIVE_INTEGER; +import static io.ipfs.api.cbor.CborConstants.TYPE_TAG; +import static io.ipfs.api.cbor.CborConstants.TYPE_TEXT_STRING; +import static io.ipfs.api.cbor.CborConstants.TYPE_UNSIGNED_INTEGER; /** * Represents the various major types in CBOR, along with their . - *

- * The major type is encoded in the upper three bits of each initial byte. The lower 5 bytes represent any additional information. - *

+ * + *

The major type is encoded in the upper three bits of each initial byte. The lower 5 bytes + * represent any additional information. */ public class CborType { - private final int m_major; - private final int m_additional; - - private CborType(int major, int additional) { - m_major = major; - m_additional = additional; - } + private final int m_major; + private final int m_additional; - /** - * Returns a descriptive string for the given major type. - * - * @param mt the major type to return as string, values from [0..7] are supported. - * @return the name of the given major type, as String, never null. - * @throws IllegalArgumentException in case the given major type is not supported. - */ - public static String getName(int mt) { - switch (mt) { - case TYPE_ARRAY: - return "array"; - case TYPE_BYTE_STRING: - return "byte string"; - case TYPE_FLOAT_SIMPLE: - return "float/simple value"; - case TYPE_MAP: - return "map"; - case TYPE_NEGATIVE_INTEGER: - return "negative integer"; - case TYPE_TAG: - return "tag"; - case TYPE_TEXT_STRING: - return "text string"; - case TYPE_UNSIGNED_INTEGER: - return "unsigned integer"; - default: - throw new IllegalArgumentException("Invalid major type: " + mt); - } - } + private CborType(int major, int additional) { + m_major = major; + m_additional = additional; + } - /** - * Decodes a given byte value to a {@link CborType} value. - * - * @param i the input byte (8-bit) to decode into a {@link CborType} instance. - * @return a {@link CborType} instance, never null. - */ - public static CborType valueOf(int i) { - return new CborType((i & 0xff) >>> 5, i & 0x1f); + /** + * Returns a descriptive string for the given major type. + * + * @param mt the major type to return as string, values from [0..7] are supported. + * @return the name of the given major type, as String, never null. + * @throws IllegalArgumentException in case the given major type is not supported. + */ + public static String getName(int mt) { + switch (mt) { + case TYPE_ARRAY: + return "array"; + case TYPE_BYTE_STRING: + return "byte string"; + case TYPE_FLOAT_SIMPLE: + return "float/simple value"; + case TYPE_MAP: + return "map"; + case TYPE_NEGATIVE_INTEGER: + return "negative integer"; + case TYPE_TAG: + return "tag"; + case TYPE_TEXT_STRING: + return "text string"; + case TYPE_UNSIGNED_INTEGER: + return "unsigned integer"; + default: + throw new IllegalArgumentException("Invalid major type: " + mt); } + } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } + /** + * Decodes a given byte value to a {@link CborType} value. + * + * @param i the input byte (8-bit) to decode into a {@link CborType} instance. + * @return a {@link CborType} instance, never null. + */ + public static CborType valueOf(int i) { + return new CborType((i & 0xff) >>> 5, i & 0x1f); + } - CborType other = (CborType) obj; - return (m_major == other.m_major) && (m_additional == other.m_additional); + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; } - - /** - * @return the additional information of this type, as integer value from [0..31]. - */ - public int getAdditionalInfo() { - return m_additional; + if (obj == null || getClass() != obj.getClass()) { + return false; } - /** - * @return the major type, as integer value from [0..7]. - */ - public int getMajorType() { - return m_major; - } + CborType other = (CborType) obj; + return (m_major == other.m_major) && (m_additional == other.m_additional); + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + m_additional; - result = prime * result + m_major; - return result; - } + /** + * @return the additional information of this type, as integer value from [0..31]. + */ + public int getAdditionalInfo() { + return m_additional; + } - /** - * @return true if this type allows for an infinite-length payload, - * false if only definite-length payloads are allowed. - */ - public boolean isBreakAllowed() { - return m_major == TYPE_ARRAY || m_major == TYPE_BYTE_STRING || m_major == TYPE_MAP - || m_major == TYPE_TEXT_STRING; - } + /** + * @return the major type, as integer value from [0..7]. + */ + public int getMajorType() { + return m_major; + } - /** - * Determines whether the major type of a given {@link CborType} equals the major type of this {@link CborType}. - * - * @param other the {@link CborType} to compare against, cannot be null. - * @return true if the given {@link CborType} is of the same major type as this {@link CborType}, false otherwise. - * @throws IllegalArgumentException in case the given argument was null. - */ - public boolean isEqualType(CborType other) { - if (other == null) { - throw new IllegalArgumentException("Parameter cannot be null!"); - } - return m_major == other.m_major; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + m_additional; + result = prime * result + m_major; + return result; + } - /** - * Determines whether the major type of a given byte value (representing an encoded {@link CborType}) equals the major type of this {@link CborType}. - * - * @param encoded the encoded CBOR type to compare. - * @return true if the given byte value represents the same major type as this {@link CborType}, false otherwise. - */ - public boolean isEqualType(int encoded) { - return m_major == ((encoded & 0xff) >>> 5); - } + /** + * @return true if this type allows for an infinite-length payload, false + * if only definite-length payloads are allowed. + */ + public boolean isBreakAllowed() { + return m_major == TYPE_ARRAY + || m_major == TYPE_BYTE_STRING + || m_major == TYPE_MAP + || m_major == TYPE_TEXT_STRING; + } - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getName(m_major)).append('(').append(m_additional).append(')'); - return sb.toString(); + /** + * Determines whether the major type of a given {@link CborType} equals the major type of this + * {@link CborType}. + * + * @param other the {@link CborType} to compare against, cannot be null. + * @return true if the given {@link CborType} is of the same major type as this + * {@link CborType}, false otherwise. + * @throws IllegalArgumentException in case the given argument was null. + */ + public boolean isEqualType(CborType other) { + if (other == null) { + throw new IllegalArgumentException("Parameter cannot be null!"); } -} \ No newline at end of file + return m_major == other.m_major; + } + + /** + * Determines whether the major type of a given byte value (representing an encoded {@link + * CborType}) equals the major type of this {@link CborType}. + * + * @param encoded the encoded CBOR type to compare. + * @return true if the given byte value represents the same major type as this {@link + * CborType}, false otherwise. + */ + public boolean isEqualType(int encoded) { + return m_major == ((encoded & 0xff) >>> 5); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getName(m_major)).append('(').append(m_additional).append(')'); + return sb.toString(); + } +} diff --git a/src/main/java/io/ipfs/api/cbor/Cborable.java b/src/main/java/io/ipfs/api/cbor/Cborable.java index 2025e727..07af23d3 100644 --- a/src/main/java/io/ipfs/api/cbor/Cborable.java +++ b/src/main/java/io/ipfs/api/cbor/Cborable.java @@ -2,9 +2,9 @@ public interface Cborable { - CborObject toCbor(); + CborObject toCbor(); - default byte[] serialize() { - return toCbor().toByteArray(); - } + default byte[] serialize() { + return toCbor().toByteArray(); + } } diff --git a/src/main/java/io/ipfs/api/demo/UsageMFSFilesAPI.java b/src/main/java/io/ipfs/api/demo/UsageMFSFilesAPI.java index d45e6b4a..1ef6562b 100644 --- a/src/main/java/io/ipfs/api/demo/UsageMFSFilesAPI.java +++ b/src/main/java/io/ipfs/api/demo/UsageMFSFilesAPI.java @@ -4,7 +4,6 @@ import io.ipfs.api.NamedStreamable; import io.ipfs.api.WriteFilesArgs; import io.ipfs.multiaddr.MultiAddress; - import java.io.IOException; import java.util.List; import java.util.Map; @@ -22,79 +21,81 @@ The Mutable File System (MFS) is a virtual file system on top of IPFS that expos */ public class UsageMFSFilesAPI { - public UsageMFSFilesAPI(IPFS ipfsClient) { - try { - run(ipfsClient); - } catch (IOException ioe) { - ioe.printStackTrace(); - } + public UsageMFSFilesAPI(IPFS ipfsClient) { + try { + run(ipfsClient); + } catch (IOException ioe) { + ioe.printStackTrace(); + } + } + + private void run(IPFS ipfs) throws IOException { + + // remove 'my' directory to clean up from a previous run + ipfs.files.rm("/my", true, true); + + // To create a new directory nested under others that don't yet exist, you need to explicitly + // set the value of parents to true + ipfs.files.mkdir("/my/directory/example", true); + + // Check directory status + String directoryPath = "/my/directory/example"; + Map exampleDirectory = ipfs.files.stat(directoryPath); + // {Hash=QmV1a2QoUnB9fPzjZd1GunGR53isuhcWWNCS5Bg3mJyv8N, Size=0, CumulativeSize=57, Blocks=1, + // Type=directory} + + // Add a file + String contents = "hello world!"; + String filename = "hello.txt"; + String filePath = directoryPath + "/" + filename; + NamedStreamable ns = new NamedStreamable.ByteArrayWrapper(filename, contents.getBytes()); + ipfs.files.write(filePath, ns, true, true); + + // Read contents of a file + String fileContents = new String(ipfs.files.read(filePath)); + System.out.println(fileContents); + + // Write a file using builder pattern + String ipfsFilename = "ipfs.txt"; + String fullIpfsPath = directoryPath + "/" + ipfsFilename; + NamedStreamable ipfsFile = + new NamedStreamable.ByteArrayWrapper(ipfsFilename, "ipfs says hello".getBytes()); + WriteFilesArgs args = WriteFilesArgs.Builder.newInstance().setCreate().setParents().build(); + ipfs.files.write(fullIpfsPath, ipfsFile, args); + + // List directory contents + List ls = ipfs.files.ls(directoryPath); + for (Map entry : ls) { + System.out.println(entry.get("Name")); } - private void run(IPFS ipfs) throws IOException { - - // remove 'my' directory to clean up from a previous run - ipfs.files.rm("/my", true, true); - - // To create a new directory nested under others that don't yet exist, you need to explicitly set the value of parents to true - ipfs.files.mkdir("/my/directory/example", true); - - // Check directory status - String directoryPath = "/my/directory/example"; - Map exampleDirectory = ipfs.files.stat(directoryPath); - //{Hash=QmV1a2QoUnB9fPzjZd1GunGR53isuhcWWNCS5Bg3mJyv8N, Size=0, CumulativeSize=57, Blocks=1, Type=directory} - - // Add a file - String contents = "hello world!"; - String filename = "hello.txt"; - String filePath = directoryPath + "/" + filename; - NamedStreamable ns = new NamedStreamable.ByteArrayWrapper(filename, contents.getBytes()); - ipfs.files.write(filePath, ns, true, true); - - // Read contents of a file - String fileContents = new String(ipfs.files.read(filePath)); - System.out.println(fileContents); - - // Write a file using builder pattern - String ipfsFilename = "ipfs.txt"; - String fullIpfsPath = directoryPath + "/" + ipfsFilename; - NamedStreamable ipfsFile = new NamedStreamable.ByteArrayWrapper(ipfsFilename, "ipfs says hello".getBytes()); - WriteFilesArgs args = WriteFilesArgs.Builder.newInstance() - .setCreate() - .setParents() - .build(); - ipfs.files.write(fullIpfsPath, ipfsFile, args); - - // List directory contents - List ls = ipfs.files.ls(directoryPath); - for(Map entry : ls) { - System.out.println(entry.get("Name")); - } - - // Copy file to another directory - String copyDirectoryPath = "/my/copy/"; - ipfs.files.cp(filePath, copyDirectoryPath + filename, true); - ls = ipfs.files.ls(copyDirectoryPath); - for(Map entry : ls) { - System.out.println(entry.get("Name")); - } - - // Move file to another directory - String duplicateDirectoryPath = "/my/duplicate/"; - ipfs.files.mkdir(duplicateDirectoryPath, false); - ipfs.files.mv(copyDirectoryPath + filename, duplicateDirectoryPath + filename); - ls = ipfs.files.ls(duplicateDirectoryPath); - for(Map entry : ls) { - System.out.println(entry.get("Name")); - } - - // Remove a directory - ipfs.files.rm(copyDirectoryPath, true, true); - ls = ipfs.files.ls("/my"); - for(Map entry : ls) { - System.out.println(entry.get("Name")); - } + + // Copy file to another directory + String copyDirectoryPath = "/my/copy/"; + ipfs.files.cp(filePath, copyDirectoryPath + filename, true); + ls = ipfs.files.ls(copyDirectoryPath); + for (Map entry : ls) { + System.out.println(entry.get("Name")); } - public static void main(String[] args) { - IPFS ipfsClient = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); - new UsageMFSFilesAPI(ipfsClient); + + // Move file to another directory + String duplicateDirectoryPath = "/my/duplicate/"; + ipfs.files.mkdir(duplicateDirectoryPath, false); + ipfs.files.mv(copyDirectoryPath + filename, duplicateDirectoryPath + filename); + ls = ipfs.files.ls(duplicateDirectoryPath); + for (Map entry : ls) { + System.out.println(entry.get("Name")); } + + // Remove a directory + ipfs.files.rm(copyDirectoryPath, true, true); + ls = ipfs.files.ls("/my"); + for (Map entry : ls) { + System.out.println(entry.get("Name")); + } + } + + public static void main(String[] args) { + IPFS ipfsClient = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); + new UsageMFSFilesAPI(ipfsClient); + } } diff --git a/src/main/java/io/ipfs/api/demo/UsageRemotePinningAPI.java b/src/main/java/io/ipfs/api/demo/UsageRemotePinningAPI.java index 2e15f72d..c3265aa6 100644 --- a/src/main/java/io/ipfs/api/demo/UsageRemotePinningAPI.java +++ b/src/main/java/io/ipfs/api/demo/UsageRemotePinningAPI.java @@ -5,7 +5,6 @@ import io.ipfs.api.NamedStreamable; import io.ipfs.multiaddr.MultiAddress; import io.ipfs.multihash.Multihash; - import java.io.IOException; import java.util.List; import java.util.Map; @@ -28,48 +27,50 @@ */ public class UsageRemotePinningAPI { - public UsageRemotePinningAPI(IPFS ipfsClient) { - try { - run(ipfsClient); - } catch (IOException ioe) { - ioe.printStackTrace(); - } + public UsageRemotePinningAPI(IPFS ipfsClient) { + try { + run(ipfsClient); + } catch (IOException ioe) { + ioe.printStackTrace(); } - private void run(IPFS ipfs) throws IOException { - - // Add file to the local node - MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("file.txt", "test data".getBytes())).get(0); - // Retrieve CID - Multihash hash = file.hash; - - //Add the service - String serviceName = "mock"; - ipfs.pin.remote.rmService(serviceName); //clean up if necessary - ipfs.pin.remote.addService(serviceName, "http://127.0.0.1:3000", "secret"); + } - //List services - List services = ipfs.pin.remote.lsService(true); - for(Map service : services) { - System.out.println(service); - } + private void run(IPFS ipfs) throws IOException { - // Pin - Map addHashResult = ipfs.pin.remote.add(serviceName, hash, Optional.empty(), true); - System.out.println(addHashResult); + // Add file to the local node + MerkleNode file = + ipfs.add(new NamedStreamable.ByteArrayWrapper("file.txt", "test data".getBytes())).get(0); + // Retrieve CID + Multihash hash = file.hash; - // List - List statusList = List.of(IPFS.PinStatus.values()); // all statuses - Map ls = ipfs.pin.remote.ls(serviceName, Optional.empty(), Optional.of(statusList)); - System.out.println(ls); - - // Remove pin from remote pinning service - List queued = List.of(IPFS.PinStatus.queued); - ipfs.pin.remote.rm(serviceName, Optional.empty(), Optional.of(queued), Optional.of(List.of(hash))); + // Add the service + String serviceName = "mock"; + ipfs.pin.remote.rmService(serviceName); // clean up if necessary + ipfs.pin.remote.addService(serviceName, "http://127.0.0.1:3000", "secret"); + // List services + List services = ipfs.pin.remote.lsService(true); + for (Map service : services) { + System.out.println(service); } - public static void main(String[] args) { - IPFS ipfsClient = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); - new UsageRemotePinningAPI(ipfsClient); - } + // Pin + Map addHashResult = ipfs.pin.remote.add(serviceName, hash, Optional.empty(), true); + System.out.println(addHashResult); + + // List + List statusList = List.of(IPFS.PinStatus.values()); // all statuses + Map ls = ipfs.pin.remote.ls(serviceName, Optional.empty(), Optional.of(statusList)); + System.out.println(ls); + + // Remove pin from remote pinning service + List queued = List.of(IPFS.PinStatus.queued); + ipfs.pin.remote.rm( + serviceName, Optional.empty(), Optional.of(queued), Optional.of(List.of(hash))); + } + + public static void main(String[] args) { + IPFS ipfsClient = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); + new UsageRemotePinningAPI(ipfsClient); + } } diff --git a/src/test/java/io/ipfs/api/APITest.java b/src/test/java/io/ipfs/api/APITest.java index 4ea3b163..0a30cf4c 100644 --- a/src/test/java/io/ipfs/api/APITest.java +++ b/src/test/java/io/ipfs/api/APITest.java @@ -1,964 +1,1058 @@ package io.ipfs.api; -import io.ipfs.api.cbor.*; -import io.ipfs.cid.*; -import io.ipfs.multihash.Multihash; -import io.ipfs.multiaddr.MultiAddress; -import org.junit.*; - -import java.io.*; -import java.nio.charset.StandardCharsets; -import java.nio.file.*; -import java.util.*; -import java.util.function.*; -import java.util.stream.*; - +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertArrayEquals; + +import io.ipfs.api.cbor.CborObject; +import io.ipfs.cid.Cid; +import io.ipfs.multiaddr.MultiAddress; +import io.ipfs.multihash.Multihash; +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Random; +import java.util.TreeMap; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; @SuppressWarnings({"rawtypes", "unused"}) public class APITest { - private final MultiAddress ipfsAddress = new MultiAddress("/ip4/127.0.0.1/tcp/5001"); - private final IPFS ipfs = new IPFS(ipfsAddress.getHost(), ipfsAddress.getPort(), "/api/v0/", true, false); - - private final Random r = new Random(33550336); // perfect - - @Test - public void dag() throws IOException { - String original = "{\"data\":1234}"; - byte[] object = original.getBytes(); - MerkleNode put = ipfs.dag.put("json", object); - - Cid expected = Cid.decode("bafyreidbm2zncsc3j25zn7lofgd4woeh6eygdy73thfosuni2rwr3bhcvu"); - - Multihash result = put.hash; - assertEquals("Correct cid returned", result, expected); - - byte[] get = ipfs.dag.get(expected); - assertEquals("Raw data equal", original, new String(get).trim()); - Map res = ipfs.dag.resolve("bafyreidbm2zncsc3j25zn7lofgd4woeh6eygdy73thfosuni2rwr3bhcvu"); - assertNotNull("not resolved", res); - res = ipfs.dag.stat(expected); - assertNotNull("not found", res); - } - - @Test - public void dagCbor() throws IOException { - Map tmp = new LinkedHashMap<>(); - String value = "G'day mate!"; - tmp.put("data", new CborObject.CborString(value)); - CborObject original = CborObject.CborMap.build(tmp); - byte[] object = original.toByteArray(); - MerkleNode put = ipfs.dag.put("dag-cbor", object); - - Cid cid = (Cid) put.hash; - - byte[] get = ipfs.dag.get(cid); - assertEquals("Raw data equal", ((Map) JSONParser.parse(new String(get))).get("data"), - value); - - Cid expected = Cid.decode("zdpuApemz4XMURSCkBr9W5y974MXkSbeDfLeZmiQTPpvkatFF"); - assertEquals("Correct cid returned", cid, expected); - } - - @Test - public void keys() throws IOException { - List existing = ipfs.key.list(); - String name = "mykey" + System.nanoTime(); - KeyInfo gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); - String newName = "bob" + System.nanoTime(); - Object rename = ipfs.key.rename(name, newName); - List rm = ipfs.key.rm(newName); - List remaining = ipfs.key.list(); - assertEquals("removed key", remaining, existing); - } - - @Test - @Ignore("Not reliable") - public void log() throws IOException { - Map lsResult = ipfs.log.ls(); - Assert.assertFalse("Log ls", lsResult.isEmpty()); - Map levelResult = ipfs.log.level("all", "info"); - Assert.assertTrue("Log level", ((String)levelResult.get("Message")).startsWith("Changed log level")); + private final MultiAddress ipfsAddress = new MultiAddress("/ip4/127.0.0.1/tcp/5001"); + private final IPFS ipfs = + new IPFS(ipfsAddress.getHost(), ipfsAddress.getPort(), "/api/v0/", true, false); + + private final Random r = new Random(33550336); // perfect + + @Test + public void dag() throws IOException { + String original = "{\"data\":1234}"; + byte[] object = original.getBytes(); + MerkleNode put = ipfs.dag.put("json", object); + + Cid expected = Cid.decode("bafyreidbm2zncsc3j25zn7lofgd4woeh6eygdy73thfosuni2rwr3bhcvu"); + + Multihash result = put.hash; + assertEquals("Correct cid returned", result, expected); + + byte[] get = ipfs.dag.get(expected); + assertEquals("Raw data equal", original, new String(get).trim()); + Map res = ipfs.dag.resolve("bafyreidbm2zncsc3j25zn7lofgd4woeh6eygdy73thfosuni2rwr3bhcvu"); + assertNotNull("not resolved", res); + res = ipfs.dag.stat(expected); + assertNotNull("not found", res); + } + + @Test + public void dagCbor() throws IOException { + Map tmp = new LinkedHashMap<>(); + String value = "G'day mate!"; + tmp.put("data", new CborObject.CborString(value)); + CborObject original = CborObject.CborMap.build(tmp); + byte[] object = original.toByteArray(); + MerkleNode put = ipfs.dag.put("dag-cbor", object); + + Cid cid = (Cid) put.hash; + + byte[] get = ipfs.dag.get(cid); + assertEquals("Raw data equal", ((Map) JSONParser.parse(new String(get))).get("data"), value); + + Cid expected = Cid.decode("zdpuApemz4XMURSCkBr9W5y974MXkSbeDfLeZmiQTPpvkatFF"); + assertEquals("Correct cid returned", cid, expected); + } + + @Test + public void keys() throws IOException { + List existing = ipfs.key.list(); + String name = "mykey" + System.nanoTime(); + KeyInfo gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); + String newName = "bob" + System.nanoTime(); + Object rename = ipfs.key.rename(name, newName); + List rm = ipfs.key.rm(newName); + List remaining = ipfs.key.list(); + assertEquals("removed key", remaining, existing); + } + + @Test + @Ignore("Not reliable") + public void log() throws IOException { + Map lsResult = ipfs.log.ls(); + Assert.assertFalse("Log ls", lsResult.isEmpty()); + Map levelResult = ipfs.log.level("all", "info"); + Assert.assertTrue( + "Log level", ((String) levelResult.get("Message")).startsWith("Changed log level")); + } + + @Test + public void ipldNode() { + Function>, CborObject.CborMap> map = + s -> CborObject.CborMap.build(s.collect(Collectors.toMap(p -> p.left, p -> p.right))); + CborObject.CborMap a = map.apply(Stream.of(new Pair<>("b", new CborObject.CborLong(1)))); + + CborObject.CborMap cbor = + map.apply(Stream.of(new Pair<>("a", a), new Pair<>("c", new CborObject.CborLong(2)))); + + IpldNode.CborIpldNode node = new IpldNode.CborIpldNode(cbor); + List tree = node.tree("", -1); + assertEquals("Correct tree", tree, Arrays.asList("/a/b", "/c")); + } + + @Test + public void singleFileTest() throws IOException { + NamedStreamable.ByteArrayWrapper file = + new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); + fileTest(file); + } + + @Test + public void wrappedSingleFileTest() throws IOException { + NamedStreamable.ByteArrayWrapper file = + new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); + List addParts = ipfs.add(file, true); + MerkleNode filePart = addParts.get(0); + MerkleNode dirPart = addParts.get(1); + byte[] catResult = ipfs.cat(filePart.hash); + byte[] getResult = ipfs.get(filePart.hash); + if (!Arrays.equals(catResult, file.getContents())) + throw new IllegalStateException("Different contents!"); + List pinRm = ipfs.pin.rm(dirPart.hash, true); + if (!pinRm.get(0).equals(dirPart.hash)) throw new IllegalStateException("Didn't remove file!"); + Object gc = ipfs.repo.gc(); + } + + @Test + public void dirTest() throws IOException { + Path test = Files.createTempDirectory("test"); + Files.write(test.resolve("file.txt"), "G'day IPFS!".getBytes()); + NamedStreamable dir = new NamedStreamable.FileWrapper(test.toFile()); + List add = ipfs.add(dir); + MerkleNode addResult = add.get(add.size() - 1); + List ls = ipfs.ls(addResult.hash); + Assert.assertTrue(ls.size() > 0); + } + + @Test + public void directoryTest() throws IOException { + Random rnd = new Random(); + String dirName = "folder" + rnd.nextInt(100); + Path tmpDir = Files.createTempDirectory(dirName); + + String fileName = "afile" + rnd.nextInt(100); + Path file = tmpDir.resolve(fileName); + FileOutputStream fout = new FileOutputStream(file.toFile()); + byte[] fileContents = "IPFS rocks!".getBytes(); + fout.write(fileContents); + fout.flush(); + fout.close(); + + String subdirName = "subdir"; + tmpDir.resolve(subdirName).toFile().mkdir(); + + String subfileName = "subdirfile" + rnd.nextInt(100); + Path subdirfile = tmpDir.resolve(subdirName + "/" + subfileName); + FileOutputStream fout2 = new FileOutputStream(subdirfile.toFile()); + byte[] file2Contents = "IPFS still rocks!".getBytes(); + fout2.write(file2Contents); + fout2.flush(); + fout2.close(); + + List addParts = ipfs.add(new NamedStreamable.FileWrapper(tmpDir.toFile())); + MerkleNode addResult = addParts.get(addParts.size() - 1); + List lsResult = ipfs.ls(addResult.hash); + if (lsResult.size() != 2) throw new IllegalStateException("Incorrect number of objects in ls!"); + if (!lsResult.stream() + .map(x -> x.name.get()) + .collect(Collectors.toSet()) + .equals(new HashSet<>(Arrays.asList(subdirName, fileName)))) + throw new IllegalStateException("Dir not returned in ls!"); + byte[] catResult = ipfs.cat(addResult.hash, "/" + fileName); + if (!Arrays.equals(catResult, fileContents)) + throw new IllegalStateException("Different contents!"); + + byte[] catResult2 = ipfs.cat(addResult.hash, "/" + subdirName + "/" + subfileName); + if (!Arrays.equals(catResult2, file2Contents)) + throw new IllegalStateException("Different contents!"); + } + + @Ignore + @Test + public void largeFileTest() throws IOException { + byte[] largerData = new byte[100 * 1024 * 1024]; + new Random(1).nextBytes(largerData); + NamedStreamable.ByteArrayWrapper largeFile = + new NamedStreamable.ByteArrayWrapper("nontrivial.txt", largerData); + fileTest(largeFile); + } + + @Ignore + @Test + public void hugeFileStreamTest() throws IOException { + byte[] hugeData = new byte[1000 * 1024 * 1024]; + new Random(1).nextBytes(hugeData); + NamedStreamable.ByteArrayWrapper largeFile = + new NamedStreamable.ByteArrayWrapper("massive.txt", hugeData); + MerkleNode addResult = ipfs.add(largeFile).get(0); + InputStream in = ipfs.catStream(addResult.hash); + + byte[] res = new byte[hugeData.length]; + int offset = 0; + byte[] buf = new byte[4096]; + int r; + while ((r = in.read(buf)) >= 0) { + try { + System.arraycopy(buf, 0, res, offset, r); + offset += r; + } catch (Exception e) { + e.printStackTrace(); + } } - - @Test - public void ipldNode() { - Function>, CborObject.CborMap> map = - s -> CborObject.CborMap.build(s.collect(Collectors.toMap(p -> p.left, p -> p.right))); - CborObject.CborMap a = map.apply(Stream.of(new Pair<>("b", new CborObject.CborLong(1)))); - - CborObject.CborMap cbor = map.apply(Stream.of(new Pair<>("a", a), new Pair<>("c", new CborObject.CborLong(2)))); - - IpldNode.CborIpldNode node = new IpldNode.CborIpldNode(cbor); - List tree = node.tree("", -1); - assertEquals("Correct tree", tree, Arrays.asList("/a/b", "/c")); - } - - @Test - public void singleFileTest() throws IOException { - NamedStreamable.ByteArrayWrapper file = new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); - fileTest(file); - } - - @Test - public void wrappedSingleFileTest() throws IOException { - NamedStreamable.ByteArrayWrapper file = new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); - List addParts = ipfs.add(file, true); - MerkleNode filePart = addParts.get(0); - MerkleNode dirPart = addParts.get(1); - byte[] catResult = ipfs.cat(filePart.hash); - byte[] getResult = ipfs.get(filePart.hash); - if (!Arrays.equals(catResult, file.getContents())) - throw new IllegalStateException("Different contents!"); - List pinRm = ipfs.pin.rm(dirPart.hash, true); - if (!pinRm.get(0).equals(dirPart.hash)) - throw new IllegalStateException("Didn't remove file!"); - Object gc = ipfs.repo.gc(); - } - - @Test - public void dirTest() throws IOException { - Path test = Files.createTempDirectory("test"); - Files.write(test.resolve("file.txt"), "G'day IPFS!".getBytes()); - NamedStreamable dir = new NamedStreamable.FileWrapper(test.toFile()); - List add = ipfs.add(dir); - MerkleNode addResult = add.get(add.size() - 1); - List ls = ipfs.ls(addResult.hash); - Assert.assertTrue(ls.size() > 0); - } - - @Test - public void directoryTest() throws IOException { - Random rnd = new Random(); - String dirName = "folder" + rnd.nextInt(100); - Path tmpDir = Files.createTempDirectory(dirName); - - String fileName = "afile" + rnd.nextInt(100); - Path file = tmpDir.resolve(fileName); - FileOutputStream fout = new FileOutputStream(file.toFile()); - byte[] fileContents = "IPFS rocks!".getBytes(); - fout.write(fileContents); - fout.flush(); - fout.close(); - - String subdirName = "subdir"; - tmpDir.resolve(subdirName).toFile().mkdir(); - - String subfileName = "subdirfile" + rnd.nextInt(100); - Path subdirfile = tmpDir.resolve(subdirName + "/" + subfileName); - FileOutputStream fout2 = new FileOutputStream(subdirfile.toFile()); - byte[] file2Contents = "IPFS still rocks!".getBytes(); - fout2.write(file2Contents); - fout2.flush(); - fout2.close(); - - List addParts = ipfs.add(new NamedStreamable.FileWrapper(tmpDir.toFile())); - MerkleNode addResult = addParts.get(addParts.size() - 1); - List lsResult = ipfs.ls(addResult.hash); - if (lsResult.size() != 2) - throw new IllegalStateException("Incorrect number of objects in ls!"); - if (! lsResult.stream().map(x -> x.name.get()).collect(Collectors.toSet()).equals(new HashSet<>(Arrays.asList(subdirName, fileName)))) - throw new IllegalStateException("Dir not returned in ls!"); - byte[] catResult = ipfs.cat(addResult.hash, "/" + fileName); - if (! Arrays.equals(catResult, fileContents)) - throw new IllegalStateException("Different contents!"); - - byte[] catResult2 = ipfs.cat(addResult.hash, "/" + subdirName + "/" + subfileName); - if (! Arrays.equals(catResult2, file2Contents)) - throw new IllegalStateException("Different contents!"); - } - - @Ignore - @Test - public void largeFileTest() throws IOException { - byte[] largerData = new byte[100*1024*1024]; - new Random(1).nextBytes(largerData); - NamedStreamable.ByteArrayWrapper largeFile = new NamedStreamable.ByteArrayWrapper("nontrivial.txt", largerData); - fileTest(largeFile); - } - - @Ignore - @Test - public void hugeFileStreamTest() throws IOException { - byte[] hugeData = new byte[1000*1024*1024]; - new Random(1).nextBytes(hugeData); - NamedStreamable.ByteArrayWrapper largeFile = new NamedStreamable.ByteArrayWrapper("massive.txt", hugeData); - MerkleNode addResult = ipfs.add(largeFile).get(0); - InputStream in = ipfs.catStream(addResult.hash); - - byte[] res = new byte[hugeData.length]; - int offset = 0; - byte[] buf = new byte[4096]; - int r; - while ((r = in.read(buf)) >= 0) { - try { - System.arraycopy(buf, 0, res, offset, r); - offset += r; - }catch (Exception e){ - e.printStackTrace(); - } - } - if (!Arrays.equals(res, hugeData)) - throw new IllegalStateException("Different contents!"); - } - - @Test - public void hostFileTest() throws IOException { - Path tempFile = Files.createTempFile("IPFS", "tmp"); - BufferedWriter w = new BufferedWriter(new FileWriter(tempFile.toFile())); - w.append("Some data"); - w.flush(); - w.close(); - NamedStreamable hostFile = new NamedStreamable.FileWrapper(tempFile.toFile()); - fileTest(hostFile); - } - - @Test - public void hashOnly() throws IOException { - byte[] data = randomBytes(4096); - NamedStreamable file = new NamedStreamable.ByteArrayWrapper(data); - MerkleNode addResult = ipfs.add(file, false, true).get(0); - List local = ipfs.refs.local(); - if (local.contains(addResult.hash)) - throw new IllegalStateException("Object shouldn't be present!"); - } - - public void fileTest(NamedStreamable file) throws IOException{ - MerkleNode addResult = ipfs.add(file).get(0); - byte[] catResult = ipfs.cat(addResult.hash); - byte[] getResult = ipfs.get(addResult.hash); - if (!Arrays.equals(catResult, file.getContents())) - throw new IllegalStateException("Different contents!"); - List pinRm = ipfs.pin.rm(addResult.hash, true); - if (!pinRm.get(0).equals(addResult.hash)) - throw new IllegalStateException("Didn't remove file!"); - Object gc = ipfs.repo.gc(); - } - @Test - public void filesTest() throws IOException { - - ipfs.files.rm("/filesTest", true, true); - String filename = "hello.txt"; - String folder = "/filesTest/one/two"; - String path = folder + "/" + filename; - String contents = "hello world!"; - NamedStreamable ns = new NamedStreamable.ByteArrayWrapper(filename, contents.getBytes()); - String res = ipfs.files.write(path, ns, true, true); - Map stat = ipfs.files.stat( path); - Map stat2 = ipfs.files.stat( path, Optional.of(""), true); - String readContents = new String(ipfs.files.read(path)); - assertEquals("Should be equals", contents, readContents); - res = ipfs.files.rm(path, false, false); - - String tempFilename = "temp.txt"; - String tempFolder = "/filesTest/a/b/c"; - String tempPath = tempFolder + "/" + tempFilename; - String mkdir = ipfs.files.mkdir(tempFolder, true); - stat = ipfs.files.stat(tempFolder); - NamedStreamable tempFile = new NamedStreamable.ByteArrayWrapper(tempFilename, contents.getBytes()); - res = ipfs.files.write(tempPath, tempFile, true, false); - res = ipfs.files.mv(tempPath, "/" + tempFilename); - stat = ipfs.files.stat("/" + tempFilename); - List lsMap = ipfs.files.ls("/"); - List lsMap2 = ipfs.files.ls("/", true, false); - - String flushFolder = "/filesTest/f/l/u/s/h"; - res = ipfs.files.mkdir(flushFolder, true); - Map flushMap = ipfs.files.flush(flushFolder); - - String copyFilename = "copy.txt"; - String copyFromFolder = "/filesTest/fromThere"; - String copyToFolder = "/filesTest/toHere"; - String copyFromPath = copyFromFolder + "/" + copyFilename; - String copyToPath = copyToFolder + "/" + copyFilename; - NamedStreamable copyFile = new NamedStreamable.ByteArrayWrapper(copyFilename, "copy".getBytes()); - WriteFilesArgs args = WriteFilesArgs.Builder.newInstance() - .setCreate() - .setParents() - .build(); - res = ipfs.files.write(copyFromPath, copyFile, args); - res = ipfs.files.cp(copyFromPath, copyToPath, true); - stat = ipfs.files.stat(copyToPath); - String cidRes = ipfs.files.chcid(copyToPath); - stat = ipfs.files.stat(copyToPath); - String cidV0Res = ipfs.files.chcid(copyToPath, Optional.of(0), Optional.empty()); - stat = ipfs.files.stat(copyToPath); - ipfs.files.rm("/filesTest", false, true); - } - - @Test - public void multibaseTest() throws IOException { - List encodings = ipfs.multibase.list(true, false); - Assert.assertFalse("multibase/list works", encodings.isEmpty()); - String encoded = ipfs.multibase.encode(Optional.empty(), new NamedStreamable.ByteArrayWrapper("hello".getBytes())); - assertEquals("multibase/encode works", "uaGVsbG8", encoded); - String decoded = ipfs.multibase.decode(new NamedStreamable.ByteArrayWrapper(encoded.getBytes())); - assertEquals("multibase/decode works", "hello", decoded); - String input = "f68656c6c6f"; - String transcode = ipfs.multibase.transcode(Optional.of("base64url"), new NamedStreamable.ByteArrayWrapper(input.getBytes())); - assertEquals("multibase/transcode works", transcode, encoded); - } - - @Test - @Ignore("Experimental feature not enabled by default") - public void fileStoreTest() throws IOException { - ipfs.fileStore.dups(); - Map res = ipfs.fileStore.ls(true); - ipfs.fileStore.verify(true); - } - - @Test - public void pinTest() throws IOException { - MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); - Multihash hash = file.hash; - Map ls1 = ipfs.pin.ls(IPFS.PinType.all); - boolean pinned = ls1.containsKey(hash); - List rm = ipfs.pin.rm(hash); - // second rm should not throw a http 500, but return an empty list -// List rm2 = ipfs.pin.rm(hash); - List add2 = ipfs.pin.add(hash); - // adding something already pinned should succeed - List add3 = ipfs.pin.add(hash); - Map ls = ipfs.pin.ls(IPFS.PinType.recursive); - ipfs.repo.gc(); - // object should still be present after gc - Map ls2 = ipfs.pin.ls(IPFS.PinType.recursive); - boolean stillPinned = ls2.containsKey(hash); - Assert.assertTrue("Pinning works", pinned && stillPinned); - } - - @Test - @Ignore - public void remotePinTest() throws IOException { - MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("test data".getBytes())).get(0); - Multihash hash = file.hash; - String service = "mock"; - String rmRemoteService = ipfs.pin.remote.rmService(service); - List lsRemoteService = ipfs.pin.remote.lsService(false); - String endpoint = "http://127.0.0.1:3000"; - String key = "SET_VALUE_HERE"; - String added = ipfs.pin.remote.addService(service, endpoint, key); - lsRemoteService = ipfs.pin.remote.lsService(false); - Map addHash = ipfs.pin.remote.add(service, hash, Optional.empty(), true); - Map lsRemote = ipfs.pin.remote.ls(service, Optional.empty(), Optional.of(List.of(IPFS.PinStatus.values()))); - String rmRemote = ipfs.pin.remote.rm(service, Optional.empty(), Optional.of(List.of(IPFS.PinStatus.queued)), Optional.of(List.of(hash))); - lsRemote = ipfs.pin.remote.ls(service, Optional.empty(), Optional.of(List.of(IPFS.PinStatus.values()))); - } - - @Test - public void pinUpdate() throws IOException { - MerkleNode child1 = ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); - Multihash hashChild1 = child1.hash; - - CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); - MerkleNode root1Res = ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); - ipfs.pin.add(root1Res.hash); - - CborObject.CborList root2 = new CborObject.CborList(Arrays.asList(new CborObject.CborMerkleLink(hashChild1), new CborObject.CborLong(System.currentTimeMillis()))); - MerkleNode root2Res = ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); - List update = ipfs.pin.update(root1Res.hash, root2Res.hash, true); - - Map ls = ipfs.pin.ls(IPFS.PinType.all); - boolean childPresent = ls.containsKey(hashChild1); - if (!childPresent) - throw new IllegalStateException("Child not present!"); - - ipfs.repo.gc(); - Map ls2 = ipfs.pin.ls(IPFS.PinType.all); - boolean childPresentAfterGC = ls2.containsKey(hashChild1); - if (!childPresentAfterGC) - throw new IllegalStateException("Child not present!"); - } - - @Test - public void rawLeafNodePinUpdate() throws IOException { - MerkleNode child1 = ipfs.block.put("some data".getBytes(), Optional.of("raw")); - Multihash hashChild1 = child1.hash; - - CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); - MerkleNode root1Res = ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); - ipfs.pin.add(root1Res.hash); - - MerkleNode child2 = ipfs.block.put("G'day new tree".getBytes(), Optional.of("raw")); - Multihash hashChild2 = child2.hash; - - CborObject.CborList root2 = new CborObject.CborList(Arrays.asList( + if (!Arrays.equals(res, hugeData)) throw new IllegalStateException("Different contents!"); + } + + @Test + public void hostFileTest() throws IOException { + Path tempFile = Files.createTempFile("IPFS", "tmp"); + BufferedWriter w = new BufferedWriter(new FileWriter(tempFile.toFile())); + w.append("Some data"); + w.flush(); + w.close(); + NamedStreamable hostFile = new NamedStreamable.FileWrapper(tempFile.toFile()); + fileTest(hostFile); + } + + @Test + public void hashOnly() throws IOException { + byte[] data = randomBytes(4096); + NamedStreamable file = new NamedStreamable.ByteArrayWrapper(data); + MerkleNode addResult = ipfs.add(file, false, true).get(0); + List local = ipfs.refs.local(); + if (local.contains(addResult.hash)) + throw new IllegalStateException("Object shouldn't be present!"); + } + + public void fileTest(NamedStreamable file) throws IOException { + MerkleNode addResult = ipfs.add(file).get(0); + byte[] catResult = ipfs.cat(addResult.hash); + byte[] getResult = ipfs.get(addResult.hash); + if (!Arrays.equals(catResult, file.getContents())) + throw new IllegalStateException("Different contents!"); + List pinRm = ipfs.pin.rm(addResult.hash, true); + if (!pinRm.get(0).equals(addResult.hash)) + throw new IllegalStateException("Didn't remove file!"); + Object gc = ipfs.repo.gc(); + } + + @Test + public void filesTest() throws IOException { + + ipfs.files.rm("/filesTest", true, true); + String filename = "hello.txt"; + String folder = "/filesTest/one/two"; + String path = folder + "/" + filename; + String contents = "hello world!"; + NamedStreamable ns = new NamedStreamable.ByteArrayWrapper(filename, contents.getBytes()); + String res = ipfs.files.write(path, ns, true, true); + Map stat = ipfs.files.stat(path); + Map stat2 = ipfs.files.stat(path, Optional.of(""), true); + String readContents = new String(ipfs.files.read(path)); + assertEquals("Should be equals", contents, readContents); + res = ipfs.files.rm(path, false, false); + + String tempFilename = "temp.txt"; + String tempFolder = "/filesTest/a/b/c"; + String tempPath = tempFolder + "/" + tempFilename; + String mkdir = ipfs.files.mkdir(tempFolder, true); + stat = ipfs.files.stat(tempFolder); + NamedStreamable tempFile = + new NamedStreamable.ByteArrayWrapper(tempFilename, contents.getBytes()); + res = ipfs.files.write(tempPath, tempFile, true, false); + res = ipfs.files.mv(tempPath, "/" + tempFilename); + stat = ipfs.files.stat("/" + tempFilename); + List lsMap = ipfs.files.ls("/"); + List lsMap2 = ipfs.files.ls("/", true, false); + + String flushFolder = "/filesTest/f/l/u/s/h"; + res = ipfs.files.mkdir(flushFolder, true); + Map flushMap = ipfs.files.flush(flushFolder); + + String copyFilename = "copy.txt"; + String copyFromFolder = "/filesTest/fromThere"; + String copyToFolder = "/filesTest/toHere"; + String copyFromPath = copyFromFolder + "/" + copyFilename; + String copyToPath = copyToFolder + "/" + copyFilename; + NamedStreamable copyFile = + new NamedStreamable.ByteArrayWrapper(copyFilename, "copy".getBytes()); + WriteFilesArgs args = WriteFilesArgs.Builder.newInstance().setCreate().setParents().build(); + res = ipfs.files.write(copyFromPath, copyFile, args); + res = ipfs.files.cp(copyFromPath, copyToPath, true); + stat = ipfs.files.stat(copyToPath); + String cidRes = ipfs.files.chcid(copyToPath); + stat = ipfs.files.stat(copyToPath); + String cidV0Res = ipfs.files.chcid(copyToPath, Optional.of(0), Optional.empty()); + stat = ipfs.files.stat(copyToPath); + ipfs.files.rm("/filesTest", false, true); + } + + @Test + public void multibaseTest() throws IOException { + List encodings = ipfs.multibase.list(true, false); + Assert.assertFalse("multibase/list works", encodings.isEmpty()); + String encoded = + ipfs.multibase.encode( + Optional.empty(), new NamedStreamable.ByteArrayWrapper("hello".getBytes())); + assertEquals("multibase/encode works", "uaGVsbG8", encoded); + String decoded = + ipfs.multibase.decode(new NamedStreamable.ByteArrayWrapper(encoded.getBytes())); + assertEquals("multibase/decode works", "hello", decoded); + String input = "f68656c6c6f"; + String transcode = + ipfs.multibase.transcode( + Optional.of("base64url"), new NamedStreamable.ByteArrayWrapper(input.getBytes())); + assertEquals("multibase/transcode works", transcode, encoded); + } + + @Test + @Ignore("Experimental feature not enabled by default") + public void fileStoreTest() throws IOException { + ipfs.fileStore.dups(); + Map res = ipfs.fileStore.ls(true); + ipfs.fileStore.verify(true); + } + + @Test + public void pinTest() throws IOException { + MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); + Multihash hash = file.hash; + Map ls1 = ipfs.pin.ls(IPFS.PinType.all); + boolean pinned = ls1.containsKey(hash); + List rm = ipfs.pin.rm(hash); + // second rm should not throw a http 500, but return an empty list + // List rm2 = ipfs.pin.rm(hash); + List add2 = ipfs.pin.add(hash); + // adding something already pinned should succeed + List add3 = ipfs.pin.add(hash); + Map ls = ipfs.pin.ls(IPFS.PinType.recursive); + ipfs.repo.gc(); + // object should still be present after gc + Map ls2 = ipfs.pin.ls(IPFS.PinType.recursive); + boolean stillPinned = ls2.containsKey(hash); + Assert.assertTrue("Pinning works", pinned && stillPinned); + } + + @Test + @Ignore + public void remotePinTest() throws IOException { + MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("test data".getBytes())).get(0); + Multihash hash = file.hash; + String service = "mock"; + String rmRemoteService = ipfs.pin.remote.rmService(service); + List lsRemoteService = ipfs.pin.remote.lsService(false); + String endpoint = "http://127.0.0.1:3000"; + String key = "SET_VALUE_HERE"; + String added = ipfs.pin.remote.addService(service, endpoint, key); + lsRemoteService = ipfs.pin.remote.lsService(false); + Map addHash = ipfs.pin.remote.add(service, hash, Optional.empty(), true); + Map lsRemote = + ipfs.pin.remote.ls( + service, Optional.empty(), Optional.of(List.of(IPFS.PinStatus.values()))); + String rmRemote = + ipfs.pin.remote.rm( + service, + Optional.empty(), + Optional.of(List.of(IPFS.PinStatus.queued)), + Optional.of(List.of(hash))); + lsRemote = + ipfs.pin.remote.ls( + service, Optional.empty(), Optional.of(List.of(IPFS.PinStatus.values()))); + } + + @Test + public void pinUpdate() throws IOException { + MerkleNode child1 = + ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); + Multihash hashChild1 = child1.hash; + + CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); + MerkleNode root1Res = + ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); + ipfs.pin.add(root1Res.hash); + + CborObject.CborList root2 = + new CborObject.CborList( + Arrays.asList( + new CborObject.CborMerkleLink(hashChild1), + new CborObject.CborLong(System.currentTimeMillis()))); + MerkleNode root2Res = + ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); + List update = ipfs.pin.update(root1Res.hash, root2Res.hash, true); + + Map ls = ipfs.pin.ls(IPFS.PinType.all); + boolean childPresent = ls.containsKey(hashChild1); + if (!childPresent) throw new IllegalStateException("Child not present!"); + + ipfs.repo.gc(); + Map ls2 = ipfs.pin.ls(IPFS.PinType.all); + boolean childPresentAfterGC = ls2.containsKey(hashChild1); + if (!childPresentAfterGC) throw new IllegalStateException("Child not present!"); + } + + @Test + public void rawLeafNodePinUpdate() throws IOException { + MerkleNode child1 = ipfs.block.put("some data".getBytes(), Optional.of("raw")); + Multihash hashChild1 = child1.hash; + + CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); + MerkleNode root1Res = + ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); + ipfs.pin.add(root1Res.hash); + + MerkleNode child2 = ipfs.block.put("G'day new tree".getBytes(), Optional.of("raw")); + Multihash hashChild2 = child2.hash; + + CborObject.CborList root2 = + new CborObject.CborList( + Arrays.asList( new CborObject.CborMerkleLink(hashChild1), new CborObject.CborMerkleLink(hashChild2), - new CborObject.CborLong(System.currentTimeMillis())) - ); - MerkleNode root2Res = ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); - List update = ipfs.pin.update(root1Res.hash, root2Res.hash, false); - } - - @Test - public void indirectPinTest() throws IOException { - String path = "/test/indirectPinTest-" + UUID.randomUUID(); - ipfs.files.write(path + "/content", new NamedStreamable.ByteArrayWrapper("something".getBytes(StandardCharsets.UTF_8)), true, true); - Multihash content = Multihash.decode((String) ipfs.files.stat(path + "/content").get("Hash")); - - // adding one more extra entry to parent1 to keep its hash different from parent2 - ipfs.files.mkdir(path + "/parent1", true); - ipfs.files.write(path + "/parent1/content1", new NamedStreamable.ByteArrayWrapper("somethingelse".getBytes(StandardCharsets.UTF_8)), true, true); - ipfs.files.cp("/ipfs/" + content, path + "/parent1/content2", true); - - ipfs.files.mkdir(path + "/parent2", true); - ipfs.files.cp("/ipfs/" + content, path + "/parent2/content", true); - - Multihash parent1 = Multihash.decode((String) ipfs.files.stat(path + "/parent1").get("Hash")); - Multihash parent2 = Multihash.decode((String) ipfs.files.stat(path + "/parent2").get("Hash")); - - ipfs.pin.add(parent1); - ipfs.pin.add(parent2); - ipfs.pin.rm(parent1, true); - - Map ls = ipfs.pin.ls(IPFS.PinType.all); - boolean childPresent = ls.containsKey(content); - if (!childPresent) - throw new IllegalStateException("Child not present: " + ls); - - ipfs.repo.gc(); - Map ls2 = ipfs.pin.ls(IPFS.PinType.all); - boolean childPresentAfterGC = ls2.containsKey(content); - if (!childPresentAfterGC) - throw new IllegalStateException("Child not present:" + ls); - - ipfs.files.rm(path, true, true); - } - - @Ignore("RPC API removed") - @Test - public void objectPatch() throws IOException { - MerkleNode obj = ipfs.object._new(Optional.empty()); - Multihash base = obj.hash; - // link tests - String linkName = "alink"; - MerkleNode addLink = ipfs.object.patch(base, "add-link", Optional.empty(), Optional.of(linkName), Optional.of(base)); - MerkleNode withLink = ipfs.object.get(addLink.hash); - if (withLink.links.size() != 1 || !withLink.links.get(0).hash.equals(base) || !withLink.links.get(0).name.get().equals(linkName)) - throw new RuntimeException("Added link not correct!"); - MerkleNode rmLink = ipfs.object.patch(addLink.hash, "rm-link", Optional.empty(), Optional.of(linkName), Optional.empty()); - if (!rmLink.hash.equals(base)) - throw new RuntimeException("Adding not inverse of removing link!"); - - // data tests -// byte[] data = "some random textual data".getBytes(); - byte[] data = new byte[1024]; - new Random().nextBytes(data); - MerkleNode patched = ipfs.object.patch(base, "set-data", Optional.of(data), Optional.empty(), Optional.empty()); - byte[] patchedResult = ipfs.object.data(patched.hash); - if (!Arrays.equals(patchedResult, data)) - throw new RuntimeException("object.patch: returned data != stored data!"); - - MerkleNode twicePatched = ipfs.object.patch(patched.hash, "append-data", Optional.of(data), Optional.empty(), Optional.empty()); - byte[] twicePatchedResult = ipfs.object.data(twicePatched.hash); - byte[] twice = new byte[2*data.length]; - for (int i=0; i < 2; i++) - System.arraycopy(data, 0, twice, i*data.length, data.length); - if (!Arrays.equals(twicePatchedResult, twice)) - throw new RuntimeException("object.patch: returned data after append != stored data!"); - - } - - @Test - public void refsTest() throws IOException { - List local = ipfs.refs.local(); - for (Multihash ref: local) { - Object refs = ipfs.refs(ref, false); - } - } - - @Ignore("RPC API removed") - @Test - public void objectTest() throws IOException { - MerkleNode _new = ipfs.object._new(Optional.empty()); - Multihash pointer = Multihash.fromBase58("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); - MerkleNode object = ipfs.object.get(pointer); - List newPointer = ipfs.object.put(Collections.singletonList(object.toJSONString().getBytes())); - List newPointer2 = ipfs.object.put("json", Collections.singletonList(object.toJSONString().getBytes())); - MerkleNode links = ipfs.object.links(pointer); - byte[] data = ipfs.object.data(pointer); - Map stat = ipfs.object.stat(pointer); - } - - @Test - public void blockTest() throws IOException { - MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); - Map stat = ipfs.block.stat(pointer.hash); - byte[] object = ipfs.block.get(pointer.hash); - List newPointer = ipfs.block.put(Collections.singletonList("Some random data...".getBytes())); - } - - @Test - public void bulkBlockTest() throws IOException { - CborObject cbor = new CborObject.CborString("G'day IPFS!"); - byte[] raw = cbor.toByteArray(); - List bulkPut = ipfs.block.put(Arrays.asList(raw, raw, raw, raw, raw), Optional.of("cbor")); - List hashes = bulkPut.stream().map(m -> m.hash).collect(Collectors.toList()); - byte[] result = ipfs.block.get(hashes.get(0)); + new CborObject.CborLong(System.currentTimeMillis()))); + MerkleNode root2Res = + ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); + List update = ipfs.pin.update(root1Res.hash, root2Res.hash, false); + } + + @Test + public void indirectPinTest() throws IOException { + String path = "/test/indirectPinTest-" + UUID.randomUUID(); + ipfs.files.write( + path + "/content", + new NamedStreamable.ByteArrayWrapper("something".getBytes(StandardCharsets.UTF_8)), + true, + true); + Multihash content = Multihash.decode((String) ipfs.files.stat(path + "/content").get("Hash")); + + // adding one more extra entry to parent1 to keep its hash different from parent2 + ipfs.files.mkdir(path + "/parent1", true); + ipfs.files.write( + path + "/parent1/content1", + new NamedStreamable.ByteArrayWrapper("somethingelse".getBytes(StandardCharsets.UTF_8)), + true, + true); + ipfs.files.cp("/ipfs/" + content, path + "/parent1/content2", true); + + ipfs.files.mkdir(path + "/parent2", true); + ipfs.files.cp("/ipfs/" + content, path + "/parent2/content", true); + + Multihash parent1 = Multihash.decode((String) ipfs.files.stat(path + "/parent1").get("Hash")); + Multihash parent2 = Multihash.decode((String) ipfs.files.stat(path + "/parent2").get("Hash")); + + ipfs.pin.add(parent1); + ipfs.pin.add(parent2); + ipfs.pin.rm(parent1, true); + + Map ls = ipfs.pin.ls(IPFS.PinType.all); + boolean childPresent = ls.containsKey(content); + if (!childPresent) throw new IllegalStateException("Child not present: " + ls); + + ipfs.repo.gc(); + Map ls2 = ipfs.pin.ls(IPFS.PinType.all); + boolean childPresentAfterGC = ls2.containsKey(content); + if (!childPresentAfterGC) throw new IllegalStateException("Child not present:" + ls); + + ipfs.files.rm(path, true, true); + } + + @Ignore("RPC API removed") + @Test + public void objectPatch() throws IOException { + MerkleNode obj = ipfs.object._new(Optional.empty()); + Multihash base = obj.hash; + // link tests + String linkName = "alink"; + MerkleNode addLink = + ipfs.object.patch( + base, "add-link", Optional.empty(), Optional.of(linkName), Optional.of(base)); + MerkleNode withLink = ipfs.object.get(addLink.hash); + if (withLink.links.size() != 1 + || !withLink.links.get(0).hash.equals(base) + || !withLink.links.get(0).name.get().equals(linkName)) + throw new RuntimeException("Added link not correct!"); + MerkleNode rmLink = + ipfs.object.patch( + addLink.hash, "rm-link", Optional.empty(), Optional.of(linkName), Optional.empty()); + if (!rmLink.hash.equals(base)) + throw new RuntimeException("Adding not inverse of removing link!"); + + // data tests + // byte[] data = "some random textual data".getBytes(); + byte[] data = new byte[1024]; + new Random().nextBytes(data); + MerkleNode patched = + ipfs.object.patch(base, "set-data", Optional.of(data), Optional.empty(), Optional.empty()); + byte[] patchedResult = ipfs.object.data(patched.hash); + if (!Arrays.equals(patchedResult, data)) + throw new RuntimeException("object.patch: returned data != stored data!"); + + MerkleNode twicePatched = + ipfs.object.patch( + patched.hash, "append-data", Optional.of(data), Optional.empty(), Optional.empty()); + byte[] twicePatchedResult = ipfs.object.data(twicePatched.hash); + byte[] twice = new byte[2 * data.length]; + for (int i = 0; i < 2; i++) System.arraycopy(data, 0, twice, i * data.length, data.length); + if (!Arrays.equals(twicePatchedResult, twice)) + throw new RuntimeException("object.patch: returned data after append != stored data!"); + } + + @Test + public void refsTest() throws IOException { + List local = ipfs.refs.local(); + for (Multihash ref : local) { + Object refs = ipfs.refs(ref, false); } - - @Test - public void publish() throws Exception { - // JSON document - String json = "{\"name\":\"blogpost\",\"documents\":[]}"; - - // Add a DAG node to IPFS - MerkleNode merkleNode = ipfs.dag.put("json", json.getBytes()); - assertEquals("expected to be bafyreiafmbgul64c4nyybvgivswmkuhifamc24cdfuj4ij5xtnhpsfelky" , "bafyreiafmbgul64c4nyybvgivswmkuhifamc24cdfuj4ij5xtnhpsfelky", merkleNode.hash.toString()); - - // Get a DAG node - byte[] res = ipfs.dag.get((Cid) merkleNode.hash); - assertEquals("Should be equals", JSONParser.parse(json), JSONParser.parse(new String(res))); - - // Publish to IPNS - Map result = ipfs.name.publish(merkleNode.hash); - - // Resolve from IPNS - String resolved = ipfs.name.resolve(Cid.decode((String) result.get("Name"))); - assertEquals("Should be equals", resolved, "/ipfs/" + merkleNode.hash); - } - - @Test - public void resolveName() throws Exception { - // Resolve from DNSLinked domain - String resolved = ipfs.name.resolve("docs.ipfs.tech"); - assertNotNull(resolved); - assertTrue(resolved.startsWith("/ipfs/")); - assertTrue(resolved.length() > 20); // this may change (content and encoding as well) - } - - @Test - public void pubsubSynchronous() { - String topic = "topic" + System.nanoTime(); - List> res = Collections.synchronizedList(new ArrayList<>()); - new Thread(() -> { - try { + } + + @Ignore("RPC API removed") + @Test + public void objectTest() throws IOException { + MerkleNode _new = ipfs.object._new(Optional.empty()); + Multihash pointer = Multihash.fromBase58("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); + MerkleNode object = ipfs.object.get(pointer); + List newPointer = + ipfs.object.put(Collections.singletonList(object.toJSONString().getBytes())); + List newPointer2 = + ipfs.object.put("json", Collections.singletonList(object.toJSONString().getBytes())); + MerkleNode links = ipfs.object.links(pointer); + byte[] data = ipfs.object.data(pointer); + Map stat = ipfs.object.stat(pointer); + } + + @Test + public void blockTest() throws IOException { + MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); + Map stat = ipfs.block.stat(pointer.hash); + byte[] object = ipfs.block.get(pointer.hash); + List newPointer = + ipfs.block.put(Collections.singletonList("Some random data...".getBytes())); + } + + @Test + public void bulkBlockTest() throws IOException { + CborObject cbor = new CborObject.CborString("G'day IPFS!"); + byte[] raw = cbor.toByteArray(); + List bulkPut = + ipfs.block.put(Arrays.asList(raw, raw, raw, raw, raw), Optional.of("cbor")); + List hashes = bulkPut.stream().map(m -> m.hash).collect(Collectors.toList()); + byte[] result = ipfs.block.get(hashes.get(0)); + } + + @Test + public void publish() throws Exception { + // JSON document + String json = "{\"name\":\"blogpost\",\"documents\":[]}"; + + // Add a DAG node to IPFS + MerkleNode merkleNode = ipfs.dag.put("json", json.getBytes()); + assertEquals( + "expected to be bafyreiafmbgul64c4nyybvgivswmkuhifamc24cdfuj4ij5xtnhpsfelky", + "bafyreiafmbgul64c4nyybvgivswmkuhifamc24cdfuj4ij5xtnhpsfelky", + merkleNode.hash.toString()); + + // Get a DAG node + byte[] res = ipfs.dag.get((Cid) merkleNode.hash); + assertEquals("Should be equals", JSONParser.parse(json), JSONParser.parse(new String(res))); + + // Publish to IPNS + Map result = ipfs.name.publish(merkleNode.hash); + + // Resolve from IPNS + String resolved = ipfs.name.resolve(Cid.decode((String) result.get("Name"))); + assertEquals("Should be equals", resolved, "/ipfs/" + merkleNode.hash); + } + + @Test + public void resolveName() throws Exception { + // Resolve from DNSLinked domain + String resolved = ipfs.name.resolve("docs.ipfs.tech"); + assertNotNull(resolved); + assertTrue(resolved.startsWith("/ipfs/")); + assertTrue(resolved.length() > 20); // this may change (content and encoding as well) + } + + @Test + public void pubsubSynchronous() { + String topic = "topic" + System.nanoTime(); + List> res = Collections.synchronizedList(new ArrayList<>()); + new Thread( + () -> { + try { ipfs.pubsub.sub(topic, res::add, t -> t.printStackTrace()); - } catch (IOException e) { - throw new RuntimeException(e);} - }).start(); - - int nMessages = 100; - for (int i = 1; i < nMessages; ) { - ipfs.pubsub.pub(topic, "Hello World!"); - if (res.size() >= i) { - i++; - } - } - Assert.assertTrue(res.size() > nMessages - 5); // pubsub is not reliable so it loses messages + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .start(); + + int nMessages = 100; + for (int i = 1; i < nMessages; ) { + ipfs.pubsub.pub(topic, "Hello World!"); + if (res.size() >= i) { + i++; + } } - - @Test - public void pubsub() throws Exception { - String topic = "topic" + System.nanoTime(); - Stream> sub = ipfs.pubsub.sub(topic); - String data = "Hello World!"; - ipfs.pubsub.pub(topic, data); - ipfs.pubsub.pub(topic, "G'day"); - List results = sub.limit(2).collect(Collectors.toList()); - Assert.assertNotEquals(results.get(0), Collections.emptyMap()); - } - - private static String toEscapedHex(byte[] in) throws IOException { - StringBuilder res = new StringBuilder(); - for (byte b : in) { - res.append("\\x"); - res.append(String.format("%02x", b & 0xFF)); - } - return res.toString(); - } - - /** - * Test that merkle links in values of a cbor map are followed during recursive pins - */ - @Test - public void merkleLinkInMap() throws IOException { - Random r = new Random(); - CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!").getBytes()); - byte[] rawTarget = target.toByteArray(); - MerkleNode targetRes = ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); - - CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); - Map m = new TreeMap<>(); - m.put("alink", link); - m.put("arr", new CborObject.CborList(Collections.emptyList())); - CborObject.CborMap source = CborObject.CborMap.build(m); - byte[] rawSource = source.toByteArray(); - MerkleNode sourceRes = ipfs.block.put(Collections.singletonList(rawSource), Optional.of("cbor")).get(0); - - CborObject.fromByteArray(rawSource); - - List add = ipfs.pin.add(sourceRes.hash); - ipfs.repo.gc(); - ipfs.repo.gc(); - - List refs = ipfs.refs(sourceRes.hash, true); - Assert.assertTrue("refs returns links", refs.contains(targetRes.hash)); - - byte[] bytes = ipfs.block.get(targetRes.hash); - assertArrayEquals("same contents after GC", bytes, rawTarget); - // These commands can be used to reproduce this on the command line - String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; - String reproCommand2 = "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; + Assert.assertTrue(res.size() > nMessages - 5); // pubsub is not reliable so it loses messages + } + + @Test + public void pubsub() throws Exception { + String topic = "topic" + System.nanoTime(); + Stream> sub = ipfs.pubsub.sub(topic); + String data = "Hello World!"; + ipfs.pubsub.pub(topic, data); + ipfs.pubsub.pub(topic, "G'day"); + List results = sub.limit(2).collect(Collectors.toList()); + Assert.assertNotEquals(results.get(0), Collections.emptyMap()); + } + + private static String toEscapedHex(byte[] in) throws IOException { + StringBuilder res = new StringBuilder(); + for (byte b : in) { + res.append("\\x"); + res.append(String.format("%02x", b & 0xFF)); } - - @Test - public void recursiveRefs() throws IOException { - CborObject.CborByteArray leaf1 = new CborObject.CborByteArray(("G'day IPFS!").getBytes()); - byte[] rawLeaf1 = leaf1.toByteArray(); - MerkleNode leaf1Res = ipfs.block.put(Collections.singletonList(rawLeaf1), Optional.of("cbor")).get(0); - - CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(leaf1Res.hash); - Map m = new TreeMap<>(); - m.put("link1", link); - CborObject.CborMap source = CborObject.CborMap.build(m); - MerkleNode sourceRes = ipfs.block.put(Collections.singletonList(source.toByteArray()), Optional.of("cbor")).get(0); - - CborObject.CborByteArray leaf2 = new CborObject.CborByteArray(("G'day again, IPFS!").getBytes()); - byte[] rawLeaf2 = leaf2.toByteArray(); - MerkleNode leaf2Res = ipfs.block.put(Collections.singletonList(rawLeaf2), Optional.of("cbor")).get(0); - - Map m2 = new TreeMap<>(); - m2.put("link1", new CborObject.CborMerkleLink(sourceRes.hash)); - m2.put("link2", new CborObject.CborMerkleLink(leaf2Res.hash)); - CborObject.CborMap source2 = CborObject.CborMap.build(m2); - MerkleNode rootRes = ipfs.block.put(Collections.singletonList(source2.toByteArray()), Optional.of("cbor")).get(0); - - List refs = ipfs.refs(rootRes.hash, false); - boolean correct = refs.contains(sourceRes.hash) && refs.contains(leaf2Res.hash) && refs.size() == 2; - Assert.assertTrue("refs returns links", correct); - - List refsRecurse = ipfs.refs(rootRes.hash, true); - boolean correctRecurse = refsRecurse.contains(sourceRes.hash) - && refsRecurse.contains(leaf1Res.hash) - && refsRecurse.contains(leaf2Res.hash) - && refsRecurse.size() == 3; - Assert.assertTrue("refs returns links", correctRecurse); - } - - /** - * Test that merkle links as a root object are followed during recursive pins - */ - @Test - public void rootMerkleLink() throws IOException { - Random r = new Random(); - CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); - byte[] rawTarget = target.toByteArray(); - MerkleNode block1 = ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); - Multihash block1Hash = block1.hash; - byte[] retrievedObj1 = ipfs.block.get(block1Hash); - assertArrayEquals("get inverse of put", retrievedObj1, rawTarget); - - CborObject.CborMerkleLink cbor2 = new CborObject.CborMerkleLink(block1.hash); - byte[] obj2 = cbor2.toByteArray(); - MerkleNode block2 = ipfs.block.put(Collections.singletonList(obj2), Optional.of("cbor")).get(0); - byte[] retrievedObj2 = ipfs.block.get(block2.hash); - assertArrayEquals("get inverse of put", retrievedObj2, obj2); - - List add = ipfs.pin.add(block2.hash); - ipfs.repo.gc(); - ipfs.repo.gc(); - - byte[] bytes = ipfs.block.get(block1.hash); - assertArrayEquals("same contents after GC", bytes, rawTarget); - // These commands can be used to reproduce this on the command line - String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; - String reproCommand2 = "printf \"" + toEscapedHex(obj2) + "\" | ipfs block put --format=cbor"; - } - - /** - * Test that a cbor null is allowed as an object root - */ - @Test - public void rootNull() throws IOException { - CborObject.CborNull cbor = new CborObject.CborNull(); - byte[] obj = cbor.toByteArray(); - MerkleNode block = ipfs.block.put(Collections.singletonList(obj), Optional.of("cbor")).get(0); - byte[] retrievedObj = ipfs.block.get(block.hash); - assertArrayEquals("get inverse of put", retrievedObj, obj); - - List add = ipfs.pin.add(block.hash); - ipfs.repo.gc(); - ipfs.repo.gc(); - - // These commands can be used to reproduce this on the command line - String reproCommand1 = "printf \"" + toEscapedHex(obj) + "\" | ipfs block put --format=cbor"; - } - - /** - * Test that merkle links in a cbor list are followed during recursive pins - */ - @Test - public void merkleLinkInList() throws IOException { - Random r = new Random(); - CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); - byte[] rawTarget = target.toByteArray(); - MerkleNode targetRes = ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); - - CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); - CborObject.CborList source = new CborObject.CborList(Collections.singletonList(link)); - byte[] rawSource = source.toByteArray(); - MerkleNode sourceRes = ipfs.block.put(Collections.singletonList(rawSource), Optional.of("cbor")).get(0); - - List add = ipfs.pin.add(sourceRes.hash); - ipfs.repo.gc(); - ipfs.repo.gc(); - - byte[] bytes = ipfs.block.get(targetRes.hash); - assertArrayEquals("same contents after GC", bytes, rawTarget); - // These commands can be used to reproduce this on the command line - String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; - String reproCommand2 = "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; - } - - @Test - public void fileContentsTest() throws IOException { - ipfs.repo.gc(); - List local = ipfs.refs.local(); - for (Multihash hash: local) { - try { - Map ls = ipfs.file.ls(hash); - return; - } catch (Exception e) {} // non unixfs files will throw an exception here - } - } - - @Test - @Ignore - public void repoTest() throws IOException { - ipfs.repo.gc(); - Multihash res = ipfs.repo.ls(); - //String migration = ipfs.repo.migrate(false); - RepoStat stat = ipfs.repo.stat(false); - RepoStat stat2 = ipfs.repo.stat(true); - Map verify = ipfs.repo.verify(); - Map version = ipfs.repo.version(); - } - @Test - @Ignore("name test may hang forever") - public void nameTest() throws IOException { - MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); - Map pub = ipfs.name.publish(pointer.hash); - String name = "key" + System.nanoTime(); - Object gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); - Map mykey = ipfs.name.publish(pointer.hash, Optional.of(name)); - String resolved = ipfs.name.resolve(Cid.decode((String) pub.get("Name"))); - } - - public void mountTest() throws IOException { - Map mount = ipfs.mount(null, null); + return res.toString(); + } + + /** Test that merkle links in values of a cbor map are followed during recursive pins */ + @Test + public void merkleLinkInMap() throws IOException { + Random r = new Random(); + CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!").getBytes()); + byte[] rawTarget = target.toByteArray(); + MerkleNode targetRes = + ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); + + CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); + Map m = new TreeMap<>(); + m.put("alink", link); + m.put("arr", new CborObject.CborList(Collections.emptyList())); + CborObject.CborMap source = CborObject.CborMap.build(m); + byte[] rawSource = source.toByteArray(); + MerkleNode sourceRes = + ipfs.block.put(Collections.singletonList(rawSource), Optional.of("cbor")).get(0); + + CborObject.fromByteArray(rawSource); + + List add = ipfs.pin.add(sourceRes.hash); + ipfs.repo.gc(); + ipfs.repo.gc(); + + List refs = ipfs.refs(sourceRes.hash, true); + Assert.assertTrue("refs returns links", refs.contains(targetRes.hash)); + + byte[] bytes = ipfs.block.get(targetRes.hash); + assertArrayEquals("same contents after GC", bytes, rawTarget); + // These commands can be used to reproduce this on the command line + String reproCommand1 = + "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; + String reproCommand2 = + "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; + } + + @Test + public void recursiveRefs() throws IOException { + CborObject.CborByteArray leaf1 = new CborObject.CborByteArray(("G'day IPFS!").getBytes()); + byte[] rawLeaf1 = leaf1.toByteArray(); + MerkleNode leaf1Res = + ipfs.block.put(Collections.singletonList(rawLeaf1), Optional.of("cbor")).get(0); + + CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(leaf1Res.hash); + Map m = new TreeMap<>(); + m.put("link1", link); + CborObject.CborMap source = CborObject.CborMap.build(m); + MerkleNode sourceRes = + ipfs.block.put(Collections.singletonList(source.toByteArray()), Optional.of("cbor")).get(0); + + CborObject.CborByteArray leaf2 = + new CborObject.CborByteArray(("G'day again, IPFS!").getBytes()); + byte[] rawLeaf2 = leaf2.toByteArray(); + MerkleNode leaf2Res = + ipfs.block.put(Collections.singletonList(rawLeaf2), Optional.of("cbor")).get(0); + + Map m2 = new TreeMap<>(); + m2.put("link1", new CborObject.CborMerkleLink(sourceRes.hash)); + m2.put("link2", new CborObject.CborMerkleLink(leaf2Res.hash)); + CborObject.CborMap source2 = CborObject.CborMap.build(m2); + MerkleNode rootRes = + ipfs.block + .put(Collections.singletonList(source2.toByteArray()), Optional.of("cbor")) + .get(0); + + List refs = ipfs.refs(rootRes.hash, false); + boolean correct = + refs.contains(sourceRes.hash) && refs.contains(leaf2Res.hash) && refs.size() == 2; + Assert.assertTrue("refs returns links", correct); + + List refsRecurse = ipfs.refs(rootRes.hash, true); + boolean correctRecurse = + refsRecurse.contains(sourceRes.hash) + && refsRecurse.contains(leaf1Res.hash) + && refsRecurse.contains(leaf2Res.hash) + && refsRecurse.size() == 3; + Assert.assertTrue("refs returns links", correctRecurse); + } + + /** Test that merkle links as a root object are followed during recursive pins */ + @Test + public void rootMerkleLink() throws IOException { + Random r = new Random(); + CborObject.CborByteArray target = + new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); + byte[] rawTarget = target.toByteArray(); + MerkleNode block1 = + ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); + Multihash block1Hash = block1.hash; + byte[] retrievedObj1 = ipfs.block.get(block1Hash); + assertArrayEquals("get inverse of put", retrievedObj1, rawTarget); + + CborObject.CborMerkleLink cbor2 = new CborObject.CborMerkleLink(block1.hash); + byte[] obj2 = cbor2.toByteArray(); + MerkleNode block2 = ipfs.block.put(Collections.singletonList(obj2), Optional.of("cbor")).get(0); + byte[] retrievedObj2 = ipfs.block.get(block2.hash); + assertArrayEquals("get inverse of put", retrievedObj2, obj2); + + List add = ipfs.pin.add(block2.hash); + ipfs.repo.gc(); + ipfs.repo.gc(); + + byte[] bytes = ipfs.block.get(block1.hash); + assertArrayEquals("same contents after GC", bytes, rawTarget); + // These commands can be used to reproduce this on the command line + String reproCommand1 = + "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; + String reproCommand2 = "printf \"" + toEscapedHex(obj2) + "\" | ipfs block put --format=cbor"; + } + + /** Test that a cbor null is allowed as an object root */ + @Test + public void rootNull() throws IOException { + CborObject.CborNull cbor = new CborObject.CborNull(); + byte[] obj = cbor.toByteArray(); + MerkleNode block = ipfs.block.put(Collections.singletonList(obj), Optional.of("cbor")).get(0); + byte[] retrievedObj = ipfs.block.get(block.hash); + assertArrayEquals("get inverse of put", retrievedObj, obj); + + List add = ipfs.pin.add(block.hash); + ipfs.repo.gc(); + ipfs.repo.gc(); + + // These commands can be used to reproduce this on the command line + String reproCommand1 = "printf \"" + toEscapedHex(obj) + "\" | ipfs block put --format=cbor"; + } + + /** Test that merkle links in a cbor list are followed during recursive pins */ + @Test + public void merkleLinkInList() throws IOException { + Random r = new Random(); + CborObject.CborByteArray target = + new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); + byte[] rawTarget = target.toByteArray(); + MerkleNode targetRes = + ipfs.block.put(Collections.singletonList(rawTarget), Optional.of("cbor")).get(0); + + CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); + CborObject.CborList source = new CborObject.CborList(Collections.singletonList(link)); + byte[] rawSource = source.toByteArray(); + MerkleNode sourceRes = + ipfs.block.put(Collections.singletonList(rawSource), Optional.of("cbor")).get(0); + + List add = ipfs.pin.add(sourceRes.hash); + ipfs.repo.gc(); + ipfs.repo.gc(); + + byte[] bytes = ipfs.block.get(targetRes.hash); + assertArrayEquals("same contents after GC", bytes, rawTarget); + // These commands can be used to reproduce this on the command line + String reproCommand1 = + "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; + String reproCommand2 = + "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; + } + + @Test + public void fileContentsTest() throws IOException { + ipfs.repo.gc(); + List local = ipfs.refs.local(); + for (Multihash hash : local) { + try { + Map ls = ipfs.file.ls(hash); + return; + } catch (Exception e) { + } // non unixfs files will throw an exception here } - - @Test - @Ignore("dhtTest may fail with timeout") - public void dhtTest() throws IOException { - MerkleNode raw = ipfs.block.put("Mathematics is wonderful".getBytes(), Optional.of("raw")); -// Map get = ipfs.dht.get(raw.hash); -// Map put = ipfs.dht.put("somekey", "somevalue"); - List> findprovs = ipfs.dht.findprovs(raw.hash); - List peers = ipfs.swarm.peers(); - Map query = ipfs.dht.query(peers.get(0).id); - Map find = ipfs.dht.findpeer(peers.get(0).id); - } - - @Test - public void localId() throws Exception { - Map id = ipfs.id(); - } - - @Test - public void statsTest() throws IOException { - Map stats = ipfs.stats.bw(); - Map bitswap = ipfs.stats.bitswap(true); - Map dht = ipfs.stats.dht(); - //{"Message":"can only return stats if Experimental.AcceleratedDHTClient is enabled","Code":0,"Type":"error"} - //requires Map provide = ipfs.stats.provide(); - RepoStat repo = ipfs.stats.repo(false); - } - - public void resolveTest() throws IOException { - Multihash hash = Multihash.fromBase58("QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy"); - Map res = ipfs.resolve("ipns", hash, false); - } - - @Test - @Ignore - public void swarmTest() throws IOException { - Map> addrs = ipfs.swarm.addrs(); - if (addrs.size() > 0) { - boolean contacted = addrs.entrySet().stream() - .anyMatch(e -> { - Multihash target = e.getKey(); - List nodeAddrs = e.getValue(); - boolean contactable = nodeAddrs.stream() - .anyMatch(addr -> { - try { - MultiAddress peer = new MultiAddress(addr.toString() + "/ipfs/" + target.toBase58()); - Map connect = ipfs.swarm.connect(peer); - Map disconnect = ipfs.swarm.disconnect(peer); - return true; - } catch (Exception ex) { - return false; - } + } + + @Test + @Ignore + public void repoTest() throws IOException { + ipfs.repo.gc(); + Multihash res = ipfs.repo.ls(); + // String migration = ipfs.repo.migrate(false); + RepoStat stat = ipfs.repo.stat(false); + RepoStat stat2 = ipfs.repo.stat(true); + Map verify = ipfs.repo.verify(); + Map version = ipfs.repo.version(); + } + + @Test + @Ignore("name test may hang forever") + public void nameTest() throws IOException { + MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); + Map pub = ipfs.name.publish(pointer.hash); + String name = "key" + System.nanoTime(); + Object gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); + Map mykey = ipfs.name.publish(pointer.hash, Optional.of(name)); + String resolved = ipfs.name.resolve(Cid.decode((String) pub.get("Name"))); + } + + public void mountTest() throws IOException { + Map mount = ipfs.mount(null, null); + } + + @Test + @Ignore("dhtTest may fail with timeout") + public void dhtTest() throws IOException { + MerkleNode raw = ipfs.block.put("Mathematics is wonderful".getBytes(), Optional.of("raw")); + // Map get = ipfs.dht.get(raw.hash); + // Map put = ipfs.dht.put("somekey", "somevalue"); + List> findprovs = ipfs.dht.findprovs(raw.hash); + List peers = ipfs.swarm.peers(); + Map query = ipfs.dht.query(peers.get(0).id); + Map find = ipfs.dht.findpeer(peers.get(0).id); + } + + @Test + public void localId() throws Exception { + Map id = ipfs.id(); + } + + @Test + public void statsTest() throws IOException { + Map stats = ipfs.stats.bw(); + Map bitswap = ipfs.stats.bitswap(true); + Map dht = ipfs.stats.dht(); + // {"Message":"can only return stats if Experimental.AcceleratedDHTClient is + // enabled","Code":0,"Type":"error"} + // requires Map provide = ipfs.stats.provide(); + RepoStat repo = ipfs.stats.repo(false); + } + + public void resolveTest() throws IOException { + Multihash hash = Multihash.fromBase58("QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy"); + Map res = ipfs.resolve("ipns", hash, false); + } + + @Test + @Ignore + public void swarmTest() throws IOException { + Map> addrs = ipfs.swarm.addrs(); + if (addrs.size() > 0) { + boolean contacted = + addrs.entrySet().stream() + .anyMatch( + e -> { + Multihash target = e.getKey(); + List nodeAddrs = e.getValue(); + boolean contactable = + nodeAddrs.stream() + .anyMatch( + addr -> { + try { + MultiAddress peer = + new MultiAddress( + addr.toString() + "/ipfs/" + target.toBase58()); + Map connect = ipfs.swarm.connect(peer); + Map disconnect = ipfs.swarm.disconnect(peer); + return true; + } catch (Exception ex) { + return false; + } }); - try { - Map id = ipfs.id(target); - Map ping = ipfs.ping(target); - return contactable; - } catch (Exception ex) { - // not all nodes have to be contactable - return false; - } - }); - if (!contacted) - throw new IllegalStateException("Couldn't contact any node!"); - } - List peers = ipfs.swarm.peers(); - } - - @Test - public void versionTest() throws IOException { - Map listenAddrs = ipfs.version.versionDeps(); - System.currentTimeMillis(); - } - - @Test - public void swarmTestFilters() throws IOException { - // on GH CI we run this in "server" profile that packs a TON of filters - // See https://github.com/ipfs/kubo/blob/c1fd4d70f58e682bfe73fa4b50d17581c823c671/config/profile.go#L27 - Map listenAddrs = ipfs.swarm.listenAddrs(); - Map localAddrs = ipfs.swarm.localAddrs(true); - String multiAddrFilter = "/ip4/192.168.0.0/ipcidr/16"; - Map rm = ipfs.swarm.rmFilter(multiAddrFilter); - Map filters = ipfs.swarm.filters(); - List filtersList = (List)filters.get("Strings"); - Assert.assertTrue("Filters empty", filtersList == null || !filtersList.contains(multiAddrFilter)); - - Map added = ipfs.swarm.addFilter(multiAddrFilter); - filters = ipfs.swarm.filters(); - filtersList = (List)filters.get("Strings"); - Assert.assertFalse("Filters NOT empty", filtersList.isEmpty()); - rm = ipfs.swarm.rmFilter(multiAddrFilter); - } - - @Test - @Ignore - public void swarmTestPeering() throws IOException { - String id = "INSERT_VAL_HERE"; - Multihash hash = Multihash.fromBase58(id); - String peer = "/ip6/::1/tcp/4001/p2p/" + id; - MultiAddress ma = new MultiAddress(peer); - Map addPeering = ipfs.swarm.addPeering(ma); - Map lsPeering = ipfs.swarm.lsPeering(); - List peeringList = (List)lsPeering.get("Peers"); - Assert.assertFalse("Filters not empty", peeringList.isEmpty()); - Map rmPeering = ipfs.swarm.rmPeering(hash); - lsPeering = ipfs.swarm.lsPeering(); - peeringList = (List)lsPeering.get("Peers"); - Assert.assertTrue("Filters empty", peeringList.isEmpty()); - } - - @Test - public void bitswapTest() throws IOException { - List peers = ipfs.swarm.peers(); - Map ledger = ipfs.bitswap.ledger(peers.get(0).id); - Map want = ipfs.bitswap.wantlist(peers.get(0).id); - //String reprovide = ipfs.bitswap.reprovide(); - Map stat = ipfs.bitswap.stat(); - Map stat2 = ipfs.bitswap.stat(true); - } - - @Ignore("AutoConf.Enabled=true is default; prevents bootstrap removal") - @Test - public void bootstrapTest() throws IOException { - List bootstrap = ipfs.bootstrap.list(); - List rm = ipfs.bootstrap.rm(bootstrap.get(0), false); - List add = ipfs.bootstrap.add(bootstrap.get(0)); - List defaultPeers = ipfs.bootstrap.add(); - List peers = ipfs.bootstrap.list(); - } - - @Test - public void cidTest() throws IOException { - List bases = ipfs.cid.bases(true, true); - List codecs = ipfs.cid.codecs(true, true); - Map stat = ipfs.files.stat("/"); - String rootFolderHash = (String)stat.get("Hash"); - Map base32 = ipfs.cid.base32(Cid.decode(rootFolderHash)); - Map format = ipfs.cid.format(Cid.decode(rootFolderHash), - Optional.of("%s"), Optional.of("1"), - Optional.empty(), Optional.empty()); - - List hashes = ipfs.cid.hashes(false, false); - - System.currentTimeMillis(); - } - - @Test - public void diagTest() throws IOException { - Map config = ipfs.config.show(); - Object api = ipfs.config.get("Addresses.API"); - Object val = ipfs.config.get("Datastore.GCPeriod"); - Map setResult = ipfs.config.set("Datastore.GCPeriod", val); - ipfs.config.replace(new NamedStreamable.ByteArrayWrapper(JSONParser.toString(config).getBytes())); -// Object log = ipfs.log(); - Map sys = ipfs.diag.sys(); - List cmds = ipfs.diag.cmds(); - String res = ipfs.diag.clearCmds(); - List cmds2 = ipfs.diag.cmds(true); - //res = ipfs.diag.profile(); - //String profile = "default"; - //ipfs.config.profileApply(profile, true); - //Map entry = ipfs.config("Addresses.API", Optional.of("/ip4/127.0.0.1/tcp/5001"), Optional.empty()); - } - - @Test - public void toolsTest() throws IOException { - String version = ipfs.version(); - int major = Integer.parseInt(version.split("\\.")[0]); - int minor = Integer.parseInt(version.split("\\.")[1]); - assertTrue(major >= 0 && minor >= 4); // Requires at least 0.4.0 - Map commands = ipfs.commands(); - } - - @Test(expected = RuntimeException.class) - public void testTimeoutFail() throws IOException { - IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); - ipfs.cat(Multihash.fromBase58("QmYpbSXyiCTYCbyMpzrQNix72nBYB8WRv6i39JqRc8C1ry")); - } - - @Test - public void testTimeoutOK() throws IOException { - IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); - ipfs.cat(Multihash.fromBase58("Qmaisz6NMhDB51cCvNWa1GMS7LU1pAxdF4Ld6Ft9kZEP2a")); - } - - @Test - public void addArgsTest() { - AddArgs args = AddArgs.Builder.newInstance() - .setInline() - .setCidVersion(1) - .build(); - String res = args.toString(); - assertEquals("args toString() format", "[cid-version = 1, inline = true]", res); - String queryStr = args.toQueryString(); - assertEquals("args toQueryString() format", "inline=true&cid-version=1", queryStr); - } - - // this api is disabled until deployment over IPFS is enabled - public void updateTest() throws IOException { - Object check = ipfs.update.check(); - Object update = ipfs.update(); - } - - private byte[] randomBytes(int len) { - byte[] res = new byte[len]; - r.nextBytes(res); - return res; + try { + Map id = ipfs.id(target); + Map ping = ipfs.ping(target); + return contactable; + } catch (Exception ex) { + // not all nodes have to be contactable + return false; + } + }); + if (!contacted) throw new IllegalStateException("Couldn't contact any node!"); } + List peers = ipfs.swarm.peers(); + } + + @Test + public void versionTest() throws IOException { + Map listenAddrs = ipfs.version.versionDeps(); + System.currentTimeMillis(); + } + + @Test + public void swarmTestFilters() throws IOException { + // on GH CI we run this in "server" profile that packs a TON of filters + // See + // https://github.com/ipfs/kubo/blob/c1fd4d70f58e682bfe73fa4b50d17581c823c671/config/profile.go#L27 + Map listenAddrs = ipfs.swarm.listenAddrs(); + Map localAddrs = ipfs.swarm.localAddrs(true); + String multiAddrFilter = "/ip4/192.168.0.0/ipcidr/16"; + Map rm = ipfs.swarm.rmFilter(multiAddrFilter); + Map filters = ipfs.swarm.filters(); + List filtersList = (List) filters.get("Strings"); + Assert.assertTrue( + "Filters empty", filtersList == null || !filtersList.contains(multiAddrFilter)); + + Map added = ipfs.swarm.addFilter(multiAddrFilter); + filters = ipfs.swarm.filters(); + filtersList = (List) filters.get("Strings"); + Assert.assertFalse("Filters NOT empty", filtersList.isEmpty()); + rm = ipfs.swarm.rmFilter(multiAddrFilter); + } + + @Test + @Ignore + public void swarmTestPeering() throws IOException { + String id = "INSERT_VAL_HERE"; + Multihash hash = Multihash.fromBase58(id); + String peer = "/ip6/::1/tcp/4001/p2p/" + id; + MultiAddress ma = new MultiAddress(peer); + Map addPeering = ipfs.swarm.addPeering(ma); + Map lsPeering = ipfs.swarm.lsPeering(); + List peeringList = (List) lsPeering.get("Peers"); + Assert.assertFalse("Filters not empty", peeringList.isEmpty()); + Map rmPeering = ipfs.swarm.rmPeering(hash); + lsPeering = ipfs.swarm.lsPeering(); + peeringList = (List) lsPeering.get("Peers"); + Assert.assertTrue("Filters empty", peeringList.isEmpty()); + } + + @Test + public void bitswapTest() throws IOException { + List peers = ipfs.swarm.peers(); + Map ledger = ipfs.bitswap.ledger(peers.get(0).id); + Map want = ipfs.bitswap.wantlist(peers.get(0).id); + // String reprovide = ipfs.bitswap.reprovide(); + Map stat = ipfs.bitswap.stat(); + Map stat2 = ipfs.bitswap.stat(true); + } + + @Ignore("AutoConf.Enabled=true is default; prevents bootstrap removal") + @Test + public void bootstrapTest() throws IOException { + List bootstrap = ipfs.bootstrap.list(); + List rm = ipfs.bootstrap.rm(bootstrap.get(0), false); + List add = ipfs.bootstrap.add(bootstrap.get(0)); + List defaultPeers = ipfs.bootstrap.add(); + List peers = ipfs.bootstrap.list(); + } + + @Test + public void cidTest() throws IOException { + List bases = ipfs.cid.bases(true, true); + List codecs = ipfs.cid.codecs(true, true); + Map stat = ipfs.files.stat("/"); + String rootFolderHash = (String) stat.get("Hash"); + Map base32 = ipfs.cid.base32(Cid.decode(rootFolderHash)); + Map format = + ipfs.cid.format( + Cid.decode(rootFolderHash), + Optional.of("%s"), + Optional.of("1"), + Optional.empty(), + Optional.empty()); + + List hashes = ipfs.cid.hashes(false, false); + + System.currentTimeMillis(); + } + + @Test + public void diagTest() throws IOException { + Map config = ipfs.config.show(); + Object api = ipfs.config.get("Addresses.API"); + Object val = ipfs.config.get("Datastore.GCPeriod"); + Map setResult = ipfs.config.set("Datastore.GCPeriod", val); + ipfs.config.replace( + new NamedStreamable.ByteArrayWrapper(JSONParser.toString(config).getBytes())); + // Object log = ipfs.log(); + Map sys = ipfs.diag.sys(); + List cmds = ipfs.diag.cmds(); + String res = ipfs.diag.clearCmds(); + List cmds2 = ipfs.diag.cmds(true); + // res = ipfs.diag.profile(); + // String profile = "default"; + // ipfs.config.profileApply(profile, true); + // Map entry = ipfs.config("Addresses.API", Optional.of("/ip4/127.0.0.1/tcp/5001"), + // Optional.empty()); + } + + @Test + public void toolsTest() throws IOException { + String version = ipfs.version(); + int major = Integer.parseInt(version.split("\\.")[0]); + int minor = Integer.parseInt(version.split("\\.")[1]); + assertTrue(major >= 0 && minor >= 4); // Requires at least 0.4.0 + Map commands = ipfs.commands(); + } + + @Test(expected = RuntimeException.class) + public void testTimeoutFail() throws IOException { + IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); + ipfs.cat(Multihash.fromBase58("QmYpbSXyiCTYCbyMpzrQNix72nBYB8WRv6i39JqRc8C1ry")); + } + + @Test + public void testTimeoutOK() throws IOException { + IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); + ipfs.cat(Multihash.fromBase58("Qmaisz6NMhDB51cCvNWa1GMS7LU1pAxdF4Ld6Ft9kZEP2a")); + } + + @Test + public void addArgsTest() { + AddArgs args = AddArgs.Builder.newInstance().setInline().setCidVersion(1).build(); + String res = args.toString(); + assertEquals("args toString() format", "[cid-version = 1, inline = true]", res); + String queryStr = args.toQueryString(); + assertEquals("args toQueryString() format", "inline=true&cid-version=1", queryStr); + } + + // this api is disabled until deployment over IPFS is enabled + public void updateTest() throws IOException { + Object check = ipfs.update.check(); + Object update = ipfs.update(); + } + + private byte[] randomBytes(int len) { + byte[] res = new byte[len]; + r.nextBytes(res); + return res; + } } diff --git a/src/test/java/io/ipfs/api/AddTest.java b/src/test/java/io/ipfs/api/AddTest.java index ae3a9dd5..8e97bf91 100644 --- a/src/test/java/io/ipfs/api/AddTest.java +++ b/src/test/java/io/ipfs/api/AddTest.java @@ -1,43 +1,51 @@ package io.ipfs.api; -import io.ipfs.multibase.*; -import org.junit.*; - -import java.io.*; -import java.net.*; +import io.ipfs.multibase.Base16; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.net.URISyntaxException; +import java.net.URL; +import org.junit.Test; public class AddTest { - @Test - public void add() throws IOException, URISyntaxException { - String boundary = "At7ncPkda6xyWozoimjCd6aRySM13bEH"; - byte[] multipartBody = Base16.decode("2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663686170220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663686170253246636830312e68746d6c223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a3c21444f43545950452068746d6c3e0a3c68746d6c206c616e673d22656e223e0a3c686561643e0a202020203c7469746c653e495046533c2f7469746c653e0a202020203c6d65746120636861727365743d227574662d3822202f3e0a202020203c6c696e6b2072656c3d227374796c6573686565742220687265663d222e2f2e2e2f6373732f64656661756c742e637373223e0a3c2f686561643e0a3c626f64793e0a3c703e3c696d67207372633d222e2e2f696d672f6c6f676f2e706e672220616c743d226c6f676f22202f3e203c6120687265663d222e2e2f524541444d452e68746d6c223e486f6d653c2f613e3c2f703e0a3c68323e436861707465722030313c2f68323e0a3c703e5961646120796164613c2f703e0a3c2f626f64793e0a3c2f68746d6c3e0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696d67220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696d672532466c6f676f2e706e67223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a89504e470d0a1a0a0000000d4948445200000090000000900806000000e746e2b8000000017352474200aece1ce9000000097048597300000b1300000b1301009a9c180000015969545874584d4c3a636f6d2e61646f62652e786d7000000000003c783a786d706d65746120786d6c6e733a783d2261646f62653a6e733a6d6574612f2220783a786d70746b3d22584d5020436f726520352e342e30223e0a2020203c7264663a52444620786d6c6e733a7264663d22687474703a2f2f7777772e77332e6f72672f313939392f30322f32322d7264662d73796e7461782d6e7323223e0a2020202020203c7264663a4465736372697074696f6e207264663a61626f75743d22220a202020202020202020202020786d6c6e733a746966663d22687474703a2f2f6e732e61646f62652e636f6d2f746966662f312e302f223e0a2020202020202020203c746966663a4f7269656e746174696f6e3e313c2f746966663a4f7269656e746174696f6e3e0a2020202020203c2f7264663a4465736372697074696f6e3e0a2020203c2f7264663a5244463e0a3c2f783a786d706d6574613e0a4cc227590000187b494441547801ed5d0b701dd5793ebb7b1f7a5b96e4277e90f018c084ce14525a024190804d93143253b96d42da8921a584694348681a62888c259b8c01134f6b4220b8cc94342052d292740a8c53f3340d246d99620a8618087e5b92653daeeebdfbe8f79dbb475a5fddab7baf2cd952f9ff994fbb77f79cb367bff3ed7ffe73f621a5c484016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401828c9401058adeded318565c9b4924018883060b53dfaa813f9adc2df22a42829b23e9681a8702edcbc79f6c51b367cb175e3c6469332badf6c93e5cc60604aaffef6f6769b3460e97379c986bb3fa702fff6784ded29eed0e04ecbb26fdb7af34d3f0ad31c9596dbc4a63f03532420c6396b9c6deded2e2968ddb0b1d50afcd54e3cfe8900bf7dd7cddab1589cfbbc6cf6693be174fcfcc61b9fe5ef56c447c8e7619549c5a63903932e2076475d2b575200ea931b377ed87783359eeb7e3e565d6df9e9b4ef7baeb26cc70e3ccf8788949d4cda6e2a8575e71f3ce5b73f7bf3cdbb98375a0e7f8b4d4f06264d406d6d6d4e575717bbaaa0adbd3dd15dd77053e0fadfb413f1068847d52d5ce841344eaaa747f9998cb2130955ddd4ac2cc7f2fa77ef71bce1616eef0b9cf8fafe86eabb7f79dd75598ed4da56aeb451ae16e4f4a4f0835dab631710e29cb6d7ceb2baba725ee7d2bbbffbd920ebae7512896556cc51b5f3e6b9c9d9b31d1c68e45841e02bc43f23cc07b0e1c3bdded0befd31dff3959749ff8f72d4addbbefef59f30515b1bbcdab21d810a63a9918cb272c2191869d409d484f3392371ce251bee395ff9feadca569f8278e05d9adcaa961607718f05cf83e24d48630e19fee6c2b2e0891c08271ba4ba0f79c33d3d312f9d5681653d613baae3e737ddf40bd60fc793f888444c2333ad595195a2f149eba64d8bacb4b71ab1cdaa5832194f3634f83573e628745b361c0b2266f46a104849635adb46524b31261a3a7040a5fbfb6d379dced84eec079613744248bb594ef4f825cb950453ca40192d3b7afc767457edfc89ae84ebff5ed7f015e5f9ab63f17893934cea38275157e72076c168bd4ce18c169f5b435e0b42a2650606bc81bd7b1d7a232f9be9568eb376dbd7beba092a6377a6eb827ae829825c66f97bbc19284f408c73ce429c138eae2eb9f39ee581ef75a07b3acf46d7533d779e5bdd84380786ed3887f28a1dff6429246a310810787b43fbf7c702c4479801f88513b36e85377a8af9b537da21f1d1f85c4eddde522d7d749c73d7a6df0a7c77b5e5fb7fe85455a9aac646af7ace1c1b314f2ece6137544e7755eef984e5313ef2d3197ff0e08120ddd7a7476ccab11fb503bb73ebcd37becae25a73f1919e772ab7784977ec0c14155034ceb862d3a639c369ef6f02cfbd1e5d5575bcae2ea89b37cf479ce35414e74cb4be1452181f655343dee0befd767670d0427c947262ce665ff97760c47688c547eb3dd1c349bef21938eae6a6ce86b997768cb137df70838e2d5a37dc759d9bf51e7162b1cbe2d5d5f1fa458bbcdab97311d7c66c1de730d3647a9d427537e563f80f6f67271b1b2d88d7f3329984f2830b30dff4850f2f5f7e64d7d34ffd6a4757978e8fd4b66d854a2ab68d41172f260328b6a0e5a733e90b2d5900b797634c5749d9f9c72b565f736c966d60f2725ff49866dde4296b79f409f2110b06a8b04befbce7e398365e8791d5c7f81b5d955bd3dcec20c0459c73e2e35606dabeef07a94387bcd4c183318ad8cda49fc729dcf2cc5fdff41ceb0ce3f9952257279cc23fa6614ec464288f4dabb4c1e858c85bc97ca3020ac573d5962d8d7ddd3d7721eb2acee7e02af79acf3cd38ad7d6da9c41d6663c42eed789f9cb6e0dc619edecc080dffdbfaf074e22e9e03e9bc264e40fdcde83373f7fc71dbdfab9a3f0a218a7a2a7625f0b403ed815ee04f22d860de700bc87574a9469a4e906f601262e63d944a1463905dbe7866947db041b4a18d35298af01a9bcb41481112dcbff6de06c60095003f07cb87f08e0f4c88e102c2b6c68ac953016a2adadabcbee428199b4774e5563d3aa4c7f3f7fb8a02ac6db0f0c6429a8d149c14aced31c65929691e0dac52d10d40f579ac51bb32e048fd19a7b8d6727fe1147dbaa6f858c12995f019e04c5b006f85cb8f3612caf0678f5b2b14d9a7aac3f01cc07c62398e571ff11e000c049d01f035b01ee8b36ac29fb366cff53802230c7c5eab8c6b212c05ee0626017c0bcdc4ea338ce03be015c023403a58c62a290fe197808f80d60ea88d5b13622a0653bda0208480531c57b569e0e909309c74d0dab81f7df57981de63c0f465eb3f53ccd89ecc628661e3f75f0901ad8b757719e88961d1cc29c514afb262b1133579fde57e20fbd8a3136422123916c30eeaf2a9420b2ad1aebb380c5c0b9c0f5c04f81eb803d405444f8a9bd1a9749a0d8f1b9bf9851ac34231e2ebf026c0458ef7ce37e22ff58f44c141dc10b8a75a7a88b8a68444048a42df0e0ef031fa3abdcc517abaad69e07b3c3eaf05b6fabaad98daa76c10285094354e118260ccd01cb5de258b909468b33d46a70ef5e953902ded09d524099817ece1961b066630890cf4bc98318e29930ba9e9f91a41863f7d40b500cd13c249bbc3601e6a13976639f069e052e017865b392269f59322fbbbed701963b9eb12e14eaab405f989065f2c25905dc136e338bed58791a60f7bc1fe055c70b611eb014a058884500ebc172291e96193d6ffc1cb53102e22e5eddc959b354cddcb90a13786ab8af4f2106426ce129ccc3a8341a8eb72bb89f020b02d4d9070753111bd19f70080f50c483b8c531dcdd833ae2e10f805dad9fcd42d8b3555573b31a3ed48d6d10967616a3275ac11ac92b66d17d1d48f47700afdaa8b7a318c82b1be542e0af00c61ec3c029c066e03300d3b13c2e8df1f70bc09580116674bf496796263fd318f12cc47aa74980650f400ff868645bb1558a9e75be1678304c143de770d3e862ac80b0257003857b5a0a733daa0a424a7577abfe3d7b5480064c603be20c354861f5f6f26ebbaa6e6951763c3ec9f1518e5f2b16d302193c78500d017e3603f1042a3b3408410da9784d9daa3f6991f688ac833b30088143f063cf6cf4ac27678d5e854661e45fa18c81e845881f014f001703141a3dd145c073001b3ddf4c834545999fc6fc8e8acbe4bb0a3b19a7b17e6481dde66300f71baf56281f8f47b1fd4b082cb48d5b8fa234d30b7144c32b9f22e1153eb06f9f1a04184c13f40847de7b8f41ac8e8fe8b57833940d7c6c36721b03dea65be17e9862b0cc8b358b63621251e101341c7391ae176fa7e8fac243eaae975499e63db68a8c97db34bef114f969590bf2db0fdc00fc1760f25c867523a0620dc4b4e51069d298e5f9c847e3b1ff1b30e2e1b652acb0ce3c2e2f08531e568b5b5101e9ee885d12e31c340cafee594b972acc05a9230caae17d6235351052527b82de9d3b55555393aa9b3f5f77773a1fe78b2ae9d64c778567857023558b35dd7718b5479c03af970de31c3c2aa26a5ae628dec0a570f4c81055c524964e5bdea91727a5cc3d86602ecd7a342bb7d113b1521c1aff12308dbb04ebb442f9727b72fbc6db6fd271c963182f3827b283c7a545f7e7b614fecbe3151374c11cc505144d6e84048fc458a8f9f4d355aab747e14942ddd0b8b5a1bd551aa2ca2046626cc41889f7cb74e35218512115f9cd07d0dc541a5dd5018509429d97710ee679b48012f50dbadc91001ec21e117ab4bed3739da32f6375e18a6974b37d3296a69b6259669d029a122b4f40e6d01041ee6ab75475730be2a4593a2e6117c311908e8f3022e26f0efb6be08de8b172c36e0a1b3e82232423487a286e85707cd753437bf76bf1d0dbd0b330c6c90e0da958b24a352c5e82c0be01f973dd552ee394f1a28b9fe43f1ca21b4b852b937502f41ca6ebe128ced80558a907d88d72aa825d58b95e0d494b5b65026279a12709c2f8a80e437a765d140d476cec56383bec313e7af75d1dc3d472fe08f11145c1f885a326a6c15c138ab3d01d1ed6c3728a85e60e23ce81d7e1e8ab76de7c5d3e1e1dc9755793f6b8883ed4f1f8c306a32730dd168fc96095564a40dc5f6e839bb25e429e3f01d87d2e06389cbf06c80234a6a3d8682cdb406fa8f44fe5023247304242371283181a4f3e195ea959f5233e4a1f3eac62e8ea6c888c416fef9b6fe25e5a8b16401a62d186fcc94606ddb6f656dc4661713e87c17b7216e69b384d0091d1bb8d3e676478ca15338dffb2a2e4970d77167006c0c6e276cef61633738266592c5d743bcb35dd21477d5f0328588a681570267027f03c7000c88f7328281e8f65b0acb26de20232878010cc231dc9fa7a9538e30c3dec1fd8bd5b65e05118af70e8cd59631a5fe5314631e5c401cf8461b907cf83970e73c3724c17e8119d89734a5eaca6d4e3b63457f1e809e50e6dae683686b9ea3bb0ce74dc07d7aab601b4428d65464a4610b994a5ffb22c7a3a0a8443f7c7812a8075f83de0c7c03e80c13c47849c58e434c33b40f4582ca36c21e59f3cf24ed028a4b0b119407348cfe79a39f467dc63c7d1ad0da7e15d703e484b71f8f02c8c73381dc0f91e3d2c6f6cd422d3a3abfc607b82559ba26c8361b99c5f2864e4f6a3c06dc00a8033bf8c831e01de0028c068c3e1a7b673f1f7ef01d3368544c684dc5e033c00fc1bc0f2e859b8e4ef4b01deca30233faceaf9a14f6149d00e023b8157806dc05680b3b0b462f5cbed0dff9a4a1eb571c23f200a1ae3237a9afa934ee2db19aa1b5d18dffb8ad5d6686fc4fb6b1ca6bbe91444e7ebd9ec86c58bf386e59578f009d7f858327e1e994f05280a7a0d23866aac2f04ce06d875f08aa617603a5ef1b700c58ce5306ef9b362090a6c7f19db28184318ebc1c6df0e7c0cf82c70357031d008448d437ee2028033e6bb80bf05ee064c39e6bcb069ac4dae804cf914928e5b200ec43078104d8fa638e1e7e0d6078365de6ed077f771213955c99c784c77150ad114370d97249593814439c611d053c02a805d8cb9ba4da36393366ea7d8e8ddf2f7e904913ff436b3008eb0f2cd343ed33c16a205cbdf0528968b008abb1930c63c1f02ee02ae005602bd80a92b56c7dad408287a1c8881f34401e676d84d515834339ce7ac350536c38c8dcb86e6b21887ecb2de07fe13f821f03840a34762c3163236d633c07500e3179295230c2b058c65ed0eb7e77b0afe66fd5826cb6010fad31058e89ba6e761c92eee1300bb5b1aebfd49e011603960ca29588f62278f7c9360100f6319c6449c18e48d4f4e2e1ae3a88b13931c71311d63a31962ac28af5492bc14a8036a016e4f01dd003dcd3b00e30c636ccc62e231691883ec343f2a58166a606e33c73362e2925d25c54dfc04a087bc1c60d7753a300cd0bb52c8df038ad67b6a058423d3e3e0f969d584d9ebc3bb76e9d8873cf39e5512cf16d5cd5fa0271a8d67629619626fa19e1ccd10e39969383664be9728948f5e85c6652151e89d913f4c536ebaa89858048541a337fd19c07b672f028b01da350005c47c3c8f31c7310560dfd419bd4b1cf7cd1a162d8a1cc482679aab631ffdf66a64cf0c594d86f5e4928d9d0f726b48370d17661977611a89622b0726fdb885e6ed641e82f532e248609d1ee95ec018bdd142f3a3d0f2b8088807d6c3f2fc6e8af1103173baae2887c69b9846c85f723f1b6926981113ebca79226314d5fcf0072f863176dc0434e6c8b261ba3160c49e89548ca2a1672d6affcf046438287abe1f941da6fbace47c8d87991bc9c4d888a3b7a2565c40a67b299a75faed18918fa162fa55f178d5c8749ff41ec5dbf8e8da9874bf13d9cc91e49ef0f708bd91fd450a4752ced3e8b99a68eae9bc0ec19bb9251d794cfdf872bab2c1799233c3ca312e33b11ac544564cb04fc1986d1cc6d3db2c06be0018e3e427e78598ae4c016186c07270871c77d4793394cff2cc842136c5c34740f8a606d77137e58366c6832cc3896f07fe1558057c08a0514c64854b82c232db289ed3807f02d885512cdc6f466405c583fd856751d9006c88eed75f572db8bb1ee39385ecd2a6a9f1c62c1f1be97b67971eed69d16bae2654e1f14e74bc7d133ad82466321df7d52893b738ae08d183e50ee015e06d8043754e74d2b3f066ec12e042a00de0bd328a8a1ee79bc0abe13ab715b4318e1e5f020bf056868767a06d36ca91f777aba6534f299879ba6ca4b8f9182c5f3bc24d5c3c3c54713b979ba1dc749552132d37ba5e4939a6abe2cc373d0abb255a13408110c62808a6a7508ce732fbb86d3df01d80a22c2a1eec1bcdfcda595d5ac1f80aaf83a70a29235cc878d113afc8b061a6d626ca592e1fe798f848086ed6f2b50c0bf5b72c1fcfbe966f6652903938f751c8c84f7564c7988b2fb2afd2d5728e5faa4c43e23a243c1bb80d7801a0b7c93772438145c5438ff42c7025700b50968d90d0d5d6a6155c93745e4d0da71f50b6752dee5bf163971ebec363e161319b4f0aeaab7bd227fe8cf72dabceb944f43261a0ef0e0efa781629c063b2313e4a8277e4b7c41aea7ec584e1a7878b156c48ef4482870012fa5e98d8ec334bdea3fa238002236fba7c2c8fe5ea3265df81721e0648c4b8a31eec2f653c873781b54007701a707ab85c8a253d1205cb74f4541498e9e25ec63a8dfb5837533f6e2b6847b75cf8850ea65cbef9fe8b504c274474117fe3f152176f5b387c7558dff8d4651f9d9de90a1a1a9b7109df6aedc6eb3f7c7b63e8c07e35fbd4d3f42d8ed1c7550be62eb891711a3e60ae3fef92e93b1ce39d7e7c47f105dc65fbd6f6f5eb9f0933b182254928708072f3959baec021a6741305c0ba4d44dcf44e65e7e381468d9f418188daf14dc427bffca5e79efc8b2f7d3c934afd39ba873d78683ed6bde3750b0fc07bbcf5c0b72326106b8c1e4baf55deb6140e0defa5797dbf7edbca1ce98be1d59f7d9e97bdfea5f5eb2ed4e241fd91a492c6657a124770bd58c54c1a2e2b291fc9c7b5fce38f9bb88c9dec4d2802d631bf6cfe26cc3eb3df6c2b5b3c284317c2e5188b7e2aee33f7ddd792b5e2dff0ddcc979d64554da2be3ea85fb0c0c7a318fc0a66e947318a7aa053f533d0253d10f343386c5574571e5e73b6f1e9190bdd550a9ee77bf812c71d2fb6b773d20b1f25d75fccaf888431272f1bca66802a1cd75a231faf5c71fffde7e03b0adf4280bd9243fbaad94d5edd82f916decab075a08d862e786374a20262798c73e0f1f06a333f42eee351587c911c17986d3f8648b9f3c575ebf4e314613d299c62de63dcf3949d1363a0a48074b1799ff9fdfdfbb75c862e632d1e943f9f8fa9d6ce9fefe2a1b1e29ff99da080e875f0017308a7c74bf7f6e8cfd8c15bbd82471b576f5f77fb93ac9bf638cb96c9bf419858fb1f73aef204141e86b1513bd7f9716fc44a2b1ed8f297e8466ec5e8a705df52e4eb385eb2a19ef1c1d1dd5a8502d2dd1586e67882d143b0edf0b15708a907130c1d2b12b1efa21efaf8ed6bd6305ed3a3471e53ecf8335091804cf5a2f1d1a7ef7de824d7716fc110ff5accbf24aa1a66f9750b17f0c30bb9afb8f2192058e1515818039979267657488b87eefdc14307f92607a70eb2f824ec83ae1d74bedcd9f91b96153d3e7f8b9d38062624a0b0ba566be49fadacf8fe968fe24353abb1ef0fe88df096aa1ef6233eb2387f545040a75040357066b92019f34de8aebabd6cff118cacf01f7d94f533df8977fc4767fb4b3c268e17db26ff8c2ea47f7a2c8e4540fa0cd085d8af45fe0dc2f2fb1fbc12ddda5a88e823140dde9d77212607b7182cdea0ed79e30dfd6918ce03359d76ba7ea81e137f013ec6a0ff4b0f2630f96af30ecb419cd379bb7e9341e29ce9219642b5386601994275b7c2d96ccc259d7bdf7df11615ff6ae0676fc1f7836671c4366bc9122f3338e8f4e1830b7c777e68ff3e55bf882f1326f43f54d18fbcba5e3fbed4b13eb374f19dfa1fcea1e78378e41fce1992a7e172d20464ce2d1a9facd8b2e564fcf3b93541d6bb1a2f14e23bd3699f5e099380363c905e8767b2f17a0f6e60c51f8e55c5befd7c7bfbaf5956b41c53b62ca71f03932e207d8a18a1b5ae19fda7bb973ff0c0c72d37588d3bfc9771848577e6b3c33ddd717ee205ddd556e5581d2f75746c635e8973348333e6cfd408283c7dc6475cc5520fc52ebff7fb7fecc463b7e3d6c8697defbcfb16466ddfdebe6eed0f75f2302d12cbb03ce44f162103ec8e0c19576d79bcf1d2ef6cf8e247aebf7eb6d9c620d9accb521828c6801515121385bfa7d40b16ab8c6c9fa90c303ec27c0eaa2fc299a96d28f5160684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006848199cdc0ff015ad992bcc9ca2eb60000000049454e44ae4260820d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246637373220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663737325324664656661756c742e637373223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a626f6479207b0a20202020666f6e742d66616d696c793a202256657264616e61223b0a20202020636f6c6f723a20233133376362390a7d0a0a61207b0a2020202023746578742d6465636f726174696f6e3a206e6f6e653b0a20202020636f6c6f723a20233133376362390a7d0a0a612e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a0a6831207b0a09666f6e742d7765696768743a206e6f726d616c3b0a20202020666f6e742d73697a653a20323070783b0a7d0a0a6832207b0a20202020666f6e742d7765696768743a206e6f726d616c3b0a09666f6e742d73697a653a20313570783b0a7d0a0a7468207b0a09746578742d616c69676e3a206c6566743b0a20202020666f6e742d7765696768743a206e6f726d616c3b0a20202020666f6e742d73697a653a20313470783b0a20202020636f6c6f723a20677261793b0a7d0a0a74642e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a74722e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696e6465782e68746d6c223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a3c21444f43545950452068746d6c3e0a3c68746d6c206c616e673d22656e223e0a3c686561643e0a202020203c7469746c653e495046533c2f7469746c653e0a202020203c6d65746120636861727365743d227574662d3822202f3e0a202020203c6c696e6b2072656c3d227374796c6573686565742220687265663d222e2f6373732f64656661756c742e637373223e0a3c2f686561643e0a3c626f64793e0a3c703e3c696d67207372633d22696d672f6c6f676f2e706e672220616c743d226c6f676f22202f3e203c6120687265663d22524541444d452e68746d6c223e486f6d653c2f613e3c2f703e0a3c703e3c6120687265663d22636861702f636830312e68746d6c223e63686170746572206f6e653c2f613e3c2f703e0a3c2f626f64793e0a3c2f68746d6c3e0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245482d2d0d0a"); - HttpURLConnection httpConn = (HttpURLConnection) new URL("http://localhost:5001/api/v0/add?stream-channels=true&w=false&n=true").openConnection(); -// httpConn.setUseCaches(false); - httpConn.setDoOutput(true); - httpConn.setDoInput(true); - httpConn.setRequestProperty("User-Agent", "Java IPFS Client"); - httpConn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary); - httpConn.getOutputStream().write(multipartBody); - httpConn.getOutputStream().flush(); - httpConn.getOutputStream().close(); + @Test + public void add() throws IOException, URISyntaxException { + String boundary = "At7ncPkda6xyWozoimjCd6aRySM13bEH"; + byte[] multipartBody = + Base16.decode( + "2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663686170220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663686170253246636830312e68746d6c223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a3c21444f43545950452068746d6c3e0a3c68746d6c206c616e673d22656e223e0a3c686561643e0a202020203c7469746c653e495046533c2f7469746c653e0a202020203c6d65746120636861727365743d227574662d3822202f3e0a202020203c6c696e6b2072656c3d227374796c6573686565742220687265663d222e2f2e2e2f6373732f64656661756c742e637373223e0a3c2f686561643e0a3c626f64793e0a3c703e3c696d67207372633d222e2e2f696d672f6c6f676f2e706e672220616c743d226c6f676f22202f3e203c6120687265663d222e2e2f524541444d452e68746d6c223e486f6d653c2f613e3c2f703e0a3c68323e436861707465722030313c2f68323e0a3c703e5961646120796164613c2f703e0a3c2f626f64793e0a3c2f68746d6c3e0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696d67220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696d672532466c6f676f2e706e67223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a89504e470d0a1a0a0000000d4948445200000090000000900806000000e746e2b8000000017352474200aece1ce9000000097048597300000b1300000b1301009a9c180000015969545874584d4c3a636f6d2e61646f62652e786d7000000000003c783a786d706d65746120786d6c6e733a783d2261646f62653a6e733a6d6574612f2220783a786d70746b3d22584d5020436f726520352e342e30223e0a2020203c7264663a52444620786d6c6e733a7264663d22687474703a2f2f7777772e77332e6f72672f313939392f30322f32322d7264662d73796e7461782d6e7323223e0a2020202020203c7264663a4465736372697074696f6e207264663a61626f75743d22220a202020202020202020202020786d6c6e733a746966663d22687474703a2f2f6e732e61646f62652e636f6d2f746966662f312e302f223e0a2020202020202020203c746966663a4f7269656e746174696f6e3e313c2f746966663a4f7269656e746174696f6e3e0a2020202020203c2f7264663a4465736372697074696f6e3e0a2020203c2f7264663a5244463e0a3c2f783a786d706d6574613e0a4cc227590000187b494441547801ed5d0b701dd5793ebb7b1f7a5b96e4277e90f018c084ce14525a024190804d93143253b96d42da8921a584694348681a62888c259b8c01134f6b4220b8cc94342052d292740a8c53f3340d246d99620a8618087e5b92653daeeebdfbe8f79dbb475a5fddab7baf2cd952f9ff994fbb77f79cb367bff3ed7ffe73f621a5c484016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401828c9401058adeded318565c9b4924018883060b53dfaa813f9adc2df22a42829b23e9681a8702edcbc79f6c51b367cb175e3c6469332badf6c93e5cc60604aaffef6f6769b3460e97379c986bb3fa702fff6784ded29eed0e04ecbb26fdb7af34d3f0ad31c9596dbc4a63f03532420c6396b9c6deded2e2968ddb0b1d50afcd54e3cfe8900bf7dd7cddab1589cfbbc6cf6693be174fcfcc61b9fe5ef56c447c8e7619549c5a63903932e2076475d2b575200ea931b377ed87783359eeb7e3e565d6df9e9b4ef7baeb26cc70e3ccf8788949d4cda6e2a8575e71f3ce5b73f7bf3cdbb98375a0e7f8b4d4f06264d406d6d6d4e575717bbaaa0adbd3dd15dd77053e0fadfb413f1068847d52d5ce841344eaaa747f9998cb2130955ddd4ac2cc7f2fa77ef71bce1616eef0b9cf8fafe86eabb7f79dd75598ed4da56aeb451ae16e4f4a4f0835dab631710e29cb6d7ceb2baba725ee7d2bbbffbd920ebae7512896556cc51b5f3e6b9c9d9b31d1c68e45841e02bc43f23cc07b0e1c3bdded0befd31dff3959749ff8f72d4addbbefef59f30515b1bbcdab21d810a63a9918cb272c2191869d409d484f3392371ce251bee395ff9feadca569f8278e05d9adcaa961607718f05cf83e24d48630e19fee6c2b2e0891c08271ba4ba0f79c33d3d312f9d5681653d613baae3e737ddf40bd60fc793f888444c2333ad595195a2f149eba64d8bacb4b71ab1cdaa5832194f3634f83573e628745b361c0b2266f46a104849635adb46524b31261a3a7040a5fbfb6d379dced84eec079613744248bb594ef4f825cb950453ca40192d3b7afc767457edfc89ae84ebff5ed7f015e5f9ab63f17893934cea38275157e72076c168bd4ce18c169f5b435e0b42a2650606bc81bd7b1d7a232f9be9568eb376dbd7beba092a6377a6eb827ae829825c66f97bbc19284f408c73ce429c138eae2eb9f39ee581ef75a07b3acf46d7533d779e5bdd84380786ed3887f28a1dff6429246a310810787b43fbf7c702c4479801f88513b36e85377a8af9b537da21f1d1f85c4eddde522d7d749c73d7a6df0a7c77b5e5fb7fe85455a9aac646af7ace1c1b314f2ece6137544e7755eef984e5313ef2d3197ff0e08120ddd7a7476ccab11fb503bb73ebcd37becae25a73f1919e772ab7784977ec0c14155034ceb862d3a639c369ef6f02cfbd1e5d5575bcae2ea89b37cf479ce35414e74cb4be1452181f655343dee0befd767670d0427c947262ce665ff97760c47688c547eb3dd1c349bef21938eae6a6ce86b997768cb137df70838e2d5a37dc759d9bf51e7162b1cbe2d5d5f1fa458bbcdab97311d7c66c1de730d3647a9d427537e563f80f6f67271b1b2d88d7f3329984f2830b30dff4850f2f5f7e64d7d34ffd6a4757978e8fd4b66d854a2ab68d41172f260328b6a0e5a733e90b2d5900b797634c5749d9f9c72b565f736c966d60f2725ff49866dde4296b79f409f2110b06a8b04befbce7e398365e8791d5c7f81b5d955bd3dcec20c0459c73e2e35606dabeef07a94387bcd4c183318ad8cda49fc729dcf2cc5fdff41ceb0ce3f9952257279cc23fa6614ec464288f4dabb4c1e858c85bc97ca3020ac573d5962d8d7ddd3d7721eb2acee7e02af79acf3cd38ad7d6da9c41d6663c42eed789f9cb6e0dc619edecc080dffdbfaf074e22e9e03e9bc264e40fdcde83373f7fc71dbdfab9a3f0a218a7a2a7625f0b403ed815ee04f22d860de700bc87574a9469a4e906f601262e63d944a1463905dbe7866947db041b4a18d35298af01a9bcb41481112dcbff6de06c60095003f07cb87f08e0f4c88e102c2b6c68ac953016a2adadabcbee428199b4774e5563d3aa4c7f3f7fb8a02ac6db0f0c6429a8d149c14aced31c65929691e0dac52d10d40f579ac51bb32e048fd19a7b8d6727fe1147dbaa6f858c12995f019e04c5b006f85cb8f3612caf0678f5b2b14d9a7aac3f01cc07c62398e571ff11e000c049d01f035b01ee8b36ac29fb366cff53802230c7c5eab8c6b212c05ee0626017c0bcdc4ea338ce03be015c023403a58c62a290fe197808f80d60ea88d5b13622a0653bda0208480531c57b569e0e909309c74d0dab81f7df57981de63c0f465eb3f53ccd89ecc628661e3f75f0901ad8b757719e88961d1cc29c514afb262b1133579fde57e20fbd8a3136422123916c30eeaf2a9420b2ad1aebb380c5c0b9c0f5c04f81eb803d405444f8a9bd1a9749a0d8f1b9bf9851ac34231e2ebf026c0458ef7ce37e22ff58f44c141dc10b8a75a7a88b8a68444048a42df0e0ef031fa3abdcc517abaad69e07b3c3eaf05b6fabaad98daa76c10285094354e118260ccd01cb5de258b909468b33d46a70ef5e953902ded09d524099817ece1961b066630890cf4bc98318e29930ba9e9f91a41863f7d40b500cd13c249bbc3601e6a13976639f069e052e017865b392269f59322fbbbed701963b9eb12e14eaab405f989065f2c25905dc136e338bed58791a60f7bc1fe055c70b611eb014a058884500ebc172291e96193d6ffc1cb53102e22e5eddc959b354cddcb90a13786ab8af4f2106426ce129ccc3a8341a8eb72bb89f020b02d4d9070753111bd19f70080f50c483b8c531dcdd833ae2e10f805dad9fcd42d8b3555573b31a3ed48d6d10967616a3275ac11ac92b66d17d1d48f47700afdaa8b7a318c82b1be542e0af00c61ec3c029c066e03300d3b13c2e8df1f70bc09580116674bf496796263fd318f12cc47aa74980650f400ff868645bb1558a9e75be1678304c143de770d3e862ac80b0257003857b5a0a733daa0a424a7577abfe3d7b5480064c603be20c354861f5f6f26ebbaa6e6951763c3ec9f1518e5f2b16d302193c78500d017e3603f1042a3b3408410da9784d9daa3f6991f688ac833b30088143f063cf6cf4ac27678d5e854661e45fa18c81e845881f014f001703141a3dd145c073001b3ddf4c834545999fc6fc8e8acbe4bb0a3b19a7b17e6481dde66300f71baf56281f8f47b1fd4b082cb48d5b8fa234d30b7144c32b9f22e1153eb06f9f1a04184c13f40847de7b8f41ac8e8fe8b57833940d7c6c36721b03dea65be17e9862b0cc8b358b63621251e101341c7391ae176fa7e8fac243eaae975499e63db68a8c97db34bef114f969590bf2db0fdc00fc1760f25c867523a0620dc4b4e51069d298e5f9c847e3b1ff1b30e2e1b652acb0ce3c2e2f08531e568b5b5101e9ee885d12e31c340cafee594b972acc05a9230caae17d6235351052527b82de9d3b55555393aa9b3f5f77773a1fe78b2ae9d64c778567857023558b35dd7718b5479c03af970de31c3c2aa26a5ae628dec0a570f4c81055c524964e5bdea91727a5cc3d86602ecd7a342bb7d113b1521c1aff12308dbb04ebb442f9727b72fbc6db6fd271c963182f3827b283c7a545f7e7b614fecbe3151374c11cc505144d6e84048fc458a8f9f4d355aab747e14942ddd0b8b5a1bd551aa2ca2046626cc41889f7cb74e35218512115f9cd07d0dc541a5dd5018509429d97710ee679b48012f50dbadc91001ec21e117ab4bed3739da32f6375e18a6974b37d3296a69b6259669d029a122b4f40e6d01041ee6ab75475730be2a4593a2e6117c311908e8f3022e26f0efb6be08de8b172c36e0a1b3e82232423487a286e85707cd753437bf76bf1d0dbd0b330c6c90e0da958b24a352c5e82c0be01f973dd552ee394f1a28b9fe43f1ca21b4b852b937502f41ca6ebe128ced80558a907d88d72aa825d58b95e0d494b5b65026279a12709c2f8a80e437a765d140d476cec56383bec313e7af75d1dc3d472fe08f11145c1f885a326a6c15c138ab3d01d1ed6c3728a85e60e23ce81d7e1e8ab76de7c5d3e1e1dc9755793f6b8883ed4f1f8c306a32730dd168fc96095564a40dc5f6e839bb25e429e3f01d87d2e06389cbf06c80234a6a3d8682cdb406fa8f44fe5023247304242371283181a4f3e195ea959f5233e4a1f3eac62e8ea6c888c416fef9b6fe25e5a8b16401a62d186fcc94606ddb6f656dc4661713e87c17b7216e69b384d0091d1bb8d3e676478ca15338dffb2a2e4970d77167006c0c6e276cef61633738266592c5d743bcb35dd21477d5f0328588a681570267027f03c7000c88f7328281e8f65b0acb26de20232878010cc231dc9fa7a9538e30c3dec1fd8bd5b65e05118af70e8cd59631a5fe5314631e5c401cf8461b907cf83970e73c3724c17e8119d89734a5eaca6d4e3b63457f1e809e50e6dae683686b9ea3bb0ce74dc07d7aab601b4428d65464a4610b994a5ffb22c7a3a0a8443f7c7812a8075f83de0c7c03e80c13c47849c58e434c33b40f4582ca36c21e59f3cf24ed028a4b0b119407348cfe79a39f467dc63c7d1ad0da7e15d703e484b71f8f02c8c73381dc0f91e3d2c6f6cd422d3a3abfc607b82559ba26c8361b99c5f2864e4f6a3c06dc00a8033bf8c831e01de0028c068c3e1a7b673f1f7ef01d3368544c684dc5e033c00fc1bc0f2e859b8e4ef4b01deca30233faceaf9a14f6149d00e023b8157806dc05680b3b0b462f5cbed0dff9a4a1eb571c23f200a1ae3237a9afa934ee2db19aa1b5d18dffb8ad5d6686fc4fb6b1ca6bbe91444e7ebd9ec86c58bf386e59578f009d7f858327e1e994f05280a7a0d23866aac2f04ce06d875f08aa617603a5ef1b700c58ce5306ef9b362090a6c7f19db28184318ebc1c6df0e7c0cf82c70357031d008448d437ee2028033e6bb80bf05ee064c39e6bcb069ac4dae804cf914928e5b200ec43078104d8fa638e1e7e0d6078365de6ed077f771213955c99c784c77150ad114370d97249593814439c611d053c02a805d8cb9ba4da36393366ea7d8e8ddf2f7e904913ff436b3008eb0f2cd343ed33c16a205cbdf0528968b008abb1930c63c1f02ee02ae005602bd80a92b56c7dad408287a1c8881f34401e676d84d515834339ce7ac350536c38c8dcb86e6b21887ecb2de07fe13f821f03840a34762c3163236d633c07500e3179295230c2b058c65ed0eb7e77b0afe66fd5826cb6010fad31058e89ba6e761c92eee1300bb5b1aebfd49e011603960ca29588f62278f7c9360100f6319c6449c18e48d4f4e2e1ae3a88b13931c71311d63a31962ac28af5492bc14a8036a016e4f01dd003dcd3b00e30c636ccc62e231691883ec343f2a58166a606e33c73362e2925d25c54dfc04a087bc1c60d7753a300cd0bb52c8df038ad67b6a058423d3e3e0f969d584d9ebc3bb76e9d8873cf39e5512cf16d5cd5fa0271a8d67629619626fa19e1ccd10e39969383664be9728948f5e85c6652151e89d913f4c536ebaa89858048541a337fd19c07b672f028b01da350005c47c3c8f31c7310560dfd419bd4b1cf7cd1a162d8a1cc482679aab631ffdf66a64cf0c594d86f5e4928d9d0f726b48370d17661977611a89622b0726fdb885e6ed641e82f532e248609d1ee95ec018bdd142f3a3d0f2b8088807d6c3f2fc6e8af1103173baae2887c69b9846c85f723f1b6926981113ebca79226314d5fcf0072f863176dc0434e6c8b261ba3160c49e89548ca2a1672d6affcf046438287abe1f941da6fbace47c8d87991bc9c4d888a3b7a2565c40a67b299a75faed18918fa162fa55f178d5c8749ff41ec5dbf8e8da9874bf13d9cc91e49ef0f708bd91fd450a4752ced3e8b99a68eae9bc0ec19bb9251d794cfdf872bab2c1799233c3ca312e33b11ac544564cb04fc1986d1cc6d3db2c06be0018e3e427e78598ae4c016186c07270871c77d4793394cff2cc842136c5c34740f8a606d77137e58366c6832cc3896f07fe1558057c08a0514c64854b82c232db289ed3807f02d885512cdc6f466405c583fd856751d9006c88eed75f572db8bb1ee39385ecd2a6a9f1c62c1f1be97b67971eed69d16bae2654e1f14e74bc7d133ad82466321df7d52893b738ae08d183e50ee015e06d8043754e74d2b3f066ec12e042a00de0bd328a8a1ee79bc0abe13ab715b4318e1e5f020bf056868767a06d36ca91f777aba6534f299879ba6ca4b8f9182c5f3bc24d5c3c3c54713b979ba1dc749552132d37ba5e4939a6abe2cc373d0abb255a13408110c62808a6a7508ce732fbb86d3df01d80a22c2a1eec1bcdfcda595d5ac1f80aaf83a70a29235cc878d113afc8b061a6d626ca592e1fe798f848086ed6f2b50c0bf5b72c1fcfbe966f6652903938f751c8c84f7564c7988b2fb2afd2d5728e5faa4c43e23a243c1bb80d7801a0b7c93772438145c5438ff42c7025700b50968d90d0d5d6a6155c93745e4d0da71f50b6752dee5bf163971ebec363e161319b4f0aeaab7bd227fe8cf72dabceb944f43261a0ef0e0efa781629c063b2313e4a8277e4b7c41aea7ec584e1a7878b156c48ef4482870012fa5e98d8ec334bdea3fa238002236fba7c2c8fe5ea3265df81721e0648c4b8a31eec2f653c873781b54007701a707ab85c8a253d1205cb74f4541498e9e25ec63a8dfb5837533f6e2b6847b75cf8850ea65cbef9fe8b504c274474117fe3f152176f5b387c7558dff8d4651f9d9de90a1a1a9b7109df6aedc6eb3f7c7b63e8c07e35fbd4d3f42d8ed1c7550be62eb891711a3e60ae3fef92e93b1ce39d7e7c47f105dc65fbd6f6f5eb9f0933b182254928708072f3959baec021a6741305c0ba4d44dcf44e65e7e381468d9f418188daf14dc427bffca5e79efc8b2f7d3c934afd39ba873d78683ed6bde3750b0fc07bbcf5c0b72326106b8c1e4baf55deb6140e0defa5797dbf7edbca1ce98be1d59f7d9e97bdfea5f5eb2ed4e241fd91a492c6657a124770bd58c54c1a2e2b291fc9c7b5fce38f9bb88c9dec4d2802d631bf6cfe26cc3eb3df6c2b5b3c284317c2e5188b7e2aee33f7ddd792b5e2dff0ddcc979d64554da2be3ea85fb0c0c7a318fc0a66e947318a7aa053f533d0253d10f343386c5574571e5e73b6f1e9190bdd550a9ee77bf812c71d2fb6b773d20b1f25d75fccaf888431272f1bca66802a1cd75a231faf5c71fffde7e03b0adf4280bd9243fbaad94d5edd82f916decab075a08d862e786374a20262798c73e0f1f06a333f42eee351587c911c17986d3f8648b9f3c575ebf4e314613d299c62de63dcf3949d1363a0a48074b1799ff9fdfdfbb75c862e632d1e943f9f8fa9d6ce9fefe2a1b1e29ff99da080e875f0017308a7c74bf7f6e8cfd8c15bbd82471b576f5f77fb93ac9bf638cb96c9bf419858fb1f73aef204141e86b1513bd7f9716fc44a2b1ed8f297e8466ec5e8a705df52e4eb385eb2a19ef1c1d1dd5a8502d2dd1586e67882d143b0edf0b15708a907130c1d2b12b1efa21efaf8ed6bd6305ed3a3471e53ecf8335091804cf5a2f1d1a7ef7de824d7716fc110ff5accbf24aa1a66f9750b17f0c30bb9afb8f2192058e1515818039979267657488b87eefdc14307f92607a70eb2f824ec83ae1d74bedcd9f91b96153d3e7f8b9d38062624a0b0ba566be49fadacf8fe968fe24353abb1ef0fe88df096aa1ef6233eb2387f545040a75040357066b92019f34de8aebabd6cff118cacf01f7d94f533df8977fc4767fb4b3c268e17db26ff8c2ea47f7a2c8e4540fa0cd085d8af45fe0dc2f2fb1fbc12ddda5a88e823140dde9d77212607b7182cdea0ed79e30dfd6918ce03359d76ba7ea81e137f013ec6a0ff4b0f2630f96af30ecb419cd379bb7e9341e29ce9219642b5386601994275b7c2d96ccc259d7bdf7df11615ff6ae0676fc1f7836671c4366bc9122f3338e8f4e1830b7c777e68ff3e55bf882f1326f43f54d18fbcba5e3fbed4b13eb374f19dfa1fcea1e78378e41fce1992a7e172d20464ce2d1a9facd8b2e564fcf3b93541d6bb1a2f14e23bd3699f5e099380363c905e8767b2f17a0f6e60c51f8e55c5befd7c7bfbaf5956b41c53b62ca71f03932e207d8a18a1b5ae19fda7bb973ff0c0c72d37588d3bfc9771848577e6b3c33ddd717ee205ddd556e5581d2f75746c635e8973348333e6cfd408283c7dc6475cc5520fc52ebff7fb7fecc463b7e3d6c8697defbcfb16466ddfdebe6eed0f75f2302d12cbb03ce44f162103ec8e0c19576d79bcf1d2ef6cf8e247aebf7eb6d9c620d9accb521828c6801515121385bfa7d40b16ab8c6c9fa90c303ec27c0eaa2fc299a96d28f5160684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006840161401810068401614018100684016140181006848199cdc0ff015ad992bcc9ca2eb60000000049454e44ae4260820d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246637373220d0a436f6e74656e742d547970653a206170706c69636174696f6e2f782d6469726563746f72790d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c25324663737325324664656661756c742e637373223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a626f6479207b0a20202020666f6e742d66616d696c793a202256657264616e61223b0a20202020636f6c6f723a20233133376362390a7d0a0a61207b0a2020202023746578742d6465636f726174696f6e3a206e6f6e653b0a20202020636f6c6f723a20233133376362390a7d0a0a612e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a0a6831207b0a09666f6e742d7765696768743a206e6f726d616c3b0a20202020666f6e742d73697a653a20323070783b0a7d0a0a6832207b0a20202020666f6e742d7765696768743a206e6f726d616c3b0a09666f6e742d73697a653a20313570783b0a7d0a0a7468207b0a09746578742d616c69676e3a206c6566743b0a20202020666f6e742d7765696768743a206e6f726d616c3b0a20202020666f6e742d73697a653a20313470783b0a20202020636f6c6f723a20677261793b0a7d0a0a74642e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a74722e67726179207b0a20202020636f6c6f723a20677261793b0a7d0a0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245480d0a436f6e74656e742d446973706f736974696f6e3a2066696c653b2066696c656e616d653d2268746d6c253246696e6465782e68746d6c223b0d0a436f6e74656e742d547970653a206170706c69636174696f6e2f6f637465742d73747265616d0d0a436f6e74656e742d5472616e736665722d456e636f64696e673a2062696e6172790d0a0d0a3c21444f43545950452068746d6c3e0a3c68746d6c206c616e673d22656e223e0a3c686561643e0a202020203c7469746c653e495046533c2f7469746c653e0a202020203c6d65746120636861727365743d227574662d3822202f3e0a202020203c6c696e6b2072656c3d227374796c6573686565742220687265663d222e2f6373732f64656661756c742e637373223e0a3c2f686561643e0a3c626f64793e0a3c703e3c696d67207372633d22696d672f6c6f676f2e706e672220616c743d226c6f676f22202f3e203c6120687265663d22524541444d452e68746d6c223e486f6d653c2f613e3c2f703e0a3c703e3c6120687265663d22636861702f636830312e68746d6c223e63686170746572206f6e653c2f613e3c2f703e0a3c2f626f64793e0a3c2f68746d6c3e0a0d0a2d2d4174376e63506b6461367879576f7a6f696d6a436436615279534d31336245482d2d0d0a"); + HttpURLConnection httpConn = + (HttpURLConnection) + new URL("http://localhost:5001/api/v0/add?stream-channels=true&w=false&n=true") + .openConnection(); + // httpConn.setUseCaches(false); + httpConn.setDoOutput(true); + httpConn.setDoInput(true); + httpConn.setRequestProperty("User-Agent", "Java IPFS Client"); + httpConn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary); + httpConn.getOutputStream().write(multipartBody); + httpConn.getOutputStream().flush(); + httpConn.getOutputStream().close(); - int status = httpConn.getResponseCode(); - StringBuilder b = new StringBuilder(); - if (status == HttpURLConnection.HTTP_OK) { - BufferedReader reader = new BufferedReader(new InputStreamReader(httpConn.getInputStream())); - String line; - while ((line = reader.readLine()) != null) { - b.append(line); - } - reader.close(); - httpConn.disconnect(); - } - if (b.toString().contains("rror")) - throw new IllegalStateException("Error returned from IPFS: " + b.toString()); + int status = httpConn.getResponseCode(); + StringBuilder b = new StringBuilder(); + if (status == HttpURLConnection.HTTP_OK) { + BufferedReader reader = new BufferedReader(new InputStreamReader(httpConn.getInputStream())); + String line; + while ((line = reader.readLine()) != null) { + b.append(line); + } + reader.close(); + httpConn.disconnect(); } + if (b.toString().contains("rror")) + throw new IllegalStateException("Error returned from IPFS: " + b.toString()); + } - public static void main(String[] a) throws Exception { - new AddTest().add(); - } + public static void main(String[] a) throws Exception { + new AddTest().add(); + } } diff --git a/src/test/java/io/ipfs/api/RecursiveAddTest.java b/src/test/java/io/ipfs/api/RecursiveAddTest.java index 4a6845da..d4739783 100644 --- a/src/test/java/io/ipfs/api/RecursiveAddTest.java +++ b/src/test/java/io/ipfs/api/RecursiveAddTest.java @@ -1,106 +1,105 @@ package io.ipfs.api; +import io.ipfs.multiaddr.MultiAddress; import java.io.File; -import java.nio.file.*; -import java.util.*; - +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Random; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import io.ipfs.multiaddr.MultiAddress; - public class RecursiveAddTest { - private final IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); - - static File TMPDATA = new File("target/tmpdata"); - - @BeforeClass - public static void createTmpData() { - TMPDATA.mkdirs(); - } - - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(TMPDATA); - - @Test - public void testAdd() throws Exception { - System.out.println("ipfs version: " + ipfs.version()); - - String EXPECTED = "QmX5fZ6aUxNTAS7ZfYc8f4wPoMx6LctuNbMjuJZ9EmUSr6"; - - Path base = tempFolder.newFolder().toPath(); - Files.write(base.resolve("index.html"), "".getBytes()); - Path js = base.resolve("js"); - js.toFile().mkdirs(); - Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); - - List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); - MerkleNode node = add.get(add.size() - 1); - Assert.assertEquals(EXPECTED, node.hash.toBase58()); - } - - @Test - public void binaryRecursiveAdd() throws Exception { - String EXPECTED = "Qmd1dTx4Z1PHxSHDR9jYoyLJTrYsAau7zLPE3kqo14s84d"; - - Path base = tempFolder.newFolder().toPath(); - base.toFile().mkdirs(); - byte[] bindata = new byte[1024*1024]; - new Random(28).nextBytes(bindata); - Files.write(base.resolve("data.bin"), bindata); - Path js = base.resolve("js"); - js.toFile().mkdirs(); - Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); - - List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); - MerkleNode node = add.get(add.size() - 1); - Assert.assertEquals(EXPECTED, node.hash.toBase58()); - } - - @Test - public void largeBinaryRecursiveAdd() throws Exception { - String EXPECTED = "QmZdfdj7nfxE68fBPUWAGrffGL3sYGx1MDEozMg73uD2wj"; - - Path base = tempFolder.newFolder().toPath(); - base.toFile().mkdirs(); - byte[] bindata = new byte[100 * 1024*1024]; - new Random(28).nextBytes(bindata); - Files.write(base.resolve("data.bin"), bindata); - new Random(496).nextBytes(bindata); - Files.write(base.resolve("data2.bin"), bindata); - Path js = base.resolve("js"); - js.toFile().mkdirs(); - Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); - - List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); - MerkleNode node = add.get(add.size() - 1); - Assert.assertEquals(EXPECTED, node.hash.toBase58()); - } - - @Test - public void largeBinaryInSubdirRecursiveAdd() throws Exception { - String EXPECTED = "QmUYuMwCpgaxJhNxRA5Pmje8EfpEgU3eQSB9t3VngbxYJk"; - - Path base = tempFolder.newFolder().toPath(); - base.toFile().mkdirs(); - Path bindir = base.resolve("moredata"); - bindir.toFile().mkdirs(); - byte[] bindata = new byte[100 * 1024*1024]; - new Random(28).nextBytes(bindata); - Files.write(bindir.resolve("data.bin"), bindata); - new Random(496).nextBytes(bindata); - Files.write(bindir.resolve("data2.bin"), bindata); - - Path js = base.resolve("js"); - js.toFile().mkdirs(); - Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); - - List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); - MerkleNode node = add.get(add.size() - 1); - Assert.assertEquals(EXPECTED, node.hash.toBase58()); - } + private final IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); + + static File TMPDATA = new File("target/tmpdata"); + + @BeforeClass + public static void createTmpData() { + TMPDATA.mkdirs(); + } + + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(TMPDATA); + + @Test + public void testAdd() throws Exception { + System.out.println("ipfs version: " + ipfs.version()); + + String EXPECTED = "QmX5fZ6aUxNTAS7ZfYc8f4wPoMx6LctuNbMjuJZ9EmUSr6"; + + Path base = tempFolder.newFolder().toPath(); + Files.write(base.resolve("index.html"), "".getBytes()); + Path js = base.resolve("js"); + js.toFile().mkdirs(); + Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); + + List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); + MerkleNode node = add.get(add.size() - 1); + Assert.assertEquals(EXPECTED, node.hash.toBase58()); + } + + @Test + public void binaryRecursiveAdd() throws Exception { + String EXPECTED = "Qmd1dTx4Z1PHxSHDR9jYoyLJTrYsAau7zLPE3kqo14s84d"; + + Path base = tempFolder.newFolder().toPath(); + base.toFile().mkdirs(); + byte[] bindata = new byte[1024 * 1024]; + new Random(28).nextBytes(bindata); + Files.write(base.resolve("data.bin"), bindata); + Path js = base.resolve("js"); + js.toFile().mkdirs(); + Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); + + List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); + MerkleNode node = add.get(add.size() - 1); + Assert.assertEquals(EXPECTED, node.hash.toBase58()); + } + + @Test + public void largeBinaryRecursiveAdd() throws Exception { + String EXPECTED = "QmZdfdj7nfxE68fBPUWAGrffGL3sYGx1MDEozMg73uD2wj"; + + Path base = tempFolder.newFolder().toPath(); + base.toFile().mkdirs(); + byte[] bindata = new byte[100 * 1024 * 1024]; + new Random(28).nextBytes(bindata); + Files.write(base.resolve("data.bin"), bindata); + new Random(496).nextBytes(bindata); + Files.write(base.resolve("data2.bin"), bindata); + Path js = base.resolve("js"); + js.toFile().mkdirs(); + Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); + + List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); + MerkleNode node = add.get(add.size() - 1); + Assert.assertEquals(EXPECTED, node.hash.toBase58()); + } + + @Test + public void largeBinaryInSubdirRecursiveAdd() throws Exception { + String EXPECTED = "QmUYuMwCpgaxJhNxRA5Pmje8EfpEgU3eQSB9t3VngbxYJk"; + + Path base = tempFolder.newFolder().toPath(); + base.toFile().mkdirs(); + Path bindir = base.resolve("moredata"); + bindir.toFile().mkdirs(); + byte[] bindata = new byte[100 * 1024 * 1024]; + new Random(28).nextBytes(bindata); + Files.write(bindir.resolve("data.bin"), bindata); + new Random(496).nextBytes(bindata); + Files.write(bindir.resolve("data2.bin"), bindata); + + Path js = base.resolve("js"); + js.toFile().mkdirs(); + Files.write(js.resolve("func.js"), "function() {console.log('Hey');}".getBytes()); + + List add = ipfs.add(new NamedStreamable.FileWrapper(base.toFile())); + MerkleNode node = add.get(add.size() - 1); + Assert.assertEquals(EXPECTED, node.hash.toBase58()); + } } diff --git a/src/test/java/io/ipfs/api/SimpleAddTest.java b/src/test/java/io/ipfs/api/SimpleAddTest.java index 0d7bb30c..b8628498 100644 --- a/src/test/java/io/ipfs/api/SimpleAddTest.java +++ b/src/test/java/io/ipfs/api/SimpleAddTest.java @@ -1,116 +1,110 @@ package io.ipfs.api; +import io.ipfs.api.NamedStreamable.FileWrapper; +import io.ipfs.multiaddr.MultiAddress; import java.nio.file.Path; import java.nio.file.Paths; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -import io.ipfs.api.NamedStreamable.FileWrapper; -import io.ipfs.multiaddr.MultiAddress; - /** - * * ipfs daemon --enable-pubsub-experiment & * - * ipfs pin rm `ipfs pin ls -qt recursive` - * - * ipfs --api=/ip4/127.0.0.1/tcp/5001 add -r src/test/resources/html + *

ipfs pin rm `ipfs pin ls -qt recursive` * + *

ipfs --api=/ip4/127.0.0.1/tcp/5001 add -r src/test/resources/html */ public class SimpleAddTest { - static final Map cids = new LinkedHashMap<>(); - static { - cids.put("index.html", "QmVts3YjmhsCSqMv8Thk1CCy1nnpCbqEFjbkjS7PEzthZE"); - cids.put("html", "QmUQvDumYa8najL94EnGhmGobyMyNzAmCSpfAxYnYcQHZD"); - } - - IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); - - @Test - public void testSingle() throws Exception { - Path path = Paths.get("src/test/resources/html/index.html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file); - - Assert.assertEquals(1, tree.size()); - Assert.assertEquals("index.html", tree.get(0).name.get()); - Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); - } - - @Test - public void testAddArgs() throws Exception { - Path path = Paths.get("src/test/resources/html/index.html"); - NamedStreamable file = new FileWrapper(path.toFile()); - AddArgs args = AddArgs.Builder.newInstance() - .setInline() - .setCidVersion(1) - .build(); - List tree = ipfs.add(file, args); - - Assert.assertEquals(1, tree.size()); - Assert.assertEquals("index.html", tree.get(0).name.get()); - } - @Test - public void testFilenameEncoding() throws Exception { - Path path = Paths.get("src/test/resources/folder/你好.html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file); - - Assert.assertEquals(1, tree.size()); - Assert.assertEquals("你好.html", tree.get(0).name.get()); - } - - @Test - public void testSingleWrapped() throws Exception { - - Path path = Paths.get("src/test/resources/html/index.html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file, true); - - Assert.assertEquals(2, tree.size()); - Assert.assertEquals("index.html", tree.get(0).name.get()); - Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); - } - - @Test - public void testSingleOnlyHash() throws Exception { - - Path path = Paths.get("src/test/resources/html/index.html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file, false, true); - - Assert.assertEquals(1, tree.size()); - Assert.assertEquals("index.html", tree.get(0).name.get()); - Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); - } - - @Test - public void testRecursive() throws Exception { - - Path path = Paths.get("src/test/resources/html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file); - - Assert.assertEquals(8, tree.size()); - Assert.assertEquals("html", tree.get(7).name.get()); - Assert.assertEquals(cids.get("html"), tree.get(7).hash.toBase58()); - } - - @Test - public void testRecursiveOnlyHash() throws Exception { - - Path path = Paths.get("src/test/resources/html"); - NamedStreamable file = new FileWrapper(path.toFile()); - List tree = ipfs.add(file, false, true); - - Assert.assertEquals(8, tree.size()); - Assert.assertEquals("html", tree.get(7).name.get()); - Assert.assertEquals(cids.get("html"), tree.get(7).hash.toBase58()); - } + static final Map cids = new LinkedHashMap<>(); + + static { + cids.put("index.html", "QmVts3YjmhsCSqMv8Thk1CCy1nnpCbqEFjbkjS7PEzthZE"); + cids.put("html", "QmUQvDumYa8najL94EnGhmGobyMyNzAmCSpfAxYnYcQHZD"); + } + + IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); + + @Test + public void testSingle() throws Exception { + Path path = Paths.get("src/test/resources/html/index.html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file); + + Assert.assertEquals(1, tree.size()); + Assert.assertEquals("index.html", tree.get(0).name.get()); + Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); + } + + @Test + public void testAddArgs() throws Exception { + Path path = Paths.get("src/test/resources/html/index.html"); + NamedStreamable file = new FileWrapper(path.toFile()); + AddArgs args = AddArgs.Builder.newInstance().setInline().setCidVersion(1).build(); + List tree = ipfs.add(file, args); + + Assert.assertEquals(1, tree.size()); + Assert.assertEquals("index.html", tree.get(0).name.get()); + } + + @Test + public void testFilenameEncoding() throws Exception { + Path path = Paths.get("src/test/resources/folder/你好.html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file); + + Assert.assertEquals(1, tree.size()); + Assert.assertEquals("你好.html", tree.get(0).name.get()); + } + + @Test + public void testSingleWrapped() throws Exception { + + Path path = Paths.get("src/test/resources/html/index.html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file, true); + + Assert.assertEquals(2, tree.size()); + Assert.assertEquals("index.html", tree.get(0).name.get()); + Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); + } + + @Test + public void testSingleOnlyHash() throws Exception { + + Path path = Paths.get("src/test/resources/html/index.html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file, false, true); + + Assert.assertEquals(1, tree.size()); + Assert.assertEquals("index.html", tree.get(0).name.get()); + Assert.assertEquals(cids.get("index.html"), tree.get(0).hash.toBase58()); + } + + @Test + public void testRecursive() throws Exception { + + Path path = Paths.get("src/test/resources/html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file); + + Assert.assertEquals(8, tree.size()); + Assert.assertEquals("html", tree.get(7).name.get()); + Assert.assertEquals(cids.get("html"), tree.get(7).hash.toBase58()); + } + + @Test + public void testRecursiveOnlyHash() throws Exception { + + Path path = Paths.get("src/test/resources/html"); + NamedStreamable file = new FileWrapper(path.toFile()); + List tree = ipfs.add(file, false, true); + + Assert.assertEquals(8, tree.size()); + Assert.assertEquals("html", tree.get(7).name.get()); + Assert.assertEquals(cids.get("html"), tree.get(7).hash.toBase58()); + } } diff --git a/src/test/java/io/ipfs/api/VersionsTest.java b/src/test/java/io/ipfs/api/VersionsTest.java index be1e3deb..18f546b4 100644 --- a/src/test/java/io/ipfs/api/VersionsTest.java +++ b/src/test/java/io/ipfs/api/VersionsTest.java @@ -1,19 +1,23 @@ package io.ipfs.api; -import org.junit.*; - -import java.util.*; -import java.util.stream.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.Assert; +import org.junit.Test; public class VersionsTest { - @Test - public void sorting(){ - List original = Arrays.asList("1.0.3", "0.4.9", "0.4.10", "0.5.1-rc1", "0.5.1-rc2", "0.5.1-rc2+meta"); - List versions = original.stream().map(Version::parse).collect(Collectors.toList()); - Collections.sort(versions); - List sorted = versions.stream().map(Object::toString).collect(Collectors.toList()); - List correct = Arrays.asList("0.4.9", "0.4.10", "0.5.1-rc1", "0.5.1-rc2", "0.5.1-rc2+meta", "1.0.3"); - Assert.assertTrue("Correct version sorting", sorted.equals(correct)); - } + @Test + public void sorting() { + List original = + Arrays.asList("1.0.3", "0.4.9", "0.4.10", "0.5.1-rc1", "0.5.1-rc2", "0.5.1-rc2+meta"); + List versions = original.stream().map(Version::parse).collect(Collectors.toList()); + Collections.sort(versions); + List sorted = versions.stream().map(Object::toString).collect(Collectors.toList()); + List correct = + Arrays.asList("0.4.9", "0.4.10", "0.5.1-rc1", "0.5.1-rc2", "0.5.1-rc2+meta", "1.0.3"); + Assert.assertTrue("Correct version sorting", sorted.equals(correct)); + } }