Skip to content

Commit e764dd8

Browse files
committed
修改删除的方法
1 parent f9f6fb7 commit e764dd8

File tree

3 files changed

+35
-13
lines changed

3 files changed

+35
-13
lines changed

src/main/java/io/udfs/api/MerkleNode.java

+8-8
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ public class MerkleNode {
1919
public final List<MerkleNode> links;
2020
public final Optional<byte[]> data;
2121
public final List<String> backup;
22-
public static String backdirhash="";
23-
public static String backfilehash="";
22+
//public static String backdirhash="";
23+
//public static String backfilehash="";
2424

2525
public MerkleNode(String hash,
2626
Optional<String> name,
@@ -72,16 +72,16 @@ public static MerkleNode fromJSON(Object rawjson) {
7272
hash = (String)json.get("Key");
7373
if (hash == null && json.containsKey("Cid"))
7474
hash = (String) (((Map) json.get("Cid")).get("/"));
75-
if(null!=hash){
75+
/* if(null!=hash){
7676
backdirhash=hash;
77-
}
77+
}*/
7878
Optional<String> name = json.containsKey("Name") ?
7979
Optional.of((String) json.get("Name")):
8080
Optional.empty();
8181
String fileName=(String) json.get("Name");
82-
if(StringUtils.isNotEmpty(fileName)){
82+
/* if(StringUtils.isNotEmpty(fileName)){
8383
backfilehash=hash;
84-
}
84+
}*/
8585
Object rawSize = json.get("Size");
8686
Optional<Integer> size = rawSize instanceof Integer ?
8787
Optional.of((Integer) rawSize) :
@@ -102,8 +102,8 @@ public static MerkleNode fromJSON(Object rawjson) {
102102
if(null!=job&&job.size()!=0){
103103
JSONArray list = job.getJSONArray("Success");
104104
if(null!=list){
105-
hash=backdirhash;//备份信息将存放的节点的hash做为hash值
106-
name=Optional.of(backfilehash);//备份信息将文件的hash值作为name值
105+
//hash=backdirhash;//备份信息将存放的节点的hash做为hash值
106+
//name=Optional.of(backfilehash);//备份信息将文件的hash值作为name值
107107
Iterator<Object> it = list.iterator();
108108
while (it.hasNext()) {
109109
JSONObject ob = (JSONObject) it.next();

src/main/java/io/udfs/api/UDFS.java

+8
Original file line numberDiff line numberDiff line change
@@ -225,12 +225,20 @@ public Map<Multihash, Object> ls(PinType type) throws IOException {
225225
public List<Multihash> rm(Multihash hash) throws IOException {
226226
return rm(hash, true);
227227
}
228+
public List<Multihash> rmlocal(Multihash hash) throws IOException {
229+
return rmlocal(hash, true);
230+
}
228231

229232
public List<Multihash> rm(Multihash hash, boolean recursive) throws IOException {
230233
Map json = retrieveMap("pin/rm?stream-channels=true&r=" + recursive + "&arg=" + hash);
231234
return ((List<Object>) json.get("Pins")).stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList());
232235
}
233236

237+
public List<Multihash> rmlocal(Multihash hash, boolean recursive) throws IOException {
238+
Map json = retrieveMap("localrm?stream-channels=true&r=" + recursive + "&arg=" + hash);
239+
return ((List<Object>) json.get("Pins")).stream().map(x -> Cid.decode((String) x)).collect(Collectors.toList());
240+
}
241+
234242
public List<MultiAddress> update(Multihash existing, Multihash modified, boolean unpin) throws IOException {
235243
return ((List<Object>)((Map)retrieveAndParse("pin/update?stream-channels=true&arg=" + existing + "&arg=" + modified + "&unpin=" + unpin)).get("Pins"))
236244
.stream()

src/test/java/io/udfs/api/APITest.java

+19-5
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@
1717

1818
public class APITest {
1919
//http访问方式
20-
//private final UDFS udfs = new UDFS("132.232.99.150",5001,false);
20+
private final UDFS udfs = new UDFS("132.232.99.150",5001,false);
2121
//https访问方式
22-
private final UDFS udfs = new UDFS("114.67.37.76",443,false);
22+
//private final UDFS udfs = new UDFS("114.67.37.76",443,false);
2323
private final Random r = new Random(33550336); // perfect
2424

2525
@Test
@@ -123,13 +123,27 @@ public void pushSingleFileTest() throws IOException {
123123
@Test
124124
public void pushFileTest() throws IOException {
125125
//要添加文件使用
126-
NamedStreamable.FileWrapper file = new NamedStreamable.FileWrapper(new File("F:/test/20181116.txt"));
126+
/* NamedStreamable.FileWrapper file = new NamedStreamable.FileWrapper(new File("F:/test/20181116.txt"));
127127
//添加文件到IPFS返回HASH值
128128
List<MerkleNode> addParts = udfs.add(file);
129+
System.out.println("内容为:"+addParts.get(0).toJSONString());*/
130+
/* String hash="QmPQJ6CTMxxGintCKKeQ38gxE8P9nnUdq5nM8nifLT1aQh";
131+
//Multihash hash=new Multihash("QmPQJ6CTMxxGintCKKeQ38gxE8P9nnUdq5nM8nifLT1aQh");
132+
List<Multihash> pinRm=udfs.pin.rm(Multihash.fromBase58("QmPQJ6CTMxxGintCKKeQ38gxE8P9nnUdq5nM8nifLT1aQh"));
133+
System.out.println("删除结果:"+pinRm.get(0).toString());*/
134+
135+
Multihash filePointer = Multihash.fromBase58("Qme7KYZZTkARzkwE4x3vLKC4zB1jtNdw5HwuCxqABE7Kgc");
136+
byte[] fileContents = udfs.cat(filePointer);
137+
String str=new String(fileContents);
138+
System.out.println("查询的内容为:"+str);
139+
List<Multihash> pinRm=udfs.pin.rmlocal(filePointer);
140+
System.out.println("删除结果:"+pinRm.get(0).toString());
129141
//输出HASH值
130-
131142
//List<MerkleNode> addParts = udfs.push(file);
132-
System.out.println(addParts.get(0).hash);
143+
/* System.out.println("哈希值:"+addParts.get(0).hash);
144+
System.out.println("名称:"+addParts.get(0).name);
145+
System.out.println("文件大小"+addParts.get(0).largeSize);
146+
System.out.println("备份节点信息"+addParts.get(0).backup);*/
133147
}
134148

135149
@Test

0 commit comments

Comments
 (0)