Skip to content

Commit 1f19d65

Browse files
committed
Cleaning up datatypes
1 parent d579086 commit 1f19d65

File tree

3 files changed

+28
-44
lines changed

3 files changed

+28
-44
lines changed

modules/hivemq-edge-module-s7/src/main/java/com/hivemq/edge/adapters/s7/S7Client.java

Lines changed: 23 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
package com.hivemq.edge.adapters.s7;
22

33
import com.github.xingshuangs.iot.protocol.s7.enums.EPlcType;
4-
import com.github.xingshuangs.iot.protocol.s7.service.MultiAddressRead;
54
import com.github.xingshuangs.iot.protocol.s7.service.S7PLC;
6-
import com.github.xingshuangs.iot.protocol.s7.utils.AddressUtil;
75
import com.hivemq.adapter.sdk.api.data.DataPoint;
86
import com.hivemq.adapter.sdk.api.factories.DataPointFactory;
97
import com.hivemq.edge.adapters.s7.config.S7AdapterConfig;
@@ -17,11 +15,6 @@
1715
import java.util.stream.Collectors;
1816
import java.util.stream.IntStream;
1917

20-
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1200;
21-
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1500;
22-
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_300;
23-
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_400;
24-
2518
public class S7Client {
2619

2720
private static final Logger log = LoggerFactory.getLogger(S7Client.class);
@@ -40,29 +33,29 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
4033
log.trace("Reading data from addresses {} with type {}", addresses, type);
4134
}
4235
switch (type) {
43-
case BOOL: return combine(dataPointFactory, addresses, s7PLC.readBoolean(addresses));
36+
case BOOL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readBoolean(addresses));
4437
case BYTE: return addresses.stream().map(address -> dataPointFactory
4538
.create(address, s7PLC.readByte(address)))
4639
.collect(Collectors.toList());
47-
case WORD: return combine(dataPointFactory, addresses, s7PLC.readInt16(addresses));
48-
case DWORD: return combine(dataPointFactory, addresses, s7PLC.readInt32(addresses));
49-
case LWORD: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
40+
case WORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 2));
41+
case DWORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 4));
42+
case LWORD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, readBytes(addresses, 8));
5043
case USINT: return addresses.stream().map(address -> dataPointFactory
5144
.create(address, Byte.toUnsignedInt(s7PLC.readByte(address))))
5245
.collect(Collectors.toList());
53-
case UINT: return combine(dataPointFactory, addresses, s7PLC.readUInt16(addresses));
54-
case UDINT: return combine(dataPointFactory, addresses, s7PLC.readUInt32(addresses));
46+
case UINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readUInt16(addresses));
47+
case UDINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readUInt32(addresses));
5548
case ULINT: return addresses.stream()
5649
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
5750
.collect(Collectors.toList());
5851
case SINT: return addresses.stream().map(address -> dataPointFactory
5952
.create(address, ((Byte)s7PLC.readByte(address)).shortValue()))
6053
.collect(Collectors.toList());
61-
case INT: return combine(dataPointFactory, addresses, s7PLC.readInt16(addresses));
62-
case DINT: return combine(dataPointFactory, addresses, s7PLC.readInt32(addresses));
63-
case LINT: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
64-
case REAL: return combine(dataPointFactory, addresses, s7PLC.readFloat32(addresses));
65-
case LREAL: return combine(dataPointFactory, addresses, s7PLC.readFloat64(addresses));
54+
case INT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt16(addresses));
55+
case DINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt32(addresses));
56+
case LINT: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt64(addresses));
57+
case REAL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readFloat32(addresses));
58+
case LREAL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readFloat64(addresses));
6659
case CHAR: return addresses.stream().map(address -> dataPointFactory
6760
.create(address, s7PLC.readByte(address)))
6861
.collect(Collectors.toList());
@@ -74,11 +67,11 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
7467
})
7568
.collect(Collectors.toList());
7669
case STRING:
77-
case WSTRING: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readString).collect(Collectors.toList()));
78-
case TIME: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTime).collect(Collectors.toList()));
79-
case LTIME: return combine(dataPointFactory, addresses, s7PLC.readInt64(addresses));
80-
case DATE: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDate).collect(Collectors.toList()));
81-
case TOD: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTimeOfDay).collect(Collectors.toList()));
70+
case WSTRING: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readString).collect(Collectors.toList()));
71+
case TIME: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTime).collect(Collectors.toList()));
72+
case LTIME: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, s7PLC.readInt64(addresses));
73+
case DATE: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDate).collect(Collectors.toList()));
74+
case TOD: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readTimeOfDay).collect(Collectors.toList()));
8275
case LTOD: return addresses.stream()
8376
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
8477
.collect(Collectors.toList());
@@ -88,16 +81,19 @@ public List<DataPoint> read(final @NotNull S7DataType type, final @NotNull List<
8881
case LDT:return addresses.stream()
8982
.map(address -> dataPointFactory.create(address, new BigInteger(Long.toUnsignedString(s7PLC.readInt64(address)))))
9083
.collect(Collectors.toList());
91-
case DTL: return combine(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDTL).collect(Collectors.toList()));
92-
case ARRAY: throw new IllegalArgumentException("Arrays not supported");
84+
case DTL: return createDatapointsFromAddressesAndValues(dataPointFactory, addresses, addresses.stream().map(s7PLC::readDTL).collect(Collectors.toList()));
9385
default: {
9486
log.error("Unspported tag-type {} at address {}", type, addresses);
9587
throw new IllegalArgumentException("Unspported tag-type " + type + " at address " + addresses);
9688
}
9789
}
9890
}
99-
100-
public static List<DataPoint> combine(final @NotNull DataPointFactory dataPointFactory, final @NotNull List<String> addresses, final @NotNull List<?> values) {
91+
92+
public List<byte[]> readBytes(final List<String> addresses, final int count) {
93+
return addresses.stream().map(address -> s7PLC.readByte(address, count)).collect(Collectors.toList());
94+
}
95+
96+
public static List<DataPoint> createDatapointsFromAddressesAndValues(final @NotNull DataPointFactory dataPointFactory, final @NotNull List<String> addresses, final @NotNull List<?> values) {
10197
return IntStream
10298
.range(0, addresses.size())
10399
.mapToObj(i -> dataPointFactory.create(addresses.get(i), values.get(i)))

modules/hivemq-edge-module-s7/src/main/java/com/hivemq/edge/adapters/s7/config/S7DataType.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,10 @@
2020
import java.math.BigInteger;
2121
import java.time.LocalDate;
2222
import java.time.LocalDateTime;
23-
import java.time.LocalTime;
2423
import java.util.List;
2524

2625
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1200;
2726
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_1500;
28-
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_200;
2927
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_300;
3028
import static com.hivemq.edge.adapters.s7.config.S7Versions.S7_400;
3129

@@ -66,6 +64,7 @@ public enum S7DataType {
6664
LDT(LocalDateTime.class, 64, List.of(S7_1500), "Date and time (year-month-day-hour:minute:second:nanoseconds)", "https://support.industry.siemens.com/cs/mdm/109054417?c=71834521483&lc=en-GE"),
6765
DTL(LocalDateTime.class, 64, List.of(S7_1500), "Date and time (year-month-day-hour:minute:second:nanoseconds)", "https://support.industry.siemens.com/cs/mdm/109054417?c=64682916235&lc=en-GE"),
6866
ARRAY(Byte[].class, -1, List.of(S7_300, S7_400, S7_1200, S7_1500), "Array of type", "https://support.industry.siemens.com/cs/mdm/109054417?c=52352205963&lc=en-GE");
67+
6968
//RAW_BYTE_ARRAY TODO: it's not an actual type but is there in the old implementation
7069

7170
S7DataType(final @NotNull Class<?> javaType, final @NotNull int lengthInBits, final @NotNull List<S7Versions> availableOn, final @NotNull String description, final @NotNull String docs){

modules/hivemq-edge-module-s7/src/test/java/com/hivemq/edge/adapters/s7/config/S7AdapterConfigTest.java

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import java.net.URISyntaxException;
1919
import java.net.URL;
2020
import java.nio.file.Path;
21+
import java.util.List;
2122
import java.util.Map;
2223
import java.util.Optional;
2324

@@ -140,21 +141,9 @@ public void unconvertConfigObject_full_valid() {
140141
}
141142

142143
private @NotNull HiveMQConfigEntity loadConfig(final @NotNull File configFile) {
143-
final ConfigFileReaderWriter readerWriter = new ConfigFileReaderWriter(new ConfigurationFile(configFile),
144-
mock(),
145-
mock(),
146-
mock(),
147-
mock(),
148-
mock(),
149-
mock(),
150-
mock(),
151-
mock(),
152-
mock(),
153-
mock(),
154-
mock(),
155-
mock(),
156-
mock(),
157-
mock());
144+
final ConfigFileReaderWriter readerWriter = new ConfigFileReaderWriter(
145+
new ConfigurationFile(configFile),
146+
List.of());
158147
return readerWriter.applyConfig();
159148
}
160149

0 commit comments

Comments
 (0)