Skip to content

Commit b3b8609

Browse files
committed
Add Grok hive storage format and table properties
1 parent 18d2588 commit b3b8609

File tree

13 files changed

+237
-2
lines changed

13 files changed

+237
-2
lines changed

Diff for: lib/trino-hive-formats/src/main/java/io/trino/hive/formats/HiveClassNames.java

+1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ public final class HiveClassNames
2121
public static final String COLUMNAR_SERDE_CLASS = "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe";
2222
public static final String FILE_INPUT_FORMAT_CLASS = "org.apache.hadoop.mapred.FileInputFormat";
2323
public static final String FILE_OUTPUT_FORMAT_CLASS = "org.apache.hadoop.mapred.FileOutputFormat";
24+
public static final String GROK_SERDE_CLASS = "com.amazonaws.serde.GrokSerDe";
2425
public static final String HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS = "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat";
2526
public static final String HIVE_SEQUENCEFILE_OUTPUT_FORMAT_CLASS = "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat";
2627
public static final String HUDI_PARQUET_INPUT_FORMAT = "org.apache.hudi.hadoop.HoodieParquetInputFormat";

Diff for: lib/trino-hive-formats/src/main/java/io/trino/hive/formats/line/grok/GrokDeserializerFactory.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import java.util.Set;
2424

2525
import static com.google.common.base.Preconditions.checkArgument;
26+
import static io.trino.hive.formats.HiveClassNames.GROK_SERDE_CLASS;
2627

2728
public class GrokDeserializerFactory
2829
implements LineDeserializerFactory
@@ -33,7 +34,7 @@ public class GrokDeserializerFactory
3334
@Override
3435
public Set<String> getHiveSerDeClassNames()
3536
{
36-
return ImmutableSet.of("com.amazonaws.serde.GrokSerDe");
37+
return ImmutableSet.of(GROK_SERDE_CLASS);
3738
}
3839

3940
@Override

Diff for: plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java

+37
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,8 @@
232232
import static io.trino.plugin.hive.HiveTableProperties.CSV_QUOTE;
233233
import static io.trino.plugin.hive.HiveTableProperties.CSV_SEPARATOR;
234234
import static io.trino.plugin.hive.HiveTableProperties.EXTERNAL_LOCATION_PROPERTY;
235+
import static io.trino.plugin.hive.HiveTableProperties.GROK_CUSTOM_PATTERNS;
236+
import static io.trino.plugin.hive.HiveTableProperties.GROK_INPUT_FORMAT;
235237
import static io.trino.plugin.hive.HiveTableProperties.NULL_FORMAT_PROPERTY;
236238
import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_COLUMNS;
237239
import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_FPP;
@@ -251,6 +253,8 @@
251253
import static io.trino.plugin.hive.HiveTableProperties.getExternalLocation;
252254
import static io.trino.plugin.hive.HiveTableProperties.getExtraProperties;
253255
import static io.trino.plugin.hive.HiveTableProperties.getFooterSkipCount;
256+
import static io.trino.plugin.hive.HiveTableProperties.getGrokCustomPatterns;
257+
import static io.trino.plugin.hive.HiveTableProperties.getGrokInputFormat;
254258
import static io.trino.plugin.hive.HiveTableProperties.getHeaderSkipCount;
255259
import static io.trino.plugin.hive.HiveTableProperties.getHiveStorageFormat;
256260
import static io.trino.plugin.hive.HiveTableProperties.getNullFormat;
@@ -387,6 +391,9 @@ public class HiveMetadata
387391
private static final String REGEX_KEY = "input.regex";
388392
private static final String REGEX_CASE_SENSITIVE_KEY = "input.regex.case.insensitive";
389393

394+
private static final String GROK_INPUT_FORMAT_KEY = "input.format";
395+
private static final String GROK_CUSTOM_PATTERNS_KEY = "input.grokCustomPatterns";
396+
390397
private static final String AUTO_PURGE_KEY = "auto.purge";
391398

392399
public static final String MODIFYING_NON_TRANSACTIONAL_TABLE_MESSAGE = "Modifying Hive table rows is only supported for transactional tables";
@@ -776,6 +783,12 @@ else if (isTrinoView || isTrinoMaterializedView) {
776783
getSerdeProperty(table, REGEX_CASE_SENSITIVE_KEY)
777784
.ifPresent(regexCaseInsensitive -> properties.put(REGEX_CASE_INSENSITIVE, parseBoolean(regexCaseInsensitive)));
778785

786+
// GROK specific properties
787+
getSerdeProperty(table, GROK_INPUT_FORMAT_KEY)
788+
.ifPresent(inputFormat -> properties.put(GROK_INPUT_FORMAT_KEY, inputFormat));
789+
getSerdeProperty(table, GROK_CUSTOM_PATTERNS_KEY)
790+
.ifPresent(grokCustomPattern -> properties.put(GROK_CUSTOM_PATTERNS_KEY, grokCustomPattern));
791+
779792
Optional<String> comment = Optional.ofNullable(table.getParameters().get(Table.TABLE_COMMENT));
780793

781794
String autoPurgeProperty = table.getParameters().get(AUTO_PURGE_KEY);
@@ -1265,6 +1278,30 @@ else if (avroSchemaLiteral != null) {
12651278
tableProperties.put(REGEX_CASE_SENSITIVE_KEY, String.valueOf(regexCaseInsensitive));
12661279
});
12671280

1281+
// GROK specific properties
1282+
getGrokInputFormat(tableMetadata.getProperties())
1283+
.ifPresentOrElse(
1284+
inputFormat -> {
1285+
checkFormatForProperty(hiveStorageFormat, HiveStorageFormat.GROK, GROK_INPUT_FORMAT);
1286+
// try {
1287+
// Pattern.compile(inputFormat); // this is tricky b/c pattern.compile() is going to fail when input.format = "%{"(?<name_underscore>\\S+)"; b/c java.util.pattern doesn't support underscores in named regex groups
1288+
// }
1289+
// catch (PatternSyntaxException e) {
1290+
// throw new TrinoException(INVALID_TABLE_PROPERTY, "Invalid GROK input format value: " + inputFormat);
1291+
// }
1292+
tableProperties.put(GROK_INPUT_FORMAT_KEY, inputFormat);
1293+
},
1294+
() -> {
1295+
if (hiveStorageFormat == HiveStorageFormat.GROK) {
1296+
throw new TrinoException(INVALID_TABLE_PROPERTY, format("GROK format requires the '%s' table property", GROK_INPUT_FORMAT));
1297+
}
1298+
});
1299+
getGrokCustomPatterns(tableMetadata.getProperties())
1300+
.ifPresent(grokCustomPatterns -> {
1301+
checkFormatForProperty(hiveStorageFormat, HiveStorageFormat.GROK, GROK_CUSTOM_PATTERNS);
1302+
tableProperties.put(GROK_CUSTOM_PATTERNS_KEY, grokCustomPatterns);
1303+
});
1304+
12681305
// Set bogus table stats to prevent Hive 2.x from gathering these stats at table creation.
12691306
// These stats are not useful by themselves and can take a very long time to collect when creating an
12701307
// external table over a large data set.

Diff for: plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveModule.java

+2
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import io.trino.plugin.hive.fs.TransactionScopeCachingDirectoryListerFactory;
2929
import io.trino.plugin.hive.line.CsvFileWriterFactory;
3030
import io.trino.plugin.hive.line.CsvPageSourceFactory;
31+
import io.trino.plugin.hive.line.GrokFileWriterFactory;
3132
import io.trino.plugin.hive.line.GrokPageSourceFactory;
3233
import io.trino.plugin.hive.line.JsonFileWriterFactory;
3334
import io.trino.plugin.hive.line.JsonPageSourceFactory;
@@ -132,6 +133,7 @@ public void configure(Binder binder)
132133
fileWriterFactoryBinder.addBinding().to(CsvFileWriterFactory.class).in(Scopes.SINGLETON);
133134
fileWriterFactoryBinder.addBinding().to(JsonFileWriterFactory.class).in(Scopes.SINGLETON);
134135
fileWriterFactoryBinder.addBinding().to(RegexFileWriterFactory.class).in(Scopes.SINGLETON);
136+
fileWriterFactoryBinder.addBinding().to(GrokFileWriterFactory.class).in(Scopes.SINGLETON);
135137
fileWriterFactoryBinder.addBinding().to(OpenXJsonFileWriterFactory.class).in(Scopes.SINGLETON);
136138
fileWriterFactoryBinder.addBinding().to(SimpleTextFileWriterFactory.class).in(Scopes.SINGLETON);
137139
fileWriterFactoryBinder.addBinding().to(SimpleSequenceFileWriterFactory.class).in(Scopes.SINGLETON);

Diff for: plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveStorageFormat.java

+6-1
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import static io.trino.hive.formats.HiveClassNames.AVRO_CONTAINER_OUTPUT_FORMAT_CLASS;
3434
import static io.trino.hive.formats.HiveClassNames.AVRO_SERDE_CLASS;
3535
import static io.trino.hive.formats.HiveClassNames.COLUMNAR_SERDE_CLASS;
36+
import static io.trino.hive.formats.HiveClassNames.GROK_SERDE_CLASS;
3637
import static io.trino.hive.formats.HiveClassNames.HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS;
3738
import static io.trino.hive.formats.HiveClassNames.HIVE_SEQUENCEFILE_OUTPUT_FORMAT_CLASS;
3839
import static io.trino.hive.formats.HiveClassNames.JSON_SERDE_CLASS;
@@ -101,6 +102,10 @@ public enum HiveStorageFormat
101102
REGEX(
102103
REGEX_SERDE_CLASS,
103104
TEXT_INPUT_FORMAT_CLASS,
105+
HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS),
106+
GROK(
107+
GROK_SERDE_CLASS,
108+
TEXT_INPUT_FORMAT_CLASS,
104109
HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS);
105110

106111
private final String serde;
@@ -134,7 +139,7 @@ public boolean isSplittable(String path)
134139
// Only uncompressed text input format is splittable
135140
return switch (this) {
136141
case ORC, PARQUET, AVRO, RCBINARY, RCTEXT, SEQUENCEFILE -> true;
137-
case JSON, OPENX_JSON, TEXTFILE, CSV, REGEX -> CompressionKind.forFile(path).isEmpty();
142+
case JSON, OPENX_JSON, TEXTFILE, CSV, REGEX, GROK -> CompressionKind.forFile(path).isEmpty();
138143
};
139144
}
140145

Diff for: plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableProperties.java

+14
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,8 @@ public class HiveTableProperties
7070
public static final String PARQUET_BLOOM_FILTER_COLUMNS = "parquet_bloom_filter_columns";
7171
public static final String REGEX_PATTERN = "regex";
7272
public static final String REGEX_CASE_INSENSITIVE = "regex_case_insensitive";
73+
public static final String GROK_INPUT_FORMAT = "grok_input_format";
74+
public static final String GROK_CUSTOM_PATTERNS = "grok_custom_patterns";
7375
public static final String TRANSACTIONAL = "transactional";
7476
public static final String AUTO_PURGE = "auto_purge";
7577
public static final String EXTRA_PROPERTIES = "extra_properties";
@@ -174,6 +176,8 @@ public HiveTableProperties(
174176
stringProperty(CSV_ESCAPE, "CSV escape character", null, false),
175177
stringProperty(REGEX_PATTERN, "REGEX pattern", null, false),
176178
booleanProperty(REGEX_CASE_INSENSITIVE, "REGEX pattern is case insensitive", null, false),
179+
stringProperty(GROK_INPUT_FORMAT, "GROK input format", null, false),
180+
stringProperty(GROK_CUSTOM_PATTERNS, "GROK custom patterns", null, false),
177181
booleanProperty(TRANSACTIONAL, "Table is transactional", null, false),
178182
booleanProperty(AUTO_PURGE, "Skip trash when table or partition is deleted", config.isAutoPurge(), false),
179183
booleanProperty(
@@ -343,6 +347,16 @@ public static Optional<Boolean> isRegexCaseInsensitive(Map<String, Object> table
343347
return Optional.ofNullable((Boolean) tableProperties.get(REGEX_CASE_INSENSITIVE));
344348
}
345349

350+
public static Optional<String> getGrokInputFormat(Map<String, Object> tableProperties)
351+
{
352+
return Optional.ofNullable((String) tableProperties.get(GROK_INPUT_FORMAT));
353+
}
354+
355+
public static Optional<String> getGrokCustomPatterns(Map<String, Object> tableProperties)
356+
{
357+
return Optional.ofNullable((String) tableProperties.get(GROK_CUSTOM_PATTERNS));
358+
}
359+
346360
public static Optional<Boolean> isTransactional(Map<String, Object> tableProperties)
347361
{
348362
return Optional.ofNullable((Boolean) tableProperties.get(TRANSACTIONAL));
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
/*
2+
* Licensed under the Apache License, Version 2.0 (the "License");
3+
* you may not use this file except in compliance with the License.
4+
* You may obtain a copy of the License at
5+
*
6+
* http://www.apache.org/licenses/LICENSE-2.0
7+
*
8+
* Unless required by applicable law or agreed to in writing, software
9+
* distributed under the License is distributed on an "AS IS" BASIS,
10+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
* See the License for the specific language governing permissions and
12+
* limitations under the License.
13+
*/
14+
package io.trino.plugin.hive.line;
15+
16+
import io.trino.filesystem.Location;
17+
import io.trino.metastore.StorageFormat;
18+
import io.trino.plugin.hive.FileWriter;
19+
import io.trino.plugin.hive.HiveCompressionCodec;
20+
import io.trino.plugin.hive.HiveFileWriterFactory;
21+
import io.trino.plugin.hive.WriterKind;
22+
import io.trino.plugin.hive.acid.AcidTransaction;
23+
import io.trino.spi.TrinoException;
24+
import io.trino.spi.connector.ConnectorSession;
25+
26+
import java.util.List;
27+
import java.util.Map;
28+
import java.util.Optional;
29+
import java.util.OptionalInt;
30+
31+
import static io.trino.hive.formats.HiveClassNames.GROK_SERDE_CLASS;
32+
import static io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR;
33+
34+
public class GrokFileWriterFactory
35+
implements HiveFileWriterFactory
36+
{
37+
@Override
38+
public Optional<FileWriter> createFileWriter(
39+
Location location,
40+
List<String> inputColumnNames,
41+
StorageFormat storageFormat,
42+
HiveCompressionCodec compressionCodec,
43+
Map<String, String> schema,
44+
ConnectorSession session,
45+
OptionalInt bucketNumber,
46+
AcidTransaction transaction,
47+
boolean useAcidSchema,
48+
WriterKind writerKind)
49+
{
50+
if (GROK_SERDE_CLASS.equals(storageFormat.getSerde())) {
51+
throw new TrinoException(HIVE_WRITER_OPEN_ERROR, "GROK format is read-only");
52+
}
53+
return Optional.empty();
54+
}
55+
}

Diff for: plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java

+5
Original file line numberDiff line numberDiff line change
@@ -5701,6 +5701,7 @@ private boolean isMappingByName(HiveStorageFormat format)
57015701
case TEXTFILE -> false;
57025702
case CSV -> false;
57035703
case REGEX -> false;
5704+
case GROK -> false;
57045705
};
57055706
}
57065707

@@ -9505,6 +9506,10 @@ private List<TestingHiveStorageFormat> getAllTestingHiveStorageFormat()
95059506
// REGEX format is read-only
95069507
continue;
95079508
}
9509+
if (hiveStorageFormat == HiveStorageFormat.GROK) {
9510+
// GROK format is read-only
9511+
continue;
9512+
}
95089513

95099514
formats.add(new TestingHiveStorageFormat(getSession(), hiveStorageFormat));
95109515
}

Diff for: plugin/trino-hive/src/test/java/io/trino/plugin/hive/HiveTestUtils.java

+4
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@
3838
import io.trino.plugin.hive.avro.AvroPageSourceFactory;
3939
import io.trino.plugin.hive.line.CsvFileWriterFactory;
4040
import io.trino.plugin.hive.line.CsvPageSourceFactory;
41+
import io.trino.plugin.hive.line.GrokFileWriterFactory;
42+
import io.trino.plugin.hive.line.GrokPageSourceFactory;
4143
import io.trino.plugin.hive.line.JsonFileWriterFactory;
4244
import io.trino.plugin.hive.line.JsonPageSourceFactory;
4345
import io.trino.plugin.hive.line.OpenXJsonFileWriterFactory;
@@ -183,6 +185,7 @@ public static Set<HivePageSourceFactory> getDefaultHivePageSourceFactories(Trino
183185
.add(new JsonPageSourceFactory(fileSystemFactory, hiveConfig))
184186
.add(new OpenXJsonPageSourceFactory(fileSystemFactory, hiveConfig))
185187
.add(new RegexPageSourceFactory(fileSystemFactory, hiveConfig))
188+
.add(new GrokPageSourceFactory(fileSystemFactory, hiveConfig))
186189
.add(new SimpleTextFilePageSourceFactory(fileSystemFactory, hiveConfig))
187190
.add(new SimpleSequenceFilePageSourceFactory(fileSystemFactory, hiveConfig))
188191
.add(new AvroPageSourceFactory(fileSystemFactory))
@@ -204,6 +207,7 @@ public static Set<HiveFileWriterFactory> getDefaultHiveFileWriterFactories(HiveC
204207
.add(new CsvFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
205208
.add(new JsonFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
206209
.add(new RegexFileWriterFactory())
210+
.add(new GrokFileWriterFactory())
207211
.add(new OpenXJsonFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
208212
.add(new SimpleTextFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
209213
.add(new SimpleSequenceFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER, nodeVersion))
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
/*
2+
* Licensed under the Apache License, Version 2.0 (the "License");
3+
* you may not use this file except in compliance with the License.
4+
* You may obtain a copy of the License at
5+
*
6+
* http://www.apache.org/licenses/LICENSE-2.0
7+
*
8+
* Unless required by applicable law or agreed to in writing, software
9+
* distributed under the License is distributed on an "AS IS" BASIS,
10+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
* See the License for the specific language governing permissions and
12+
* limitations under the License.
13+
*/
14+
package io.trino.plugin.hive;
15+
16+
import com.google.common.collect.ImmutableMap;
17+
import com.google.common.io.Resources;
18+
import io.trino.filesystem.Location;
19+
import io.trino.filesystem.TrinoFileSystem;
20+
import io.trino.filesystem.TrinoFileSystemFactory;
21+
import io.trino.spi.security.ConnectorIdentity;
22+
import io.trino.testing.AbstractTestQueryFramework;
23+
import io.trino.testing.MaterializedResult;
24+
import io.trino.testing.MaterializedRow;
25+
import io.trino.testing.QueryRunner;
26+
import org.intellij.lang.annotations.Language;
27+
import org.junit.jupiter.api.Test;
28+
29+
import java.io.IOException;
30+
import java.io.OutputStream;
31+
import java.net.URL;
32+
import java.util.Arrays;
33+
import java.util.List;
34+
import java.util.UUID;
35+
36+
import static io.trino.plugin.hive.TestingHiveUtils.getConnectorService;
37+
import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder;
38+
39+
public class TestGrokTable
40+
extends AbstractTestQueryFramework
41+
{
42+
@Override
43+
protected QueryRunner createQueryRunner()
44+
throws Exception
45+
{
46+
return HiveQueryRunner.builder()
47+
.setHiveProperties(ImmutableMap.of("hive.non-managed-table-writes-enabled", "true"))
48+
.build();
49+
}
50+
51+
@Test
52+
public void testCreateExternalTableWithData()
53+
throws IOException
54+
{
55+
URL resourceLocation = Resources.getResource("grok/commonapachelog_test");
56+
TrinoFileSystem fileSystem = getConnectorService(getQueryRunner(), TrinoFileSystemFactory.class).create(ConnectorIdentity.ofUser("test"));
57+
58+
// Create a temporary directory for the table data
59+
Location tempDir = Location.of("local:///temp_" + UUID.randomUUID());
60+
fileSystem.createDirectory(tempDir);
61+
Location dataFile = tempDir.appendPath("commonapachelog_test");
62+
63+
try (OutputStream out = fileSystem.newOutputFile(dataFile).create()) {
64+
Resources.copy(resourceLocation, out);
65+
}
66+
67+
String inputFormat = "%{COMMONAPACHELOG:access_log}";
68+
69+
// GROK format is read-only, so create data files using the text file format
70+
@Language("SQL") String createTableSql =
71+
"""
72+
CREATE TABLE test_grok_table (
73+
access_log VARCHAR)
74+
WITH (
75+
format = 'grok',
76+
grok_input_format = '%s',
77+
external_location = '%s')
78+
""".formatted(inputFormat, dataFile.parentDirectory());
79+
80+
assertUpdate(createTableSql);
81+
MaterializedResult result = computeActual("SELECT * FROM test_grok_table");
82+
List<MaterializedRow> expected = List.of(
83+
new MaterializedRow(Arrays.asList(
84+
"64.242.88.10 - - [07/Mar/2004:16:05:49 -0800] \"GET /twiki/bin/edit/Main/Double_bounce_sender?topicparent=Main.ConfigurationVariables HTTP/1.1\" 401 12846"
85+
))
86+
);
87+
assertEqualsIgnoreOrder(result.getMaterializedRows(), expected);
88+
assertQueryFails("INSERT INTO test_grok_table VALUES ('grok fails writes')", "GROK format is read-only");
89+
90+
assertUpdate("DROP TABLE test_grok_table");
91+
}
92+
93+
@Test
94+
public void testGrokInputFormatPropertyIsRequired()
95+
{
96+
assertQueryFails(
97+
"""
98+
CREATE TABLE test_regex_property_required (
99+
name VARCHAR)
100+
WITH (format = 'grok')
101+
""",
102+
"GROK format requires the 'grok_input_format' table property");
103+
}
104+
}

Diff for: plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java

+4
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,10 @@ void testAllFormats()
122122
// REGEX format is readonly
123123
continue;
124124
}
125+
if (format == HiveStorageFormat.GROK) {
126+
// GROK format is readonly
127+
continue;
128+
}
125129
config.setHiveStorageFormat(format);
126130
config.setHiveCompressionCodec(NONE);
127131
long uncompressedLength = writeTestFile(fileSystemFactory, config, sortingFileWriterConfig, metastore, makeFileName(config));
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
64.242.88.10 - - [07/Mar/2004:16:05:49 -0800] "GET /twiki/bin/edit/Main/Double_bounce_sender?topicparent=Main.ConfigurationVariables HTTP/1.1" 401 12846

0 commit comments

Comments
 (0)