@@ -179,38 +179,58 @@ public long getLagMillis(){
179
179
String lc_trackerUrl = trackerUrl .toLowerCase (Locale .US );
180
180
181
181
int position = trackerUrl .lastIndexOf ('/' );
182
- if ( position >= 0 &&
183
- trackerUrl .length () >= position +9 &&
184
- trackerUrl .substring (position +1 ,position +9 ).equals ("announce" )) {
185
-
186
- this .scrapeURL = trackerUrl .substring (0 ,position +1 ) + "scrape" + trackerUrl .substring (position +9 );
187
- // System.out.println( "url = " + trackerUrl + ", scrape =" + scrapeURL );
188
-
189
- }else if ( lc_trackerUrl .startsWith ("udp:" )){
190
- // UDP scrapes aren't based on URL rewriting, just carry on
191
-
192
- scrapeURL = trackerUrl ;
193
- }else if ( lc_trackerUrl .startsWith ( "ws:" ) || lc_trackerUrl .startsWith ( "wss:" )){
194
-
195
- // websocket trackers
196
-
197
- scrapeURL = trackerUrl ;
198
-
199
- bSingleHashScrapes = true ;
200
-
201
- }else if ( position >= 0 && trackerUrl .lastIndexOf ('.' ) < position ){
202
-
203
- // some trackers support /scrape appended but don't have an /announce
204
- // don't do this though it the URL ends with .php (or infact .<anything>)
205
-
206
- scrapeURL = trackerUrl + (trackerUrl .endsWith ("/" )?"" :"/" ) + "scrape" ;
207
-
208
- } else {
209
- if (!logged_invalid_urls .contains (trackerUrl )) {
182
+
183
+ List <String > replaces = new ArrayList <>(2 );
184
+
185
+ replaces .add ( "announce" );
186
+
187
+ // https://github.com/i2p/i2p.plugins.zzzot - accepts /a as well as /announce...
188
+
189
+ if ( AENetworkClassifier .categoriseAddress ( tracker_url .getHost ()) == AENetworkClassifier .AT_I2P ){
190
+
191
+ replaces .add ( "a" );
192
+ }
193
+
194
+ for ( String rep : replaces ){
195
+ int repLen = rep .length ();
196
+ if ( position >= 0 &&
197
+ trackerUrl .length () >= position +repLen +1 &&
198
+ trackerUrl .substring (position +1 ,position +repLen +1 ).equals ( rep )) {
199
+
200
+ scrapeURL = trackerUrl .substring (0 ,position +1 ) + "scrape" + trackerUrl .substring (position +repLen +1 );
201
+ // System.out.println( "url = " + trackerUrl + ", scrape =" + scrapeURL );
202
+
203
+ break ;
204
+ }
205
+ }
210
206
211
- logged_invalid_urls .add (trackerUrl );
212
- // Error logging is done by the caller, since it has the hash/torrent info
213
- }
207
+ if ( scrapeURL == null ){
208
+ if ( lc_trackerUrl .startsWith ("udp:" )){
209
+ // UDP scrapes aren't based on URL rewriting, just carry on
210
+
211
+ scrapeURL = trackerUrl ;
212
+ }else if ( lc_trackerUrl .startsWith ( "ws:" ) || lc_trackerUrl .startsWith ( "wss:" )){
213
+
214
+ // websocket trackers
215
+
216
+ scrapeURL = trackerUrl ;
217
+
218
+ bSingleHashScrapes = true ;
219
+
220
+ }else if ( position >= 0 && trackerUrl .lastIndexOf ('.' ) < position ){
221
+
222
+ // some trackers support /scrape appended but don't have an /announce
223
+ // don't do this though it the URL ends with .php (or infact .<anything>)
224
+
225
+ scrapeURL = trackerUrl + (trackerUrl .endsWith ("/" )?"" :"/" ) + "scrape" ;
226
+
227
+ } else {
228
+ if (!logged_invalid_urls .contains (trackerUrl )) {
229
+
230
+ logged_invalid_urls .add (trackerUrl );
231
+ // Error logging is done by the caller, since it has the hash/torrent info
232
+ }
233
+ }
214
234
}
215
235
} catch (Throwable e ) {
216
236
Debug .printStackTrace ( e );
0 commit comments