@@ -13,7 +13,7 @@ def __init__(self, db_dir=None, max_query_workers=None, chain=None, reorg_limit=
1313 blocking_channel_ids = None , filtering_channel_ids = None , peer_hubs = None , peer_announce = None ,
1414 index_address_status = None , address_history_cache_size = None , daemon_ca_path = None ,
1515 merkle_cache_size = None , resolved_url_cache_size = None , tx_cache_size = None ,
16- history_tx_cache_size = None ):
16+ history_tx_cache_size = None , largest_address_history_cache_size = None ):
1717 super ().__init__ (db_dir , max_query_workers , chain , reorg_limit , prometheus_port , cache_all_tx_hashes ,
1818 cache_all_claim_txos , blocking_channel_ids , filtering_channel_ids , index_address_status )
1919 self .daemon_url = daemon_url if daemon_url is not None else self .required ('DAEMON_URL' )
@@ -54,6 +54,9 @@ def __init__(self, db_dir=None, max_query_workers=None, chain=None, reorg_limit=
5454 (float (self .integer ('QUERY_TIMEOUT_MS' , 10000 )) / 1000.0 )
5555 self .hashX_history_cache_size = address_history_cache_size if address_history_cache_size is not None \
5656 else self .integer ('ADDRESS_HISTORY_CACHE_SIZE' , 4096 )
57+ self .largest_hashX_history_cache_size = largest_address_history_cache_size if largest_address_history_cache_size is not None \
58+ else self .integer ('LARGEST_ADDRESS_HISTORY_CACHE_SIZE' , 256 )
59+
5760 self .daemon_ca_path = daemon_ca_path if daemon_ca_path else None
5861 self .merkle_cache_size = merkle_cache_size if merkle_cache_size is not None else self .integer ('MERKLE_CACHE_SIZE' , 32768 )
5962 self .resolved_url_cache_size = resolved_url_cache_size if resolved_url_cache_size is not None else self .integer (
@@ -110,6 +113,10 @@ def contribute_to_arg_parser(cls, parser):
110113 parser .add_argument ('--daily_fee' , default = cls .default ('DAILY_FEE' , '0' ), type = str )
111114 parser .add_argument ('--query_timeout_ms' , type = int , default = cls .integer ('QUERY_TIMEOUT_MS' , 10000 ),
112115 help = "Elasticsearch query timeout, in ms. Can be set in env with 'QUERY_TIMEOUT_MS'" )
116+ parser .add_argument ('--largest_address_history_cache_size' , type = int ,
117+ default = cls .integer ('LARGEST_ADDRESS_HISTORY_CACHE_SIZE' , 256 ),
118+ help = "Size of the largest value cache for address histories. "
119+ "Can be set in the env with 'LARGEST_ADDRESS_HISTORY_CACHE_SIZE'" )
113120 parser .add_argument ('--address_history_cache_size' , type = int ,
114121 default = cls .integer ('ADDRESS_HISTORY_CACHE_SIZE' , 4096 ),
115122 help = "Size of the lru cache of address histories. "
@@ -148,5 +155,6 @@ def from_arg_parser(cls, args):
148155 elastic_notifier_port = args .elastic_notifier_port , index_address_status = args .index_address_statuses ,
149156 address_history_cache_size = args .address_history_cache_size , daemon_ca_path = args .daemon_ca_path ,
150157 merkle_cache_size = args .merkle_cache_size , resolved_url_cache_size = args .resolved_url_cache_size ,
151- tx_cache_size = args .tx_cache_size , history_tx_cache_size = args .history_tx_cache_size
158+ tx_cache_size = args .tx_cache_size , history_tx_cache_size = args .history_tx_cache_size ,
159+ largest_address_history_cache_size = args .largest_address_history_cache_size
152160 )
0 commit comments