Skip to content

Commit 7bf92d9

Browse files
DPDV-4287: Pass the proxy settings (#92)
* DPDV-4287: Pass the proxy settings * Include proxies also for get and delete queries * Fix the synchronization script and update the client as well * Fix the conversion * Fix the proxy configuration * Store the release and output folder * Make HTTP protocol explicit in the UI
1 parent 78219a2 commit 7bf92d9

File tree

12 files changed

+178
-74
lines changed

12 files changed

+178
-74
lines changed

.github/workflows/validate.yaml

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,3 +40,19 @@ jobs:
4040
--input TA_dataset \
4141
--output output \
4242
--release release
43+
44+
- name: Store Release Folder
45+
uses: actions/upload-artifact@v3
46+
if: always()
47+
with:
48+
name: release
49+
path: release
50+
retention-days: 30
51+
52+
- name: Store Output Folder
53+
uses: actions/upload-artifact@v3
54+
if: always()
55+
with:
56+
name: output
57+
path: output
58+
retention-days: 30

CONTRIBUTING.md

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -125,10 +125,14 @@ Note that build cleans previously created configuration. To prevent removal of c
125125
* Remove Splunk container - `make docker-splunk-remove`
126126
* Restore configuration - `make dev-config-backup`
127127
* Backup configuration - `make dev-config-restore` - it's not clear whether it really works
128-
* Tail Splunk logs - Splunkd - `make docker-tail-logs-splunk`
129-
* Tail Splunk logs - Python - `make docker-tail-logs-python`
130-
* Tail Splunk logs - Inputs - `make docker-tail-logs-inputs`
128+
* To see all available logs - `make docker-splunk-list-logs`
129+
* To see particular log, you may use - `make docker-splunk-tail-log LOG_NAME=log-file`
130+
* Logs related to Splunk Python - `make docker-splunk-tail-logs-python` calls `make docker-splunk-tail-log LOG_NAME=python.log`
131+
* Logs related to Search command - `make docker-splunk-tail-logs-app-search-command` calls `make docker-splunk-tail-log LOG_NAME="TA_dataset_search_command.log"`
131132
133+
#### Where are errors:
134+
135+
* `search_messages.log` - error message that is shown in the UI, no stack trace :/
132136
# E2E Testing
133137
134138
We are using Playwright - https://playwright.dev/

Makefile

Lines changed: 28 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -51,26 +51,34 @@ docker-splunk-show-app:
5151
sudo -u splunk \
5252
ls -l /opt/splunk/etc/apps/TA_dataset/
5353

54-
.PHONY: docker-splunk-tail-logs-splunkd
55-
docker-splunk-tail-logs-splunkd:
54+
.PHONY: docker-splunk-list-logs
55+
docker-splunk-list-logs:
5656
docker exec $(CONTAINER_NAME) \
5757
sudo -u splunk \
58-
tail -f \
59-
/opt/splunk/var/log/splunk/splunkd.log
58+
ls -lrt \
59+
/opt/splunk/var/log/splunk/
60+
61+
.PHONY: docker-splunk-tail-log
62+
docker-splunk-tail-log:
63+
docker exec $(CONTAINER_NAME) \
64+
bash -l -c "sudo -u splunk tail -f /opt/splunk/var/log/splunk/${LOG_NAME}"
65+
66+
.PHONY: docker-splunk-tail-logs-splunkd
67+
docker-splunk-tail-logs-splunkd:
68+
make docker-splunk-tail-log LOG_NAME=splunkd.log
6069

6170
.PHONY: docker-splunk-tail-logs-python
6271
docker-splunk-tail-logs-python:
63-
docker exec $(CONTAINER_NAME) \
64-
sudo -u splunk \
65-
tail -f \
66-
/opt/splunk/var/log/splunk/python.log
72+
make docker-splunk-tail-log LOG_NAME=python.log
6773

68-
.PHONY: docker-splunk-tail-logs-input
69-
docker-splunk-tail-logs-input:
70-
docker exec $(CONTAINER_NAME) \
71-
sudo -u splunk \
72-
tail -f \
73-
/opt/splunk/var/log/splunk/TA_dataset_input.log
74+
# TODO: Figure out, how to make this work!
75+
.PHONY: docker-splunk-tail-logs-app-all
76+
docker-splunk-tail-logs-app-all:
77+
make docker-splunk-tail-log LOG_NAME="TA_dataset*"
78+
79+
.PHONY: docker-splunk-tail-logs-app-search-command
80+
docker-splunk-tail-logs-app-search-command:
81+
make docker-splunk-tail-log LOG_NAME="TA_dataset_search_command.log"
7482

7583
.PHONY: docker-splunk-tail-logs
7684
docker-splunk-tail-logs-count:
@@ -101,7 +109,7 @@ inspect:
101109
splunk-appinspect inspect TA_dataset --included-tags splunk_appinspect
102110

103111
.PHONY: pack
104-
pack:
112+
pack: clean
105113
find $(SOURCE_PACKAGE) -name __pycache__ -exec rm -rfv {} \;
106114
version=$$(jq -r '.meta.version' globalConfig.json) && \
107115
scripts/pack.sh \
@@ -137,6 +145,7 @@ dev-config-restore:
137145
dev-update-source:
138146
rsync -av $(SOURCE_PACKAGE)/bin/ $(OUTPUT_PACKAGE)/bin/
139147
rsync -av $(SOURCE_PACKAGE)/default/ $(OUTPUT_PACKAGE)/default/
148+
rsync -av $(SOURCE_PACKAGE)/lib/dataset_query_api_client/ $(OUTPUT_PACKAGE)/lib/dataset_query_api_client/
140149

141150
dev-install-dependencies-pack:
142151
pip install --upgrade-strategy only-if-needed -r requirements-pack.txt
@@ -162,3 +171,7 @@ e2e-test-headed:
162171
npm run playwright:headed
163172
e2e-test-ui:
164173
npm run playwright:ui
174+
175+
.PHONY: clean
176+
clean:
177+
find $(SOURCE_PACKAGE) -name '.DS_Store' -exec rm -rfv {} \;

TA_dataset/bin/dataset_api.py

Lines changed: 30 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,26 @@
2424
)
2525

2626

27+
# TODO: Convert to the expected format
28+
# https://www.python-httpx.org/advanced/#http-proxying
29+
def convert_proxy(proxy):
30+
if not proxy:
31+
return {}
32+
new_proxy = {}
33+
if "http" in proxy:
34+
new_proxy["http://"] = proxy["http"]
35+
if "https" in proxy:
36+
new_proxy["https://"] = proxy["https"]
37+
return new_proxy
38+
39+
2740
# Executes Dataset LongRunningQuery for log events
28-
def ds_lrq_log_query(base_url, api_key, start_time, end_time, filter_expr, limit):
29-
client = AuthenticatedClient(base_url=base_url, token=api_key)
41+
def ds_lrq_log_query(
42+
base_url, api_key, start_time, end_time, filter_expr, limit, proxy
43+
):
44+
client = AuthenticatedClient(
45+
base_url=base_url, token=api_key, proxy=convert_proxy(proxy)
46+
)
3047
body = PostQueriesLaunchQueryRequestBody(
3148
query_type=PostQueriesLaunchQueryRequestBodyQueryType.LOG,
3249
start_time=start_time,
@@ -37,8 +54,10 @@ def ds_lrq_log_query(base_url, api_key, start_time, end_time, filter_expr, limit
3754

3855

3956
# Executes Dataset LongRunningQuery using PowerQuery language
40-
def ds_lrq_power_query(base_url, api_key, start_time, end_time, query):
41-
client = AuthenticatedClient(base_url=base_url, token=api_key)
57+
def ds_lrq_power_query(base_url, api_key, start_time, end_time, query, proxy):
58+
client = AuthenticatedClient(
59+
base_url=base_url, token=api_key, proxy=convert_proxy(proxy)
60+
)
4261
body = PostQueriesLaunchQueryRequestBody(
4362
query_type=PostQueriesLaunchQueryRequestBodyQueryType.PQ,
4463
start_time=start_time,
@@ -50,9 +69,11 @@ def ds_lrq_power_query(base_url, api_key, start_time, end_time, query):
5069

5170
# Executes Dataset LongRunningQuery to fetch facet values
5271
def ds_lrq_facet_values(
53-
base_url, api_key, start_time, end_time, filter, name, max_values
72+
base_url, api_key, start_time, end_time, filter, name, max_values, proxy
5473
):
55-
client = AuthenticatedClient(base_url=base_url, token=api_key)
74+
client = AuthenticatedClient(
75+
base_url=base_url, token=api_key, proxy=convert_proxy(proxy)
76+
)
5677
body = PostQueriesLaunchQueryRequestBody(
5778
query_type=PostQueriesLaunchQueryRequestBodyQueryType.FACET_VALUES,
5879
start_time=start_time,
@@ -66,7 +87,9 @@ def ds_lrq_facet_values(
6687

6788
# Executes LRQ run loop of launch-ping-remove API requests until the query completes
6889
# with a result
69-
def ds_lrq_run_loop(client, body: PostQueriesLaunchQueryRequestBody):
90+
def ds_lrq_run_loop(
91+
client: AuthenticatedClient, body: PostQueriesLaunchQueryRequestBody
92+
):
7093
body.query_priority = PostQueriesLaunchQueryRequestBodyQueryPriority.HIGH
7194
response = post_queries.sync_detailed(client=client, json_body=body)
7295
result = response.parsed

TA_dataset/bin/dataset_common.py

Lines changed: 29 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
# -*- coding: utf-8 -*-
22
import json
3+
import logging
34
import os.path as op
45
import sys
56
import time
67

78
# adjust paths to make the Splunk app working
89
import import_declare_test # noqa: F401
9-
from solnlib import conf_manager
10+
from solnlib import conf_manager, log
1011

1112
APP_NAME = __file__.split(op.sep)[-3]
1213
CONF_NAME = "ta_dataset"
@@ -28,6 +29,15 @@ def get_url(base_url, ds_method):
2829
return base_url + "/api/" + ds_api_endpoint
2930

3031

32+
# returns logger that logs data into file
33+
# /opt/splunk/var/log/splunk/${APP_NAME}/${suffix}
34+
def get_logger(session_key, suffix: str):
35+
logger = log.Logs().get_logger("{}_{}".format(APP_NAME, suffix))
36+
log_level = get_log_level(session_key, logging)
37+
logger.setLevel(log_level)
38+
return logger
39+
40+
3141
# one conf manager to rule them all
3242
def get_conf_manager(session_key, logger):
3343
try:
@@ -84,7 +94,6 @@ def get_proxy(session_key, logger):
8494
# MM: it does not have key `proxy_enabled, it has key - disabled
8595
# {'disabled': '0', 'eai:appName': 'TA_dataset' ...
8696
proxy_details = cfm.get_conf(CONF_NAME + "_settings").get("proxy")
87-
logger.info("MM PROXY: " + repr(proxy_details))
8897
proxy_enabled = proxy_details.get("proxy_enabled", 0)
8998
except Exception as e:
9099
logger.debug("No proxy information defined: {}".format(e))
@@ -93,30 +102,30 @@ def get_proxy(session_key, logger):
93102
if int(proxy_enabled) == 0:
94103
return None
95104
else:
96-
proxy_url = proxy_details.get("proxy_url")
105+
proxy_type = proxy_details.get("proxy_type")
106+
proxy_host = proxy_details.get("proxy_url")
97107
proxy_port = proxy_details.get("proxy_port")
98108
proxy_username = proxy_details.get("proxy_username")
99109
proxy_password = proxy_details.get("proxy_password")
100-
proxy_type = proxy_details.get("proxy_type")
101110
proxies = {}
102-
if proxy_username and proxy_password:
103-
proxies["http"] = (
104-
proxy_username
105-
+ ":"
106-
+ proxy_password
107-
+ "@"
108-
+ proxy_url
109-
+ ":"
110-
+ proxy_port
111-
)
112-
elif proxy_username:
113-
proxies["http"] = proxy_username + "@" + proxy_url + ":" + proxy_port
111+
proxy_url = ""
112+
if proxy_type:
113+
proxy_url += proxy_type
114114
else:
115-
proxies["http"] = proxy_url + ":" + proxy_port
116-
if proxy_type and proxy_type != "http":
117-
proxies["http"] = proxy_type + "://" + proxies["http"]
118-
115+
proxy_url += "http"
116+
proxy_url += "://"
117+
if proxy_username:
118+
proxy_url += proxy_username
119+
if proxy_password:
120+
proxy_url += ":" + proxy_password
121+
if proxy_username:
122+
proxy_url += "@"
123+
proxy_url += proxy_host
124+
proxy_url += ":" + proxy_port
125+
126+
proxies["http"] = proxy_url
119127
proxies["https"] = proxies["http"]
128+
120129
return proxies
121130

122131
except Exception as e:

TA_dataset/bin/dataset_query.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,12 @@
1414
from dataset_api import build_payload, get_maxcount, parse_query, query_api_max
1515
from dataset_common import (
1616
get_acct_info,
17-
get_log_level,
17+
get_logger,
1818
get_proxy,
1919
get_url,
2020
relative_to_epoch,
2121
)
2222
from dataset_query_api_client.client import get_user_agent
23-
from solnlib import log
2423
from solnlib.modular_input import checkpointer
2524
from splunklib import modularinput as smi
2625

@@ -115,11 +114,7 @@ def stream_events(self, inputs, ew):
115114

116115
# Generate logger with input name
117116
_, input_name = input_name.split("//", 2)
118-
logger = log.Logs().get_logger("{}_input".format(APP_NAME))
119-
120-
# Log level configuration
121-
log_level = get_log_level(session_key, logger)
122-
logger.setLevel(log_level)
117+
logger = get_logger(session_key, "input")
123118

124119
logger.debug("Modular input invoked.")
125120

0 commit comments

Comments
 (0)