Skip to content

Commit

Permalink
feat: modal implementation for data ingestion table (#1244)
Browse files Browse the repository at this point in the history
**Issue number:**
[ADDON-70993](https://splunk.atlassian.net/browse/ADDON-70993),
[ADDON-72969](https://splunk.atlassian.net/browse/ADDON-72969)

## Summary
### Changes

> Added a modal for data ingestion table view to show the specific data
for input selector.

### User experience

> Users can see the data volume and number of events visualization chart
for the particular row using the modal popup in data ingestion table.

## Checklist

If your change doesn't seem to apply, please leave them unchecked.

* [x] I have performed a self-review of this change
* [x] Changes have been tested
* [x] Changes are documented
* [x] PR title follows [conventional commit
semantics](https://www.conventionalcommits.org/en/v1.0.0/)

---------

Co-authored-by: sgoral <[email protected]>
Co-authored-by: rohanm-crest <[email protected]>
Co-authored-by: srv-rr-github-token <[email protected]>
Co-authored-by: rohanm-crest <[email protected]>
  • Loading branch information
5 people authored Oct 16, 2024
1 parent 139a47a commit 8246870
Show file tree
Hide file tree
Showing 24 changed files with 1,416 additions and 36 deletions.
4 changes: 4 additions & 0 deletions docs/dashboard.md
Original file line number Diff line number Diff line change
Expand Up @@ -435,3 +435,7 @@ e.g. of globalConfig.json:

the above configuration will create the following filter query:
`...source=*license_usage.log type=Usage (st IN ("*addon123*","my_custom_condition*"))...`

> Note:
> * In the Data Ingestion table, the first column displays the `View by` options list. When you click on any row in this column, a modal opens, showing detailed information such as `Data volume` and the `Number of events` over time, visualized in charts. The modal allows you to adjust the options via a dropdown to view data for different View by options. This enables dynamic exploration of data trends for various selected inputs.
16 changes: 16 additions & 0 deletions splunk_add_on_ucc_framework/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"data_ingestion_tab": "data_ingestion_tab_definition.json",
"errors_tab": "errors_tab_definition.json",
"resources_tab": "resources_tab_definition.json",
"data_ingestion_modal_definition": "data_ingestion_modal_definition.json",
}

data_ingestion = (
Expand Down Expand Up @@ -242,6 +243,21 @@ def generate_dashboard_content(
)
)

if (
definition_json_name
== default_definition_json_filename["data_ingestion_modal_definition"]
):
content = (
utils.get_j2_env()
.get_template(definition_json_name)
.render(
data_ingestion=data_ingestion.format(
lic_usg_condition=lic_usg_condition, determine_by=determine_by
),
events_count=events_count.format(addon_name=addon_name.lower()),
)
)

return content


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
{
"visualizations": {
"data_ingestion_modal_timerange_label_start_viz": {
"type": "splunk.singlevalue",
"options": {
"majorFontSize": 12,
"backgroundColor": "transparent",
"majorColor": "#9fa4af"
},
"dataSources": {
"primary": "data_ingestion_modal_data_time_label_start_ds"
}
},
"data_ingestion_modal_timerange_label_end_viz": {
"type": "splunk.singlevalue",
"options": {
"majorFontSize": 12,
"backgroundColor": "transparent",
"majorColor": "#9fa4af"
},
"dataSources": {
"primary": "data_ingestion_modal_data_time_label_end_ds"
}
},
"data_ingestion_modal_data_volume_viz": {
"type": "splunk.line",
"options": {
"xAxisVisibility": "hide",
"seriesColors": ["#A870EF"],
"yAxisTitleText": "Volume (bytes)",
"xAxisTitleText": "Time"
},
"title": "Data volume",
"dataSources": {
"primary": "data_ingestion_modal_data_volume_ds"
}
},
"data_ingestion_modal_events_count_viz": {
"type": "splunk.line",
"options": {
"xAxisVisibility": "hide",
"xAxisTitleText": "Time",
"seriesColors": ["#A870EF"],
"yAxisTitleText": "Number of events"
},
"title": "Number of events",
"dataSources": {
"primary": "ds_search_1"
}
}
},
"dataSources": {
"data_ingestion_modal_data_time_label_start_ds": {
"type": "ds.search",
"options": {
"query": "| makeresults | addinfo | eval StartDate = strftime(info_min_time, \"%e %b %Y %I:%M%p\") | table StartDate",
"queryParameters": {
"earliest": "$data_ingestion_modal_time.earliest$",
"latest": "$data_ingestion_modal_time.latest$"
}
}
},
"data_ingestion_modal_data_time_label_end_ds": {
"type": "ds.search",
"options": {
"query": "| makeresults | addinfo | eval EndDate = strftime(info_max_time, \"%e %b %Y %I:%M%p\") | table EndDate",
"queryParameters": {
"earliest": "$data_ingestion_modal_time.earliest$",
"latest": "$data_ingestion_modal_time.latest$"
}
}
},
"data_ingestion_modal_data_volume_ds": {
"type": "ds.search",
"options": {
"query": "{{data_ingestion}}",
"queryParameters": {
"earliest": "$data_ingestion_modal_time.earliest$",
"latest": "$data_ingestion_modal_time.latest$"
}
}
},
"ds_search_1": {
"type": "ds.search",
"options": {
"query": "{{events_count}}",
"queryParameters": {
"earliest": "$data_ingestion_modal_time.earliest$",
"latest": "$data_ingestion_modal_time.latest$"
}
},
"name": "Security Score vs Spend"
}
},
"defaults": {},
"inputs": {
"data_ingestion_modal_time_window": {
"options": {
"defaultValue": "-24h,now",
"token": "data_ingestion_modal_time"
},
"title": "Time Window",
"type": "input.timerange"
}
},
"layout": {
"type": "grid",
"globalInputs": ["data_ingestion_modal_time_window"],
"structure": [
{
"item": "data_ingestion_modal_timerange_label_start_viz",
"position": {
"x": 0,
"y": 50,
"w": 100,
"h": 20
}
},
{
"item": "data_ingestion_modal_timerange_label_end_viz",
"position": {
"x": 100,
"y": 50,
"w": 100,
"h": 20
}
},
{
"item": "data_ingestion_modal_data_volume_viz",
"position": {
"x": 0,
"y": 80,
"w": 300,
"h": 400
}
},
{
"item": "data_ingestion_modal_events_count_viz",
"position": {
"x": 0,
"y": 500,
"w": 300,
"h": 400
}
}
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,12 @@
}
},
"count": 10
}
},
"eventHandlers": [
{
"type": "table.click.handler"
}
]
}
},
"dataSources": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"appserver/static/js/build/globalConfig.json created\u001b[0m": "INFO",
"appserver/static/openapi.json created\u001b[0m": "INFO",
"metadata/default.meta created\u001b[0m": "INFO",
"appserver/static/js/build/custom/data_ingestion_modal_definition.json created\u001b[0m": "INFO",
"appserver/static/js/build/custom/data_ingestion_tab_definition.json created\u001b[0m": "INFO",
"appserver/static/js/build/custom/errors_tab_definition.json created\u001b[0m": "INFO",
"appserver/static/js/build/custom/overview_definition.json created\u001b[0m": "INFO",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,12 @@

import json
import sys
from time import time

from splunklib import modularinput as smi
from solnlib import log

logger = log.Logs().get_logger('splunk_ta_uccexample_four')


class EXAMPLE_INPUT_FOUR(smi.Script):
Expand Down Expand Up @@ -32,13 +36,23 @@ def validate_input(self, definition: smi.ValidationDefinition):

def stream_events(self, inputs: smi.InputDefinition, ew: smi.EventWriter):
input_items = [{'count': len(inputs.inputs)}]
input_name_1 = ""
for input_name, input_item in inputs.inputs.items():
input_item['name'] = input_name
input_name_1 = input_name
input_items.append(input_item)

sourcetype = f'example_input_four-st--{input_name_1.split("://")[-1]}'
host = f'host--{input_name_1.split("://")[-1]}'

event = smi.Event(
data=json.dumps(input_items),
sourcetype='example_input_four',
sourcetype=sourcetype,
host=host,
source=input_name_1,
)
log.events_ingested(logger, input_name_1, sourcetype,
str(time())[-3:], "main", "no_account_4", host)
ew.write_event(event)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,12 @@

import json
import sys
from time import time

from splunklib import modularinput as smi
from solnlib import log

logger = log.Logs().get_logger('splunk_ta_uccexample_three')


class EXAMPLE_INPUT_THREE(smi.Script):
Expand Down Expand Up @@ -32,13 +36,24 @@ def validate_input(self, definition: smi.ValidationDefinition):

def stream_events(self, inputs: smi.InputDefinition, ew: smi.EventWriter):
input_items = [{'count': len(inputs.inputs)}]
input_name_1 = ""
for input_name, input_item in inputs.inputs.items():
input_item['name'] = input_name
input_name_1 = input_name
input_items.append(input_item)

sourcetype = f'example_input_three-st--{input_name_1.split("://")[-1]}'
host = f'host--{input_name_1.split("://")[-1]}'
source = f'example_input_three-s--{input_name_1.split("://")[-1]}'

event = smi.Event(
data=json.dumps(input_items),
sourcetype='example_input_three',
sourcetype=sourcetype,
host=host,
source=source,
)
log.events_ingested(logger, input_name_1, sourcetype,
str(time())[-3:], "main", "no_account_4", host)
ew.write_event(event)


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import import_declare_test

import json
import sys
from time import time

from splunklib import modularinput as smi
from solnlib import log

logger = log.Logs().get_logger('splunk_ta_uccexample_four')


class EXAMPLE_INPUT_FOUR(smi.Script):
def __init__(self):
super(EXAMPLE_INPUT_FOUR, self).__init__()

def get_scheme(self):
scheme = smi.Scheme('example_input_four')
scheme.description = 'Example Input Four'
scheme.use_external_validation = True
scheme.streaming_mode_xml = True
scheme.use_single_instance = False

scheme.add_argument(
smi.Argument(
'name',
title='Name',
description='Name',
required_on_create=True
)
)
return scheme

def validate_input(self, definition: smi.ValidationDefinition):
return

def stream_events(self, inputs: smi.InputDefinition, ew: smi.EventWriter):
input_items = [{'count': len(inputs.inputs)}]
input_name_1 = ""
for input_name, input_item in inputs.inputs.items():
input_item['name'] = input_name
input_name_1 = input_name
input_items.append(input_item)

sourcetype = f'example_input_four-st--{input_name_1.split("://")[-1]}'
host = f'host--{input_name_1.split("://")[-1]}'

event = smi.Event(
data=json.dumps(input_items),
sourcetype=sourcetype,
host=host,
source=input_name_1,
)
log.events_ingested(logger, input_name_1, sourcetype,
str(time())[-3:], "main", "no_account_4", host)
ew.write_event(event)


if __name__ == '__main__':
exit_code = EXAMPLE_INPUT_FOUR().run(sys.argv)
sys.exit(exit_code)
Loading

0 comments on commit 8246870

Please sign in to comment.