`. If unknown will return: 'Unknown'.
Meant to be use in places where we want to display the version. If you want to perform logic based upon vesrion
use: is_demisto_version_ge.
@@ -8396,6 +8391,14 @@ def is_xsiam():
return demisto.demistoVersion().get("platform") == "x2"
+def is_using_engine():
+ """Determines whether or not the platform is using engine.
+ :return: True iff the platform is using engine.
+ :rtype: ``bool``
+ """
+ return demisto.demistoVersion().get("engine")
+
+
class DemistoHandler(logging.Handler):
"""
Handler to route logging messages to an IntegrationLogger or demisto.debug if not supplied
@@ -9305,7 +9308,7 @@ def _return_execution_metrics_results(self):
def generic_http_request(method,
server_url,
- timeout=10,
+ timeout=60,
verify=True,
proxy=False,
client_headers=None,
@@ -11636,12 +11639,14 @@ def split_data_to_chunks(data, target_chunk_size):
data = data.split('\n')
for data_part in data:
if chunk_size >= target_chunk_size:
+ demisto.debug("reached max chunk size, sending chunk with size: {size}".format(size=chunk_size))
yield chunk
chunk = []
chunk_size = 0
chunk.append(data_part)
chunk_size += sys.getsizeof(data_part)
if chunk_size != 0:
+ demisto.debug("sending the remaining chunk with size: {size}".format(size=chunk_size))
yield chunk
@@ -11936,7 +11941,7 @@ def data_error_handler(res):
xsiam_api_call_with_retries(client=client, events_error_handler=data_error_handler,
error_msg=header_msg, headers=headers,
num_of_attempts=num_of_attempts, xsiam_url=xsiam_url,
- zipped_data=zipped_data, is_json_response=True)
+ zipped_data=zipped_data, is_json_response=True, data_type=data_type)
if should_update_health_module:
demisto.updateModuleHealth({'{data_type}Pulled'.format(data_type=data_type): data_size})
@@ -12016,6 +12021,36 @@ def is_time_sensitive():
return hasattr(demisto, 'isTimeSensitive') and demisto.isTimeSensitive()
+def parse_json_string(json_string):
+ """
+ Parse a JSON string into a Python dictionary.
+
+ :type json_string: ``str``
+ :param json_string: The JSON string to be parsed.
+
+ :rtype: ``dict``
+ :return: A Python dictionary representing the parsed JSON data.
+ """
+ try:
+ data = json.loads(json_string)
+ return data
+ except json.JSONDecodeError as error: # type: ignore[attr-defined]
+ demisto.error("Error decoding JSON: {error}".format(error=error))
+ return {}
+
+
+def get_server_config():
+ """
+ Retrieves XSOAR server configuration.
+
+ :rtype: ``dict``
+ :return: The XSOAR server configuration.
+ """
+ response = demisto.internalHttpRequest(method='GET', uri='/system/config')
+ body = parse_json_string(response.get('body'))
+ server_config = body.get('sysConf', {})
+ return server_config
+
from DemistoClassApiModule import * # type:ignore [no-redef] # noqa:E402
diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
index 8eea3bbcc911..8efb56a5b1d3 100644
--- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
+++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
@@ -6,6 +6,7 @@
import re
import sys
import urllib
+import uuid
import warnings
import dateparser
@@ -17,18 +18,21 @@
import CommonServerPython
import demistomock as demisto
-from CommonServerPython import xml2json, json2xml, entryTypes, formats, tableToMarkdown, underscoreToCamelCase, \
- flattenCell, date_to_timestamp, datetime, timedelta, camelize, pascalToSpace, argToList, \
- remove_nulls_from_dictionary, is_error, get_error, hash_djb2, fileResult, is_ip_valid, get_demisto_version, \
- IntegrationLogger, parse_date_string, IS_PY3, PY_VER_MINOR, DebugLogger, b64_encode, parse_date_range, \
- return_outputs, is_filename_valid, convert_dict_values_bytes_to_str, \
- argToBoolean, ipv4Regex, ipv4cidrRegex, ipv6cidrRegex, urlRegex, ipv6Regex, domainRegex, batch, FeedIndicatorType, \
- encode_string_results, safe_load_json, remove_empty_elements, aws_table_to_markdown, is_demisto_version_ge, \
- appendContext, auto_detect_indicator_type, handle_proxy, get_demisto_version_as_str, get_x_content_info_headers, \
- url_to_clickable_markdown, WarningsHandler, DemistoException, SmartGetDict, JsonTransformer, \
- remove_duplicates_from_list_arg, DBotScoreType, DBotScoreReliability, Common, send_events_to_xsiam, ExecutionMetrics, \
- response_to_context, is_integration_command_execution, is_xsiam_or_xsoar_saas, is_xsoar, is_xsoar_on_prem, \
- is_xsoar_hosted, is_xsoar_saas, is_xsiam, send_data_to_xsiam, censor_request_logs, censor_request_logs, safe_sleep
+from CommonServerPython import (xml2json, json2xml, entryTypes, formats, tableToMarkdown, underscoreToCamelCase,
+ flattenCell, date_to_timestamp, datetime, timedelta, camelize, pascalToSpace, argToList,
+ remove_nulls_from_dictionary, is_error, get_error, hash_djb2, fileResult, is_ip_valid,
+ get_demisto_version, IntegrationLogger, parse_date_string, IS_PY3, PY_VER_MINOR, DebugLogger,
+ b64_encode, parse_date_range, return_outputs, is_filename_valid, convert_dict_values_bytes_to_str,
+ argToBoolean, ipv4Regex, ipv4cidrRegex, ipv6cidrRegex, urlRegex, ipv6Regex, domainRegex, batch,
+ FeedIndicatorType, encode_string_results, safe_load_json, remove_empty_elements,
+ aws_table_to_markdown, is_demisto_version_ge, appendContext, auto_detect_indicator_type,
+ handle_proxy, get_demisto_version_as_str, get_x_content_info_headers, url_to_clickable_markdown,
+ WarningsHandler, DemistoException, SmartGetDict, JsonTransformer, remove_duplicates_from_list_arg,
+ DBotScoreType, DBotScoreReliability, Common, send_events_to_xsiam, ExecutionMetrics,
+ response_to_context, is_integration_command_execution, is_xsiam_or_xsoar_saas, is_xsoar,
+ is_xsoar_on_prem, is_xsoar_hosted, is_xsoar_saas, is_xsiam, send_data_to_xsiam,
+ censor_request_logs, censor_request_logs, safe_sleep, get_server_config
+ )
EVENTS_LOG_ERROR = \
"""Error sending new events into XSIAM.
@@ -1265,29 +1269,29 @@ def test_get_error_need_raise_error_on_non_error_input():
assert False
-# TODO: Enable this unittest once it is fixed in CIAC-10650
-# @mark.parametrize('data,data_expected,filename', [
-# ("this is a test", b"this is a test", "test.txt"),
-# ("this is a test", b"this is a test", "../../../test.txt"),
-# (u"עברית", u"עברית".encode('utf-8'), "test.txt"),
-# (b"binary data\x15\x00", b"binary data\x15\x00", "test.txt"),
-# ]) # noqa: E124
-# def test_fileResult(mocker, request, data, data_expected, filename):
-# mocker.patch.object(demisto, 'uniqueFile', return_value="test_file_result")
-# mocker.patch.object(demisto, 'investigation', return_value={'id': '1'})
-# file_name = "1_test_file_result"
+@mark.parametrize('data,data_expected,filename', [
+ ("this is a test", b"this is a test", "test.txt"),
+ ("this is a test", b"this is a test", "../../../test.txt"),
+ (u"עברית", u"עברית".encode('utf-8'), "test.txt"),
+ (b"binary data\x15\x00", b"binary data\x15\x00", "test.txt"),
+]) # noqa: E124
+def test_fileResult(mocker, request, data, data_expected, filename):
+ file_id = str(uuid.uuid4())
+ mocker.patch.object(demisto, 'uniqueFile', return_value="fileresult")
+ mocker.patch.object(demisto, 'investigation', return_value={'id': file_id})
+ file_name = "{}_fileresult".format(file_id)
-# def cleanup():
-# try:
-# os.remove(file_name)
-# except OSError:
-# pass
+ def cleanup():
+ try:
+ os.remove(file_name)
+ except OSError:
+ pass
-# request.addfinalizer(cleanup)
-# res = fileResult(filename, data)
-# assert res['File'] == "test.txt"
-# with open(file_name, 'rb') as f:
-# assert f.read() == data_expected
+ request.addfinalizer(cleanup)
+ res = fileResult(filename, data)
+ assert res['File'] == "test.txt"
+ with open(file_name, 'rb') as f:
+ assert f.read() == data_expected
# Error that always returns a unicode string to it's str representation
@@ -1566,6 +1570,48 @@ def test_is_mac_address():
assert (is_mac_address(mac_address_true))
+def test_return_error_truncated_message(mocker):
+ """
+ Given
+ - invalid error message due to longer than max length (50,000)
+
+ When
+ - return_error function is called
+
+ Then
+ - Return a truncated message that contains clarification about the truncation
+ """
+ from CommonServerPython import return_error, MAX_ERROR_MESSAGE_LENGTH
+ err_msg = "1" * (MAX_ERROR_MESSAGE_LENGTH + 1)
+ results = mocker.spy(demisto, 'results')
+ mocker.patch.object(sys, 'exit')
+ return_error(err_msg)
+ assert len(results.call_args[0][0]["Contents"]) == MAX_ERROR_MESSAGE_LENGTH + \
+ len("...This error body was truncated...")
+ assert "This error body was truncated" in results.call_args[0][0]["Contents"]
+
+
+def test_return_error_valid_message(mocker):
+ """
+ Given
+ - A valid error message
+
+ When
+ - return_error function is called
+
+ Then
+ - Ensure the same message is returned
+ - Ensure the error message does not contain clarification about a truncation
+ """
+ from CommonServerPython import return_error, MAX_ERROR_MESSAGE_LENGTH
+ err_msg = "1" * int(MAX_ERROR_MESSAGE_LENGTH * 0.9)
+ results = mocker.spy(demisto, 'results')
+ mocker.patch.object(sys, 'exit')
+ return_error(err_msg)
+ assert len(results.call_args[0][0]["Contents"]) == len(err_msg)
+ assert "This error body was truncated" not in results.call_args[0][0]["Contents"]
+
+
def test_return_error_command(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
@@ -9726,3 +9772,51 @@ def test_sleep_mocked_time(mocker):
# Verify sleep duration based on mocked time difference
assert sleep_mocker.call_count == 2
+
+
+def test_get_server_config(mocker):
+ mock_response = {
+ 'body': '{"sysConf":{"incident.closereasons":"CustomReason1, CustomReason 2, Foo","versn":40},"defaultMap":{}}\n',
+ 'headers': {
+ 'Content-Length': ['104'],
+ 'X-Xss-Protection': ['1; mode=block'],
+ 'X-Content-Type-Options': ['nosniff'],
+ 'Strict-Transport-Security': ['max-age=10886400000000000; includeSubDomains'],
+ 'Vary': ['Accept-Encoding'],
+ 'Server-Timing': ['7'],
+ 'Date': ['Wed, 03 Jul 2010 09:11:35 GMT'],
+ 'X-Frame-Options': ['DENY'],
+ 'Content-Type': ['application/json']
+ },
+ 'status': '200 OK',
+ 'statusCode': 200
+ }
+
+ mocker.patch.object(demisto, 'internalHttpRequest', return_value=mock_response)
+ server_config = get_server_config()
+ assert server_config == {'incident.closereasons': 'CustomReason1, CustomReason 2, Foo', 'versn': 40}
+
+
+@pytest.mark.skipif(not IS_PY3, reason='test not supported in py2')
+def test_get_server_config_fail(mocker):
+ mock_response = {
+ 'body': 'NOT A VALID JSON',
+ 'headers': {
+ 'Content-Length': ['104'],
+ 'X-Xss-Protection': ['1; mode=block'],
+ 'X-Content-Type-Options': ['nosniff'],
+ 'Strict-Transport-Security': ['max-age=10886400000000000; includeSubDomains'],
+ 'Vary': ['Accept-Encoding'],
+ 'Server-Timing': ['7'],
+ 'Date': ['Wed, 03 Jul 2010 09:11:35 GMT'],
+ 'X-Frame-Options': ['DENY'],
+ 'Content-Type': ['application/json']
+ },
+ 'status': '200 OK',
+ 'statusCode': 200
+ }
+
+ mocker.patch.object(demisto, 'internalHttpRequest', return_value=mock_response)
+ mocked_error = mocker.patch.object(demisto, 'error')
+ assert get_server_config() == {}
+ assert mocked_error.call_args[0][0] == 'Error decoding JSON: Expecting value: line 1 column 1 (char 0)'
diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.py b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.py
index d3b7051a3fe3..4069eaedd7a6 100644
--- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.py
+++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.py
@@ -14,10 +14,30 @@
import tempfile
from http.server import HTTPServer
+
+def find_unused_port() -> int: # pragma: no cover
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ sock.bind(('localhost', 0)) # tries to bind any available port on the os
+ return sock.getsockname()[1]
+ except Exception:
+ start_port, end_port = 10000, 30000
+ for port in range(start_port, end_port + 1):
+ is_connection_success = sock.connect_ex(('localhost', port))
+ if is_connection_success == 0:
+ demisto.debug(f'Port {port} is already used')
+ else:
+ demisto.debug(f'Port {port} is free')
+ return port
+ raise RuntimeError("Could not find available ports")
+ finally:
+ sock.close()
+
+
WORKING_DIR = Path("/app")
DISABLE_LOGOS = True # Bugfix before sane-reports can work with image files.
MD_IMAGE_PATH = '/markdown/image'
-MD_HTTP_PORT = 10888
+MD_HTTP_PORT = find_unused_port()
SERVER_OBJECT = None
MD_IMAGE_SUPPORT_MIN_VER = '6.5'
TABLE_TEXT_MAX_LENGTH_SUPPORT_MIN_VER = '7.0'
@@ -44,7 +64,7 @@ def find_zombie_processes():
return zombies, ps_out
-def quit_driver_and_reap_children(killMarkdownServer):
+def quit_driver_and_reap_children(killMarkdownServer): # pragma: no cover
try:
if killMarkdownServer:
# Kill Markdown artifacts server
@@ -66,7 +86,7 @@ def quit_driver_and_reap_children(killMarkdownServer):
demisto.error(f'Failed checking for zombie processes: {e}. Trace: {traceback.format_exc()}')
-def startServer():
+def startServer(): # pragma: no cover
class fileHandler(http.server.BaseHTTPRequestHandler):
# See: https://docs.python.org/3/library/http.server.html#http.server.BaseHTTPRequestHandler.log_message
# Need to override otherwise messages are logged to stderr
@@ -114,10 +134,12 @@ def do_GET(self):
global SERVER_OBJECT
SERVER_OBJECT = HTTPServer(server_address=('', MD_HTTP_PORT), RequestHandlerClass=fileHandler)
# Start the web server
+ demisto.debug(f"starting markdown server on port {MD_HTTP_PORT}")
SERVER_OBJECT.serve_forever()
-def main():
+def main(): # pragma: no cover
+ isMDImagesSupported = is_demisto_version_ge(MD_IMAGE_SUPPORT_MIN_VER)
try:
sane_json_b64 = demisto.args().get('sane_pdf_report_base64', '').encode(
'utf-8')
@@ -175,6 +197,13 @@ def main():
with open(input_file, 'wb') as f:
f.write(base64.b64decode(sane_json_b64))
+ if headerLeftImage:
+ customer_logo_file_path = tmpdir + "/customer-logo-base64.txt"
+ with open(customer_logo_file_path, "w") as f:
+ f.write(headerLeftImage)
+ extra_cmd = extra_cmd.replace(headerLeftImage, customer_logo_file_path)
+ headerLeftImage = customer_logo_file_path
+
cmd = ['./reportsServer', input_file, output_file, dist_dir] + shlex.split(
extra_cmd)
@@ -189,15 +218,14 @@ def main():
if isMDImagesSupported:
params += f', markdownArtifactsServerAddress="{mdServerAddress}"'
- LOG(f"Sane-pdf parameters: {params}]")
+ demisto.debug(f"Sane-PDF parameters: {params}]")
cmd_string = " ".join(cmd)
- LOG(f"Sane-pdf cmd: {cmd_string}")
- LOG.print_log()
+ demisto.debug(f'Sane-PDF report commmad: {cmd_string}')
# Execute the report creation
out = subprocess.check_output(cmd, cwd=WORKING_DIR,
stderr=subprocess.STDOUT)
- LOG(f"Sane-pdf output: {str(out)}")
+ demisto.debug(f"Sane-pdf output: {str(out)}")
with open(output_file, 'rb') as f:
encoded = base64.b64encode(f.read()).decode('utf-8', 'ignore')
diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
index 04f6a4f5f1f5..ff78d143951b 100644
--- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
+++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
@@ -51,7 +51,7 @@ tags:
- pdf
timeout: '0'
type: python
-dockerimage: demisto/sane-pdf-reports:1.0.0.95847
+dockerimage: demisto/sane-pdf-reports:1.0.0.99838
runas: DBotWeakRole
tests:
- No Test
diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport_test.py b/Packs/Base/Scripts/SanePdfReport/SanePdfReport_test.py
index d29b3c904daa..2a2bb332d1bf 100644
--- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport_test.py
+++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport_test.py
@@ -50,6 +50,7 @@ def test_sane_pdf_report(mocker):
def test_markdown_image_server(mocker, capfd):
+ from SanePdfReport import MD_HTTP_PORT
with capfd.disabled():
mocker.patch.object(demisto, 'results')
fileName = '1234-5678-9012-3456.png'
@@ -63,7 +64,7 @@ def test_markdown_image_server(mocker, capfd):
time.sleep(5)
# wrong path
- conn = http.client.HTTPConnection("localhost", 10888)
+ conn = http.client.HTTPConnection("localhost", MD_HTTP_PORT)
conn.request("GET", "/wrong/path")
res1 = conn.getresponse()
assert res1.status == 400
diff --git a/Packs/Base/Scripts/ValidateContent/ValidateContent.yml b/Packs/Base/Scripts/ValidateContent/ValidateContent.yml
index e9078451d7b6..f87c431442e2 100644
--- a/Packs/Base/Scripts/ValidateContent/ValidateContent.yml
+++ b/Packs/Base/Scripts/ValidateContent/ValidateContent.yml
@@ -43,6 +43,6 @@ scripttarget: 0
timeout: 600ns
runas: DBotWeakRole
fromversion: 5.5.0
-dockerimage: demisto/xsoar-tools:1.0.0.90942
+dockerimage: demisto/xsoar-tools:1.0.0.99061
tests:
- ValidateContent - Test
diff --git a/Packs/Base/TestPlaybooks/playbook-IPv6_extraction_test.yml b/Packs/Base/TestPlaybooks/playbook-IPv6_extraction_test.yml
index 5181dd28bc52..9dc77fec247a 100644
--- a/Packs/Base/TestPlaybooks/playbook-IPv6_extraction_test.yml
+++ b/Packs/Base/TestPlaybooks/playbook-IPv6_extraction_test.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 1b434045-2df3-4f80-8b9f-157dca93b8cf
+ taskid: 2156b8d3-82ba-4c2e-835e-766238e62366
type: start
task:
- id: 1b434045-2df3-4f80-8b9f-157dca93b8cf
+ id: 2156b8d3-82ba-4c2e-835e-766238e62366
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: 20f8a1c6-e3d8-44ec-81b7-1edbd55f9afe
+ taskid: df39c0da-9a4a-46dd-86c4-86cdaf6d66ba
type: regular
task:
- id: 20f8a1c6-e3d8-44ec-81b7-1edbd55f9afe
+ id: df39c0da-9a4a-46dd-86c4-86cdaf6d66ba
version: -1
name: DeleteContext
description: Delete field from context
@@ -71,10 +71,10 @@ tasks:
isautoswitchedtoquietmode: false
"49":
id: "49"
- taskid: e05746d3-898e-419a-8f1e-5e7be71f6bdd
+ taskid: fe913f29-c515-4092-8c4b-cb7c06a5b5f4
type: title
task:
- id: e05746d3-898e-419a-8f1e-5e7be71f6bdd
+ id: fe913f29-c515-4092-8c4b-cb7c06a5b5f4
version: -1
name: IPv6
type: title
@@ -102,10 +102,10 @@ tasks:
isautoswitchedtoquietmode: false
"51":
id: "51"
- taskid: d52862ae-52e1-4df5-8f5d-8d862b07c1be
+ taskid: 059ed22d-567e-4cb0-8b40-f0f2b595aadd
type: regular
task:
- id: d52862ae-52e1-4df5-8f5d-8d862b07c1be
+ id: 059ed22d-567e-4cb0-8b40-f0f2b595aadd
version: -1
name: Print valid IPv6s
description: Prints text to war room (Markdown supported)
@@ -146,10 +146,10 @@ tasks:
isautoswitchedtoquietmode: false
"53":
id: "53"
- taskid: 49575ef2-c65e-49b8-8e47-a19e729b8c59
+ taskid: fe235c83-0846-4744-8cf0-1a18f4ed3cc7
type: regular
task:
- id: 49575ef2-c65e-49b8-8e47-a19e729b8c59
+ id: fe235c83-0846-4744-8cf0-1a18f4ed3cc7
version: -1
name: Set invalid IPv6s
description: Sets a value into the context with the given context key
@@ -164,7 +164,7 @@ tasks:
key:
simple: invalid_ipv6s
value:
- simple: '"2001:db8:::1","fe80::123::5678","2001::db8::1","::fffff:192.0.2.1","2001:db8:::1%eth0","fe80::1%eth0%vlan0"'
+ simple: '"2001:db8:::1","fe80::123::5678","2001::db8::1","::fffff:192.0.2.1","2001:db8:::1%eth0","fe80::1%eth0%vlan0","Event::Endpoint::Denc::Encrypt"'
separatecontext: false
continueonerrortype: ""
view: |-
@@ -183,10 +183,10 @@ tasks:
isautoswitchedtoquietmode: false
"54":
id: "54"
- taskid: 1eb6d5b6-14b7-4593-87fe-f29d597448aa
+ taskid: a34570bc-879a-454c-8f46-1e8099825eab
type: regular
task:
- id: 1eb6d5b6-14b7-4593-87fe-f29d597448aa
+ id: a34570bc-879a-454c-8f46-1e8099825eab
version: -1
name: Print invalid IPv6s
description: Prints text to war room (Markdown supported)
@@ -203,6 +203,7 @@ tasks:
- "95"
- "96"
- "97"
+ - "98"
scriptarguments:
value:
simple: ${invalid_ipv6s}
@@ -225,10 +226,10 @@ tasks:
isautoswitchedtoquietmode: false
"55":
id: "55"
- taskid: 2640d463-ee23-4878-8118-73e423df644c
+ taskid: 06ac2615-9f2a-4f1a-8959-4a91ac3b6290
type: condition
task:
- id: 2640d463-ee23-4878-8118-73e423df644c
+ id: 06ac2615-9f2a-4f1a-8959-4a91ac3b6290
version: -1
name: 2001:db8:::1 - Double colons in the middle
type: condition
@@ -266,10 +267,10 @@ tasks:
isautoswitchedtoquietmode: false
"73":
id: "73"
- taskid: 3e66fd61-0094-4b4f-8943-a6a94a2524e1
+ taskid: 24631531-2e39-476f-8ba3-99ad7a897d46
type: regular
task:
- id: 3e66fd61-0094-4b4f-8943-a6a94a2524e1
+ id: 24631531-2e39-476f-8ba3-99ad7a897d46
version: -1
name: Set valid IPv6s
description: Sets a value into the context with the given context key
@@ -284,7 +285,7 @@ tasks:
key:
simple: valid_ipv6s
value:
- simple: '"fe80::1","::1","2001:0db8:0000:0000:0000:0000:0000:0001","2001:db8:1:0:0:0:0:1","::ffff:192.0.2.1","fe80::7:8%eth0","ff02::1:ff00:1","ff02:1::ff00:1","ff02:1:ff00::1"'
+ simple: '"fe80::1","::1","2001:0db8:0000:0000:0000:0000:0000:0001","2001:db8:1:0:0:0:0:1","::ffff:192.0.2.1","ff02::1:ff00:1","ff02:1::ff00:1","ff02:1:ff00::1","fe80::7:8%eth0"'
separatecontext: false
continueonerrortype: ""
view: |-
@@ -303,10 +304,10 @@ tasks:
isautoswitchedtoquietmode: false
"80":
id: "80"
- taskid: cdd1e174-e0e3-4b6d-856e-fdb35a79d9e5
+ taskid: 82471283-39e7-4911-8e5b-f6b07e36a3fa
type: regular
task:
- id: cdd1e174-e0e3-4b6d-856e-fdb35a79d9e5
+ id: 82471283-39e7-4911-8e5b-f6b07e36a3fa
version: -1
name: DeleteContext
description: Delete field from context
@@ -335,10 +336,10 @@ tasks:
isautoswitchedtoquietmode: false
"81":
id: "81"
- taskid: e0243c9f-cecc-43b2-8185-3340cd216916
+ taskid: 0cf20ef1-5c52-4078-86fa-98e38f16df8b
type: condition
task:
- id: e0243c9f-cecc-43b2-8185-3340cd216916
+ id: 0cf20ef1-5c52-4078-86fa-98e38f16df8b
version: -1
name: fe80::123::5678 - Multiple consecutive colons
type: condition
@@ -376,10 +377,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 27750af8-3990-42e0-8239-4045b1e09c2b
+ taskid: 42efe980-0d4d-422e-8bb4-46c0082183e7
type: condition
task:
- id: 27750af8-3990-42e0-8239-4045b1e09c2b
+ id: 42efe980-0d4d-422e-8bb4-46c0082183e7
version: -1
name: Collapsed address - ff02::1:ff00:1
type: condition
@@ -417,10 +418,10 @@ tasks:
isautoswitchedtoquietmode: false
"85":
id: "85"
- taskid: 27e609d6-d063-486e-8e98-309cd09059e8
+ taskid: 70325c9b-121a-45e1-8334-e7044946f672
type: condition
task:
- id: 27e609d6-d063-486e-8e98-309cd09059e8
+ id: 70325c9b-121a-45e1-8334-e7044946f672
version: -1
name: local IPv6 with zone identifier - fe80::7:8%eth0
type: condition
@@ -458,10 +459,10 @@ tasks:
isautoswitchedtoquietmode: false
"86":
id: "86"
- taskid: a757a4ae-747f-45c4-8ca4-b270efccf8a5
+ taskid: 7943746b-af8d-4123-8ab9-90dfcf759036
type: condition
task:
- id: a757a4ae-747f-45c4-8ca4-b270efccf8a5
+ id: 7943746b-af8d-4123-8ab9-90dfcf759036
version: -1
name: IPv4-compatible IPv6 address - ::ffff:192.0.2.1
type: condition
@@ -499,10 +500,10 @@ tasks:
isautoswitchedtoquietmode: false
"87":
id: "87"
- taskid: ee664d04-7d2d-4bf9-8353-64dae4dfc546
+ taskid: 7e7f9ae4-d8a8-4657-88c9-a3ec82ac4ef4
type: condition
task:
- id: ee664d04-7d2d-4bf9-8353-64dae4dfc546
+ id: 7e7f9ae4-d8a8-4657-88c9-a3ec82ac4ef4
version: -1
name: Local Address - fe80::1
type: condition
@@ -540,10 +541,10 @@ tasks:
isautoswitchedtoquietmode: false
"88":
id: "88"
- taskid: a2218277-526e-479f-8d5d-03cc26f0d5d8
+ taskid: bcadde02-28cd-4a28-8715-4d7ed08cc32c
type: condition
task:
- id: a2218277-526e-479f-8d5d-03cc26f0d5d8
+ id: bcadde02-28cd-4a28-8715-4d7ed08cc32c
version: -1
name: Collapsed zeros - 2001:db8:1:0:0:0:0:1
type: condition
@@ -581,10 +582,10 @@ tasks:
isautoswitchedtoquietmode: false
"89":
id: "89"
- taskid: 8f8fb385-6766-4113-8d85-1316d760bb07
+ taskid: a317ea7b-7123-47cd-849c-4832157392b2
type: condition
task:
- id: 8f8fb385-6766-4113-8d85-1316d760bb07
+ id: a317ea7b-7123-47cd-849c-4832157392b2
version: -1
name: Leading zeros - 2001:0db8:0000:0000:0000:0000:0000:0001
type: condition
@@ -622,10 +623,10 @@ tasks:
isautoswitchedtoquietmode: false
"90":
id: "90"
- taskid: 75aae3f7-5224-4ea2-8226-16a3d6511b60
+ taskid: 1be167b6-ab37-423b-8905-26dfdc7d5f62
type: condition
task:
- id: 75aae3f7-5224-4ea2-8226-16a3d6511b60
+ id: 1be167b6-ab37-423b-8905-26dfdc7d5f62
version: -1
name: Loopback - ::1
type: condition
@@ -663,10 +664,10 @@ tasks:
isautoswitchedtoquietmode: false
"91":
id: "91"
- taskid: d6d9ce83-d0a7-4dff-8a1b-3ae028232253
+ taskid: d5b94802-7b18-424c-8b92-1fefe85b2112
type: condition
task:
- id: d6d9ce83-d0a7-4dff-8a1b-3ae028232253
+ id: d5b94802-7b18-424c-8b92-1fefe85b2112
version: -1
name: Collapsed address mid - ff02:1::ff00:1
type: condition
@@ -704,10 +705,10 @@ tasks:
isautoswitchedtoquietmode: false
"92":
id: "92"
- taskid: 2db343e3-4a96-41e2-8501-826d5996ac36
+ taskid: a8f026df-0874-4fec-8410-ef1edae1dc50
type: condition
task:
- id: 2db343e3-4a96-41e2-8501-826d5996ac36
+ id: a8f026df-0874-4fec-8410-ef1edae1dc50
version: -1
name: Collapsed address mid - ff02:1:ff00::1
type: condition
@@ -745,10 +746,10 @@ tasks:
isautoswitchedtoquietmode: false
"93":
id: "93"
- taskid: 4ccb6e56-a3d3-4573-8058-cda5d34a6f1b
+ taskid: 67fa7fdc-1648-410a-8b49-61b8cfc78597
type: condition
task:
- id: 4ccb6e56-a3d3-4573-8058-cda5d34a6f1b
+ id: 67fa7fdc-1648-410a-8b49-61b8cfc78597
version: -1
name: 2001::db8::1 - Double colons not at the start or end
type: condition
@@ -786,10 +787,10 @@ tasks:
isautoswitchedtoquietmode: false
"94":
id: "94"
- taskid: 9395253d-35a2-4c28-86f5-4dc8885e1b62
+ taskid: 05e30fc9-c3cd-4e75-8e9f-1b83f3ce214d
type: condition
task:
- id: 9395253d-35a2-4c28-86f5-4dc8885e1b62
+ id: 05e30fc9-c3cd-4e75-8e9f-1b83f3ce214d
version: -1
name: ::fffff:192.0.2.1 - Too many hexadecimal digits in one group
type: condition
@@ -827,10 +828,10 @@ tasks:
isautoswitchedtoquietmode: false
"95":
id: "95"
- taskid: 9a79bb83-5353-4c1a-85a1-cdc49bc94aee
+ taskid: a6c27f45-947b-4a56-8ce9-b46b5dcc24a4
type: condition
task:
- id: 9a79bb83-5353-4c1a-85a1-cdc49bc94aee
+ id: a6c27f45-947b-4a56-8ce9-b46b5dcc24a4
version: -1
name: 2001:db8:::1%eth0 - Double colons with zone identifier
type: condition
@@ -868,10 +869,10 @@ tasks:
isautoswitchedtoquietmode: false
"96":
id: "96"
- taskid: 7c000ee0-fb0b-4728-8dba-4ed982597055
+ taskid: 45fc9842-24f6-47e8-8f9a-e6b5ebda5b0e
type: condition
task:
- id: 7c000ee0-fb0b-4728-8dba-4ed982597055
+ id: 45fc9842-24f6-47e8-8f9a-e6b5ebda5b0e
version: -1
name: fe80::1%eth0%vlan0 - Multiple zone identifiers
type: condition
@@ -909,10 +910,10 @@ tasks:
isautoswitchedtoquietmode: false
"97":
id: "97"
- taskid: 8b3df2c7-76d8-4af7-8117-a28e17ad8515
+ taskid: 7f26cb6f-4bd8-4ee1-8f25-62b2ab3a07ff
type: condition
task:
- id: 8b3df2c7-76d8-4af7-8117-a28e17ad8515
+ id: 7f26cb6f-4bd8-4ee1-8f25-62b2ab3a07ff
version: -1
name: 00:1A:2B:3C:4D:5E - MAC Address
type: condition
@@ -948,13 +949,54 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "98":
+ id: "98"
+ taskid: f7c5985a-764f-4052-8879-9be0e62e5430
+ type: condition
+ task:
+ id: f7c5985a-764f-4052-8879-9be0e62e5430
+ version: -1
+ name: Event::Endpoint::Denc::Encrypt - Event
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "80"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: notContainsGeneral
+ left:
+ value:
+ simple: IPv6.Address
+ iscontext: true
+ right:
+ value:
+ simple: c::E
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3510,
+ "y": 1390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
"height": 1610,
- "width": 3820,
+ "width": 3840,
"x": 50,
"y": 50
}
diff --git a/Packs/Base/pack_metadata.json b/Packs/Base/pack_metadata.json
index 772b3d69651a..7809b945f12f 100644
--- a/Packs/Base/pack_metadata.json
+++ b/Packs/Base/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Base",
"description": "The base pack for Cortex XSOAR.",
"support": "xsoar",
- "currentVersion": "1.34.9",
+ "currentVersion": "1.34.27",
"author": "Cortex XSOAR",
"serverMinVersion": "6.0.0",
"url": "https://www.paloaltonetworks.com/cortex",
diff --git a/Packs/BitDam/Integrations/BitDam/BitDam.yml b/Packs/BitDam/Integrations/BitDam/BitDam.yml
index 978661ce70ed..6ad1c6a90936 100644
--- a/Packs/BitDam/Integrations/BitDam/BitDam.yml
+++ b/Packs/BitDam/Integrations/BitDam/BitDam.yml
@@ -13,7 +13,7 @@ configuration:
required: true
- display: API Token
name: apitoken
- defaultvalue: ""
+ defaultvalue: ''
type: 4
required: true
- display: Trust any certificate (not secure)
@@ -77,7 +77,7 @@ script:
type: string
description: Returns the verdict for a scanned file.
runonce: false
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
tests:
- Detonate File - BitDam Test
fromversion: 5.0.0
diff --git a/Packs/BitDam/ReleaseNotes/1_0_14.md b/Packs/BitDam/ReleaseNotes/1_0_14.md
new file mode 100644
index 000000000000..840d3942b629
--- /dev/null
+++ b/Packs/BitDam/ReleaseNotes/1_0_14.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### BitDam
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/BitDam/doc_files/Detonate_File_-_BitDam.png b/Packs/BitDam/doc_files/Detonate_File_-_BitDam.png
new file mode 100644
index 000000000000..5861ecbaee4e
Binary files /dev/null and b/Packs/BitDam/doc_files/Detonate_File_-_BitDam.png differ
diff --git a/Packs/BitDam/pack_metadata.json b/Packs/BitDam/pack_metadata.json
index 7d2cf47dbe65..7d9484582b64 100644
--- a/Packs/BitDam/pack_metadata.json
+++ b/Packs/BitDam/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "BitDam",
"description": "BitDam secure email gateway protects from advanced content-borne threats with the most accurate prevention of known and unknown threats, at their source.",
"support": "community",
- "currentVersion": "1.0.13",
+ "currentVersion": "1.0.14",
"author": "BitDam",
"url": "https://www.bitdam.com",
"email": "support@bitdam.com",
diff --git a/Packs/BitcoinAbuse/ReleaseNotes/1_0_42.md b/Packs/BitcoinAbuse/ReleaseNotes/1_0_42.md
new file mode 100644
index 000000000000..e13d10e60e74
--- /dev/null
+++ b/Packs/BitcoinAbuse/ReleaseNotes/1_0_42.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### BitcoinAbuse Feed (Deprecated)
+
+Maintenance and stability enhancements.
\ No newline at end of file
diff --git a/Packs/BitcoinAbuse/pack_metadata.json b/Packs/BitcoinAbuse/pack_metadata.json
index 162f442e0cb5..5415bc0ddd34 100644
--- a/Packs/BitcoinAbuse/pack_metadata.json
+++ b/Packs/BitcoinAbuse/pack_metadata.json
@@ -3,7 +3,7 @@
"description": "Deprecated. No available replacement.",
"serverMinVersion": "5.5.0",
"support": "xsoar",
- "currentVersion": "1.0.41",
+ "currentVersion": "1.0.42",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/BluelivThreatContext/Integrations/BluelivThreatContext/BluelivThreatContext.yml b/Packs/BluelivThreatContext/Integrations/BluelivThreatContext/BluelivThreatContext.yml
index f704970b21c8..1605d231b7c6 100644
--- a/Packs/BluelivThreatContext/Integrations/BluelivThreatContext/BluelivThreatContext.yml
+++ b/Packs/BluelivThreatContext/Integrations/BluelivThreatContext/BluelivThreatContext.yml
@@ -637,7 +637,7 @@ script:
- contextPath: BluelivThreatContext.signature.type
description: Signature type.
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/BluelivThreatContext/ReleaseNotes/1_0_10.md b/Packs/BluelivThreatContext/ReleaseNotes/1_0_10.md
new file mode 100644
index 000000000000..f4f03e975627
--- /dev/null
+++ b/Packs/BluelivThreatContext/ReleaseNotes/1_0_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Blueliv ThreatContext
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/BluelivThreatContext/pack_metadata.json b/Packs/BluelivThreatContext/pack_metadata.json
index d107e8e5d43e..492910a21878 100644
--- a/Packs/BluelivThreatContext/pack_metadata.json
+++ b/Packs/BluelivThreatContext/pack_metadata.json
@@ -2,8 +2,8 @@
"name": "Blueliv ThreatContext",
"description": "The Threat Context module provides SOC, Incident Response and Threat Intelligence teams with continuously updated and intuitive information around threat actors, campaigns, malware indicators, attack patterns, tools, signatures and CVEs. Analysts can rapidly gather enriched, contextualized information to enhance cybersecurity processes before, during and after an attack.",
"support": "community",
- "currentVersion": "1.0.9",
- "author": "Oriol Campderr\u00f3s",
+ "currentVersion": "1.0.10",
+ "author": "Oriol Campderrós",
"url": " ",
"email": "",
"created": "2020-06-08T15:37:54Z",
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
index ecb3b8105e6f..29690795fd97 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.py
@@ -3,7 +3,7 @@
# type: ignore
# mypy: ignore-errors
from copy import deepcopy
-from typing import Callable, Tuple
+from collections.abc import Callable
from datetime import datetime
@@ -351,9 +351,8 @@ def retrieve_access_token(self, username: str, password: str) -> str:
"""
integration_context = get_integration_context()
now = int(datetime.now().timestamp())
- if integration_context.get("token") and integration_context.get("expires_in"):
- if now < integration_context["expires_in"]:
- return integration_context["token"]
+ if integration_context.get("token") and integration_context.get("expires_in") and now < integration_context["expires_in"]:
+ return integration_context["token"]
try:
token = self._http_request(
@@ -2914,7 +2913,7 @@ def get_paginated_records_with_hr(
limit: Optional[int],
page: int = None,
page_size: int = None,
-) -> Tuple[list, str]:
+) -> tuple[list, str]:
"""
Retrieve the required page either with Automatic or Manual pagination,
and the matching readable output header.
@@ -2995,7 +2994,7 @@ def validate_related_arguments_provided(**related_args):
def extract_args_from_additional_fields_arg(additional_fields: str,
- field_name: str) -> Tuple[Any, List[str]]:
+ field_name: str) -> tuple[Any, List[str]]:
"""
Extract dictionary structure from additional field argument.
@@ -3298,7 +3297,7 @@ def fetch_relevant_tickets(
impact_filter: List[str],
urgency_filter: List[str],
custom_query: str,
-) -> Tuple[list, dict]:
+) -> tuple[list, dict]:
"""
Fetch the relevant tickets according to the provided filter arguments.
The Tickets are fetched Iteratively, by their ticket type until the capacity
@@ -3340,9 +3339,9 @@ def fetch_relevant_tickets(
tickets_capacity -= tickets_amount
if fetched_tickets:
- last_ticket_create_time = total_tickets[-1].get("CreateDate")
- ticket_type_to_last_epoch[ticket_type] = date_to_epoch_for_fetch(
- arg_to_datetime(last_ticket_create_time))
+ ticket_type_to_last_epoch[ticket_type] = max(
+ [date_to_epoch_for_fetch(arg_to_datetime(ticket.get("CreateDate")))
+ for ticket in total_tickets])
if tickets_capacity <= 0: # no more tickets to retrieve in the current fetch
break
@@ -3425,10 +3424,7 @@ def all_keys_empty(dict_obj: Dict[str, Any]) -> bool:
Returns:
bool: Wheter or not all keys have None value.
"""
- for value in dict_obj.values():
- if value:
- return False
- return True
+ return all(not value for value in dict_obj.values())
def gen_multi_filters_statement(filter_mapper: Dict[str, Any], oper_in_filter: str,
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
index d378dd20956b..224b419e8cf4 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM.yml
@@ -169,7 +169,7 @@ script:
script: ""
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.14.100715
commands:
- name: bmc-itsm-user-list
description: Retrieves a list of user profiles from BMC Helix ITSM. The records are retrieved by the query argument or by the filtering arguments. When using filtering arguments, each one defines a 'LIKE' operation and an 'AND' operator is used between them. To see the entire JSON then you can use the raw_response=true at the end of the command.
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
index 675c49954956..2173fd904a4f 100644
--- a/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
+++ b/Packs/BmcITSM/Integrations/BmcITSM/BmcITSM_test.py
@@ -2,6 +2,8 @@
import os
import pytest
from unittest.mock import patch
+from CommonServerPython import *
+
"""MOCK PARAMETERS """
CREDENTIALS = "credentials"
ACCOUNT_ID = "account_id"
@@ -22,7 +24,7 @@ def load_mock_response(file_name: str) -> str:
Returns:
str: Mock file content.
"""
- with open(os.path.join("test_data", file_name), mode="r", encoding="utf-8") as mock_file:
+ with open(os.path.join("test_data", file_name), encoding="utf-8") as mock_file:
return json.loads(mock_file.read())
@@ -1480,3 +1482,35 @@ def test_ticket_list_work_order_command(
assert result.outputs_prefix == "BmcITSM.Ticket"
assert len(outputs) == expected_outputs_len
assert outputs[0]["DisplayID"] == expected_name
+
+
+def test_fetch_command(
+ mocker
+):
+ """
+ Given:
+ - List tickets.
+ When:
+ - fetch_incidents command called.
+ Then:
+ - Ensure that the *last_create_time* in *last_run_result* is the last between all incidents.
+ """
+ import BmcITSM
+ mock_response = load_mock_response("list_tickets_not_sorted.json")
+ expected_result = 1719671916
+ mocker.patch.object(demisto, 'getLastRun', return_value={"SRM:Request": {"last_create_time": '2021-06-29T14:38:36.000+0000'}})
+ mocker.patch.object(BmcITSM, "fetch_relevant_tickets_by_ticket_type", return_value=mock_response)
+ incidents_result, last_run_result = BmcITSM.fetch_incidents(mock_client,
+ max_fetch=2,
+ first_fetch="2022-06-29T14:38:36.000+0000",
+ last_run={"SRM:Request": {
+ "last_create_time": '2021-06-29T14:38:36.000+0000'}},
+ ticket_type_filter=["SRM:Request"],
+ status_filter=[],
+ impact_filter=[],
+ urgency_filter=[],
+ custom_query=("'Submit Date' <= \"1657032797\" AND 'Submit Date'"
+ ">\"1657032797\" AND 'Urgency' = \"4-Low\""),
+ mirror_direction="both",
+ )
+ assert last_run_result["SRM:Request"]["last_create_time"] == expected_result
diff --git a/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_not_sorted.json b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_not_sorted.json
new file mode 100644
index 000000000000..c4125a410a58
--- /dev/null
+++ b/Packs/BmcITSM/Integrations/BmcITSM/test_data/list_tickets_not_sorted.json
@@ -0,0 +1,112 @@
+[
+ {
+ "values": {
+ "SysRequestID": "000000000000402",
+ "Submitter": "appadmin",
+ "Submit Date": "2024-06-29T14:38:36.000+0000",
+ "System Assignee": null,
+ "Status": "Planning",
+ "Status-History": {
+ "Draft": {
+ "user": "appadmin",
+ "timestamp": "2022-06-29T14:38:37.000+0000"
+ },
+ "Submitted": {
+ "user": "appadmin",
+ "timestamp": "2022-06-29T14:38:37.000+0000"
+ },
+ "Waiting Approval": {
+ "user": "Remedy Application Service",
+ "timestamp": "2022-06-29T14:39:49.000+0000"
+ },
+ "Planning": {
+ "user": "Remedy Application Service",
+ "timestamp": "2022-06-29T14:39:49.000+0000"
+ }
+ },
+ "Assignee Groups": "1000000003;'appadmin';",
+ "InstanceId": "AGGAI7ZXDK9WFAR4IUA2R3J3RVIB6V",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": "",
+ "z1D_WorkInfoType": null,
+ "z1D_WorkInfoSummary": null,
+ "z1D_WorkInfoDetails": null,
+ "z1D_WorkInfoSecureLog": null,
+ "z1D_WorkInfoViewAccess": null,
+ "z1D_WorkInfoDate": null,
+ "z1D_CommunicationSource": null,
+ "Assignee Group": "Backoffice Support",
+ "Assignee": "Mary Mann",
+ "Recurring Price Basis": null,
+ "Location Company": "Calbro Services",
+ "Organization": null,
+ "Assigned Support Organization": "IT Support",
+ "Last Name": "Admin",
+ "First Name": "App",
+ "Service Location Address": "1114 Eighth Avenue, 31st Floor. \r\nNew York, New York 10036 \r\nUnited States",
+ "Internet E-mail": null,
+ "Phone Number": "###",
+ "Navigation Tier 1": "File & Print",
+ "Navigation Tier 2": null,
+ "Navigation Tier 3": null,
+ "z1D Action": null,
+ "Request Manager Group ID": null,
+ "Company": "Calbro Services",
+ "Status_Reason": null,
+ "Details": null,
+ "Urgency": "2-High",
+ "Impact": "1-Extensive/Widespread",
+ "Assigned Group": null,
+ "Request Manager": null,
+ "Assigned Support Company": "Calbro Services",
+ "Request Manager Login": null,
+ "Total Escalation Level": 0,
+ "Request Number": "REQ000000000401",
+ "Date Required": null,
+ "Next Target Date": null,
+ "SLM Status": null,
+ "Customer First Name": "App",
+ "Customer Last Name": "Admin",
+ "Customer Company": "Calbro Services",
+ "Customer Organization": null,
+ "Customer Department": null,
+ "Customer Internet E-mail": null,
+ "Customer Phone Number": "###"
+ },
+ "CreateDate": "2024-06-29T14:38:36.000+0000"
+ },
+ {
+ "values": {
+ "SysRequestID": "000000000000403",
+ "Submitter": "appadmin",
+ "Submit Date": "2022-06-29T14:38:36.000+0000",
+ "System Assignee": null,
+ "Status": "Planning",
+ "Status-History": {
+ "Draft": {
+ "user": "appadmin",
+ "timestamp": "2022-06-29T14:38:37.000+0000"
+ },
+ "Submitted": {
+ "user": "appadmin",
+ "timestamp": "2022-06-29T14:38:37.000+0000"
+ },
+ "Waiting Approval": {
+ "user": "Remedy Application Service",
+ "timestamp": "2022-06-29T14:39:43.000+0000"
+ },
+ "Planning": {
+ "user": "Remedy Application Service",
+ "timestamp": "2022-06-29T14:39:43.000+0000"
+ }
+ },
+ "Assignee Groups": "1000000003;'appadmin';",
+ "InstanceId": "AGGAI7ZXDK9WFAR4IUA2R3J3RVIB6S",
+ "Vendor Assignee Groups": null,
+ "Vendor Assignee Groups_parent": null,
+ "Assignee Groups_parent": ""
+ },
+ "CreateDate": "2022-06-29T14:38:36.000+0000"
+ }
+ ]
diff --git a/Packs/BmcITSM/ReleaseNotes/1_0_23.md b/Packs/BmcITSM/ReleaseNotes/1_0_23.md
new file mode 100644
index 000000000000..35e13be316f4
--- /dev/null
+++ b/Packs/BmcITSM/ReleaseNotes/1_0_23.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### BMC Helix ITSM
+
+- Fixed an issue in the ***fetch-incidents*** command where duplicate incidents were fetched due to the incorrect assumption that tickets pulled from BMC Helix ITSM are sorted in ascending order.
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/BmcITSM/pack_metadata.json b/Packs/BmcITSM/pack_metadata.json
index 75cb746144cf..66cfdbd19553 100644
--- a/Packs/BmcITSM/pack_metadata.json
+++ b/Packs/BmcITSM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "BMC Helix ITSM",
"description": "BMC Helix ITSM allows customers to manage service request, incident, change request, task, problem investigation, known error and work order tickets.",
"support": "xsoar",
- "currentVersion": "1.0.22",
+ "currentVersion": "1.0.23",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Bonusly/Integrations/Bonusly/Bonusly.yml b/Packs/Bonusly/Integrations/Bonusly/Bonusly.yml
index 05258e9c40ae..86d0cea6c318 100644
--- a/Packs/Bonusly/Integrations/Bonusly/Bonusly.yml
+++ b/Packs/Bonusly/Integrations/Bonusly/Bonusly.yml
@@ -59,9 +59,9 @@ script:
- defaultValue: '20'
description: 'Number of bonuses to retrieve (min: 1, max: 100) Default: 20.'
name: limit
- - description: 'The start time by which to filter returned bonuses. e.g., 2015-10-28T21:26:50Z.'
+ - description: The start time by which to filter returned bonuses. e.g., 2015-10-28T21:26:50Z.
name: start-time
- - description: 'The end time by which to filter returned bonuses, e.g., 2015-10-28T21:26:50Z.'
+ - description: The end time by which to filter returned bonuses, e.g., 2015-10-28T21:26:50Z.
name: end-time
- description: Email address of the bonus giver by which to filter results.
name: giver-email
@@ -71,7 +71,7 @@ script:
name: user-email
- description: 'Filter to get a list of bonuses by a hashtag Example: %23teamwork'
name: hashtag
- - description: 'Whether to include child responses of the bonus.'
+ - description: Whether to include child responses of the bonus.
name: include-children
- description: Whether to show private bonuses. Requires Admin API key.
name: show-private-bonuses
@@ -82,7 +82,7 @@ script:
description: Bonus ID.
type: String
- contextPath: Bonusly.Bonus.created_at
- description: 'Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.'
+ description: Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.
type: Date
- contextPath: Bonusly.Bonus.reason
description: 'The bonus message, e.g., "For signing up for the world''s favorite employee recognition solution! #problem-solving"'
@@ -196,7 +196,7 @@ script:
- description: "The bonus message, e.g., \n+10 @george and @john for #execution with that customer #collaboration with the team, and #integrity on the known vulnerabilities to the application. \n\n+10 @francesco because he is fast and detailed\n"
name: reason
required: true
- - description: 'The parent bonus ID with which to associate this bonus, e.g., 24abcdef1234567890abcdef.'
+ - description: The parent bonus ID with which to associate this bonus, e.g., 24abcdef1234567890abcdef.
name: parent-bonus-id
description: Creates a Bonusly bonus.
name: bonusly-create-bonus
@@ -205,7 +205,7 @@ script:
description: Bonus ID.
type: String
- contextPath: Bonusly.Bonus.created_at
- description: 'Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.'
+ description: Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.
type: Date
- contextPath: Bonusly.Bonus.reason
description: 'The bonus message, e.g., "For signing up for the world''s favorite employee recognition solution! #problem-solving"'
@@ -313,7 +313,7 @@ script:
description: Family bonus balance.
type: Number
- arguments:
- - description: 'ID of the bonus to get information for.'
+ - description: ID of the bonus to get information for.
name: id
required: true
description: Gets a bonus by bonus ID.
@@ -323,7 +323,7 @@ script:
description: Bonus ID.
type: String
- contextPath: Bonusly.Bonus.created_at
- description: 'Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.'
+ description: Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.
type: Date
- contextPath: Bonusly.Bonus.reason
description: 'The bonus message, e.g., "For signing up for the world''s favorite employee recognition solution! #problem-solving"'
@@ -444,7 +444,7 @@ script:
description: Bonus ID.
type: String
- contextPath: Bonusly.Bonus.created_at
- description: 'Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.'
+ description: Date the bonus was created (given), e.g., 2015-10-28T21:26:50Z.
type: Date
- contextPath: Bonusly.Bonus.reason
description: 'The bonus message, e.g., "For signing up for the world''s favorite employee recognition solution! #problem-solving"'
@@ -561,7 +561,7 @@ script:
- contextPath: Bonusly.Bonus.message
description: Message
type: Unknown
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Bonusly/ReleaseNotes/1_0_15.md b/Packs/Bonusly/ReleaseNotes/1_0_15.md
new file mode 100644
index 000000000000..cd9f63c7980e
--- /dev/null
+++ b/Packs/Bonusly/ReleaseNotes/1_0_15.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### Bonusly
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+
+#### Scripts
+
+##### IncOwnerToBonuslyUser
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/Bonusly/Scripts/IncOwnerToBonuslyUser/IncOwnerToBonuslyUser.yml b/Packs/Bonusly/Scripts/IncOwnerToBonuslyUser/IncOwnerToBonuslyUser.yml
index 68060ba74572..a6bbe222236e 100644
--- a/Packs/Bonusly/Scripts/IncOwnerToBonuslyUser/IncOwnerToBonuslyUser.yml
+++ b/Packs/Bonusly/Scripts/IncOwnerToBonuslyUser/IncOwnerToBonuslyUser.yml
@@ -25,7 +25,7 @@ tags:
- Utilities
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
runas: DBotWeakRole
fromversion: 5.0.0
tests:
diff --git a/Packs/Bonusly/pack_metadata.json b/Packs/Bonusly/pack_metadata.json
index a89dcaa402d3..6f357b014e29 100644
--- a/Packs/Bonusly/pack_metadata.json
+++ b/Packs/Bonusly/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Bonusly",
"description": "Bonus.ly is an employee recognition platform which enterprises use to for employee recognition. We're building tools to help people feel a sense of purpose and progress at work. The platform which also has an API enables employees to recognize each other by providing a point based bonus system. Bonus.ly helps your employees feel connected, engaged, and aligned is mission critical right now. Bonusly makes employee recognition easy and fun, fostering community and creating company-wide alignment. It also provides employees with positive feedback in the work that they are doing. ",
"support": "community",
- "currentVersion": "1.0.14",
+ "currentVersion": "1.0.15",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.py b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.py
index cb813fb46f4e..6a151eb69c1d 100644
--- a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.py
+++ b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.py
@@ -8,7 +8,7 @@
from cryptography import exceptions
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_pem_private_key
-from pydantic import Field, parse_obj_as
+from pydantic import ConfigDict, Field, parse_obj_as
from SiemApiModule import * # noqa: E402
@@ -34,9 +34,7 @@ class BoxAppSettings(BaseModel):
clientID: str
clientSecret: str
appAuth: AppAuth
-
- class Config:
- arbitrary_types_allowed = True
+ model_config = ConfigDict(arbitrary_types_allowed=True)
class BoxCredentials(BaseModel):
@@ -69,9 +67,7 @@ class BoxEventsParams(BaseModel):
_normalize_after = validator('created_after', pre=True, allow_reuse=True)(
get_box_events_timestamp_format
)
-
- class Config:
- validate_assignment = True
+ model_config = ConfigDict(validate_assignment=True)
def not_gate(v):
diff --git a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
index 0fb0fd89d9a4..b1dfe24f2c9e 100644
--- a/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
+++ b/Packs/Box/Integrations/BoxEventsCollector/BoxEventsCollector.yml
@@ -57,7 +57,7 @@ script:
defaultValue: 3 days
description: Get events.
name: box-get-events
- dockerimage: demisto/auth-utils:1.0.0.91447
+ dockerimage: demisto/auth-utils:1.0.0.94075
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Box/ReleaseNotes/3_2_0.md b/Packs/Box/ReleaseNotes/3_2_0.md
new file mode 100644
index 000000000000..baaaa73a3869
--- /dev/null
+++ b/Packs/Box/ReleaseNotes/3_2_0.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Box Event Collector
+
+- Documentation and metadata improvements.
+- Updated the Docker image to: *demisto/auth-utils:1.0.0.94075*.
\ No newline at end of file
diff --git a/Packs/Box/pack_metadata.json b/Packs/Box/pack_metadata.json
index 40a037ac6cec..2c003f3c70ed 100644
--- a/Packs/Box/pack_metadata.json
+++ b/Packs/Box/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Box",
"description": "Manage Box users",
"support": "xsoar",
- "currentVersion": "3.1.44",
+ "currentVersion": "3.2.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/C2sec/doc_files/C2SEC_Domain_Scan.png b/Packs/C2sec/doc_files/C2SEC_Domain_Scan.png
new file mode 100644
index 000000000000..046049c0ab9b
Binary files /dev/null and b/Packs/C2sec/doc_files/C2SEC_Domain_Scan.png differ
diff --git a/Packs/CIRCL/Integrations/CirclCVESearch/CirclCVESearch.yml b/Packs/CIRCL/Integrations/CirclCVESearch/CirclCVESearch.yml
index 1f4fe4f8d484..786b1fa198e6 100644
--- a/Packs/CIRCL/Integrations/CirclCVESearch/CirclCVESearch.yml
+++ b/Packs/CIRCL/Integrations/CirclCVESearch/CirclCVESearch.yml
@@ -111,18 +111,18 @@ script:
description: A dictionary of CPE strings with `cpe` key for a grid.
type: String
- contextPath: DBotScore.Indicator
- description: The indicator value.
+ description: The indicator that was tested.
type: String
- contextPath: DBotScore.Score
- description: The indicator score.
+ description: The actual score.
type: Number
- contextPath: DBotScore.Type
description: The indicator type.
type: String
- contextPath: DBotScore.Vendor
- description: The vendor reporting the score of the indicator.
+ description: The vendor used to calculate the score.
type: String
- dockerimage: demisto/python3:3.10.13.83255
+ dockerimage: demisto/python3:3.10.14.99865
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/CIRCL/Integrations/CirclCVESearch/README.md b/Packs/CIRCL/Integrations/CirclCVESearch/README.md
index 7a3bd402336b..68f05e6fc132 100644
--- a/Packs/CIRCL/Integrations/CirclCVESearch/README.md
+++ b/Packs/CIRCL/Integrations/CirclCVESearch/README.md
@@ -88,10 +88,10 @@ Returns CVE information by CVE ID.
| CVE.vulnerableproduct | Dict | Vulnerable products in CPE format |
| CVE.Tags | List | A list of tags |
| CVE.Relationships | List | List of relationships for the CVE |
-| DBotScore.Indicator | String | The indicator value. |
-| DBotScore.Score | Number | The indicator score. |
+| DBotScore.Indicator | String | The indicator that was tested. |
+| DBotScore.Score | Number | The actual score. |
| DBotScore.Type | String | The indicator type. |
-| DBotScore.Vendor | String | The vendor reporting the score of the indicator. |
+| DBotScore.Vendor | String | The vendor used to calculate the score. |
##### Command Example
diff --git a/Packs/CIRCL/ReleaseNotes/1_0_23.md b/Packs/CIRCL/ReleaseNotes/1_0_23.md
new file mode 100644
index 000000000000..a327a36345e6
--- /dev/null
+++ b/Packs/CIRCL/ReleaseNotes/1_0_23.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### CIRCL CVE Search
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+- Documentation and metadata improvements.
diff --git a/Packs/CIRCL/pack_metadata.json b/Packs/CIRCL/pack_metadata.json
index 836c143c69f5..83f45f765fa9 100644
--- a/Packs/CIRCL/pack_metadata.json
+++ b/Packs/CIRCL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CIRCL",
"description": "The Computer Incident Response Center Luxembourg (CIRCL) is a government-driven initiative designed to provide a systematic response facility to computer security threats and incidents.\nThis pack includes:\n# CIRCL Passive DNS which is a database storing historical DNS records from various resources.\n# CIRCL Passive SSL is a database storing historical X.509 certificates seen per IP address. The Passive SSL historical data is indexed per IP address.\n# CIRCL CVE Search, interface to search publicly known information from security vulnerabilities in software and hardware along with their corresponding exposures.",
"support": "xsoar",
- "currentVersion": "1.0.22",
+ "currentVersion": "1.0.23",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CIRCLHashlookup/Integrations/CIRCLEHashlookup/CIRCLEHashlookup.yml b/Packs/CIRCLHashlookup/Integrations/CIRCLEHashlookup/CIRCLEHashlookup.yml
index 4e1161fb59ec..7181022fdab2 100644
--- a/Packs/CIRCLHashlookup/Integrations/CIRCLEHashlookup/CIRCLEHashlookup.yml
+++ b/Packs/CIRCLHashlookup/Integrations/CIRCLEHashlookup/CIRCLEHashlookup.yml
@@ -42,38 +42,38 @@ name: CIRCLEHashlookup
script:
commands:
- arguments: []
- description: Get information about the hash lookup database
+ description: Get information about the hash lookup database.
name: circl-info
outputs:
- contextPath: Circl.Info
- description: Info about the hashlookup database
+ description: Info about the hashlookup database.
type: string
- arguments:
- - description: List of MD5s to query
+ - description: List of MD5s to query.
isArray: true
name: md5_list
required: true
type: textArea
- description: Bulk search of MD5 hashes
+ description: Bulk search of MD5 hashes.
name: circl-bulk-md5
outputs:
- contextPath: Circl.MD5
- description: Results of bulk MD5 query
+ description: Results of bulk MD5 query.
type: string
- arguments:
- - description: List of SHA1 to search
+ - description: List of SHA1 to search.
isArray: true
name: sha1_list
required: true
type: textArea
- description: Bulk search of SHA1 hashes
+ description: Bulk search of SHA1 hashes.
name: circl-bulk-sha1
outputs:
- contextPath: Circl.SHA1
- description: Results of bulk SHA1 query
+ description: Results of bulk SHA1 query.
type: string
- arguments:
- - description: Hash to query
+ - description: Hash to query.
isArray: true
name: file
required: true
@@ -83,25 +83,25 @@ script:
name: file
outputs:
- contextPath: File.Name
- description: Name of the file
+ description: Name of the file.
type: string
- contextPath: File.Size
- description: Size of the file
+ description: Size of the file.
type: number
- contextPath: File.MD5
- description: MD5 hash of the file
+ description: MD5 hash of the file.
type: string
- contextPath: File.SHA1
- description: SHA1 hash of the file
+ description: SHA1 hash of the file.
type: string
- contextPath: File.SHA256
- description: SHA256 hash of the file
+ description: SHA256 hash of the file.
type: string
- contextPath: File.SHA512
- description: SHA512 hash of the file
+ description: SHA512 hash of the file.
type: string
- contextPath: File.SSDeep
- description: SSDeep of the file
+ description: SSDeep of the file.
type: string
- contextPath: DBotScore.Indicator
description: The indicator that was tested.
@@ -110,34 +110,34 @@ script:
description: The actual score.
type: Number
- contextPath: DBotScore.Type
- description: Type of indicator.
+ description: The indicator type.
type: String
- contextPath: DBotScore.Vendor
- description: Vendor used to calculate the score.
+ description: The vendor used to calculate the score.
type: String
- contextPath: DbotScore.Indicator
description: The indicator value.
type: string
- contextPath: DbotScore.Reliability
- description: The reliability of the source providing the intelligence data
+ description: The reliability of the source providing the intelligence data.
type: string
- contextPath: DbotScore.Score
- description: An integer regarding the status of the indicator
+ description: An integer regarding the status of the indicator.
type: number
- contextPath: DbotScore.Type
- description: The indicator type
+ description: The indicator type.
type: string
- contextPath: DbotScore.Vendor
- description: The vendor used to calculate the score
+ description: The vendor used to calculate the score.
type: string
- arguments: []
description: Return the top 100 of most queried values.
name: circl-top
outputs:
- contextPath: Circl.Top
- description: The top 100 of most queried values
+ description: The top 100 of most queried values.
type: string
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.99865
script: ''
subtype: python3
type: python
diff --git a/Packs/CIRCLHashlookup/ReleaseNotes/1_0_5.md b/Packs/CIRCLHashlookup/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..3bb9979920d1
--- /dev/null
+++ b/Packs/CIRCLHashlookup/ReleaseNotes/1_0_5.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### CIRCLEHashlookup
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+- Documentation and metadata improvements.
diff --git a/Packs/CIRCLHashlookup/pack_metadata.json b/Packs/CIRCLHashlookup/pack_metadata.json
index 847aa3f91269..9d2422303a40 100644
--- a/Packs/CIRCLHashlookup/pack_metadata.json
+++ b/Packs/CIRCLHashlookup/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CIRCL hashlookup (hashlookup.circl.lu)",
"description": "CIRCL hash lookup is a public API to lookup hash values against known database of files. NSRL RDS database is included and many others are also included. The API is accessible via HTTP ReST API and the API is also described as an OpenAPI. The service is free and served as a best-effort basis.",
"support": "community",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Harri Ruuttila",
"url": "",
"email": "",
diff --git a/Packs/CSCDomainManager/.pack-ignore b/Packs/CSCDomainManager/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CSCDomainManager/.secrets-ignore b/Packs/CSCDomainManager/.secrets-ignore
new file mode 100644
index 000000000000..d7ecf763cced
--- /dev/null
+++ b/Packs/CSCDomainManager/.secrets-ignore
@@ -0,0 +1 @@
+https://www.cscdbs.com
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Author_image.png b/Packs/CSCDomainManager/Author_image.png
new file mode 100644
index 000000000000..2e9a5a58e388
Binary files /dev/null and b/Packs/CSCDomainManager/Author_image.png differ
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.py b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.py
new file mode 100644
index 000000000000..927e9d1a8013
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.py
@@ -0,0 +1,610 @@
+import copy
+from functools import partial
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+from CommonServerUserPython import * # noqa
+
+''' CONSTANTS '''
+DEFAULT_PAGE = 1
+DEFAULT_PAGE_SIZE_SEARCH = 50
+DEFAULT_PAGE_SIZE_CONFI = 1
+DEFAULT_LIMIT = 50
+MAX_QUALIFIED_DOMAIN_NAMES = 50
+ACCEPT_VAL = "application/json"
+BEARER_PREFIX = 'Bearer '
+URL_SUFFIX = '/dbs/api/v2'
+HR_HEADERS_FOR_DOMAINS_SEARCH = ['Qualified Domain Name',
+ 'Domain',
+ 'Managed Status',
+ 'Registration Date',
+ 'Registry Expiry Date',
+ 'Paid Through Date',
+ 'Name Servers',
+ 'Dns Type',
+ 'Whois Contact first Name',
+ 'Whois Contact last Name',
+ 'Whois Contact email'
+ ]
+HR_HEADERS_FOR_DOMAIN_CONFI_LIST = ['Domain',
+ 'Domain Label',
+ 'Domain Status Code',
+ 'Domain extension',
+ 'Country',
+ 'Admin Email',
+ 'Admin Name',
+ 'Account Number',
+ 'Account Name'
+ ]
+HR_HEADERS_FOR_DOMAIN = ['Qualified Domain Name',
+ 'Domain',
+ 'Idn',
+ 'Generic top-level domains',
+ 'Managed Status',
+ 'Registration Date',
+ 'Registry Expiry Date',
+ 'Paid Through Date',
+ 'Country Code',
+ 'Server Delete Prohibited',
+ 'Server Transfer Prohibited',
+ 'Server Update Prohibited',
+ 'Name Servers',
+ 'Dns Type',
+ 'Whois Contact first Name',
+ 'Whois Contact last Name',
+ 'Whois Contact email'
+ ]
+HR_HEADERS_FOR_AVAILABILITY = ['Qualified Domain Name',
+ 'Code',
+ 'Message',
+ 'Price',
+ 'Currency',
+ 'List of the terms (months) available for registration'
+ ]
+SEARCH_OPERATORS = ["gt=", "ge=", "lt=", "le=", "in=", "like="]
+SELECTORS_MAPPING = {'domain_name': 'domain',
+ 'registration_date': 'registrationDate',
+ 'registration_org': 'regOrg',
+ 'admin_email': 'adminEmail',
+ 'email': 'email',
+ 'organization': 'organization',
+ 'registry_expiry_date': 'registryExpiryDate',
+ 'filter': 'filter',
+ 'sort': 'sort',
+ 'page': 'page',
+ 'page_size': 'size',
+ }
+
+''' CLIENT CLASS '''
+
+
+class Client(BaseClient):
+ """Client class to interact with the service API
+
+ This Client implements API calls, and does not contain any XSOAR logic.
+ Should only do requests and return data.
+ It inherits from BaseClient defined in CommonServer Python.
+ Most calls use _http_request() that handles proxy, SSL verification, etc.
+ For this implementation, no special attributes defined
+ """
+
+ def __init__(self, base_url, verify: bool, token: str = "", apikey: str = ""):
+ headers = {
+ 'Authorization': f'{BEARER_PREFIX}{token}',
+ 'apikey': apikey,
+ 'Accept': ACCEPT_VAL
+ }
+ super().__init__(base_url=base_url, verify=verify, headers=headers)
+
+ def send_get_request(self, url_suffix, params) -> Any:
+ try:
+ results = self._http_request(
+ method="GET",
+ url_suffix=url_suffix,
+ params=params,
+ headers=self._headers
+ )
+ except DemistoException as e:
+ if e.res is not None and e.res.status_code == 404:
+ results = CommandResults(
+ readable_output="No results were found",
+ outputs=None,
+ raw_response=None,
+ )
+ else:
+ raise e
+ return results
+
+ def get_qualified_domain_name(self, qualified_domain_name):
+ result = self.send_get_request(f"/domains/{qualified_domain_name}", "")
+ return result
+
+ def get_domains(self, params):
+ result = self.send_get_request("/domains", params)
+ return result
+
+ def get_available_domains(self, params):
+ return self.send_get_request("/availability", params)
+
+ def get_configurations(self, params):
+ return self.send_get_request("/domains/configuration", params)
+
+
+def parse_and_format_date(value: str) -> str:
+ date = dateparser.parse(value)
+ if date is None: # not a date
+ return_error(f'Failed to execute {demisto.command()} command. Invalid Date')
+
+ else:
+ formatted_date = date.strftime("%d-%b-%Y")
+ return formatted_date
+
+
+def create_params_string(args) -> str:
+ """
+ Create a string of the params written by the given filters to use in http request
+
+ Args:
+ args: demisto.args()
+
+ Returns:
+ A string of the params written by the given filters
+ """
+ param_for_filter: list[str] = []
+ additional_params: list[str] = []
+
+ for arg_key, param_key in SELECTORS_MAPPING.items():
+ if args.get(arg_key):
+ value = args[arg_key]
+ if arg_key == 'filter':
+ param_for_filter.append(value)
+ elif isinstance(value, str) and len(value) >= 3 and value[:3] in SEARCH_OPERATORS:
+ if arg_key in ['registration_date', 'registry_expiry_date']:
+ value = value[:3] + parse_and_format_date(value[3:])
+ param_for_filter.append(f"{param_key}={value}")
+ elif arg_key in ['sort', 'page', 'page_size']:
+ additional_params.append(f"{param_key}={value}")
+ else:
+ if arg_key in ['registration_date', 'registry_expiry_date']:
+ value = parse_and_format_date(value)
+ param_for_filter.append(f"{param_key}=={value}")
+
+ params_str = 'filter='
+ if param_for_filter:
+ params_str += ','.join(param_for_filter)
+
+ if additional_params:
+ params_str += "&" + "&".join(additional_params)
+
+ return params_str
+
+
+def get_domains_search_hr_fields(domains_list) -> list:
+ """
+ Create a list of domains with the fields for human readable, using the domain_list argument
+
+ Args:
+ domains_list: domains list that was output from the http request
+
+ Returns:
+ A list of domains with the fields for human readable
+ """
+ hr_formatted_domains = []
+ if not isinstance(domains_list, list):
+ domains_list = [domains_list]
+
+ for domain in domains_list:
+ filtered_domain = {
+ 'Qualified Domain Name': domain.get('qualifiedDomainName'),
+ 'Domain': domain.get('domain'),
+ 'Managed Status': domain.get('managedStatus'),
+ 'Registration Date': domain.get('registrationDate'),
+ 'Registry Expiry Date': domain.get('registryExpiryDate'),
+ 'Paid Through Date': domain.get('paidThroughDate'),
+ 'Name Servers': domain.get('nameServers'),
+ 'Dns Type': domain.get('dnsType'),
+ 'Whois Contact first Name': domain.get('whoisContacts')[0].get('firstName'),
+ 'Whois Contact last Name': domain.get('whoisContacts')[0].get('lastName'),
+ 'Whois Contact email': domain.get('whoisContacts')[0].get('email')
+ }
+
+ hr_formatted_domains.append(filtered_domain)
+
+ return hr_formatted_domains
+
+
+def get_domains_configurations_hr_fields(configurations) -> list:
+ """
+ Create a list of domains configurations with the fields for human readable, using the configurations argument
+
+ Args:
+ configurations: domains configurations list that was output from the http request
+
+ Returns:
+ A list of domains configurations with the fields for human readable
+ """
+ hr_formatted_configurations = []
+
+ for config in configurations:
+ filtered = {
+ 'Domain': config.get('domain'),
+ 'Domain Label': config.get('domainLabel'),
+ 'Domain Status Code': config.get('domainStatusCode'),
+ 'Domain extension': config.get('extension'),
+ 'Country': config.get('country'),
+ 'Admin Email': config.get('adminEmail'),
+ 'Admin Name': config.get('regEmail'),
+ 'Account Number': config.get('account').get('accountNumber'),
+ 'Account Name': config.get('account').get('accountName')
+ }
+
+ hr_formatted_configurations.append(filtered)
+
+ return hr_formatted_configurations
+
+
+def get_domains_availability_check_hr_fields(available_domains) -> list:
+ """
+ Create a list of available domains with the fields for human readable, using the available_domains argument
+
+ Args:
+ available_domains: available domains list that was output from the http request
+
+ Returns:
+ A list of available domains with the fields for human readable
+ """
+ hr_formatted_available_domains = []
+
+ for domain in available_domains:
+ filtered = {
+ 'Qualified Domain Name': domain.get('qualifiedDomainName'),
+ 'Code': domain.get('result').get('code'),
+ 'Message': domain.get('result').get('message'),
+ 'Price': domain.get('basePrice').get('price'),
+ 'Currency': domain.get('basePrice').get('currency'),
+ 'List of the terms (months) available for registration': domain.get('availableTerms')
+ }
+
+ hr_formatted_available_domains.append(filtered)
+
+ return hr_formatted_available_domains
+
+
+def get_domain_hr_fields(domain) -> dict:
+ """
+ Create a dict of the domain with the fields for human readable, using the domain argument
+
+ Args:
+ domain: domain dict that was output from the http request
+
+ Returns:
+ A dict of the domain with the fields for human readable
+ """
+ hr_formatted_domain = {'Qualified Domain Name': domain.get('qualifiedDomainName'),
+ 'Domain': domain.get('domain'),
+ 'Idn': domain.get('idn'),
+ 'Generic top-level domains': domain.get('newGtld'),
+ 'Managed Status': domain.get('managedStatus'),
+ 'Registration Date': domain.get('registrationDate'),
+ 'Registry Expiry Date': domain.get('registryExpiryDate'),
+ 'Paid Through Date': domain.get('paidThroughDate'),
+ 'Country Code': domain.get('countryCode'),
+ 'Server Delete Prohibited': domain.get('countryCode'),
+ 'Server Transfer Prohibited': domain.get('serverDeleteProhibited'),
+ 'Server Update Prohibite d': domain.get('serverTransferProhibited'),
+ 'Name Servers': domain.get('nameServers'),
+ 'Dns Type': domain.get('dnsType'),
+ 'Whois Contact first Name': domain.get('whoisContacts')[0].get('firstName'),
+ 'Whois Contact last Name': domain.get('whoisContacts')[0].get('lastName'),
+ 'Whois Contact email': domain.get('whoisContacts')[0].get('email')
+ }
+
+ domain.get('whoisContacts')
+ return hr_formatted_domain
+
+
+def get_whois_contacts_fields_for_domain(whois_contact, field_names: List[str], contact_type_condition: str) -> list:
+ """
+ Create a list of contact.field_name for each contact in whois_contacts. Specific arrangement for the domain command
+
+ Args:
+ whois_contacts: list of contacts
+ field_name: the field to get for each contact
+ contact_type_condition: a str to use for condition: choose the contacts with the specific contact_type
+
+ Returns:
+ A list of contact.field_name when contact is from type contact_type_condition
+ """
+ results = []
+
+ for contact in whois_contact:
+ if contact.get('contactType') == contact_type_condition:
+ combined_fields = ' '.join(contact[field] for field in field_names)
+ results.append(combined_fields)
+
+ return results
+
+
+def create_common_domain(domain_json, dbot_score):
+ """
+ Create Common.Domain for domain command
+
+ Args:
+ domain_json: json object of the domain got from the http request
+ dbot_score: Common.DBotScore object
+
+ Returns:
+ A Common.Domain object
+ """
+ whois_contacts = domain_json.get('whoisContacts')
+ get_contact_fields = partial(get_whois_contacts_fields_for_domain, whois_contacts)
+ domain_context = Common.Domain(
+ domain=domain_json.get('domain'),
+ creation_date=domain_json.get('registrationDate'),
+ domain_idn_name=domain_json.get('idn'),
+ expiration_date=domain_json.get('registryExpiryDate'),
+ name_servers=domain_json.get('nameServers'),
+ registrant_name=get_contact_fields(['firstName', 'lastName'], 'REGISTRANT'),
+ registrant_email=get_contact_fields(['email'], 'REGISTRANT'),
+ registrant_phone=get_contact_fields(['phone'], 'REGISTRANT'),
+ registrant_country=get_contact_fields(['country'], 'REGISTRANT'),
+ admin_name=get_contact_fields(['firstName', 'lastName'], 'ADMINISTRATIVE'),
+ admin_email=get_contact_fields(['email'], 'ADMINISTRATIVE'),
+ admin_phone=get_contact_fields(['phone'], 'ADMINISTRATIVE'),
+ admin_country=get_contact_fields(['country'], 'ADMINISTRATIVE'),
+ tech_country=get_contact_fields(['country'], 'TECHNICAL'),
+ tech_name=get_contact_fields(['firstName', 'lastName'], 'TECHNICAL'),
+ tech_organization=get_contact_fields(['organization'], 'TECHNICAL'),
+ tech_email=get_contact_fields(['email'], 'TECHNICAL'),
+ dbot_score=dbot_score
+ )
+
+ return domain_context
+
+
+def create_common_dbot_score(domain_name, reliability):
+ dbot_score = Common.DBotScore(
+ indicator=domain_name,
+ indicator_type=DBotScoreType.DOMAIN,
+ integration_name="CSCDomainManager",
+ score=Common.DBotScore.NONE,
+ reliability=reliability
+ )
+ return dbot_score
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client: Client) -> str:
+ """
+ Returning 'ok' indicates that the integration works like it suppose to. Connection to the service is successful.
+
+ Args:
+ client: CSCDomainManager client
+
+ Returns:
+ 'ok' if test passed, anything else will fail the test
+ """
+ message: str = ''
+ try:
+ client.send_get_request("/domains", "")
+ message = 'ok'
+ except DemistoException as e:
+ if 'Forbidden' in str(e) or 'Authorization' in str(e):
+ message = 'Authorization Error: make sure API Key and Token are correctly set'
+ else:
+ raise e
+ return message
+
+
+def csc_domains_search_command(client: Client, args) -> CommandResults:
+ """
+ Returning a list of domains with the applied filters
+
+ Args:
+ client: CSCDomainManager client
+ args: demisto.args()
+
+ Returns:
+ A list of domains with the applied filters
+ """
+
+ domains_results = {}
+ qualified_domain_name = args.get('domain_name')
+ if qualified_domain_name and '.' in qualified_domain_name:
+ domains_list = client.get_qualified_domain_name(qualified_domain_name)
+ if isinstance(domains_list, CommandResults):
+ return domains_list
+
+ else:
+ args_copy = copy.deepcopy(args)
+ if args_copy.get('limit'):
+ args_copy['page'] = '1'
+ args_copy['page_size'] = args_copy.get('limit')
+
+ params_results = create_params_string(args_copy)
+ domains_results = client.get_domains(params_results)
+ if isinstance(domains_results, CommandResults):
+ return domains_results
+ domains_list = domains_results.get('domains', [])
+
+ domains_with_required_fields = get_domains_search_hr_fields(domains_list)
+
+ results = CommandResults(
+ readable_output=tableToMarkdown('Filtered Domains', domains_with_required_fields,
+ headers=HR_HEADERS_FOR_DOMAINS_SEARCH,
+ removeNull=True),
+ outputs_prefix='CSCDomainManager.Domain',
+ outputs_key_field='QualifiedDomainName',
+ outputs=domains_list
+ )
+ return results
+
+
+def csc_domains_availability_check_command(client: Client, args) -> CommandResults:
+ """
+ Returning a list of available domains with the applied filters
+
+ Args:
+ client: CSCDomainManager client
+ args: demisto.args()
+
+ Returns:
+ A list of available domains with the applied filters
+ """
+ domain_names = args.get('domain_name')
+ params = f'qualifiedDomainNames={domain_names}'
+ available_domains_results = client.get_available_domains(params).get('results')
+ if isinstance(available_domains_results, CommandResults):
+ return available_domains_results
+
+ hr_output = get_domains_availability_check_hr_fields(available_domains_results)
+
+ results = CommandResults(
+ readable_output=tableToMarkdown('Domains Availability', hr_output,
+ headers=HR_HEADERS_FOR_AVAILABILITY,
+ removeNull=True),
+ outputs_prefix='CSCDomainManager.Domain.Availability',
+ outputs=available_domains_results
+ )
+ return results
+
+
+def csc_domains_configuration_search_command(client: Client, args) -> CommandResults:
+ """
+ Returning a list of domains configurations with the applied filters
+
+ Args:
+ client: CSCDomainManager client
+ args: demisto.args()
+
+ Returns:
+ A list of domains configurations with the applied filters
+ """
+ args_copy = copy.deepcopy(args)
+ args_copy['page'] = args_copy.get('page') or DEFAULT_PAGE_SIZE_CONFI
+ args_copy['page_size'] = args_copy.get('page_size') or DEFAULT_PAGE_SIZE_SEARCH
+
+ if args_copy.get('limit'):
+ args_copy['page'] = '1'
+ args_copy['page_size'] = args_copy.get('limit')
+
+ params_results = create_params_string(args_copy)
+ configurations_results = client.get_configurations(params_results)
+ if isinstance(configurations_results, CommandResults):
+ return configurations_results
+
+ configurations_list = configurations_results.get('configurations', [])
+ configurations_with_required_fields = get_domains_configurations_hr_fields(configurations_list)
+
+ results = CommandResults(
+ readable_output=tableToMarkdown('Filtered Configurations',
+ configurations_with_required_fields,
+ headers=HR_HEADERS_FOR_DOMAIN_CONFI_LIST,
+ removeNull=True),
+ outputs_prefix='CSCDomainManager.Domain.Configuration',
+ outputs_key_field='CSCDomainManager.Domain.Configuration.Domain',
+ outputs=configurations_list
+ )
+ return results
+
+
+def domain(client: Client, args, reliability):
+ """
+ Gets the domain
+
+ Args:
+ client: CSCDomainManager client
+ args: demisto.args()
+ reliability: The source reliability. Default set to A.
+
+ Returns:
+ domain data
+ """
+ domains_name = args.get('domain').split(",")
+ final_data = []
+
+ for name in domains_name:
+ domain_json = client.get_qualified_domain_name(name)
+ if isinstance(domain_json, CommandResults): # domain not found, continue to next name
+ continue
+
+ hr_data = get_domain_hr_fields(domain_json)
+
+ dbot_score = create_common_dbot_score(name, reliability)
+ domain_context = create_common_domain(domain_json, dbot_score)
+ results = CommandResults(
+ readable_output=tableToMarkdown('Domain', hr_data, headers=HR_HEADERS_FOR_DOMAIN),
+ outputs_prefix='CSCDomainManager.Domain',
+ indicator=domain_context,
+ outputs=domain_json
+ )
+ final_data.append(results)
+
+ if final_data == []: # if no domains were found
+ final_data.append(CommandResults(
+ readable_output="No results were found",
+ outputs=None,
+ raw_response=None,
+ ))
+
+ return final_data
+
+
+def main():
+ """main function, parses params and runs command functions
+
+ :return:
+ :rtype:
+ """
+
+ params = demisto.params()
+ args = demisto.args()
+
+ demisto.debug(f'Command being called is {demisto.command()}')
+
+ try:
+ base_url = f'{params.get("base_url")}{URL_SUFFIX}'
+ verify = not params.get('insecure', False)
+ token = params.get('token', {}).get('password')
+ api_key = params.get('credentials', {}).get('password')
+
+ reliability = params.get('integrationReliability')
+ reliability = reliability if reliability else DBotScoreReliability.A
+
+ if DBotScoreReliability.is_valid_type(reliability):
+ reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
+ else:
+ raise Exception("Please provide a valid value for the Source Reliability parameter.")
+
+ client = Client(
+ base_url=base_url,
+ verify=verify,
+ token=token,
+ apikey=api_key
+ )
+
+ if demisto.command() == 'test-module':
+ return_results(test_module(client))
+
+ elif demisto.command() == 'csc-domains-search':
+ return_results(csc_domains_search_command(client, args))
+
+ elif demisto.command() == 'csc-domains-availability-check':
+ return_results(csc_domains_availability_check_command(client, args))
+
+ elif demisto.command() == 'csc-domains-configuration-search':
+ return_results(csc_domains_configuration_search_command(client, args))
+
+ elif demisto.command() == 'domain':
+ return_results(domain(client, args, reliability))
+
+ else:
+ raise NotImplementedError(f'Command {demisto.command()} is not implemented')
+
+ except Exception as e:
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.yml b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.yml
new file mode 100644
index 000000000000..1886436ea93a
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager.yml
@@ -0,0 +1,673 @@
+category: IT Services
+commonfields:
+ id: CSCDomainManager
+ version: -1
+configuration:
+- defaultvalue: https://example.com/
+ display: Base URL
+ name: base_url
+ required: true
+ additionalinfo: The endpoint URL.
+ type: 0
+- displaypassword: Token
+ additionalinfo: The token to use for connection.
+ name: token
+ required: true
+ hiddenusername: true
+ type: 9
+- displaypassword: API Key
+ additionalinfo: The API Key to use for connection.
+ name: credentials
+ required: true
+ hiddenusername: true
+ type: 9
+- additionalinfo: Reliability of the source providing the intelligence data.
+ defaultvalue: A - Completely reliable
+ display: Source Reliability
+ name: integrationReliability
+ options:
+ - A+ - 3rd party enrichment
+ - A - Completely reliable
+ - B - Usually reliable
+ - C - Fairly reliable
+ - D - Not usually reliable
+ - E - Unreliable
+ - F - Reliability cannot be judged
+ required: false
+ type: 15
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+description: CSCDomainManager is an integration that supports querying and enriching domains through CSCDomainManager API.
+display: CSCDomainManager
+name: CSCDomainManager
+script:
+ commands:
+ - arguments:
+ - description: Domain name to filter by. Can start with like=, in=.
+ name: domain_name
+ required: false
+ - description: 'Registration date to filter by. Can start with gt=, ge=, lt=, le=. Date example: 22-Apr-2024, 22/4/24, 22-4-24.'
+ name: registration_date
+ required: false
+ - description: Email to filter by. Can start with like=, in=.
+ name: email
+ required: false
+ - description: Organization to filter by. Can start with like=, in=.
+ name: organization
+ required: false
+ - description: 'Registry expiry date to filter by. Can start with gt=, ge=, lt=, le=. Date example: 22-Apr-2024, 22/4/24, 22-4-24.'
+ name: registry_expiry_date
+ required: false
+ - description: 'Create a filter using selectors such as: accountName, accountNumber,
+ brandName, businessUnit, city, country, countryCode, criticalDomain, dnssecActivated,
+ dnsType, domain, email, extension, fax, firstName, idnReferenceName, lastModifiedDate,
+ lastModifiedDescription, lastModifiedReason, lastName, localAgent, managedStatus,
+ nameServers, newGtld, organization, paidThroughDate, phone, phoneExtn, postalCode,
+ qualifiedDomainName, redirectType, registrationDate, registryExpiryDate, serverDeleteProhibited,
+ serverTransferProhibited, serverUpdateProhibited, stateProvince, street1, street2, urlForwarding, whoisPrivacy. For example: filter=lastName==Administrator.'
+ name: filter
+ required: false
+ - description: 'Sort the output by a selector and the direction to sort by (desc/asc). For example: propertyName,asc.'
+ name: sort
+ required: false
+ - description: Page number.
+ name: page
+ required: false
+ - description: The number of rows in a page.
+ name: page_size
+ required: false
+ - description: The maximum number of rows.
+ name: limit
+ required: false
+ description: Gets the domains by the applied filters.
+ name: csc-domains-search
+ outputs:
+ - contextPath: CSCDomainManager.Domain.qualifiedDomainName
+ description: The qualified domain name.
+ type: String
+ - contextPath: CSCDomainManager.Domain.domain
+ description: The domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.managedStatus
+ description: The managed status.
+ type: String
+ - contextPath: CSCDomainManager.Domain.registrationDate
+ description: The registration date.
+ type: String
+ - contextPath: CSCDomainManager.Domain.registryExpiryDate
+ description: The registry expiry date.
+ type: String
+ - contextPath: CSCDomainManager.Domain.paidThroughDate
+ description: Paid through date.
+ type: String
+ - contextPath: CSCDomainManager.Domain.nameServers
+ description: "Server's names."
+ type: String
+ - contextPath: CSCDomainManager.Domain.dnsType
+ description: DNS type.
+ type: String
+ - contextPath: CSCDomainManager.Domain.account.accountName
+ description: The name of the account associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.account.accountNumber
+ description: The account number associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.brandName
+ description: The brand name associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.businessUnit
+ description: The business unit associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.countryCode
+ description: The country code associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.criticalDomain
+ description: Indicates if the domain is critical.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.customFields.name
+ description: The name of the custom field.
+ type: String
+ - contextPath: CSCDomainManager.Domain.customFields.value
+ description: The value of the custom field.
+ type: String
+ - contextPath: CSCDomainManager.Domain.dnssecActivated
+ description: Indicates if DNSSEC is activated for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.extension
+ description: The extension of the domain, such as .com, .net, etc.
+ type: String
+ - contextPath: CSCDomainManager.Domain.idn
+ description: Indicates if the domain is an Internationalized Domain Name (IDN).
+ type: String
+ - contextPath: CSCDomainManager.Domain.idnReferenceName
+ description: The reference name for the IDN.
+ type: String
+ - contextPath: CSCDomainManager.Domain.lastModifiedDate
+ description: The date when the domain was last modified.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.lastModifiedDescription
+ description: A description of the last modification made to the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.lastModifiedReason
+ description: The reason for the last modification made to the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.localAgent
+ description: Indicates if the domain has a local agent.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.newGtld
+ description: Indicates if the domain is a new gTLD (Generic Top-Level Domain).
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.serverDeleteProhibited
+ description: Indicates if the domain is prohibited from deletion by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.serverTransferProhibited
+ description: Indicates if the domain is prohibited from transfer by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.serverUpdateProhibited
+ description: Indicates if the domain is prohibited from updates by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.urlf.redirectType
+ description: The type of redirect used in URL forwarding for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.urlf.urlForwarding
+ description: Indicates if URL forwarding is enabled for the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.whoisContacts.city
+ description: The city of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.contactType
+ description: The type of WHOIS contact (e.g., registrant, admin, tech).
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.country
+ description: The country of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.email
+ description: The email address of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.fax
+ description: The fax number of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.firstName
+ description: The first name of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.lastName
+ description: The last name of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.organization
+ description: The organization of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.phone
+ description: The phone number of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.phoneExtn
+ description: The phone extension of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.postalCode
+ description: The postal code of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.stateProvince
+ description: The state or province of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.street1
+ description: The street address of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.street2
+ description: The secondary street address of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisPrivacy
+ description: Indicates if WHOIS privacy protection is enabled for the domain.
+ type: Boolean
+ - arguments:
+ - description: The domain name.
+ name: domain_name
+ required: true
+ description: Check registration availability for one or more domain names.
+ name: csc-domains-availability-check
+ outputs:
+ - contextPath: CSCDomainManager.Domain.Availability.qualifiedDomainName
+ description: The fully qualified domain name (FQDN) being checked for availability.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.result.code
+ description: The result code indicating the availability status of the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.result.message
+ description: A message providing additional information about the availability status.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.basePrice.price
+ description: The base price for registering the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.basePrice.currency
+ description: The currency of the base price.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.listOfTheTerms
+ description: A list of terms related to the availability of the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Availability.availableTerms
+ description: The terms available for the domain registration.
+ type: Unknown
+ - arguments:
+ - description: Domain name to filter by. Can start with like=, in=.
+ name: domain_name
+ required: false
+ - description: 'Registration date to filter by. Can start with gt=, ge=, lt=, le=. Date example: 22-Apr-2024, 22/4/24, 22-4-24.'
+ name: registration_date
+ required: false
+ - description: Email to filter by. Can start with like=, in=.
+ name: domain_email
+ required: false
+ - description: 'Create a filter using selectors such as: accountName, accountNumber,
+ brandName, businessUnit, city, country, countryCode, criticalDomain, dnssecActivated,
+ dnsType, domain, email, extension, fax, firstName, idnReferenceName, lastModifiedDate,
+ lastModifiedDescription, lastModifiedReason, lastName, localAgent, managedStatus, nameServers,
+ newGtld, organization, paidThroughDate, phone, phoneExtn, postalCode, qualifiedDomainName,
+ redirectType, registrationDate, registryExpiryDate, serverDeleteProhibited, serverTransferProhibited,
+ serverUpdateProhibited, stateProvince, street1, street2, urlForwarding, whoisPrivacy. For example: filter=lastName==Administrator.'
+ name: filter
+ required: false
+ - description: Page number.
+ name: page
+ required: false
+ - description: The number of rows in a page.
+ name: page_size
+ required: false
+ - description: The maximum number of rows to present.
+ name: limit
+ required: false
+ description: Get configuration information for owned domains with optional filtering.
+ name: csc-domains-configuration-search
+ outputs:
+ - contextPath: CSCDomainManager.Domain.Configuration.domain
+ description: The domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.domainLabel
+ description: The domain label.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.domainStatusCode
+ description: The domain status code.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.domainExtension
+ description: The domain extension.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.country
+ description: Country associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.adminEmail
+ description: Domain email.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.adminName
+ description: Admin name associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.accountNumber
+ description: The account number associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.accountName
+ description: The account name associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.account.accountName
+ description: The name of the account associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.account.accountNumber
+ description: The account number associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.adminOrg
+ description: The administrative organization managing the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.businessUnit
+ description: The business unit associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.dnsData.dnsDomain
+ description: The DNS domain information.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.dnsData.dnsProvider
+ description: The DNS provider for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.dnsHostingType
+ description: The type of DNS hosting used for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.dnsTraffic12moAve
+ description: The average DNS traffic over the last 12 months.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.extension
+ description: The extension of the domain, such as .com, .net, etc.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.hasCscUrlf
+ description: Indicates if the domain has CSC URL forwarding enabled.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.hasDkim
+ description: Indicates if DKIM is configured for the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.hasDmarc
+ description: Indicates if DMARC is configured for the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.hasDnssecDs
+ description: Indicates if the domain has DNSSEC DS records.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.hasSpf
+ description: Indicates if SPF is configured for the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.hasWww
+ description: Indicates if the domain has a WWW record.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.isGtld
+ description: Indicates if the domain is a gTLD (Generic Top-Level Domain).
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.isLive
+ description: Indicates if the domain is live.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.isLiveType
+ description: The type of live status for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.isMultilockEligible
+ description: Indicates if the domain is eligible for multilock.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.isVital
+ description: Indicates if the domain is considered vital.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.multiLocked
+ description: Indicates if the domain is multilocked.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.numLiveMx
+ description: The number of live MX records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.numRootA
+ description: The number of root A records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.numRootTxt
+ description: The number of root TXT records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.numSslNetcraft
+ description: The number of SSL certificates detected by Netcraft for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.numWwwA
+ description: The number of WWW A records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.numWwwCname
+ description: The number of WWW CNAME records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.regEmail
+ description: The registration email address for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.regName
+ description: The registration name for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.regOrg
+ description: The registration organization for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.registryExpiryDate
+ description: The expiration date of the domain registration in the registry.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.Configuration.rootHttpCode
+ description: The HTTP response code for the root domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.rootHttpUrl
+ description: The HTTP URL for the root domain.
+ type: Unknown
+ - contextPath: CSCDomainManager.Domain.Configuration.rootIsUrlf
+ description: Indicates if the root domain is URL forwarding enabled.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.serverDeleteProhibited
+ description: Indicates if the domain is prohibited from deletion by the server.
+ type: Unknown
+ - contextPath: CSCDomainManager.Domain.Configuration.serverTransferProhibited
+ description: Indicates if the domain is prohibited from transfer by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.serverUpdateProhibited
+ description: Indicates if the domain is prohibited from updates by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.Configuration.techEmail
+ description: The technical contact email address for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.techName
+ description: The technical contact name for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.techOrg
+ description: The technical contact organization for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.tld
+ description: The top-level domain (TLD) of the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.urlfTraffic12moAve
+ description: The average URL forwarding traffic over the last 12 months.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.valueRootA
+ description: The value of root A records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.valueRootMx
+ description: The value of root MX records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.valueRootTxt
+ description: The value of root TXT records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.valueWwwA
+ description: The value of WWW A records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.valueWwwCname
+ description: The value of WWW CNAME records for the domain.
+ type: Number
+ - contextPath: CSCDomainManager.Domain.Configuration.wwwHttpCode
+ description: The HTTP response code for the WWW domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.wwwHttpUrl
+ description: The HTTP URL for the WWW domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.Configuration.wwwIsUrlf
+ description: Indicates if the WWW domain is URL forwarding enabled.
+ type: Boolean
+ - arguments:
+ - description: A comma-separated list of domains.
+ name: domain
+ isArray: true
+ required: true
+ default: true
+ description: Will contain the domain information.
+ name: domain
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: Domain.Name
+ description: 'The domain name, for example: "google.com".'
+ type: String
+ - contextPath: Domain.CreationDate
+ description: Creation date.
+ type: String
+ - contextPath: Domain.DomainIDNName
+ description: Domain ID name.
+ type: String
+ - contextPath: Domain.ExpirationDate
+ description: Expiration date.
+ type: String
+ - contextPath: DomainUpdatedDate
+ description: Updated date.
+ type: String
+ - contextPath: Domain.NameServers
+ description: "Server's name."
+ type: String
+ - contextPath: Domain.Registrant.Name
+ description: Registrant name.
+ type: String
+ - contextPath: Domain.Registrant.Email
+ description: Registrant email.
+ type: String
+ - contextPath: Domain.Registrant.Phone
+ description: Registrant phone.
+ type: String
+ - contextPath: Domain.Registrant.Country
+ description: Registrant country.
+ type: String
+ - contextPath: Domain.Admin.Name
+ description: Admin name.
+ type: String
+ - contextPath: Domain.Admin.Email
+ description: Admin email.
+ type: String
+ - contextPath: Domain.Admin.Phone
+ description: Admin phone.
+ type: String
+ - contextPath: Domain.Admin.Country
+ description: Admin country.
+ type: String
+ - contextPath: Domain.Tech.Country
+ description: Tech country.
+ type: String
+ - contextPath: Domain.Tech.Name
+ description: Tech name.
+ type: String
+ - contextPath: Domain.Tech.Organization
+ description: Tech organization.
+ type: String
+ - contextPath: Domain.Tech.Email
+ description: Tech email.
+ type: String
+ - contextPath: CSCDomainManager.Domain.account.accountName
+ description: Domain account name.
+ type: String
+ - contextPath: CSCDomainManager.Domain.account.accountNumber
+ description: Domain account number.
+ type: String
+ - contextPath: CSCDomainManager.Domain.brandName
+ description: Domain brand name.
+ type: String
+ - contextPath: CSCDomainManager.Domain.businessUnit
+ description: Domain business unit.
+ type: String
+ - contextPath: CSCDomainManager.Domain.countryCode
+ description: Domain country code.
+ type: String
+ - contextPath: CSCDomainManager.Domain.criticalDomain
+ description: Domain critical domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.customFields.name
+ description: Domain custom fields name.
+ type: String
+ - contextPath: CSCDomainManager.Domain.customFields.value
+ description: The value of custom fields associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.dnsType
+ description: The type of DNS used by the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.dnssecActivated
+ description: Indicates whether DNSSEC is activated for the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.domain
+ description: The domain name.
+ type: String
+ - contextPath: CSCDomainManager.Domain.extension
+ description: The extension of the domain, such as .com, .net, etc.
+ type: String
+ - contextPath: CSCDomainManager.Domain.idn
+ description: Indicates if the domain is an Internationalized Domain Name (IDN).
+ type: String
+ - contextPath: CSCDomainManager.Domain.idnReferenceName
+ description: The reference name for the Internationalized Domain Name (IDN).
+ type: String
+ - contextPath: CSCDomainManager.Domain.lastModifiedDate
+ description: The date when the domain was last modified.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.lastModifiedDescription
+ description: A description of the last modification made to the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.lastModifiedReason
+ description: The reason for the last modification of the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.localAgent
+ description: Indicates if a local agent is associated with the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.managedStatus
+ description: The managed status of the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.nameServers
+ description: The name of the servers associated with the domain.
+ type: String
+ - contextPath: CSCDomainManager.Domain.newGtld
+ description: Indicates if the domain is a new gTLD (Generic Top-Level Domain).
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.paidThroughDate
+ description: The date through which the domain has been paid.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.qualifiedDomainName
+ description: The fully qualified domain name (FQDN).
+ type: String
+ - contextPath: CSCDomainManager.Domain.registrationDate
+ description: The date when the domain was registered.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.registryExpiryDate
+ description: The expiration date of the domain registration in the registry.
+ type: Date
+ - contextPath: CSCDomainManager.Domain.serverDeleteProhibited
+ description: Indicates if the domain is prohibited from deletion by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.serverTransferProhibited
+ description: Indicates if the domain is prohibited from transfer by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.serverUpdateProhibited
+ description: Indicates if the domain is prohibited from updates by the server.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.urlf.redirectType
+ description: The type of URL forwarding redirect.
+ type: String
+ - contextPath: CSCDomainManager.Domain.urlf.urlForwarding
+ description: Indicates if URL forwarding is enabled for the domain.
+ type: Boolean
+ - contextPath: CSCDomainManager.Domain.whoisContacts.city
+ description: The city of the WHOIS contact.
+ type: String
+ - contextPath: CSCDomainManager.Domain.whoisContacts.contactType
+ description: The type of WHOIS contact (e.g., Registrant, Admin, Tech).
+ type: String
+ - contextPath: Domain.WHOIS.Admin.Country
+ description: The country of the admin contact in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Admin.Email
+ description: The email of the admin contact in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Admin.Name
+ description: The name of the admin contact in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Admin.Phone
+ description: The phone number of the admin contact in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.CreationDate
+ description: The creation date of the domain in the WHOIS record.
+ type: Date
+ - contextPath: Domain.WHOIS.ExpirationDate
+ description: The expiration date of the domain in the WHOIS record.
+ type: Date
+ - contextPath: Domain.WHOIS.NameServers
+ description: The name of the servers listed in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Country
+ description: The country of the registrant in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Email
+ description: The email of the registrant in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Name
+ description: The name of the registrant in the WHOIS record.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Phone
+ description: The phone number of the registrant in the WHOIS record.
+ type: String
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.98471
+fromversion: 5.0.0
+tests:
+- CSCDomainManager_Test
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_description.md b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_description.md
new file mode 100644
index 000000000000..7d9879d1eeda
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_description.md
@@ -0,0 +1,22 @@
+1. Name: Give a name to the integration instance.
+2. Base URL: Enter the endpoint URL.
+3. Token: Enter the token.
+4. API Key: Enter the API key.
+
+## Access and Security
+You request access through the CSC service team. The service team will gather the details for the service account that will be used to access the API, and the API administrator(s) (one or more authorized client users) who will manage the credentials through the CSCDomainManagerSM web portal.
+
+Please see attached API guide for reference.
+
+CSC generates the API key and creates the service account, with requested permissions, that will be used to access the API.
+
+The client API administrator then logs into the CSCDomainManagerSM at https://weblogin.cscglobal.com to retrieve the key and generate the bearer token for the API service account.
+
+The API administrator(s) (one or more authorized client users) who will manage the credentials through the CSCDomainManagerSM web portal.
+
+### Token Refresh
+Token will expire after 30 consecutive days of no activity, you can reactive it by using the [token refresh endpoint](https://www.cscglobal.com/cscglobal/docs/dbs/domainmanager/api-v2/#/token/put_token_refresh).
+
+### For more information
+- [Visit the CSC website](https://www.cscdbs.com/)
+- [See the api page](https://www.cscglobal.com/cscglobal/docs/dbs/domainmanager/api-v2/#/)
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_image.png b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_image.png
new file mode 100644
index 000000000000..2e9a5a58e388
Binary files /dev/null and b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_image.png differ
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_test.py b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_test.py
new file mode 100644
index 000000000000..dc670b595c00
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/CSCDomainManager_test.py
@@ -0,0 +1,193 @@
+import json
+from CSCDomainManager import Client
+from CSCDomainManager import csc_domains_search_command
+from CSCDomainManager import csc_domains_availability_check_command
+from CSCDomainManager import csc_domains_configuration_search_command
+from CSCDomainManager import domain
+from CSCDomainManager import create_params_string
+from CSCDomainManager import get_domains_search_hr_fields
+from CSCDomainManager import get_domains_configurations_hr_fields
+from CSCDomainManager import get_domains_availability_check_hr_fields
+from CSCDomainManager import get_domain_hr_fields
+from CSCDomainManager import get_whois_contacts_fields_for_domain
+from CommonServerPython import DBotScoreReliability
+
+
+EXAMPLE_BASE_URL = 'https://test.com/api'
+VERIFY = True
+ACCEPT_VAL = "example"
+
+
+def util_load_json(path):
+ with open(path, encoding='utf-8') as f:
+ return json.loads(f.read())
+
+
+GET_REQUEST_EXAMPLE1 = util_load_json('./test_data/get_domain.json')
+GET_REQUEST_QUALIFIED_DOMAIN_NAME = util_load_json('./test_data/get_domain_qualified_domain_name.json')
+GET_DOMAINS_AVAILABILITY_CHECK = util_load_json('./test_data/get_domain_availability_check.json')
+GET_DOMAINS_CONFI_LIST = util_load_json('./test_data/get_domains_configuration_list.json')
+DOMAIN_DOMAIN = util_load_json('./test_data/domain_domain.json')
+DOMAINS_LIST = util_load_json('./test_data/domains_list_for_get_domains_search_hr_fields.json')
+CONFIGURATIONS_LIST = util_load_json('./test_data/configurations_list_for_get_domains_search_hr_fields.json')
+AVAILABLE_DOMAINS_LIST = util_load_json('./test_data/available_domains_list_for_get_domains_availability_check_hr_fields.json')
+WHOIS_CONTACTS = util_load_json('./test_data/whois_contacts.json')
+
+
+def create_mock_client():
+ return Client(
+ base_url=EXAMPLE_BASE_URL,
+ verify=VERIFY,
+ apikey='test',
+ token='test'
+ )
+
+
+def test_create_params_string():
+ args = {
+ 'domain_name': 'csc-panw',
+ 'registry_expiry_date': '22-Apr-2025'
+ }
+ params_str = create_params_string(args)
+ assert params_str == 'filter=domain==csc-panw,registryExpiryDate==22-Apr-2025'
+
+ args = {
+ 'domain_name': 'csc-panw',
+ 'registry_expiry_date': '22/04/2025'
+ }
+
+ params_str = create_params_string(args)
+ assert params_str == 'filter=domain==csc-panw,registryExpiryDate==22-Apr-2025'
+
+ args = {
+ 'domain_name': 'csc-panw',
+ 'registry_expiry_date': '22-Apr-2025',
+ 'page': '2'
+ }
+ params_str = create_params_string(args)
+ assert params_str == 'filter=domain==csc-panw,registryExpiryDate==22-Apr-2025&page=2'
+
+ args = {
+ 'admin_email': 'example@panw.com',
+ 'email': 'example@panwcom',
+ 'organization': 'panw'
+ }
+ params_str = create_params_string(args)
+ assert params_str == 'filter=adminEmail==example@panw.com,email==example@panwcom,organization==panw'
+
+
+def test_get_domains_search_hr_fields():
+ results = get_domains_search_hr_fields(DOMAINS_LIST)
+ assert len(results) == 1
+ assert len(results[0]) == 11
+ assert results[0].get('Dns Type') == "CSC_BASIC"
+
+
+def test_get_domains_configurations_hr_fields():
+ results = get_domains_configurations_hr_fields(CONFIGURATIONS_LIST)
+ assert len(results) == 1
+ assert len(results[0]) == 9
+ assert results[0].get('Domain extension') == 'biz'
+
+
+def test_get_domains_availability_check_hr_fields():
+ results = get_domains_availability_check_hr_fields(AVAILABLE_DOMAINS_LIST)
+ assert len(results) == 1
+ assert len(results[0]) == 6
+ assert results[0].get('Message') == 'Domain already in portfolio'
+
+
+def test_get_domain_hr_fields():
+ results = get_domain_hr_fields(DOMAIN_DOMAIN)
+ assert len(results) == 17
+ assert results.get('Domain') == 'example'
+ assert results.get('Generic top-level domains') is False
+
+
+def test_get_whois_contacts_fields_for_domain():
+ results = get_whois_contacts_fields_for_domain(WHOIS_CONTACTS, ['firstName', 'lastName'], 'REGISTRANT')
+ assert results == ['Domain Administrator']
+
+
+def test_csc_domains_search(mocker):
+ client = create_mock_client()
+ args = {
+ 'domain_name': 'csc-panw',
+ 'registry_expiry_date': '22-Apr-2025 UTC'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=GET_REQUEST_EXAMPLE1)
+ result = csc_domains_search_command(client, args)
+ result_output = result.to_context().get('Contents')
+ assert len(result_output) == 2
+ assert result_output[1].get('qualifiedDomainName') == 'csc-panw.com'
+ assert result_output[1].get('registrationDate') == '22-Apr-2024 UTC'
+ assert result_output[1].get('extension') == 'com'
+
+
+def test_csc_domains_search_with_operator(mocker):
+ client = create_mock_client()
+ args = {
+ 'registration_date': 'ge=22-Apr-2024'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=GET_REQUEST_EXAMPLE1)
+ result = csc_domains_search_command(client, args)
+ result_output = result.to_context().get('Contents')
+ assert len(result_output) == 2
+ assert result_output[1].get('qualifiedDomainName') == 'csc-panw.com'
+ assert result_output[1].get('registrationDate') == '22-Apr-2024 UTC'
+ assert result_output[1].get('extension') == 'com'
+
+
+def test_csc_domains_search_with_qualified_domain_name(mocker):
+ client = create_mock_client()
+ args = {
+ 'domain_name': 'csc-panw.com'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=GET_REQUEST_QUALIFIED_DOMAIN_NAME)
+ result = csc_domains_search_command(client, args)
+ result_output = result.to_context().get('Contents')
+ assert result_output.get('qualifiedDomainName') == 'csc-panw.com'
+ assert result_output.get('registrationDate') == '22-Apr-2024 UTC'
+ assert result_output.get('extension') == 'com'
+
+
+def test_csc_domains_availability_check(mocker):
+ client = create_mock_client()
+ args = {
+ 'domain_name': 'cscpanw.org,csc-panw.info'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=GET_DOMAINS_AVAILABILITY_CHECK)
+ result = csc_domains_availability_check_command(client, args)
+ result_output = result.to_context().get('Contents')
+ assert len(result_output) == 2
+ assert result_output[1].get('qualifiedDomainName') == 'csc-panw.info'
+ assert result_output[1].get('result').get('message') == 'Domain already in portfolio'
+ assert result_output[1].get('result').get('code') == 'DOMAIN_IN_PORTFOLIO'
+
+
+def test_csc_domains_configuration_list(mocker):
+ client = create_mock_client()
+ args = {
+ 'domain_name': 'csc-panw.biz'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=GET_DOMAINS_CONFI_LIST)
+ result = csc_domains_configuration_search_command(client, args)
+ result_output = result.to_context().get('Contents')
+ assert len(result_output) == 1
+ assert result_output[0].get('domain') == 'csc-panw.biz'
+ assert result_output[0].get('domainLabel') == 'csc-panw'
+
+
+def test_domain(mocker):
+ client = create_mock_client()
+ args = {
+ 'domain': 'example.com'
+ }
+ mocker.patch.object(client, 'send_get_request', return_value=DOMAIN_DOMAIN)
+ reliability = DBotScoreReliability.A
+ result = domain(client, args, reliability)
+ result_output = result[0].to_context()
+ result_output = result_output.get('Contents')
+ assert result_output.get('qualifiedDomainName') == 'example.com'
+ assert result_output.get('domain') == 'example'
+ assert result_output.get('registrationDate') == '09-Dec-2011 UTC'
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/README.md b/Packs/CSCDomainManager/Integrations/CSCDomainManager/README.md
new file mode 100644
index 000000000000..5af9ae542040
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/README.md
@@ -0,0 +1,765 @@
+CSCDomainManager uses rules-based technology, customizable reporting, granular user management, and more—and is the world's first multilingual domain management tool, available in English, French, and German.
+This integration was integrated and tested with version 2.0.0 of CSCDomainManager.
+
+## Configure CSCDomainManager on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for CSCDomainManager.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Base URL | The endpoint URL | True |
+ | Token | The token to use for connection | True |
+ | API Key | The API Key to use for connection | True |
+ | Source Reliability | Reliability of the source providing the intelligence data. | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Access and Security
+ Customers request access through their CSC service team. Their service team will gather the details for
+the service account that will be used to access the API; and
+the API administrator(s) (one or more authorized client users) who will manage the credentials through our CSCDomainManagerSM web portal.
+
+Please see attached API guide for reference.
+
+CSC generates the API key and creates the service account, with requested permissions, that will be used to access the API.
+
+The client API administrator then logs into our CSCDomainManagerSM at https://weblogin.cscglobal.com to retrieve the key and generate the bearer token for the API service account.
+
+### Token Refresh
+Token will expire after 30 consecutive days of no activity, you can reactive it by using the [token refresh endpoint](https://www.cscglobal.com/cscglobal/docs/dbs/domainmanager/api-v2/#/token/put_token_refresh).
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### csc-domains-search
+
+***
+Gets the domains by the applied filters
+
+#### Base Command
+
+`csc-domains-search`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain_name | domain name to filter by, can start with like=, in=. | Optional |
+| registration_date | registration date to filter by, can start with gt=, ge=, lt=, le=. Date example 22-Apr-2024. | Optional |
+| email | email to filter by, can start with like=, in=. | Optional |
+| organization | organization to filter by, can start with like=, in=. | Optional |
+| registry_expiry_date | registry expiry date to filter by, can start with gt=, ge=, lt=, le=. Date example 22-Apr-2024. | Optional |
+| filter | can write your own filter according to selectors such as accountName, accountNumber, brandName, businessUnit, city, country, countryCode, criticalDomain, dnssecActivated, dnsType, domain, email, extension, fax, firstName, idnReferenceName, lastModifiedDate, lastModifiedDescription, lastModifiedReason, lastName, localAgent, managedStatus, nameServers, newGtld, organization, paidThroughDate, phone, phoneExtn, postalCode, qualifiedDomainName, redirectType, registrationDate, registryExpiryDate, serverDeleteProhibited, serverTransferProhibited, serverUpdateProhibited, stateProvince, street1, street2, urlForwarding, whoisPrivacy. | Optional |
+| sort | sorting the output by a selector and desc/asc, for example: propertyName,asc. | Optional |
+| page | page number. | Optional |
+| page_size | the size of rows in a page. | Optional |
+| limit | the limit of rows. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CSCDomainManager.Domain.qualifiedDomainName | String | the qualified domain name |
+| CSCDomainManager.Domain.domain | String | the domain |
+| CSCDomainManager.Domain.managedStatus | String | the managed status |
+| CSCDomainManager.Domain.registrationDate | String | the registration date |
+| CSCDomainManager.Domain.registryExpiryDate | String | the registry expiry date |
+| CSCDomainManager.Domain.paidThroughDate | String | paid through date |
+| CSCDomainManager.Domain.nameServers | String | servers names |
+| CSCDomainManager.Domain.dnsType | String | dns type |
+| CSCDomainManager.Domain.account.accountName | String | The name of the account associated with the domain. |
+| CSCDomainManager.Domain.account.accountNumber | String | The account number associated with the domain. |
+| CSCDomainManager.Domain.brandName | String | The brand name associated with the domain. |
+| CSCDomainManager.Domain.businessUnit | String | The business unit associated with the domain. |
+| CSCDomainManager.Domain.countryCode | String | The country code associated with the domain. |
+| CSCDomainManager.Domain.criticalDomain | Boolean | Indicates if the domain is critical. |
+| CSCDomainManager.Domain.customFields.name | String | The name of the custom field. |
+| CSCDomainManager.Domain.customFields.value | String | The value of the custom field. |
+| CSCDomainManager.Domain.dnssecActivated | String | Indicates if DNSSEC is activated for the domain. |
+| CSCDomainManager.Domain.extension | String | The extension of the domain, such as .com, .net, etc. |
+| CSCDomainManager.Domain.idn | String | Indicates if the domain is an Internationalized Domain Name \(IDN\). |
+| CSCDomainManager.Domain.idnReferenceName | String | The reference name for the IDN. |
+| CSCDomainManager.Domain.lastModifiedDate | Date | The date when the domain was last modified. |
+| CSCDomainManager.Domain.lastModifiedDescription | String | A description of the last modification made to the domain. |
+| CSCDomainManager.Domain.lastModifiedReason | String | The reason for the last modification made to the domain. |
+| CSCDomainManager.Domain.localAgent | Boolean | Indicates if the domain has a local agent. |
+| CSCDomainManager.Domain.newGtld | Boolean | Indicates if the domain is a new gTLD \(Generic Top-Level Domain\). |
+| CSCDomainManager.Domain.serverDeleteProhibited | Boolean | Indicates if the domain is prohibited from deletion by the server. |
+| CSCDomainManager.Domain.serverTransferProhibited | Boolean | Indicates if the domain is prohibited from transfer by the server. |
+| CSCDomainManager.Domain.serverUpdateProhibited | Boolean | Indicates if the domain is prohibited from updates by the server. |
+| CSCDomainManager.Domain.urlf.redirectType | String | The type of redirect used in URL forwarding for the domain. |
+| CSCDomainManager.Domain.urlf.urlForwarding | Boolean | Indicates if URL forwarding is enabled for the domain. |
+| CSCDomainManager.Domain.whoisContacts.city | String | The city of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.contactType | String | The type of WHOIS contact \(e.g., registrant, admin, tech\). |
+| CSCDomainManager.Domain.whoisContacts.country | String | The country of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.email | String | The email address of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.fax | String | The fax number of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.firstName | String | The first name of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.lastName | String | The last name of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.organization | String | The organization of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.phone | String | The phone number of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.phoneExtn | String | The phone extension of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.postalCode | String | The postal code of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.stateProvince | String | The state or province of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.street1 | String | The street address of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.street2 | String | The secondary street address of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisPrivacy | Boolean | Indicates if WHOIS privacy protection is enabled for the domain. |
+
+#### Command example
+```!csc-domains-search domain_name=csc-panw.biz```
+#### Context Example
+```json
+{
+ "CSCDomainManager": {
+ "Domain": {
+ "account": {
+ "accountName": "Palo Alto Networks - Integration",
+ "accountNumber": "8601230"
+ },
+ "brandName": "",
+ "businessUnit": "Cortex",
+ "countryCode": "",
+ "criticalDomain": false,
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "Xpanse"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XR-586"
+ }
+ ],
+ "dnsType": "CSC_BASIC",
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "domain": "csc-panw",
+ "extension": "biz",
+ "idn": "",
+ "idnReferenceName": "",
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedDescription": "Domain registered",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "localAgent": false,
+ "managedStatus": "ACTIVE",
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "newGtld": false,
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "qualifiedDomainName": "csc-panw.biz",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "whoisContacts": [
+ {
+ "city": "Wilmington",
+ "contactType": "REGISTRANT",
+ "country": "US",
+ "email": "admin@internationaladmin.com",
+ "fax": "",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ },
+ {
+ "city": "Wilmington",
+ "contactType": "ADMINISTRATIVE",
+ "country": "US",
+ "email": "admin@internationaladmin.com",
+ "fax": "",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ },
+ {
+ "city": "Wilmington",
+ "contactType": "TECHNICAL",
+ "country": "US",
+ "email": "dns-admin@cscglobal.com",
+ "fax": "",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ }
+ ],
+ "whoisPrivacy": false
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Filtered Domains
+>|Qualified Domain Name|Domain|Managed Status|Registration Date|Registry Expiry Date|Paid Through Date|Name Servers|Dns Type|Whois Contact first Name|Whois Contact last Name|Whois Contact email|
+>|---|---|---|---|---|---|---|---|---|---|---|
+>| csc-panw.biz | csc-panw | ACTIVE | 22-Apr-2024 UTC | 22-Apr-2025 UTC | 22-Apr-2025 UTC | dns1.cscdns.net,
dns2.cscdns.net | CSC_BASIC | Domain,
Domain,
DNS | Administrator,
Administrator,
Administrator | admin@internationaladmin.com,
admin@internationaladmin.com,
dns-admin@cscglobal.com |
+
+
+### csc-domains-availability-check
+
+***
+Check registration availability for one or more domain names
+
+#### Base Command
+
+`csc-domains-availability-check`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain_name | the domain name. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CSCDomainManager.Domain.Availability.qualifiedDomainName | String | The fully qualified domain name \(FQDN\) being checked for availability. |
+| CSCDomainManager.Domain.Availability.result.code | String | The result code indicating the availability status of the domain. |
+| CSCDomainManager.Domain.Availability.result.message | String | A message providing additional information about the availability status. |
+| CSCDomainManager.Domain.Availability.basePrice.price | String | The base price for registering the domain. |
+| CSCDomainManager.Domain.Availability.basePrice.currency | String | The currency of the base price. |
+| CSCDomainManager.Domain.Availability.listOfTheTerms | String | A list of terms related to the availability of the domain. |
+| CSCDomainManager.Domain.Availability.availableTerms | Unknown | The terms available for the domain registration. |
+
+#### Command example
+```!csc-domains-availability-check domain_name=csc-panw.biz```
+#### Context Example
+```json
+{
+ "CSCDomainManager": {
+ "Domain": {
+ "Availability": [
+ {
+ "availableTerms": [],
+ "basePrice": {
+ "currency": "",
+ "price": null
+ },
+ "qualifiedDomainName": "csc-panw.biz",
+ "result": {
+ "code": "DOMAIN_IN_PORTFOLIO",
+ "message": "Domain already in portfolio"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Domains Availability
+>|Qualified Domain Name|Code|Message|Price|Currency|List of the terms (months) available for registration|
+>|---|---|---|---|---|---|
+>| csc-panw.biz | DOMAIN_IN_PORTFOLIO | Domain already in portfolio | | | |
+
+
+### csc-domains-configuration-list
+
+***
+Get domains configuration information for owned domains with optional filtering
+
+#### Base Command
+
+`csc-domains-configuration-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain_name | domain name to filter by, can start with like=, in=. | Optional |
+| registration_date | registration date to filter by, can start with gt=, ge=, lt=, le=. Date example 22-Apr-2024. | Optional |
+| domain_email | email to filter by, can start with like=, in=. | Optional |
+| filter | can write your own filter according to selectors such as accountName, accountNumber, brandName, businessUnit, city, country, countryCode, criticalDomain, dnssecActivated, dnsType, domain, email, extension, fax, firstName, idnReferenceName, lastModifiedDate, lastModifiedDescription, lastModifiedReason, lastName, localAgent, managedStatus, nameServers, newGtld, organization, paidThroughDate, phone, phoneExtn, postalCode, qualifiedDomainName, redirectType, registrationDate, registryExpiryDate, serverDeleteProhibited, serverTransferProhibited, serverUpdateProhibited, stateProvince, street1, street2, urlForwarding, whoisPrivacy. | Optional |
+| page | page number. | Optional |
+| page_size | the size of rows in a page. | Optional |
+| limit | to fill. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CSCDomainManager.Domain.Configuration.domain | String | the domain |
+| CSCDomainManager.Domain.Configuration.domainLabel | String | the domain label |
+| CSCDomainManager.Domain.Configuration.domainStatusCode | String | the domain status code |
+| CSCDomainManager.Domain.Configuration.domainExtension | String | extension |
+| CSCDomainManager.Domain.Configuration.country | String | country |
+| CSCDomainManager.Domain.Configuration.adminEmail | String | domain email |
+| CSCDomainManager.Domain.Configuration.adminName | String | admin name |
+| CSCDomainManager.Domain.Configuration.accountNumber | String | the account number |
+| CSCDomainManager.Domain.Configuration.accountName | String | the account name |
+| CSCDomainManager.Domain.Configuration.account.accountName | String | The name of the account associated with the domain. |
+| CSCDomainManager.Domain.Configuration.account.accountNumber | String | The account number associated with the domain. |
+| CSCDomainManager.Domain.Configuration.adminOrg | String | The administrative organization managing the domain. |
+| CSCDomainManager.Domain.Configuration.businessUnit | String | The business unit associated with the domain. |
+| CSCDomainManager.Domain.Configuration.dnsData.dnsDomain | String | The DNS domain information. |
+| CSCDomainManager.Domain.Configuration.dnsData.dnsProvider | String | The DNS provider for the domain. |
+| CSCDomainManager.Domain.Configuration.dnsHostingType | String | The type of DNS hosting used for the domain. |
+| CSCDomainManager.Domain.Configuration.dnsTraffic12moAve | Number | The average DNS traffic over the last 12 months. |
+| CSCDomainManager.Domain.Configuration.extension | String | The extension of the domain, such as .com, .net, etc. |
+| CSCDomainManager.Domain.Configuration.hasCscUrlf | Boolean | Indicates if the domain has CSC URL forwarding enabled. |
+| CSCDomainManager.Domain.Configuration.hasDkim | Boolean | Indicates if DKIM is configured for the domain. |
+| CSCDomainManager.Domain.Configuration.hasDmarc | Boolean | Indicates if DMARC is configured for the domain. |
+| CSCDomainManager.Domain.Configuration.hasDnssecDs | Boolean | Indicates if the domain has DNSSEC DS records. |
+| CSCDomainManager.Domain.Configuration.hasSpf | Boolean | Indicates if SPF is configured for the domain. |
+| CSCDomainManager.Domain.Configuration.hasWww | Boolean | Indicates if the domain has a WWW record. |
+| CSCDomainManager.Domain.Configuration.isGtld | Boolean | Indicates if the domain is a gTLD \(Generic Top-Level Domain\). |
+| CSCDomainManager.Domain.Configuration.isLive | Boolean | Indicates if the domain is live. |
+| CSCDomainManager.Domain.Configuration.isLiveType | String | The type of live status for the domain. |
+| CSCDomainManager.Domain.Configuration.isMultilockEligible | Boolean | Indicates if the domain is eligible for multilock. |
+| CSCDomainManager.Domain.Configuration.isVital | Boolean | Indicates if the domain is considered vital. |
+| CSCDomainManager.Domain.Configuration.multiLocked | Boolean | Indicates if the domain is multilocked. |
+| CSCDomainManager.Domain.Configuration.numLiveMx | Number | The number of live MX records for the domain. |
+| CSCDomainManager.Domain.Configuration.numRootA | Number | The number of root A records for the domain. |
+| CSCDomainManager.Domain.Configuration.numRootTxt | Number | The number of root TXT records for the domain. |
+| CSCDomainManager.Domain.Configuration.numSslNetcraft | Number | The number of SSL certificates detected by Netcraft for the domain. |
+| CSCDomainManager.Domain.Configuration.numWwwA | Number | The number of WWW A records for the domain. |
+| CSCDomainManager.Domain.Configuration.numWwwCname | Number | The number of WWW CNAME records for the domain. |
+| CSCDomainManager.Domain.Configuration.regEmail | String | The registration email address for the domain. |
+| CSCDomainManager.Domain.Configuration.regName | String | The registration name for the domain. |
+| CSCDomainManager.Domain.Configuration.regOrg | String | The registration organization for the domain. |
+| CSCDomainManager.Domain.Configuration.registryExpiryDate | Date | The expiration date of the domain registration in the registry. |
+| CSCDomainManager.Domain.Configuration.rootHttpCode | Number | The HTTP response code for the root domain. |
+| CSCDomainManager.Domain.Configuration.rootHttpUrl | Unknown | The HTTP URL for the root domain. |
+| CSCDomainManager.Domain.Configuration.rootIsUrlf | Boolean | Indicates if the root domain is URL forwarding enabled. |
+| CSCDomainManager.Domain.Configuration.serverDeleteProhibited | Unknown | Indicates if the domain is prohibited from deletion by the server. |
+| CSCDomainManager.Domain.Configuration.serverTransferProhibited | Unknown | Indicates if the domain is prohibited from transfer by the server. |
+| CSCDomainManager.Domain.Configuration.serverUpdateProhibited | Unknown | Indicates if the domain is prohibited from updates by the server. |
+| CSCDomainManager.Domain.Configuration.techEmail | String | The technical contact email address for the domain. |
+| CSCDomainManager.Domain.Configuration.techName | String | The technical contact name for the domain. |
+| CSCDomainManager.Domain.Configuration.techOrg | String | The technical contact organization for the domain. |
+| CSCDomainManager.Domain.Configuration.tld | String | The top-level domain \(TLD\) of the domain. |
+| CSCDomainManager.Domain.Configuration.urlfTraffic12moAve | Number | The average URL forwarding traffic over the last 12 months. |
+| CSCDomainManager.Domain.Configuration.valueRootA | Unknown | The value of root A records for the domain. |
+| CSCDomainManager.Domain.Configuration.valueRootMx | Unknown | The value of root MX records for the domain. |
+| CSCDomainManager.Domain.Configuration.valueRootTxt | Unknown | The value of root TXT records for the domain. |
+| CSCDomainManager.Domain.Configuration.valueWwwA | Unknown | The value of WWW A records for the domain. |
+| CSCDomainManager.Domain.Configuration.valueWwwCname | Unknown | The value of WWW CNAME records for the domain. |
+| CSCDomainManager.Domain.Configuration.wwwHttpCode | Number | The HTTP response code for the WWW domain. |
+| CSCDomainManager.Domain.Configuration.wwwHttpUrl | Unknown | The HTTP URL for the WWW domain. |
+| CSCDomainManager.Domain.Configuration.wwwIsUrlf | Boolean | Indicates if the WWW domain is URL forwarding enabled. |
+
+#### Command example
+```!csc-domains-configuration-list domain_name=csc-panw.biz```
+#### Context Example
+```json
+{
+ "CSCDomainManager": {
+ "Domain": {
+ "Configuration": {
+ "account": {
+ "accountName": "Palo Alto Networks - Integration",
+ "accountNumber": "8601230"
+ },
+ "adminEmail": "admin@internationaladmin.com",
+ "adminName": "Domain Administrator",
+ "adminOrg": "CSC Corporate Domains, Inc.",
+ "businessUnit": "Cortex",
+ "country": "GTLD",
+ "dnsData": [
+ {
+ "dnsDomain": "ns1.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ },
+ {
+ "dnsDomain": "ns2.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ }
+ ],
+ "dnsHostingType": "THIRDPARTY",
+ "dnsTraffic12moAve": 790,
+ "domain": "csc-panw.biz",
+ "domainLabel": "csc-panw",
+ "domainStatusCode": "ACT",
+ "extension": "biz",
+ "hasCscUrlf": false,
+ "hasDkim": false,
+ "hasDmarc": false,
+ "hasDnssecDs": false,
+ "hasSpf": false,
+ "hasWww": false,
+ "isGtld": false,
+ "isLive": false,
+ "isLiveType": "Not Live",
+ "isMultilockEligible": true,
+ "isVital": false,
+ "multiLocked": false,
+ "numLiveMx": 0,
+ "numRootA": 0,
+ "numRootTxt": 0,
+ "numSslNetcraft": 0,
+ "numWwwA": 0,
+ "numWwwCname": 0,
+ "regEmail": "admin@internationaladmin.com",
+ "regName": "Domain Administrator",
+ "regOrg": "CSC Corporate Domains, Inc.",
+ "registryExpiryDate": "2025-04-22",
+ "rootHttpCode": 0,
+ "rootHttpUrl": null,
+ "rootIsUrlf": false,
+ "serverDeleteProhibited": null,
+ "serverTransferProhibited": null,
+ "serverUpdateProhibited": null,
+ "techEmail": "dns-admin@cscglobal.com",
+ "techName": "Domain Administrator",
+ "techOrg": "CSC Corporate Domains, Inc.",
+ "tld": "biz",
+ "urlfTraffic12moAve": 0,
+ "valueRootA": null,
+ "valueRootMx": null,
+ "valueRootTxt": null,
+ "valueWwwA": null,
+ "valueWwwCname": null,
+ "wwwHttpCode": 0,
+ "wwwHttpUrl": null,
+ "wwwIsUrlf": true
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Filtered Configurations
+>|Domain|Domain Label|Domain Status Code|Domain extension|Country|Admin Email|Admin Name|Account Number|Account Name|
+>|---|---|---|---|---|---|---|---|---|
+>| csc-panw.biz | csc-panw | ACT | biz | GTLD | admin@internationaladmin.com | admin@internationaladmin.com | 8601230 | Palo Alto Networks - Integration |
+
+
+### domain
+
+***
+Get domain data by qualified domain name
+
+#### Base Command
+
+`domain`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain | Will contain domains values. Example: If you need to get the object_ids of indicator example.com then the value will be example.com. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| DBotScore.Indicator | String | The indicator that was tested. |
+| DBotScore.Type | String | The indicator type. |
+| DBotScore.Vendor | String | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
+| Domain.Name | String | The domain name, for example: "google.com". |
+| Domain.CreationDate | String | Creation dare |
+| Domain.DomainIDNName | String | Domain ID name |
+| Domain.ExpirationDate | String | Expiration date |
+| DomainUpdatedDate | String | Updated date |
+| Domain.NameServers | String | Servers Name |
+| Domain.Registrant.Name | String | Registrant name |
+| Domain.Registrant.Email | String | Registrant email |
+| Domain.Registrant.Phone | String | Registrant phone |
+| Domain.Registrant.Country | String | Registrant country |
+| Domain.Admin.Name | String | Admin name |
+| Domain.Admin.Email | String | Admin email |
+| Domain.Admin.Phone | String | Admin phone |
+| Domain.Admin.Country | String | Admin country |
+| Domain.Tech.Country | String | Tech country |
+| Domain.Tech.Name | String | Tech name |
+| Domain.Tech.Organization | String | Tech organization |
+| Domain.Tech.Email | String | Tech email |
+| CSCDomainManager.Domain.account.accountName | String | Domain account name |
+| CSCDomainManager.Domain.account.accountNumber | String | Domain account number |
+| CSCDomainManager.Domain.brandName | String | Domain brand name |
+| CSCDomainManager.Domain.businessUnit | String | Domain business unit |
+| CSCDomainManager.Domain.countryCode | String | Domain country code |
+| CSCDomainManager.Domain.criticalDomain | Boolean | Domain critical domain |
+| CSCDomainManager.Domain.customFields.name | String | Domain custom fields name |
+| CSCDomainManager.Domain.customFields.value | String | The value of custom fields associated with the domain. |
+| CSCDomainManager.Domain.dnsType | String | The type of DNS used by the domain. |
+| CSCDomainManager.Domain.dnssecActivated | String | Indicates whether DNSSEC is activated for the domain. |
+| CSCDomainManager.Domain.domain | String | The domain name. |
+| CSCDomainManager.Domain.extension | String | The extension of the domain, such as .com, .net, etc. |
+| CSCDomainManager.Domain.idn | String | Indicates if the domain is an Internationalized Domain Name \(IDN\). |
+| CSCDomainManager.Domain.idnReferenceName | String | The reference name for the Internationalized Domain Name \(IDN\). |
+| CSCDomainManager.Domain.lastModifiedDate | Date | The date when the domain was last modified. |
+| CSCDomainManager.Domain.lastModifiedDescription | String | A description of the last modification made to the domain. |
+| CSCDomainManager.Domain.lastModifiedReason | String | The reason for the last modification of the domain. |
+| CSCDomainManager.Domain.localAgent | Boolean | Indicates if a local agent is associated with the domain. |
+| CSCDomainManager.Domain.managedStatus | String | The managed status of the domain. |
+| CSCDomainManager.Domain.nameServers | String | The name servers associated with the domain. |
+| CSCDomainManager.Domain.newGtld | Boolean | Indicates if the domain is a new gTLD \(Generic Top-Level Domain\). |
+| CSCDomainManager.Domain.paidThroughDate | Date | The date through which the domain has been paid. |
+| CSCDomainManager.Domain.qualifiedDomainName | String | The fully qualified domain name \(FQDN\). |
+| CSCDomainManager.Domain.registrationDate | Date | The date when the domain was registered. |
+| CSCDomainManager.Domain.registryExpiryDate | Date | The expiration date of the domain registration in the registry. |
+| CSCDomainManager.Domain.serverDeleteProhibited | Boolean | Indicates if the domain is prohibited from deletion by the server. |
+| CSCDomainManager.Domain.serverTransferProhibited | Boolean | Indicates if the domain is prohibited from transfer by the server. |
+| CSCDomainManager.Domain.serverUpdateProhibited | Boolean | Indicates if the domain is prohibited from updates by the server. |
+| CSCDomainManager.Domain.urlf.redirectType | String | The type of URL forwarding redirect. |
+| CSCDomainManager.Domain.urlf.urlForwarding | Boolean | Indicates if URL forwarding is enabled for the domain. |
+| CSCDomainManager.Domain.whoisContacts.city | String | The city of the WHOIS contact. |
+| CSCDomainManager.Domain.whoisContacts.contactType | String | The type of WHOIS contact \(e.g., Registrant, Admin, Tech\). |
+| Domain.WHOIS.Admin.Country | String | The country of the admin contact in the WHOIS record. |
+| Domain.WHOIS.Admin.Email | String | The email of the admin contact in the WHOIS record. |
+| Domain.WHOIS.Admin.Name | String | The name of the admin contact in the WHOIS record. |
+| Domain.WHOIS.Admin.Phone | String | The phone number of the admin contact in the WHOIS record. |
+| Domain.WHOIS.CreationDate | Date | The creation date of the domain in the WHOIS record. |
+| Domain.WHOIS.ExpirationDate | Date | The expiration date of the domain in the WHOIS record. |
+| Domain.WHOIS.NameServers | String | The name servers listed in the WHOIS record. |
+| Domain.WHOIS.Registrant.Country | String | The country of the registrant in the WHOIS record. |
+| Domain.WHOIS.Registrant.Email | String | The email of the registrant in the WHOIS record. |
+| Domain.WHOIS.Registrant.Name | String | The name of the registrant in the WHOIS record. |
+| Domain.WHOIS.Registrant.Phone | String | The phone number of the registrant in the WHOIS record. |
+
+#### Command example
+```!domain domain=csc-panw.biz```
+#### Context Example
+```json
+{
+ "CSCDomainManager": {
+ "Domain": {
+ "account": {
+ "accountName": "Palo Alto Networks - Integration",
+ "accountNumber": "8601230"
+ },
+ "brandName": "",
+ "businessUnit": "Cortex",
+ "countryCode": "",
+ "criticalDomain": false,
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "Xpanse"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XR-586"
+ }
+ ],
+ "dnsType": "CSC_BASIC",
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "domain": "csc-panw",
+ "extension": "biz",
+ "idn": "",
+ "idnReferenceName": "",
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedDescription": "Domain registered",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "localAgent": false,
+ "managedStatus": "ACTIVE",
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "newGtld": false,
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "qualifiedDomainName": "csc-panw.biz",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "whoisContacts": [
+ {
+ "city": "Wilmington",
+ "contactType": "REGISTRANT",
+ "country": "US",
+ "email": "admin@internationaladmin.com",
+ "fax": "",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ },
+ {
+ "city": "Wilmington",
+ "contactType": "ADMINISTRATIVE",
+ "country": "US",
+ "email": "admin@internationaladmin.com",
+ "fax": "",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ },
+ {
+ "city": "Wilmington",
+ "contactType": "TECHNICAL",
+ "country": "US",
+ "email": "dns-admin@cscglobal.com",
+ "fax": "",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "postalCode": "19808",
+ "stateProvince": "DE",
+ "street1": "251 Little Falls Drive",
+ "street2": ""
+ }
+ ],
+ "whoisPrivacy": false
+ }
+ },
+ "DBotScore": {
+ "Indicator": "csc-panw.biz",
+ "Reliability": "A - Completely reliable",
+ "Score": 0,
+ "Type": "domain",
+ "Vendor": "CSCDomainManager"
+ },
+ "Domain": {
+ "Admin": {
+ "Country": [
+ "US"
+ ],
+ "Email": [
+ "admin@internationaladmin.com"
+ ],
+ "Name": [
+ "Domain Administrator"
+ ],
+ "Phone": [
+ "+1.3026365400"
+ ]
+ },
+ "CreationDate": "22-Apr-2024 UTC",
+ "ExpirationDate": "22-Apr-2025 UTC",
+ "Name": "csc-panw",
+ "NameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "Registrant": {
+ "Country": [
+ "US"
+ ],
+ "Email": [
+ "admin@internationaladmin.com"
+ ],
+ "Name": [
+ "Domain Administrator"
+ ],
+ "Phone": [
+ "+1.3026365400"
+ ]
+ },
+ "Tech": {
+ "Country": [
+ "US"
+ ],
+ "Email": [
+ "dns-admin@cscglobal.com"
+ ],
+ "Name": [
+ "DNS Administrator"
+ ],
+ "Organization": [
+ "CSC Corporate Domains, Inc."
+ ]
+ },
+ "WHOIS": {
+ "Admin": {
+ "Country": [
+ "US"
+ ],
+ "Email": [
+ "admin@internationaladmin.com"
+ ],
+ "Name": [
+ "Domain Administrator"
+ ],
+ "Phone": [
+ "+1.3026365400"
+ ]
+ },
+ "CreationDate": "22-Apr-2024 UTC",
+ "ExpirationDate": "22-Apr-2025 UTC",
+ "NameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "Registrant": {
+ "Country": [
+ "US"
+ ],
+ "Email": [
+ "admin@internationaladmin.com"
+ ],
+ "Name": [
+ "Domain Administrator"
+ ],
+ "Phone": [
+ "+1.3026365400"
+ ]
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Domain
+>|Qualified Domain Name|Domain|Idn|Generic top-level domains|Managed Status|Registration Date|Registry Expiry Date|Paid Through Date|Country Code|Server Delete Prohibited|Server Transfer Prohibited|Server Update Prohibited|Name Servers|Dns Type|Whois Contact first Name|Whois Contact last Name|Whois Contact email|
+>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| csc-panw.biz | csc-panw | | false | ACTIVE | 22-Apr-2024 UTC | 22-Apr-2025 UTC | 22-Apr-2025 UTC | | | false | | dns1.cscdns.net,
dns2.cscdns.net | CSC_BASIC | Domain | Administrator | admin@internationaladmin.com |
+
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/command_examples b/Packs/CSCDomainManager/Integrations/CSCDomainManager/command_examples
new file mode 100644
index 000000000000..fe659b9608bb
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/command_examples
@@ -0,0 +1,4 @@
+!csc-domains-search domain_name=csc-panw.biz
+!csc-domains-availability-check domain_name=csc-panw.biz
+!csc-domains-configuration-list domain_name=csc-panw.biz
+!domain domain=csc-panw.biz
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/available_domains_list_for_get_domains_availability_check_hr_fields.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/available_domains_list_for_get_domains_availability_check_hr_fields.json
new file mode 100644
index 000000000000..51ca67515c25
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/available_domains_list_for_get_domains_availability_check_hr_fields.json
@@ -0,0 +1,14 @@
+[
+ {
+ "qualifiedDomainName": "csc-panw.info",
+ "result": {
+ "code": "DOMAIN_IN_PORTFOLIO",
+ "message": "Domain already in portfolio"
+ },
+ "basePrice": {
+ "price": null,
+ "currency": ""
+ },
+ "availableTerms": []
+ }
+]
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/configurations_list_for_get_domains_search_hr_fields.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/configurations_list_for_get_domains_search_hr_fields.json
new file mode 100644
index 000000000000..f7487c0f3a48
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/configurations_list_for_get_domains_search_hr_fields.json
@@ -0,0 +1,69 @@
+[
+ {
+ "domain": "csc-panw.biz",
+ "domainLabel": "csc-panw",
+ "domainStatusCode": "ACT",
+ "dnsHostingType": "THIRDPARTY",
+ "extension": "biz",
+ "tld": "biz",
+ "country": "GTLD",
+ "adminEmail": "admin@internationaladmin.com",
+ "adminName": "Domain Administrator",
+ "adminOrg": "CSC Corporate Domains, Inc.",
+ "regEmail": "admin@internationaladmin.com",
+ "regName": "Domain Administrator",
+ "regOrg": "CSC Corporate Domains, Inc.",
+ "techEmail": "dns-admin@cscglobal.com",
+ "techName": "Domain Administrator",
+ "techOrg": "CSC Corporate Domains, Inc.",
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "businessUnit": "Cortex",
+ "dnsData": [
+ {
+ "dnsDomain": "ns1.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ },
+ {
+ "dnsDomain": "ns2.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ }
+ ],
+ "dnsTraffic12moAve": 790,
+ "hasCscUrlf": false,
+ "hasDkim": false,
+ "hasDmarc": false,
+ "hasDnssecDs": false,
+ "hasSpf": false,
+ "hasWww": false,
+ "isGtld": false,
+ "isLive": false,
+ "isLiveType": "Not Live",
+ "isMultilockEligible": true,
+ "isVital": false,
+ "multiLocked": false,
+ "numLiveMx": 0,
+ "numRootA": 0,
+ "numRootTxt": 0,
+ "numSslNetcraft": 0,
+ "numWwwA": 0,
+ "numWwwCname": 0,
+ "registryExpiryDate": "2025-04-22",
+ "rootHttpCode": 0,
+ "rootHttpUrl": null,
+ "rootIsUrlf": false,
+ "serverDeleteProhibited": null,
+ "serverTransferProhibited": null,
+ "serverUpdateProhibited": null,
+ "urlfTraffic12moAve": 0,
+ "valueRootA": null,
+ "valueRootMx": null,
+ "valueRootTxt": null,
+ "valueWwwA": null,
+ "valueWwwCname": null,
+ "wwwHttpCode": 0,
+ "wwwHttpUrl": null,
+ "wwwIsUrlf": true
+ }]
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domain_domain.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domain_domain.json
new file mode 100644
index 000000000000..055d6d65941c
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domain_domain.json
@@ -0,0 +1,62 @@
+{
+ "qualifiedDomainName": "example.com",
+ "domain": "example",
+ "idn": "",
+ "extension": "com",
+ "newGtld": false,
+ "managedStatus": "ACTIVE",
+ "registrationDate": "09-Dec-2011 UTC",
+ "registryExpiryDate": "09-Dec-2030 UTC",
+ "paidThroughDate": "09-Dec-2030 UTC",
+ "countryCode": "",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "dnsType": "CSC_BASIC",
+ "whoisPrivacy": false,
+ "localAgent": false,
+ "dnssecActivated": "ENABLED",
+ "criticalDomain": true,
+ "businessUnit": "My Business Unit",
+ "brandName": "My Brand",
+ "idnReferenceName": "",
+ "customFields": [
+ {
+ "name": "Cost Center",
+ "value": "Marketing"
+ }
+ ],
+ "account": {
+ "accountNumber": "1234567",
+ "accountName": "CSC"
+ },
+ "urlf": {
+ "urlForwarding": true,
+ "redirectType": 0
+ },
+ "nameServers": [
+ "ns1.cscdns.net",
+ "ns2.cscdns.net"
+ ],
+ "whoisContacts": [
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "John",
+ "lastName": "Lee",
+ "organization": "CSC",
+ "street1": "251 Little Falls Dr",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "john.lee@cscglobal.com",
+ "phone": "+1.1234567890",
+ "phoneExtn": "1234",
+ "fax": "+1.1234567890"
+ }
+ ],
+ "lastModifiedDate": "20-May-2019 UTC",
+ "lastModifiedReason": "RENEWAL_COMPLETE",
+ "lastModifiedDescription": "Domain renewed"
+ }
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domains_list_for_get_domains_search_hr_fields.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domains_list_for_get_domains_search_hr_fields.json
new file mode 100644
index 000000000000..fd8a15f2513e
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/domains_list_for_get_domains_search_hr_fields.json
@@ -0,0 +1,103 @@
+[
+ {
+ "qualifiedDomainName": "csc-panw.biz",
+ "domain": "csc-panw",
+ "idn": "",
+ "extension": "biz",
+ "newGtld": false,
+ "managedStatus": "ACTIVE",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "countryCode": "",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "dnsType": "CSC_BASIC",
+ "whoisPrivacy": false,
+ "localAgent": false,
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "criticalDomain": false,
+ "businessUnit": "Cortex",
+ "brandName": "",
+ "idnReferenceName": "",
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "Xpanse"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XR-586"
+ }
+ ],
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "whoisContacts": [
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "ADMINISTRATIVE",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "TECHNICAL",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "dns-admin@cscglobal.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ }
+ ],
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "lastModifiedDescription": "Domain registered"
+ }]
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain.json
new file mode 100644
index 000000000000..5da91e629ce3
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain.json
@@ -0,0 +1,215 @@
+{
+ "meta": {
+ "numResults": 5,
+ "pages": 1
+ },
+ "domains": [
+ {
+ "qualifiedDomainName": "csc-panw.biz",
+ "domain": "csc-panw",
+ "idn": "",
+ "extension": "biz",
+ "newGtld": false,
+ "managedStatus": "ACTIVE",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "countryCode": "",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "dnsType": "CSC_BASIC",
+ "whoisPrivacy": false,
+ "localAgent": false,
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "criticalDomain": false,
+ "businessUnit": "Cortex",
+ "brandName": "",
+ "idnReferenceName": "",
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "Xpanse"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XR-586"
+ }
+ ],
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "whoisContacts": [
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "ADMINISTRATIVE",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "TECHNICAL",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "dns-admin@cscglobal.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ }
+ ],
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "lastModifiedDescription": "Domain registered"
+ },
+ {
+ "qualifiedDomainName": "csc-panw.com",
+ "domain": "csc-panw",
+ "idn": "",
+ "extension": "com",
+ "newGtld": false,
+ "managedStatus": "ACTIVE",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "countryCode": "",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "dnsType": "CSC_BASIC",
+ "whoisPrivacy": false,
+ "localAgent": false,
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "criticalDomain": true,
+ "businessUnit": "Cortex",
+ "brandName": "",
+ "idnReferenceName": "",
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "XSOAR"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XC-852"
+ }
+ ],
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "whoisContacts": [
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "ADMINISTRATIVE",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "TECHNICAL",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "dns-admin@cscglobal.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ }
+ ],
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "lastModifiedDescription": "Domain registered"
+ }
+ ],
+ "links": {
+ "self": "https://apis-ote.cscglobal.com/dbs/api/v2/domains?filter=domain==%22csc-panw%22"
+ }
+}
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_availability_check.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_availability_check.json
new file mode 100644
index 000000000000..42d731a556ba
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_availability_check.json
@@ -0,0 +1,31 @@
+{
+ "meta": {
+ "numResults": 2
+ },
+ "results": [
+ {
+ "qualifiedDomainName": "cscpanw.org",
+ "result": {
+ "code": "DOMAIN_IN_PORTFOLIO",
+ "message": "Domain already in portfolio"
+ },
+ "basePrice": {
+ "price": null,
+ "currency": ""
+ },
+ "availableTerms": []
+ },
+ {
+ "qualifiedDomainName": "csc-panw.info",
+ "result": {
+ "code": "DOMAIN_IN_PORTFOLIO",
+ "message": "Domain already in portfolio"
+ },
+ "basePrice": {
+ "price": null,
+ "currency": ""
+ },
+ "availableTerms": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_qualified_domain_name.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_qualified_domain_name.json
new file mode 100644
index 000000000000..1e7df7cfb670
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domain_qualified_domain_name.json
@@ -0,0 +1,102 @@
+{
+ "qualifiedDomainName": "csc-panw.com",
+ "domain": "csc-panw",
+ "idn": "",
+ "extension": "com",
+ "newGtld": false,
+ "managedStatus": "ACTIVE",
+ "registrationDate": "22-Apr-2024 UTC",
+ "registryExpiryDate": "22-Apr-2025 UTC",
+ "paidThroughDate": "22-Apr-2025 UTC",
+ "countryCode": "",
+ "serverDeleteProhibited": false,
+ "serverTransferProhibited": false,
+ "serverUpdateProhibited": false,
+ "dnsType": "CSC_BASIC",
+ "whoisPrivacy": false,
+ "localAgent": false,
+ "dnssecActivated": "USAGE_UNKNOWN",
+ "criticalDomain": true,
+ "businessUnit": "Cortex",
+ "brandName": "",
+ "idnReferenceName": "",
+ "customFields": [
+ {
+ "name": "Custom Field 2",
+ "value": "Custom-RefVal"
+ },
+ {
+ "name": "Department",
+ "value": "XSOAR"
+ },
+ {
+ "name": "PO Number",
+ "value": "2024-XC-852"
+ }
+ ],
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "urlf": {
+ "redirectType": "",
+ "urlForwarding": false
+ },
+ "nameServers": [
+ "dns1.cscdns.net",
+ "dns2.cscdns.net"
+ ],
+ "whoisContacts": [
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "ADMINISTRATIVE",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "TECHNICAL",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "dns-admin@cscglobal.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ }
+ ],
+ "lastModifiedDate": "22-Apr-2024 UTC",
+ "lastModifiedReason": "REGISTRATION_COMPLETE",
+ "lastModifiedDescription": "Domain registered"
+}
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domains_configuration_list.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domains_configuration_list.json
new file mode 100644
index 000000000000..ba3494934bbb
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/get_domains_configuration_list.json
@@ -0,0 +1,79 @@
+{
+ "meta": {
+ "numResults": 1,
+ "pages": 1
+ },
+ "configurations": [
+ {
+ "domain": "csc-panw.biz",
+ "domainLabel": "csc-panw",
+ "domainStatusCode": "ACT",
+ "dnsHostingType": "THIRDPARTY",
+ "extension": "biz",
+ "tld": "biz",
+ "country": "GTLD",
+ "adminEmail": "admin@internationaladmin.com",
+ "adminName": "Domain Administrator",
+ "adminOrg": "CSC Corporate Domains, Inc.",
+ "regEmail": "admin@internationaladmin.com",
+ "regName": "Domain Administrator",
+ "regOrg": "CSC Corporate Domains, Inc.",
+ "techEmail": "dns-admin@cscglobal.com",
+ "techName": "Domain Administrator",
+ "techOrg": "CSC Corporate Domains, Inc.",
+ "account": {
+ "accountNumber": "8601230",
+ "accountName": "Palo Alto Networks - Integration"
+ },
+ "businessUnit": "Cortex",
+ "dnsData": [
+ {
+ "dnsDomain": "ns1.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ },
+ {
+ "dnsDomain": "ns2.1-877namebid.com",
+ "dnsProvider": "1-877NameBid.com LLC, (United States)"
+ }
+ ],
+ "dnsTraffic12moAve": 790,
+ "hasCscUrlf": false,
+ "hasDkim": false,
+ "hasDmarc": false,
+ "hasDnssecDs": false,
+ "hasSpf": false,
+ "hasWww": false,
+ "isGtld": false,
+ "isLive": false,
+ "isLiveType": "Not Live",
+ "isMultilockEligible": true,
+ "isVital": false,
+ "multiLocked": false,
+ "numLiveMx": 0,
+ "numRootA": 0,
+ "numRootTxt": 0,
+ "numSslNetcraft": 0,
+ "numWwwA": 0,
+ "numWwwCname": 0,
+ "registryExpiryDate": "2025-04-22",
+ "rootHttpCode": 0,
+ "rootHttpUrl": null,
+ "rootIsUrlf": false,
+ "serverDeleteProhibited": null,
+ "serverTransferProhibited": null,
+ "serverUpdateProhibited": null,
+ "urlfTraffic12moAve": 0,
+ "valueRootA": null,
+ "valueRootMx": null,
+ "valueRootTxt": null,
+ "valueWwwA": null,
+ "valueWwwCname": null,
+ "wwwHttpCode": 0,
+ "wwwHttpUrl": null,
+ "wwwIsUrlf": true
+ }
+ ],
+ "links": {
+ "self": "https://apis-uat.cscglobal.com/dbs/api/v2/domains/configuration?filter=domain==%22csc-panw.biz%22"
+ }
+}
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/whois_contacts.json b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/whois_contacts.json
new file mode 100644
index 000000000000..143f387e17c7
--- /dev/null
+++ b/Packs/CSCDomainManager/Integrations/CSCDomainManager/test_data/whois_contacts.json
@@ -0,0 +1,50 @@
+[
+ {
+ "contactType": "REGISTRANT",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "ADMINISTRATIVE",
+ "firstName": "Domain",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "admin@internationaladmin.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ },
+ {
+ "contactType": "TECHNICAL",
+ "firstName": "DNS",
+ "lastName": "Administrator",
+ "organization": "CSC Corporate Domains, Inc.",
+ "street1": "251 Little Falls Drive",
+ "street2": "",
+ "city": "Wilmington",
+ "stateProvince": "DE",
+ "country": "US",
+ "postalCode": "19808",
+ "email": "dns-admin@cscglobal.com",
+ "phone": "+1.3026365400",
+ "phoneExtn": "",
+ "fax": ""
+ }
+]
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/README.md b/Packs/CSCDomainManager/README.md
new file mode 100644
index 000000000000..8d216f095e1c
--- /dev/null
+++ b/Packs/CSCDomainManager/README.md
@@ -0,0 +1,10 @@
+CSCDomainManager is a web-based portfolio management platform consolidating domains alongside social media usernames, SSL digital certificates, and DNS.
+
+CSC offers innovative, next generation domain management and security solutions coupled with online brand and fraud protection. DomainSec is a comprehensive platform for domain security. Powered by a correlation engine and machine learning technology, it brings together the variety of data sets and blocking networks enabling threat intelligence based on domain security insights.
+
+# What does this pack do?
+The actions included in this pack allow the user to manage domains from Cortex XSOAR.
+
+# For more information
+- [Visit the CSC website](https://www.cscdbs.com/)
+- [See the api page](https://www.cscglobal.com/cscglobal/docs/dbs/domainmanager/api-v2/#/)
\ No newline at end of file
diff --git a/Packs/CSCDomainManager/TestPlaybooks/CSCDomainManager_Test.yml b/Packs/CSCDomainManager/TestPlaybooks/CSCDomainManager_Test.yml
new file mode 100644
index 000000000000..e0d4c0d279c9
--- /dev/null
+++ b/Packs/CSCDomainManager/TestPlaybooks/CSCDomainManager_Test.yml
@@ -0,0 +1,558 @@
+id: CSCDomainManager_Test
+version: -1
+name: CSCDomainManager_Test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: fd9e757a-7772-4460-83ed-aa77156bdae0
+ type: start
+ task:
+ id: fd9e757a-7772-4460-83ed-aa77156bdae0
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": -20
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 0a19db37-a259-47db-8142-8effb8ff9795
+ type: regular
+ task:
+ id: 0a19db37-a259-47db-8142-8effb8ff9795
+ version: -1
+ name: DeleteContext
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 160
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 6ec3ea48-1596-4f35-8b03-d6a1be319a6f
+ type: regular
+ task:
+ id: 6ec3ea48-1596-4f35-8b03-d6a1be319a6f
+ version: -1
+ name: csc-domains-search by domain_name
+ description: Gets the domains by the applied filters
+ script: '|||csc-domains-search'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "6"
+ scriptarguments:
+ domain_name:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: qualifiedDomainName
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 550
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 253ece94-5c08-466f-8e55-7b8ff6c4c160
+ type: regular
+ task:
+ id: 253ece94-5c08-466f-8e55-7b8ff6c4c160
+ version: -1
+ name: closeInvestigation
+ description: Close the current incident
+ script: Builtin|||closeInvestigation
+ type: regular
+ iscommand: true
+ brand: Builtin
+ scriptarguments:
+ id:
+ simple: ${incident.id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 2310
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 8fc87e3c-c3ed-4b5f-8e9c-84f075d93c95
+ type: condition
+ task:
+ id: 8fc87e3c-c3ed-4b5f-8e9c-84f075d93c95
+ version: -1
+ name: Verify Results - domains search
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "17"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: CSCDomainManager.Domain.qualifiedDomainName
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: qualifiedDomainName
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: CSCDomainManager.Domain.domain
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: domain
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: 5b39d420-dcb1-46d0-8f1c-15e983a919f2
+ type: regular
+ task:
+ id: 5b39d420-dcb1-46d0-8f1c-15e983a919f2
+ version: -1
+ name: csc-domains-availability-check
+ script: CSCDomainManager|||csc-domains-availability-check
+ type: regular
+ iscommand: true
+ brand: CSCDomainManager
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ domain_name:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: qualifiedDomainName
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1480
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 69fd19fd-eed7-481d-841d-fec402988752
+ type: condition
+ task:
+ id: 69fd19fd-eed7-481d-841d-fec402988752
+ version: -1
+ name: Verify Results - availability check
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "13"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CSCDomainManager.Domain.Availability.result
+ accessor: code
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ right:
+ value:
+ simple: DOMAIN_IN_PORTFOLIO
+ - - operator: isEqualString
+ left:
+ value:
+ simple: CSCDomainManager.Domain.Availability.result.message
+ iscontext: true
+ right:
+ value:
+ simple: Domain already in portfolio
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: c4765886-a582-4844-893d-22f89c926cc5
+ type: condition
+ task:
+ id: c4765886-a582-4844-893d-22f89c926cc5
+ version: -1
+ name: Verify Results - configuration list
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "7"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: CSCDomainManager.Domain.Configuration.domain
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: CSCDomainManager.Domain.Configuration
+ accessor: domain
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ - - operator: isEqualString
+ left:
+ value:
+ simple: CSCDomainManager.Domain.Configuration.country
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: CSCDomainManager.Domain.Configuration
+ accessor: country
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1290
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 71d93173-9e4f-414f-8f1b-ed320afdad9e
+ type: regular
+ task:
+ id: 71d93173-9e4f-414f-8f1b-ed320afdad9e
+ version: -1
+ name: csc-domains-search - no input
+ description: Gets the domains by the applied filters
+ script: CSCDomainManager|||csc-domains-search
+ type: regular
+ iscommand: true
+ brand: CSCDomainManager
+ nexttasks:
+ '#none#':
+ - "2"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: aecec5d3-8b9a-4177-88e9-729daa6472b3
+ type: regular
+ task:
+ id: aecec5d3-8b9a-4177-88e9-729daa6472b3
+ version: -1
+ name: domain
+ script: CSCDomainManager|||domain
+ type: regular
+ iscommand: true
+ brand: CSCDomainManager
+ nexttasks:
+ '#none#':
+ - "14"
+ scriptarguments:
+ domain:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: qualifiedDomainName
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1895
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 7c58c1e5-4487-430b-8ac2-f4bac9a41f6e
+ type: condition
+ task:
+ id: 7c58c1e5-4487-430b-8ac2-f4bac9a41f6e
+ version: -1
+ name: Verify Results - domain
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "4"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: CSCDomainManager.Domain
+ accessor: qualifiedDomainName
+ transformers:
+ - operator: FirstArrayElement
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 2095
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: 53ef4bdd-a77a-4923-8540-52b03d493362
+ type: regular
+ task:
+ id: 53ef4bdd-a77a-4923-8540-52b03d493362
+ version: -1
+ name: csc-domains-configuration-search - no input
+ description: Get configuration information for owned domains with optional filtering.
+ script: CSCDomainManager|||csc-domains-configuration-search
+ type: regular
+ iscommand: true
+ brand: CSCDomainManager
+ nexttasks:
+ '#none#':
+ - "18"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 920
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: 20238a83-f162-4179-8e9b-36279f64a2a8
+ type: regular
+ task:
+ id: 20238a83-f162-4179-8e9b-36279f64a2a8
+ version: -1
+ name: csc-domains-configuration-search by domain_name
+ description: Get configuration information for owned domains with optional filtering.
+ script: CSCDomainManager|||csc-domains-configuration-search
+ type: regular
+ iscommand: true
+ brand: CSCDomainManager
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ domain_name:
+ complex:
+ root: CSCDomainManager.Domain.Configuration
+ accessor: domain
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1095
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 2425,
+ "width": 380,
+ "x": 480,
+ "y": -20
+ }
+ }
+ }
+inputs: []
+outputs: []
+fromversion: 5.0.0
+description: ''
diff --git a/Packs/CSCDomainManager/pack_metadata.json b/Packs/CSCDomainManager/pack_metadata.json
new file mode 100644
index 000000000000..0589a2e9ae92
--- /dev/null
+++ b/Packs/CSCDomainManager/pack_metadata.json
@@ -0,0 +1,19 @@
+{
+ "name": "CSCDomainManager",
+ "description": "CSCDomainManager is the world's first multilingual domain management tool, available in English, French, and German. It uses rules-based technology, customizable reporting, granular user management, and more to enable you to manage your domain.",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "IT Services"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/CTF02/ReleaseNotes/1_0_4.md b/Packs/CTF02/ReleaseNotes/1_0_4.md
new file mode 100644
index 000000000000..4cdae4628b43
--- /dev/null
+++ b/Packs/CTF02/ReleaseNotes/1_0_4.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CTF_2_BF
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
index e08c96132c21..1376ab272056 100644
--- a/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
+++ b/Packs/CTF02/Scripts/CTF2BF/CTF2BF.yml
@@ -26,7 +26,7 @@ args:
scripttarget: 0
subtype: python3
runonce: false
-dockerimage: demisto/python3:3.10.13.87159
+dockerimage: demisto/python3:3.10.14.99865
runas: DBotWeakRole
engineinfo: {}
fromversion: 8.2.0
diff --git a/Packs/CTF02/doc_files/A.gif b/Packs/CTF02/doc_files/A.gif
new file mode 100644
index 000000000000..1983c3802b19
Binary files /dev/null and b/Packs/CTF02/doc_files/A.gif differ
diff --git a/Packs/CTF02/pack_metadata.json b/Packs/CTF02/pack_metadata.json
index 8c1e616f4ddb..c1f3c6ea9567 100644
--- a/Packs/CTF02/pack_metadata.json
+++ b/Packs/CTF02/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Capture The Flag - 02",
"description": "XSOAR's Capture the flag (CTF)",
"support": "xsoar",
- "currentVersion": "1.0.3",
+ "currentVersion": "1.0.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -15,7 +15,6 @@
"useCases": [],
"keywords": [],
"marketplaces": [
- "xsoar",
"xsoar_saas"
],
"dependencies": {
diff --git a/Packs/CVESearch/doc_files/CVE_Enrichment_Generic.png b/Packs/CVESearch/doc_files/CVE_Enrichment_Generic.png
new file mode 100644
index 000000000000..a891e3b2e26c
Binary files /dev/null and b/Packs/CVESearch/doc_files/CVE_Enrichment_Generic.png differ
diff --git a/Packs/CVE_2021_44228/doc_files/CVE-2021-44228_-_Log4j_RCE.png b/Packs/CVE_2021_44228/doc_files/CVE-2021-44228_-_Log4j_RCE.png
index 4ba3f2e5106a..8d164eaae5ac 100644
Binary files a/Packs/CVE_2021_44228/doc_files/CVE-2021-44228_-_Log4j_RCE.png and b/Packs/CVE_2021_44228/doc_files/CVE-2021-44228_-_Log4j_RCE.png differ
diff --git a/Packs/CVE_2022_30190/doc_files/CVE-2022-30190_-_MSDT_RCE.png b/Packs/CVE_2022_30190/doc_files/CVE-2022-30190_-_MSDT_RCE.png
index 39091f5a982d..96cce2496603 100644
Binary files a/Packs/CVE_2022_30190/doc_files/CVE-2022-30190_-_MSDT_RCE.png and b/Packs/CVE_2022_30190/doc_files/CVE-2022-30190_-_MSDT_RCE.png differ
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/.pack-ignore b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/.secrets-ignore b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/.secrets-ignore
new file mode 100644
index 000000000000..76baa2bd4543
--- /dev/null
+++ b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/.secrets-ignore
@@ -0,0 +1 @@
+https://www.openssh.com
\ No newline at end of file
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.yml b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.yml
new file mode 100644
index 000000000000..0c8ff74d837a
--- /dev/null
+++ b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.yml
@@ -0,0 +1,1091 @@
+id: CVE-2024-6387 - OpenSSH RegreSSHion RCE
+version: -1
+name: CVE-2024-6387 - OpenSSH RegreSSHion RCE
+description: |-
+ RegreSSHion Vulnerability (CVE-2024-6387)
+
+ On July 1, 2024, a critical signal handler race condition vulnerability was disclosed in OpenSSH servers (sshd) on glibc-based Linux systems. This vulnerability, known as RegreSSHion and tracked as CVE-2024-6387, can result in unauthenticated remote code execution (RCE) with root privileges. This vulnerability has been rated High severity (CVSS 8.1).
+
+ #### Impacted Versions
+
+ The vulnerability impacts the following OpenSSH server versions:
+
+ - OpenSSH versions between 8.5p1 and 9.8p1
+ - OpenSSH versions earlier than 4.4p1, if they have not been backport-patched against CVE-2006-5051 or patched against CVE-2008-4109
+
+ #### Unaffected Versions
+
+ The SSH features in PAN-OS are not affected by CVE-2024-6387.
+
+ ### The playbook includes the following tasks:
+
+ **Collect, Extract and Enrich Indicators**
+ * Collect known indicators from Unit42 blog
+
+ **Threat Hunting**
+ * Searches vulnerable endpoints using Prisma Cloud and Cortex XDR - XQL queries
+
+ **Mitigations:**
+ * OpenSSH official CVE-2024-6387 patch
+ * Unit42 recommended mitigations
+
+ **This playbook should be triggered manually or can be configured as a job.**
+
+ Please create a new incident and choose the CVE-2024-6387 - OpenSSH RegreSSHion RCE playbook and Rapid Breach Response incident type.
+
+ Reference:
+
+ [Threat Brief: CVE-2024-6387 OpenSSH RegreSSHion Vulnerability
+ ](https://unit42.paloaltonetworks.com/threat-brief-cve-2024-6387-openssh/).
+
+ Note: This is a beta playbook, which lets you implement and test pre-release software. Since the playbook is beta, it might contain bugs. Updates to the pack during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the pack to help us identify issues, fix them, and continually improve.
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 94adeb69-7e9a-4a89-8765-b6752c05df1c
+ type: start
+ task:
+ id: 94adeb69-7e9a-4a89-8765-b6752c05df1c
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -240
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 7a39d8d2-ac92-445a-8854-ebc57270e381
+ type: regular
+ task:
+ id: 7a39d8d2-ac92-445a-8854-ebc57270e381
+ version: -1
+ name: Collect Indicators from Unit42
+ description: This script will extract indicators from given HTML and will handle bad top-level domains to avoid false positives caused by file extensions.
+ scriptName: ParseHTMLIndicators
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ url:
+ simple: https://unit42.paloaltonetworks.com/threat-brief-cve-2024-6387-openssh/
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 20
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 06591089-1b00-4ab7-8b90-f11ff525fae2
+ type: title
+ task:
+ id: 06591089-1b00-4ab7-8b90-f11ff525fae2
+ version: -1
+ name: Extract and Tag Indicators
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "4"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: b38ed8c9-cd4f-46c1-83f6-0ebf43f5378c
+ type: regular
+ task:
+ id: b38ed8c9-cd4f-46c1-83f6-0ebf43f5378c
+ version: -1
+ name: Create indicators in TIM
+ description: commands.local.cmd.new.indicator
+ script: Builtin|||createNewIndicator
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "6"
+ scriptarguments:
+ tags:
+ simple: RegreSSHion, OpenSSH, RCE
+ type:
+ simple: CVE
+ value:
+ complex:
+ root: ExtractedIndicators
+ accessor: CVE
+ transformers:
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 640
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 646abc1c-8377-4cf4-861c-e6d26647af4d
+ type: regular
+ task:
+ id: 646abc1c-8377-4cf4-861c-e6d26647af4d
+ version: -1
+ name: Extract Indicators
+ description: commands.local.cmd.extract.indicators
+ script: Builtin|||extractIndicators
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "30"
+ scriptarguments:
+ text:
+ simple: ${http.parsedBlog.indicators}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 315
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: c4f179ef-d905-46f5-8743-3f39170e36cf
+ type: title
+ task:
+ id: c4f179ef-d905-46f5-8743-3f39170e36cf
+ version: -1
+ name: Collect Indicators
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -110
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 8fd54c1e-180d-45de-8def-93fc184bbf6c
+ type: title
+ task:
+ id: 8fd54c1e-180d-45de-8def-93fc184bbf6c
+ version: -1
+ name: Handle Rapid Breach Response Layout
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "7"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 810
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: d89e9319-fe26-4a4f-89e5-4a42203a3c15
+ type: playbook
+ task:
+ id: d89e9319-fe26-4a4f-89e5-4a42203a3c15
+ version: -1
+ name: Rapid Breach Response - Set Incident Info
+ description: This playbook is responsible for setting up the Rapid Breach Response Incident Info tab in the layout.
+ playbookName: Rapid Breach Response - Set Incident Info
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ SourceOfIndicators:
+ simple: ${http.parsedBlog.sourceLink}
+ countTotalIndicators:
+ complex:
+ root: ExtractedIndicators
+ accessor: CVE
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.Domain
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.IP
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.URL
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.File
+ iscontext: true
+ - operator: uniq
+ - operator: count
+ playbookDescription:
+ simple: ${inputs.PlaybookDescription}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 940
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: ef31c540-d451-4ea9-8d4a-a852e9783e45
+ type: title
+ task:
+ id: ef31c540-d451-4ea9-8d4a-a852e9783e45
+ version: -1
+ name: Threat Hunting
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "9"
+ - "11"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1100
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: 878e0ae1-e27b-48d1-8424-b64258a88ccd
+ type: title
+ task:
+ id: 878e0ae1-e27b-48d1-8424-b64258a88ccd
+ version: -1
+ name: Unit42 Managed Threat Hunting Queries
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "12"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 200,
+ "y": 1250
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 8773b606-9c32-4742-845e-bf425534ff68
+ type: title
+ task:
+ id: 8773b606-9c32-4742-845e-bf425534ff68
+ version: -1
+ name: Prisma Cloud
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "17"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 1250
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: 6bb9d8a2-00c1-4380-87c9-e46aad6c1610
+ type: condition
+ task:
+ id: 6bb9d8a2-00c1-4380-87c9-e46aad6c1610
+ version: -1
+ name: Should run XQL hunting queries?
+ description: Whether to run the XQL hunting queries.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "15"
+ "yes":
+ - "13"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: inputs.RunXQLHuntingQueries
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 200,
+ "y": 1390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 475304b6-8672-4e31-8f85-80a0c142b107
+ type: condition
+ task:
+ id: 475304b6-8672-4e31-8f85-80a0c142b107
+ version: -1
+ name: Check if Cortex XDR - XQL Query Engine is Enabled
+ description: Returns 'yes' if integration brand is available. Otherwise returns 'no'.
+ scriptName: IsIntegrationAvailable
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "no":
+ - "15"
+ "yes":
+ - "14"
+ scriptarguments:
+ brandname:
+ simple: Cortex XDR - XQL Query Engine
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10,
+ "y": 1560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: dd7a29c0-0024-4d25-83dd-90629841e2d8
+ type: regular
+ task:
+ id: dd7a29c0-0024-4d25-83dd-90629841e2d8
+ version: -1
+ name: Hunt to identify hosts vulnerable to CVE-2024-6387
+ description: |-
+ Execute an XQL query and retrieve results of an executed XQL query API. The command will be executed every 10 seconds until results are retrieved or until a timeout error is raised.
+ When more than 1000 results are retrieved, the command will return a compressed gzipped JSON format file,
+ unless the argument 'parse_result_file_to_context' is set to true and then the results will be extracted to the context.
+ script: '|||xdr-xql-generic-query'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "15"
+ scriptarguments:
+ ignore-outputs:
+ simple: "false"
+ parse_result_file_to_context:
+ simple: "true"
+ query:
+ simple: "preset = host_inventory_applications\n \n| filter endpoint_type = ENUM.AGENT_TYPE_SERVER\n \n| filter lowercase(application_name) ~= \"openssh(-server)?\"\n \n| alter product_major_version = to_number(arrayindex(split(raw_version, \".\"), 0)),\n \nproduct_minor_version_stage_1 = arrayindex(split(raw_version, \".\"), 1),\n \nproduct_rev = to_number(arrayindex(split(raw_version, \"p\"), 1))\n \n| alter product_minor_version = to_number(arrayindex(split(product_minor_version_stage_1, \"p\"), 0))\n \n// (name:\"openssh\" and version<4.4) or (name:\"openssh\" and version<9.8 and version>=8.5)\n \n| filter product_major_version < 4 or (product_major_version = 4 and product_minor_version < 4) or (product_major_version = 8 and product_minor_version >= 5) or (product_major_version = 9 and product_minor_version < 8)\n \n| fields endpoint_name, application_name, raw_version, product_major_version, product_minor_version, product_rev\n \n| dedup endpoint_name"
+ query_name:
+ simple: Hunt_CVE_-_2024_-_6387_Vulnerable_Endpoints
+ time_frame:
+ simple: 7 days
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -180,
+ "y": 1740
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "15":
+ id: "15"
+ taskid: ac1b6020-ac56-402d-8b1e-87cf2cf78e3a
+ type: condition
+ task:
+ id: ac1b6020-ac56-402d-8b1e-87cf2cf78e3a
+ version: -1
+ name: Were vulnerable endpoints found?
+ description: Checks if vulnerable endpoints found in the Threat Hunting phase.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "26"
+ "yes":
+ - "29"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: PaloAltoNetworksXQL.GenericQuery.results
+ accessor: endpoint_name
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PrismaCloudCompute.CIScan.entityInfo.hostname
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: Expanse.Issue.assets.id
+ iscontext: true
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1910
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: 23f98158-0f08-46a6-888f-27577dc36a5b
+ type: regular
+ task:
+ id: 23f98158-0f08-46a6-888f-27577dc36a5b
+ version: -1
+ name: Search for vulnerable resources
+ description: Retrieves all scan reports for images scanned by the Jenkins plugin or twistcli. Maps to Monitor > Vulnerabilities > Images > CI in the Console UI. The default will retrieve only the passed scans.
+ script: '|||prisma-cloud-compute-ci-scan-results-list'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "15"
+ scriptarguments:
+ all_results:
+ simple: "true"
+ search:
+ simple: vulnerability where asset.type = 'all' AND cve.id = 'CVE-2024-6387'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 1390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: 363fd235-f892-4edb-8666-b357dff8438c
+ type: title
+ task:
+ id: 363fd235-f892-4edb-8666-b357dff8438c
+ version: -1
+ name: Mitigation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "27"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2430
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: 1ef81cb4-1c73-4b84-8cc2-fe5480c470e8
+ type: collection
+ task:
+ id: 1ef81cb4-1c73-4b84-8cc2-fe5480c470e8
+ version: -1
+ name: Hold for an update regarding the endpoint mitigation
+ description: commands.local.cmd.todo.add
+ type: collection
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "26"
+ scriptarguments:
+ description:
+ simple: "The following endpoints have are potentially vulnerable to CVE-2024-6387:\n\nXQL Threat Hunting\n${PaloAltoNetworksXQL.GenericQuery.results.endpoint_name}\n\nPrisma Cloud Compute\n${PrismaCloudCompute.CIScan.entityInfo.hostname}\n\nCortex Xpanse\n${Expanse.Issue.assets.id}\n\nUnit42 recommended mitigations for CVE-2024-6387 are:\n\n1. Update to OpenSSH 9.8p1 or later\nReference: [OpenSSH 9.8](https://www.openssh.com/txt/release-9.8)\n\n2. Harden SSH Configuration\n- LoginGraceTime: Reduce the LoginGraceTime value to minimize the window during which an attacker can attempt to exploit the SSH service. \n - Open /etc/ssh/sshd_config as root\n - Add or modify the LoginGraceTime parameter:\n LoginGraceTime 0\n - Save the file\n - Restart the sshd service"
+ tags:
+ simple: CVE-2024-6387, OpenSSH, RegreSSHion
+ title:
+ simple: Mitigate Vulnerable Endpoints
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2730
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: Which endpoints have been mitigated?
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: multiSelect
+ options: []
+ optionsarg:
+ - complex:
+ root: PrismaCloudCompute.CIScan.entityInfo
+ accessor: hostname
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: PaloAltoNetworksXQL.GenericQuery.results.endpoint_name
+ iscontext: true
+ - {}
+ fieldassociated: ""
+ placeholder: ""
+ tooltip: ""
+ readonly: false
+ title: Please select mitigated endpoints
+ description: ""
+ sender: Your SOC team
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "26":
+ id: "26"
+ taskid: f4877f7c-e71f-43ae-8162-ad2c13e1fbd1
+ type: title
+ task:
+ id: f4877f7c-e71f-43ae-8162-ad2c13e1fbd1
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2890
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: bbbcc8f0-fed8-40ab-8517-d5eb2aafb994
+ type: regular
+ task:
+ id: bbbcc8f0-fed8-40ab-8517-d5eb2aafb994
+ version: -1
+ name: Recommended Mitigations
+ description: |-
+ Pretty-print data using Python's pprint library. This is useful for seeing the structure of incident and context data. Here's how to use it:
+
+ !PrettyPrint value=${incident}
+ scriptName: PrettyPrint
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "23"
+ scriptarguments:
+ value:
+ simple: |-
+ The Unit42 Research team recommends these mitigations:
+
+ 1. Upgrade OpenSSH to a patched version 9.8p1 or later.
+ 2. Harden your OpenSSH LoginGraceTime configuration
+ - Open /etc/ssh/sshd_config as root
+ - Add or modify the LoginGraceTime parameter as follows:
+ LoginGraceTime 0
+ - Save the file
+ - Restart the sshd service
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2570
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "28":
+ id: "28"
+ taskid: c77a3225-393a-4519-8eec-77d11464b604
+ type: regular
+ task:
+ id: c77a3225-393a-4519-8eec-77d11464b604
+ version: -1
+ name: Notify the SOC about the vulnerable endpoints
+ description: commands.server.mail.sendmail
+ script: '|||send-mail'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "20"
+ scriptarguments:
+ body:
+ simple: |-
+ Dear SOC,
+
+ The CVE-2024-6387 - OpenSSH RegreSSHion RCE playbook found potentially vulnerable endpoints:
+
+ Prisma Cloud
+ ${PrismaCloudCompute.CIScan.entityInfo.hostname}
+
+ Managed Threat Hunting Queries
+ ${PaloAltoNetworksXQL.GenericQuery.results.endpoint_name}
+
+ Please follow the steps described in the playbook to mitigate the threat.
+
+ Best regards,
+ Cortex XSOAR
+ subject:
+ simple: Cortex XSOAR - Incident ${incident.id} - CVE-2024-6387 Vulnerable Endpoints
+ to:
+ simple: ${inputs.SOCEmailAddress}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2270
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "29":
+ id: "29"
+ taskid: 551deed2-69b7-491a-8eb6-e4a06d4b0dcd
+ type: condition
+ task:
+ id: 551deed2-69b7-491a-8eb6-e4a06d4b0dcd
+ version: -1
+ name: Should notify the SOC by email?
+ description: Whether to notify the SOC about the affected assets by email.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "20"
+ "yes":
+ - "28"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: inputs.ShouldSendMail
+ iscontext: true
+ right:
+ value:
+ simple: "True"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 2090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "30":
+ id: "30"
+ taskid: 1ae47a6a-03c1-4664-83c7-b0133bc6c218
+ type: regular
+ task:
+ id: 1ae47a6a-03c1-4664-83c7-b0133bc6c218
+ version: -1
+ name: Enrich Indicators
+ description: commands.local.cmd.enrich.indicators
+ script: Builtin|||enrichIndicators
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ indicatorsValues:
+ complex:
+ root: ExtractedIndicators
+ accessor: CVE
+ transformers:
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.Domain
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.IP
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.URL
+ iscontext: true
+ - operator: append
+ args:
+ item:
+ value:
+ simple: ExtractedIndicators.File
+ iscontext: true
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 480
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "12_13_yes": 0.25,
+ "12_15_#default#": 0.27,
+ "13_14_yes": 0.39,
+ "13_15_no": 0.28,
+ "15_26_#default#": 0.32,
+ "15_29_yes": 0.4,
+ "29_28_yes": 0.4
+ },
+ "paper": {
+ "dimensions": {
+ "height": 3195,
+ "width": 1250,
+ "x": -180,
+ "y": -240
+ }
+ }
+ }
+inputs:
+- key: PlaybookDescription
+ value:
+ simple: |-
+ RegreSSHion Vulnerability (CVE-2024-6387)
+
+ On July 1, 2024, a critical signal handler race condition vulnerability was disclosed in OpenSSH servers (sshd) on glibc-based Linux systems. This vulnerability, known as RegreSSHion and tracked as CVE-2024-6387, can result in unauthenticated remote code execution (RCE) with root privileges. This vulnerability has been rated High severity (CVSS 8.1).
+
+ ## Impacted Versions
+
+ The vulnerability impacts the following OpenSSH server versions:
+
+ - OpenSSH versions between 8.5p1 and 9.8p1
+ - OpenSSH versions earlier than 4.4p1, if they have not been backport-patched against CVE-2006-5051 or patched against CVE-2008-4109
+
+ ## Unaffected Versions
+
+ The SSH features in PAN-OS are not affected by CVE-2024-6387.
+ required: false
+ description: The playbook description to populate the layout with.
+ playbookInputQuery:
+- key: RunXQLHuntingQueries
+ value:
+ simple: "True"
+ required: false
+ description: Whether to execute the XQL query.
+ playbookInputQuery:
+- key: ShouldSendMail
+ value:
+ simple: "False"
+ required: false
+ description: Whether to notify the SOC by email.
+ playbookInputQuery:
+- key: SOCEmailAddress
+ value: {}
+ required: false
+ description: The email address to notify
+ playbookInputQuery:
+inputSections:
+- inputs:
+ - PlaybookDescription
+ - RunXQLHuntingQueries
+ - ShouldSendMail
+ - SOCEmailAddress
+ name: General (Inputs group)
+ description: Generic group for inputs
+outputSections:
+- outputs: []
+ name: General (Outputs group)
+ description: Generic group for outputs
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE_README.md b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE_README.md
new file mode 100644
index 000000000000..4fdb0cba6faa
--- /dev/null
+++ b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/Playbooks/playbook-CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE_README.md
@@ -0,0 +1,86 @@
+RegreSSHion Vulnerability (CVE-2024-6387)
+
+On July 1, 2024, a critical signal handler race condition vulnerability was disclosed in OpenSSH servers (sshd) on glibc-based Linux systems. This vulnerability, known as RegreSSHion and tracked as CVE-2024-6387, can result in unauthenticated remote code execution (RCE) with root privileges. This vulnerability has been rated High severity (CVSS 8.1).
+
+#### Impacted Versions
+
+The vulnerability impacts the following OpenSSH server versions:
+
+- OpenSSH versions between 8.5p1 and 9.8p1
+- OpenSSH versions earlier than 4.4p1, if they have not been backport-patched against CVE-2006-5051 or patched against CVE-2008-4109
+
+#### Unaffected Versions
+
+The SSH features in PAN-OS are not affected by CVE-2024-6387.
+
+### The playbook includes the following tasks:
+
+**Collect, Extract and Enrich Indicators**
+* Collect known indicators from Unit42 blog
+
+**Threat Hunting**
+* Searches vulnerable endpoints using Prisma Cloud and Cortex XDR - XQL queries
+
+**Mitigations:**
+* OpenSSH official CVE-2024-6387 patch
+* Unit42 recommended mitigations
+
+**This playbook should be triggered manually or can be configured as a job.**
+
+Please create a new incident and choose the CVE-2024-6387 - OpenSSH RegreSSHion RCE playbook and Rapid Breach Response incident type.
+
+Reference:
+
+[Threat Brief: CVE-2024-6387 OpenSSH RegreSSHion Vulnerability
+](https://unit42.paloaltonetworks.com/threat-brief-cve-2024-6387-openssh/).
+
+Note: This is a beta playbook, which lets you implement and test pre-release software. Since the playbook is beta, it might contain bugs. Updates to the pack during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the pack to help us identify issues, fix them, and continually improve.
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+* Rapid Breach Response - Set Incident Info
+
+### Integrations
+
+This playbook does not use any integrations.
+
+### Scripts
+
+* ParseHTMLIndicators
+* IsIntegrationAvailable
+* PrettyPrint
+
+### Commands
+
+* enrichIndicators
+* send-mail
+* xdr-xql-generic-query
+* extractIndicators
+* createNewIndicator
+* prisma-cloud-compute-ci-scan-results-list
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| PlaybookDescription | The playbook description to populate the layout with. | RegreSSHion Vulnerability (CVE-2024-6387)
On July 1, 2024, a critical signal handler race condition vulnerability was disclosed in OpenSSH servers (sshd) on glibc-based Linux systems. This vulnerability, known as RegreSSHion and tracked as CVE-2024-6387, can result in unauthenticated remote code execution (RCE) with root privileges. This vulnerability has been rated High severity (CVSS 8.1).
## Impacted Versions
The vulnerability impacts the following OpenSSH server versions:
- OpenSSH versions between 8.5p1 and 9.8p1
- OpenSSH versions earlier than 4.4p1, if they have not been backport-patched against CVE-2006-5051 or patched against CVE-2008-4109
## Unaffected Versions
The SSH features in PAN-OS are not affected by CVE-2024-6387. | Optional |
+| RunXQLHuntingQueries | Whether to execute the XQL query. | True | Optional |
+| ShouldSendMail | Whether to notify the SOC by email. | False | Optional |
+| SOCEmailAddress | The email address to notify | | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![CVE-2024-6387 - OpenSSH RegreSSHion RCE](../doc_files/CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.png)
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/README.md b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/README.md
new file mode 100644
index 000000000000..b18dcf70a85d
--- /dev/null
+++ b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/README.md
@@ -0,0 +1,25 @@
+## RegreSSHion Vulnerability (CVE-2024-6387)
+
+On July 1, 2024, a critical signal handler race condition vulnerability was disclosed in OpenSSH servers (sshd) on glibc-based Linux systems. This vulnerability, known as RegreSSHion and tracked as CVE-2024-6387, can result in unauthenticated remote code execution (RCE) with root privileges. This vulnerability has been rated High severity (CVSS 8.1).
+
+#### Impacted Versions
+
+The vulnerability impacts the following OpenSSH server versions:
+
+- OpenSSH versions between 8.5p1 and 9.8p1
+- OpenSSH versions earlier than 4.4p1, if they have not been backport-patched against CVE-2006-5051 or patched against CVE-2008-4109
+
+#### Unaffected Versions
+
+The SSH features in PAN-OS are not affected by CVE-2024-6387.
+
+### This pack will provide you with a first response kit which includes:
+
+* **Collect, Extract and Enrich Indicators**
+* **Threat Hunting using Cortex XDR - XQL and Prisma Cloud**
+* **Mitigations**
+
+Reference:
+
+[Threat Brief: CVE-2024-6387 OpenSSH RegreSSHion Vulnerability
+](https://unit42.paloaltonetworks.com/threat-brief-cve-2024-6387-openssh/).
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/doc_files/CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.png b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/doc_files/CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.png
new file mode 100644
index 000000000000..5b074085ad14
Binary files /dev/null and b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/doc_files/CVE-2024-6387_-_OpenSSH_RegreSSHion_RCE.png differ
diff --git a/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/pack_metadata.json b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/pack_metadata.json
new file mode 100644
index 000000000000..86f6ba1d09a7
--- /dev/null
+++ b/Packs/CVE_2024_6387_-_OpenSSH_RCE_RegreSSHion/pack_metadata.json
@@ -0,0 +1,30 @@
+{
+ "name": "CVE-2024-6387 - OpenSSH RegreSSHion RCE",
+ "description": "This pack handles CVE-2024-6387 - OpenSSH RegreSSHion RCE",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "Case Management"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [
+ "RCE",
+ "OpenSSH",
+ "RegreSSHion",
+ "CVE-2024-6387",
+ "6387"
+ ],
+ "dependencies": {
+ "MajorBreachesInvestigationandResponse": {
+ "mandatory": true,
+ "display_name": "Rapid Breach Response"
+ }
+ },
+ "marketplaces": [
+ "xsoar"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/Campaign/ReleaseNotes/3_4_5.md b/Packs/Campaign/ReleaseNotes/3_4_5.md
new file mode 100644
index 000000000000..09749a680392
--- /dev/null
+++ b/Packs/Campaign/ReleaseNotes/3_4_5.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### IsIncidentPartOfCampaign
+
+- Improved the implementation for better performance. As part of the improvements, the *CampaignIncidentType* argument was deprecated.
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.py b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.py
index 70174001fbf2..11e07ef7bafe 100644
--- a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.py
+++ b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.py
@@ -1,87 +1,49 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from collections.abc import Iterable
-''' STANDALONE FUNCTION '''
+from GetIncidentsApiModule import * # noqa: E402
-def get_incidents_ids_by_type(incident_type: str) -> Iterable[str]:
+def get_campaign_ids_by_incidents(incidents_ids_set: set) -> list:
"""
- Get list of incidents ids with the given type.
+ Gets a list of campaign incidents ids for the given type and linked incident IDs.
Args:
- Incident_type(str): the incident type name.
+ incidents_ids_set (set): A set of incident IDs.
Returns:
List of ids as strings.
"""
search_args = {
- 'query': f'type:"{incident_type}"',
+ 'query': f'partofcampaign:* incident.id:({" ".join(incidents_ids_set)})',
'sort': {
'field': 'occurred',
'asc': False,
},
}
- incidents = execute_command("GetIncidentsByQuery", search_args)
- demisto.debug(f"Incidents getting from response: {incidents}")
-
- try:
- incidents = json.loads(incidents)
- except Exception as e:
- raise DemistoException(f'Failed loads for incidents: {incidents=}, error message: {str(e)}') from e
-
- campaign_ids = [incident.get('id') for incident in incidents]
- demisto.debug(f"Found campaing incident ids: {campaign_ids}")
+ incidents = get_incidents_by_query(search_args)
+ campaign_ids = [
+ i.get("partofcampaign") for i in incidents
+ if i.get("partofcampaign") != "None"
+ ]
+ demisto.debug(f"Found campaign incident ids: {campaign_ids}")
return campaign_ids
-''' COMMAND FUNCTION '''
-
-
-def check_incidents_ids_in_campaign(campaign_id: str, incidents_ids_set: set[str]) -> bool:
- """
- Check for each incident in the campaigns_ids_list if any of the ids in incidents_ids_set is linked.
- Args:
- campaigns_ids_list(str): campaign incident id to search in.
- incidents_ids_set(Set[str]): Set of incident ids to search for.
-
- Returns:
- True if at least one id from the incidents_ids_set is linked to the campaign incident, otherwise False.
- """
- try:
- campaign_context = execute_command("getContext", {'id': campaign_id})['context']
-
- if (connected_incidents_list := demisto.get(campaign_context, 'EmailCampaign.incidents')):
- connected_campaign_incidents_ids = {incident.get('id') for incident in connected_incidents_list}
- is_incidents_in_campaign = bool(incidents_ids_set & connected_campaign_incidents_ids)
- if is_incidents_in_campaign:
- return True
- except Exception as e:
- demisto.info(f"skipping for incident {campaign_id}, reason: {e}")
- return False
-
-
-''' MAIN FUNCTION '''
-
-
def main():
try:
args = demisto.args()
- campaign_type = args.get('CampaignIncidentType', 'Phishing Campaign')
incidents_ids_set = set(argToList(args.get('IncidentIDs', '')))
campaign_id = None
- campaigns_ids_list = get_incidents_ids_by_type(campaign_type)
-
- for campaign_id in campaigns_ids_list:
- if check_incidents_ids_in_campaign(campaign_id, incidents_ids_set):
- readable = f"Found campaign with ID - {campaign_id}"
- break
+ if campaigns_ids_list := get_campaign_ids_by_incidents(incidents_ids_set):
+ campaign_id = campaigns_ids_list[0]
+ readable = f"Found campaign with ID - {campaign_id}"
else:
# did not find a relevant campaign
campaign_id = None
- readable = "No campaign has found"
+ readable = "No campaign was found"
return CommandResults(readable_output=readable, outputs={"ExistingCampaignID": campaign_id},
raw_response=readable)
diff --git a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.yml b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.yml
index 19bfd0a4649d..bfe0626991e6 100644
--- a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.yml
+++ b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign.yml
@@ -7,23 +7,24 @@ type: python
tags:
- phishing
- campaign
-comment: Get the incident campaign's ID for the campaign that is linked to at least one of the given incidents.
+comment: Gets the ID of an incident campaign that is linked to at least one of the given incidents.
enabled: true
args:
- name: CampaignIncidentType
+ deprecated: true
description: The type of incident campaign to search in.
defaultValue: Phishing Campaign
- name: IncidentIDs
required: true
isArray: true
- description: Comma separated list of incidents ids to search for.
+ description: A comma-separated list of incidents ids to search an incident campaign for.
outputs:
- contextPath: ExistingCampaignID
- description: The incident campaign's ID for the campaign that is linked to at least one of the given incidents.
+ description: The ID of an incident campaign that is linked to at least one of the given incidents.
scripttarget: 0
subtype: python3
runonce: false
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.99865
fromversion: 5.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign_test.py b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign_test.py
index 029c21db7251..43bf3b0ac388 100644
--- a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign_test.py
+++ b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/IsIncidentPartOfCampaign_test.py
@@ -1,143 +1,45 @@
import pytest
-import json
+import re
import demistomock as demisto
-from IsIncidentPartOfCampaign import check_incidents_ids_in_campaign, get_incidents_ids_by_type, main
-
-PHISHING_CAMPAIGN_INCIDENTS = [
- {'id': '1',
- 'type': 'Phishing Campaign',
- 'EmailCampaign': {'incidents': [{'id': '11'}, {'id': '12'}]},
- },
- {'id': '2',
- 'type': 'Phishing Campaign',
- 'EmailCampaign': {'incidents': [{'id': '21'}, {'id': '22'}]},
- },
-]
-OTHER_INCIDENTS = [
- {'id': '3',
- 'type': 'Phishing',
- },
+from IsIncidentPartOfCampaign import main
+
+INCIDENTS: list[dict] = [
+ {'id': '1', 'type': 'Phishing Campaign'},
+ {'id': '2', 'type': 'Phishing Campaign'},
+ {'id': '3', 'type': 'Phishing'},
+ {'id': '4', 'type': 'Phishing', 'partofcampaign': '1'},
+ {'id': '5', 'type': 'Phishing', 'partofcampaign': '2'},
+ {'id': '6', 'type': 'Phishing'},
]
-def _wrap_mocked_get_context(raw_incident):
- return [{'Type': 'note', 'Contents': {'context': raw_incident}}]
-
-
-def _wrap_mocked_get_incident_by_query(raw_incidents):
- return [{'Type': 'note', 'Contents': json.dumps(raw_incidents)}]
-
-
-@pytest.mark.parametrize('incidents_ids_set, result',
- [({'11'}, True), ({'11', '21'}, True), ({'31'}, False)])
-def test_check_incidents_ids_in_campaign(mocker, incidents_ids_set, result):
- mocker.patch.object(demisto, 'executeCommand',
- return_value=_wrap_mocked_get_context(PHISHING_CAMPAIGN_INCIDENTS[0]))
- assert result == check_incidents_ids_in_campaign('1', incidents_ids_set)
-
-
-def test_check_incidents_ids_in_campaign_for_closed_incident(mocker):
- mocker.patch.object(demisto, 'executeCommand',
- side_effect=Exception("Item not found"))
- assert not check_incidents_ids_in_campaign('1', {"11"})
-
-
-def test_check_incidents_ids_in_campaign_no_incidents():
- assert check_incidents_ids_in_campaign([], {'11', '12'}) is False
-
-
-class TestGetIncidentsIDsByType:
- @staticmethod
- def test_sanity(mocker):
- """
- Given:
- Incident type
- When:
- Calling get_incident_ids_by_type
- Then:
- Only incidents with the given type are returned.
- """
- mocker.patch.object(demisto, 'executeCommand',
- side_effect=[
- _wrap_mocked_get_incident_by_query(PHISHING_CAMPAIGN_INCIDENTS),
- _wrap_mocked_get_context([])])
- assert list(get_incidents_ids_by_type('Phishing Campaign')) == ['1', '2']
-
- @staticmethod
- def test_get_incidents_ids_by_type_empty(mocker):
- """
- Given:
- non existing Incident type
- When:
- Calling get_incident_ids_by_type
- Then:
- Only incident with the given type are returned.
- """
- mocker.patch.object(demisto, 'executeCommand',
- return_value=_wrap_mocked_get_incident_by_query([]))
- assert list(get_incidents_ids_by_type('Phishing Campaign')) == []
-
-
-class TestMain:
- @staticmethod
- def test_sanity(mocker):
- mocker.patch.object(demisto, 'args',
- return_value={'CampaignIncidentType': 'Phishing Campaign', 'IncidentIDs': '11,21'})
- mocker.patch.object(demisto, 'executeCommand',
- side_effect=[_wrap_mocked_get_incident_by_query(PHISHING_CAMPAIGN_INCIDENTS)] + [
- _wrap_mocked_get_context(incident) for incident in PHISHING_CAMPAIGN_INCIDENTS])
- results = main()
- assert results.readable_output == "Found campaign with ID - 1"
- assert results.outputs['ExistingCampaignID'] == '1'
- mocker.patch.object(demisto, 'executeCommand',
- side_effect=[_wrap_mocked_get_incident_by_query([])])
- results = main()
- assert results.readable_output == "No campaign has found"
- assert results.outputs['ExistingCampaignID'] is None
-
- @staticmethod
- def test_main_pagination(mocker):
- """
- Given
- a campaign incident type
- incident ID part of a campaign
- When
- calling the script
- Then
- return the correct campaign ID which appears in the second incidents page
- """
- mocker.patch.object(demisto, 'args', return_value={
- 'CampaignIncidentType': 'Phishing Campaign',
- 'IncidentIDs': '123'
- })
- mocker.patch.object(demisto, 'executeCommand', side_effect=[
- _wrap_mocked_get_incident_by_query(PHISHING_CAMPAIGN_INCIDENTS),
- _wrap_mocked_get_context({'EmailCampaign': {'incidents': [{'id': '11'}, {'id': '12'}]}}),
- _wrap_mocked_get_context({'EmailCampaign': {'incidents': [{'id': '11'}, {'id': '123'}]}}),
- ])
+def get_incidents_by_query_func(args):
+ query = args["query"]
+ match = re.search(r"incident.id:\(([^\)]*)\)", query)
+ incident_ids = set(match.group(1).split(" ") if match and match.group(1) else [])
+ return [i for i in INCIDENTS if i.get("id") in incident_ids and i.get("partofcampaign")]
- results = main()
- assert results.readable_output == 'Found campaign with ID - 2'
- assert results.outputs['ExistingCampaignID'] == '2'
+@pytest.fixture(autouse=True)
+def mock_get_incidents_by_query(mocker):
+ mocker.patch(
+ "IsIncidentPartOfCampaign.get_incidents_by_query",
+ side_effect=get_incidents_by_query_func,
+ )
-def test_where_no_campaign_ids(mocker):
- """
- Given
- Incidents to check if they are part of campaign.
- When
- Getting some incidents campaign ids which are not related to the given incident ids.
- Then
- Ensure the results returned nothing.
- """
- import IsIncidentPartOfCampaign
- mocker.patch.object(demisto, 'args', return_value={})
- mocker.patch.object(IsIncidentPartOfCampaign, 'get_incidents_ids_by_type', return_value=[1, 2, 3])
- mocker.patch.object(IsIncidentPartOfCampaign, 'check_incidents_ids_in_campaign', return_value=False)
+def test_success(mocker):
+ """Given a list of incident IDs that are part of a campaign, make sure results are returned"""
+ mocker.patch.object(demisto, "args", return_value={"IncidentIDs": "3,5"})
+ results = main()
+ assert "Found campaign with ID" in results.readable_output
+ assert results.outputs["ExistingCampaignID"] in ["1", "2"]
- command_results = main()
- assert command_results.readable_output == "No campaign has found"
- assert command_results.outputs['ExistingCampaignID'] is None
+def test_no_results(mocker):
+ """Given a list of incident IDs, but they are not part of a campaign, make sure no results are returned"""
+ mocker.patch.object(demisto, "args", return_value={"IncidentIDs": "1"})
+ results = main()
+ assert "No campaign was found" in results.readable_output
+ assert not results.outputs["ExistingCampaignID"]
diff --git a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/README.md b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/README.md
index b99179884bfd..c3980a604021 100644
--- a/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/README.md
+++ b/Packs/Campaign/Scripts/IsIncidentPartOfCampaign/README.md
@@ -1,4 +1,4 @@
-Get the incident campaign's ID for the campaign that is linked to at least one of the given incidents.
+Gets the ID of an incident campaign that is linked to at least one of the given incidents.
## Permissions
---
@@ -21,12 +21,11 @@ For more information, see the section about permissions here: [https://docs-cort
| **Argument Name** | **Description** |
| --- | --- |
-| CampaignIncidentType | The type of incident campaign to search in. |
-| IncidentIDs | Comma separated list of incidents ids to search for. |
+| IncidentIDs | A comma-separated list of incidents ids to search an incident campaign for. |
## Outputs
---
| **Path** | **Description** | **Type** |
| --- | --- | --- |
-| ExistingCampaignID | The incident campaign's ID for the campaign that is linked to at least one of the given incidents. | String |
+| ExistingCampaignID | The ID of an incident campaign that is linked to at least one of the given incidents. | String |
diff --git a/Packs/Campaign/doc_files/campaign-canvas.png b/Packs/Campaign/doc_files/campaign-canvas.png
new file mode 100644
index 000000000000..cf3a589a9b32
Binary files /dev/null and b/Packs/Campaign/doc_files/campaign-canvas.png differ
diff --git a/Packs/Campaign/pack_metadata.json b/Packs/Campaign/pack_metadata.json
index f77a831765ac..f5d1959210bb 100644
--- a/Packs/Campaign/pack_metadata.json
+++ b/Packs/Campaign/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Phishing Campaign",
"description": "This pack can help you find related phishing, spam or other types of email incidents and characterize campaigns.",
"support": "xsoar",
- "currentVersion": "3.4.4",
+ "currentVersion": "3.4.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.py b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.py
index 7a63ebdf150a..223763d7d805 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.py
+++ b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.py
@@ -1,3 +1,4 @@
+
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
@@ -9,6 +10,7 @@
# Disable insecure warnings
urllib3.disable_warnings()
+LOG_INIT = "CBEEDR - "
class Client(BaseClient):
@@ -20,30 +22,29 @@ def __init__(self, base_url: str, use_ssl: bool, use_proxy: bool, token=None, cb
'X-Auth-Token': self.token})
def test_module_request(self):
- url_suffix = f'/appservices/v6/orgs/{self.cb_org_key}/alerts/_search'
+ url_suffix = f'/api/alerts/v7/orgs/{self.cb_org_key}/alerts/_search'
body = {
"criteria": {
"group_results": True,
"minimum_severity": 3
},
- "sort": [{"field": "first_event_time", "order": "DESC"}],
- "rows": 1,
- "start": 0
+ "sort": [{"field": "first_event_timestamp", "order": "DESC"}],
+ "rows": 0,
+ "start": 1
}
return self._http_request('POST', url_suffix=url_suffix, json_data=body)
- def search_alerts_request(self, group_results: bool = None, minimum_severity: int = None, create_time: dict = None,
- device_os_version: list = None, policy_id: list = None, alert_tag: list = None,
- alert_id: list = None, device_username: list = None, device_id: list = None,
- device_os: list = None, process_sha256: list = None, policy_name: list = None,
- reputation: list = None, alert_type: list = None, alert_category: list = None,
- workflow: list = None, device_name: list = None, process_name: list = None,
- sort_field: str = None, sort_order: str = None, limit: str = None) -> dict:
- suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/alerts/_search'
+ def search_alerts_request(self, minimum_severity: None | int = None, create_time: None | dict = None,
+ device_os_version: None | list = None, policy_id: None | list = None, alert_tag: None | list = None,
+ alert_id: None | list = None, device_username: None | list = None, device_id: None | list = None,
+ device_os: None | list = None, process_sha256: None | list = None, policy_name: None | list = None,
+ reputation: None | list = None, alert_type: None | list = None, device_name: None | list = None,
+ process_name: None | list = None, sort_field: None | str = None, sort_order: None | str = None,
+ limit: None | str = None) -> dict:
+ suffix_url = f'/api/alerts/v7/orgs/{self.cb_org_key}/alerts/_search'
body = {
'criteria': assign_params(
- group_results=group_results,
minimum_severity=minimum_severity,
create_time=create_time,
device_os_version=device_os_version,
@@ -57,8 +58,6 @@ def search_alerts_request(self, group_results: bool = None, minimum_severity: in
policy_name=policy_name,
reputation=reputation,
type=alert_type,
- category=alert_category,
- workflow=workflow,
device_name=device_name,
process_name=process_name
),
@@ -69,26 +68,40 @@ def search_alerts_request(self, group_results: bool = None, minimum_severity: in
}
],
'rows': limit,
- 'start': 0
+ 'start': 1
}
return self._http_request('POST', suffix_url, json_data=body)
- def alert_workflow_update_request(self, alert_id: str = None, state: str = None, comment: str = None,
- remediation_state: str = None) -> dict:
- suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/alerts/{alert_id}/workflow'
+ def alert_workflow_update_get_request(self, request_id: str) -> dict:
+ suffix_url = f'jobs/v1/orgs/{self.cb_org_key}/jobs/{request_id}'
+ response = self._http_request('GET', suffix_url)
+ return response
+
+ def alert_workflow_update_request(self, alert_id: str, state: None | str = None, comment: None | str = None,
+ determination: None | str = None, time_range: None | str = None, start: None | str = None,
+ end: None | str = None, closure_reason: None | str = None) -> dict:
+ suffix_url = f'/api/alerts/v7/orgs/{self.cb_org_key}/alerts/workflow'
body = assign_params(
- state=state,
- comment=comment,
- remediation_state=remediation_state
+ time_range=assign_params(start=start, end=end, range=time_range),
+ criteria=assign_params(id=[alert_id]),
+ determination=determination,
+ closure_reason=closure_reason,
+ status=state,
+ note=comment,
)
- return self._http_request('POST', suffix_url, json_data=body)
+ demisto.debug(f"{LOG_INIT} {body=}")
+ try:
+ response = self._http_request('POST', suffix_url, json_data=body)
+ except Exception as e:
+ raise e
+ return response
- def devices_list_request(self, device_id: list = None, status: list = None, device_os: list = None,
- last_contact_time: dict[str, Any | None] = None, ad_group_id: list = None,
- policy_id: list = None, target_priority: list = None, limit: int = None,
- sort_field: str = None, sort_order: str = None) -> dict:
+ def devices_list_request(self, device_id: None | list = None, status: None | list = None, device_os: None | list = None,
+ last_contact_time: None | dict[str, Any | None] = None, ad_group_id: None | list = None,
+ policy_id: None | list = None, target_priority: None | list = None, limit: None | int = None,
+ sort_field: None | str = None, sort_order: None | str = None) -> dict:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/devices/_search'
body = {
@@ -113,7 +126,7 @@ def devices_list_request(self, device_id: list = None, status: list = None, devi
return self._http_request('POST', suffix_url, json_data=body)
- def device_quarantine_request(self, device_id: list = None) -> None:
+ def device_quarantine_request(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -126,7 +139,7 @@ def device_quarantine_request(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_unquarantine_request(self, device_id: list = None) -> None:
+ def device_unquarantine_request(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -139,7 +152,7 @@ def device_unquarantine_request(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_bypass_request(self, device_id: list = None) -> None:
+ def device_bypass_request(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -152,7 +165,7 @@ def device_bypass_request(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_unbypass_request(self, device_id: list = None) -> None:
+ def device_unbypass_request(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -165,7 +178,7 @@ def device_unbypass_request(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_background_scan_request(self, device_id: list = None) -> None:
+ def device_background_scan_request(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -178,7 +191,7 @@ def device_background_scan_request(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_background_scan_request_stop(self, device_id: list = None) -> None:
+ def device_background_scan_request_stop(self, device_id: None | list = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -191,7 +204,7 @@ def device_background_scan_request_stop(self, device_id: list = None) -> None:
self._http_request('POST', suffix_url, json_data=body, resp_type='content')
- def device_policy_update(self, device_id: list = None, policy_id: str = None) -> None:
+ def device_policy_update(self, device_id: None | list = None, policy_id: None | str = None) -> None:
suffix_url = f'/appservices/v6/orgs/{self.cb_org_key}/device_actions'
body = {
@@ -208,28 +221,29 @@ def list_watchlists_request(self) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists'
return self._http_request('GET', suffix_url)
- def get_watchlist_by_id_request(self, watchlist_id: str = None) -> dict:
+ def get_watchlist_by_id_request(self, watchlist_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}'
return self._http_request('GET', suffix_url)
- def delete_watchlist_request(self, watchlist_id: str = None) -> None:
+ def delete_watchlist_request(self, watchlist_id: None | str = None) -> None:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}'
self._http_request('DELETE', suffix_url, resp_type='content')
- def watchlist_alert_status_request(self, watchlist_id: str = None) -> dict:
+ def watchlist_alert_status_request(self, watchlist_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}/alert'
return self._http_request('GET', suffix_url)
- def enable_watchlist_alert_request(self, watchlist_id: str = None) -> dict:
+ def enable_watchlist_alert_request(self, watchlist_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}/alert'
return self._http_request('PUT', suffix_url)
- def disable_watchlist_alert_request(self, watchlist_id: str = None) -> None:
+ def disable_watchlist_alert_request(self, watchlist_id: None | str = None) -> None:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}/alert'
self._http_request('DELETE', suffix_url, resp_type='content')
- def create_watchlist_request(self, watchlist_name: str = None, description: str = None, tags_enabled: bool = None,
- alerts_enabled: bool = None, report_ids: list = None, classifier: dict = None) -> dict:
+ def create_watchlist_request(self, watchlist_name: None | str = None, description: None | str = None,
+ tags_enabled: None | bool = None, alerts_enabled: None | bool = None,
+ report_ids: None | list = None, classifier: None | dict = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists'
body = assign_params(
name=watchlist_name,
@@ -242,9 +256,10 @@ def create_watchlist_request(self, watchlist_name: str = None, description: str
return self._http_request('POST', suffix_url, json_data=body)
- def update_watchlist_request(self, watchlist_id: str = None, watchlist_name: str = None, description: str = None,
- tags_enabled: bool = None, alerts_enabled: bool = None, report_ids: list = None,
- classifier: dict = None) -> dict:
+ def update_watchlist_request(self, watchlist_id: None | str = None, watchlist_name: None | str = None,
+ description: None | str = None, tags_enabled: None | bool = None,
+ alerts_enabled: None | bool = None, report_ids: None | list = None,
+ classifier: None | dict = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/watchlists/{watchlist_id}'
body = assign_params(
name=watchlist_name,
@@ -256,28 +271,29 @@ def update_watchlist_request(self, watchlist_id: str = None, watchlist_name: str
)
return self._http_request('PUT', suffix_url, json_data=body)
- def get_ignore_ioc_status_request(self, report_id: str = None, ioc_id: str = None) -> dict:
+ def get_ignore_ioc_status_request(self, report_id: None | str = None, ioc_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id})/iocs/{ioc_id}/ignore'
return self._http_request('GET', suffix_url)
- def ignore_ioc_request(self, report_id: str = None, ioc_id: str = None) -> dict:
+ def ignore_ioc_request(self, report_id: None | str = None, ioc_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}/iocs/{ioc_id}/ignore'
return self._http_request('PUT', suffix_url)
- def reactivate_ioc_request(self, report_id: str = None, ioc_id: str = None) -> None:
+ def reactivate_ioc_request(self, report_id: None | str = None, ioc_id: None | str = None) -> None:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id})/iocs/{ioc_id}/ignore'
self._http_request('DELETE', suffix_url, resp_type='content')
- def get_report_request(self, report_id: str = None) -> dict:
+ def get_report_request(self, report_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}'
return self._http_request('GET', suffix_url)
- def create_report_request(self, title: str = None, description: str = None, tags: list = None, severity: int = None,
- iocs: dict = None, timestamp: int = None) -> dict:
+ def create_report_request(self, title: None | str = None, description: None | str = None, tags: None | list = None,
+ severity: None | int = None,
+ iocs: None | dict = None, timestamp: None | int = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports'
body = assign_params(
@@ -290,28 +306,28 @@ def create_report_request(self, title: str = None, description: str = None, tags
)
return self._http_request('POST', suffix_url, json_data=body)
- def ignore_report_request(self, report_id: str = None) -> dict:
+ def ignore_report_request(self, report_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}/ignore'
return self._http_request('PUT', suffix_url)
- def reactivate_report_request(self, report_id: str = None) -> None:
+ def reactivate_report_request(self, report_id: None | str = None) -> None:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}/ignore'
self._http_request('DELETE', suffix_url, resp_type='content')
- def get_report_ignore_status_request(self, report_id: str = None) -> dict:
+ def get_report_ignore_status_request(self, report_id: None | str = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}/ignore'
return self._http_request('GET', suffix_url)
- def remove_report_request(self, report_id: str = None) -> None:
+ def remove_report_request(self, report_id: None | str = None) -> None:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}'
self._http_request('DELETE', suffix_url, resp_type='content')
- def update_report_request(self, report_id: str = None, title: str = None, description: str = None,
- severity: int = None, iocs: dict = None, tags: list = None,
- timestamp: int = None) -> dict:
+ def update_report_request(self, report_id: None | str = None, title: None | str = None, description: None | str = None,
+ severity: None | int = None, iocs: None | dict = None, tags: None | list = None,
+ timestamp: None | int = None) -> dict:
suffix_url = f'/threathunter/watchlistmgr/v3/orgs/{self.cb_org_key}/reports/{report_id}'
body = assign_params(
title=title,
@@ -323,15 +339,15 @@ def update_report_request(self, report_id: str = None, title: str = None, descri
)
return self._http_request('PUT', suffix_url, json_data=body)
- def get_file_device_summary_request(self, sha256: str = None) -> dict:
+ def get_file_device_summary_request(self, sha256: None | str = None) -> dict:
suffix_url = f'ubs/v1/orgs/{self.cb_org_key}/sha256/{sha256}/summary/device'
return self._http_request('GET', suffix_url)
- def get_file_metadata_request(self, sha256: str = None) -> dict:
+ def get_file_metadata_request(self, sha256: None | str = None) -> dict:
suffix_url = f'ubs/v1/orgs/{self.cb_org_key}/sha256/{sha256}/metadata'
return self._http_request('GET', suffix_url)
- def get_file_request(self, sha256: list = None, expiration_seconds: int = None) -> dict:
+ def get_file_request(self, sha256: None | list = None, expiration_seconds: None | int = None) -> dict:
suffix_url = f'/ubs/v1/orgs/{self.cb_org_key}/file/_download'
body = assign_params(
sha256=sha256,
@@ -340,13 +356,13 @@ def get_file_request(self, sha256: list = None, expiration_seconds: int = None)
return self._http_request('POST', suffix_url, json_data=body)
- def get_file_path_request(self, sha256: str = None) -> dict:
+ def get_file_path_request(self, sha256: None | str = None) -> dict:
suffix_url = f'/ubs/v1/orgs/{self.cb_org_key}/sha256/{sha256}/summary/file_path'
return self._http_request('GET', suffix_url)
def create_search_process_request(self, process_hash: str, process_name: str, event_id: str, query: str,
- limit: int, start_time: str = None,
- end_time: str = None, start: int = 0) -> dict:
+ limit: None | int = None, start_time: None | str = None,
+ end_time: None | str = None, start: None | int = 0) -> dict:
if not process_hash and not process_name and not event_id and not query:
raise Exception("To perform an process search, please provide at least one of the following: "
"'process_hash', 'process_name', 'event_id' or 'query'")
@@ -382,7 +398,7 @@ def get_search_process_request(self, job_id) -> dict:
return self._http_request('GET', suffix_url)
def create_search_event_by_process_request(self, process_guid: str, event_type: str,
- query: str, limit: int, start_time: str, end_time: str = None,
+ query: str, limit: int, start_time: str, end_time: None | str = None,
start: int = 0) -> dict:
if event_type and event_type not in ['filemod', 'netconn', 'regmod', 'modload', 'crossproc', 'childproc']:
raise Exception("Only the following event types can be searched: "
@@ -411,7 +427,7 @@ def create_search_event_by_process_request(self, process_guid: str, event_type:
response = self._http_request('POST', suffix_url, json_data=body)
return response
- def update_threat_tags(self, threat_id: str = None, tags: list = None) -> dict:
+ def update_threat_tags(self, threat_id: None | str = None, tags: None | list = None) -> dict:
suffix_url = f'api/alerts/v7/orgs/{self.cb_org_key}/threats/{threat_id}/tags'
@@ -421,7 +437,7 @@ def update_threat_tags(self, threat_id: str = None, tags: list = None) -> dict:
return self._http_request('POST', suffix_url, json_data=body)
- def create_threat_notes(self, threat_id: str = None, notes: str = None) -> dict:
+ def create_threat_notes(self, threat_id: None | str = None, notes: None | str = None) -> dict:
suffix_url = f'api/alerts/v7/orgs/{self.cb_org_key}/threats/{threat_id}/notes'
body = {
@@ -429,7 +445,7 @@ def create_threat_notes(self, threat_id: str = None, notes: str = None) -> dict:
}
return self._http_request('POST', suffix_url, json_data=body)
- def update_alert_notes(self, alert_id: str = None, notes: str = None) -> dict:
+ def update_alert_notes(self, alert_id: None | str = None, notes: None | str = None) -> dict:
suffix_url = f'api/alerts/v7/orgs/{self.cb_org_key}/alerts/{alert_id}/notes'
@@ -439,7 +455,7 @@ def update_alert_notes(self, alert_id: str = None, notes: str = None) -> dict:
return self._http_request('POST', suffix_url, json_data=body)
- def get_threat_tags(self, threat_id: str = None) -> dict:
+ def get_threat_tags(self, threat_id: None | str = None) -> dict:
suffix_url = f'api/alerts/v7/orgs/{self.cb_org_key}/threats/{threat_id}/tags'
@@ -461,7 +477,6 @@ def test_module(client):
def alert_list_command(client: Client, args: dict) -> CommandResults | str:
- group_results = args.get('group_results')
minimum_severity = args.get('minimum_severity')
create_time = assign_params(
start=args.get('start_time'),
@@ -478,8 +493,6 @@ def alert_list_command(client: Client, args: dict) -> CommandResults | str:
policy_name = argToList(args.get('policy_name'))
reputation = argToList(args.get('reputation'))
alert_type = argToList(args.get('alert_type'))
- alert_category = argToList(args.get('alert_category'))
- workflow = argToList(args.get('workflow'))
device_name = argToList(args.get('device_name'))
process_name = argToList(args.get('process_name'))
sort_field = args.get('sort_field')
@@ -489,23 +502,29 @@ def alert_list_command(client: Client, args: dict) -> CommandResults | str:
headers = ['AlertID', 'CreateTime', 'DeviceID', 'DeviceName', 'DeviceOS', 'PolicyName', 'ProcessName', 'Type',
'WorkflowState']
- result = client.search_alerts_request(group_results, minimum_severity, create_time,
+ result = client.search_alerts_request(minimum_severity, create_time,
device_os_version, policy_id, alert_tag, alert_id, device_username,
device_id, device_os, process_sha256, policy_name,
- reputation, alert_type, alert_category, workflow, device_name,
+ reputation, alert_type, device_name,
process_name, sort_field, sort_order, limit)
alerts = result.get('results', [])
if not alerts:
return 'No alerts were found'
for alert in alerts:
+
+ # The new API version returns status instead of state,
+ # mapping this for the output to look the same.
+ alert['workflow']['state'] = alert['workflow']['status']
+ alert['first_event_time'] = alert['first_event_timestamp']
+
contents.append({
'AlertID': alert.get('id'),
- 'CreateTime': alert.get('create_time'),
+ 'CreateTime': alert.get('backend_timestamp'),
'DeviceID': alert.get('device_id'),
'DeviceName': alert.get('device_name'),
'DeviceOS': alert.get('device_os'),
- 'PolicyName': alert.get('policy_name'),
+ 'PolicyName': alert.get('device_policy'),
'ProcessName': alert.get('process_name'),
'Type': alert.get('type'),
'WorkflowState': alert.get('workflow', {}).get('state')
@@ -522,32 +541,108 @@ def alert_list_command(client: Client, args: dict) -> CommandResults | str:
return results
-def alert_workflow_update_command(client: Client, args: dict) -> CommandResults:
- alert_id = args.get('alert_id')
- state = args.get('state')
- comment = args.get('comment')
- remediation_state = args.get('remediation_state')
+@polling_function(name='cb-eedr-alert-workflow-update', interval=60, requires_polling_arg=False)
+def alert_workflow_update_command_with_polling(args: dict, client: Client) -> PollResult:
+ """
+ Updates the given alret's workflow. This is a polling function.
- result = client.alert_workflow_update_request(alert_id, state, comment, remediation_state)
+ Args:
+ args (dict): Including alert_id and fields to update.
+ client (Client): The client.
- readable_output = tableToMarkdown(f'Successfully updated the alert: "{alert_id}"', result, removeNull=True)
- outputs = {
- 'AlertID': alert_id,
- 'State': result.get('state'),
- 'Remediation': result.get('remediation'),
- 'LastUpdateTime': result.get('last_update_time'),
- 'Comment': result.get('comment'),
- 'ChangedBy': result.get('changed_by')
- }
+ Returns:
+ PollResult: If request status is COMPLETED will stop polling, otherwise will poll again.
+ """
+ request_id = arg_to_number(args.get('request_id'))
+ alert_id = args['alert_id']
+
+ demisto.debug(f'{LOG_INIT} Polling is running - got {request_id=}, {alert_id=}')
+
+ if not request_id: # if this is the first time
+ demisto.debug(f'{LOG_INIT} Getting all relevant args for first run')
+ determination = args.get('remediation_state')
+ time_range = args.get('time_range')
+ start = args.get('start')
+ end = args.get('end')
+ closure_reason = args.get('closure_reason')
+ comment = args.get('comment')
+ status = args.get('state')
+
+ # The new API version (v7) does not support 'DISMISSED', instead need to use 'CLOSED'
+ if str(status).lower() == 'dismissed':
+ status = 'CLOSED'
+ if status == "open":
+ "OPEN"
+
+ if not determination and not status:
+ raise DemistoException('Must specify at least one of \"remediation_state\" or \"state\".')
+
+ if start or end:
+ if not start or not end:
+ raise DemistoException('Need to specify start and end timestamps')
+ if start > end:
+ raise DemistoException('start timestamp needs to be before end timestamp')
+
+ demisto.debug(f'{LOG_INIT} calling alert_workflow_update_request function')
+ response = client.alert_workflow_update_request(
+ alert_id, status, comment, determination, time_range, start, end, closure_reason)
+
+ demisto.debug(f'{LOG_INIT} Recieved response: type= {type(response)}, len= {len(response)}')
+
+ return PollResult(
+ partial_result=CommandResults(readable_output="running polling"),
+ response=None,
+ continue_to_poll=True,
+ args_for_next_run={"request_id": response['request_id']} | args
+ )
- results = CommandResults(
- outputs_prefix='CarbonBlackEEDR.Alert',
- outputs_key_field='AlertID',
- outputs=outputs,
- readable_output=readable_output,
- raw_response=result
- )
- return results
+ request_id = args['request_id']
+
+ demisto.debug(f'{LOG_INIT} Calling the second endpoint')
+ response = client.alert_workflow_update_get_request(
+ request_id)
+ demisto.debug(f'{LOG_INIT} {response=}')
+
+ request_status = response['status']
+ demisto.debug(f'{LOG_INIT} {request_status=}')
+
+ if request_status == 'CREATED':
+ message = CommandResults(
+ readable_output="Checking again in 60 seconds...")
+ demisto.debug(f'{LOG_INIT} returning PollResult with continue_to_poll=True')
+ return PollResult(
+ partial_result=message,
+ response=None,
+ continue_to_poll=True,
+ args_for_next_run={"request_id": request_id,
+
+ **args})
+
+ elif request_status == 'COMPLETED':
+ changed_by = response['job_parameters']['job_parameters']['userWorkflowDto']['changed_by']
+ status_HR = response['job_parameters']['job_parameters']['request']['status'] if args.get('state') else None
+ message = CommandResults(
+ outputs={'AlertID': alert_id, 'ChangedBy': changed_by, 'Comment': args.get('comment'),
+ 'LastUpdateTime': response['last_update_time'], 'State': status_HR,
+ 'RemediationState': args.get('remediation_state')},
+ outputs_prefix='CarbonBlackEEDR.Alert',
+ readable_output=tableToMarkdown(f'Successfully updated the alert: "{alert_id}"',
+ {'changed_by': changed_by,
+ 'last_update_time': response['last_update_time'],
+ 'determination': args.get('determination'),
+ 'comment': args.get('comment'),
+ 'closure reason': args.get('closure_reason'),
+ 'state': status_HR}, removeNull=True),
+ outputs_key_field='AlertID')
+ demisto.debug(f'{LOG_INIT} returning PollResult with continue_to_poll=False')
+ return PollResult(
+ response=message,
+ continue_to_poll=False)
+
+ # Status is failed
+ else: # The status of the response can be COMPLETED, CREATED or FAILED.
+ raise DemistoException(f"Failed to update the alerts workflow. Request's Status: {request_status}\
+ response keys: {response.keys()}")
def list_devices_command(client: Client, args: dict) -> CommandResults | str:
@@ -1197,6 +1292,11 @@ def get_file_path_command(client: Client, args: dict) -> CommandResults:
def fetch_incidents(client: Client, fetch_time: str, fetch_limit: str, last_run: dict) -> tuple[list, dict]:
+ # The new API version (v7) always returns the previous last alert along with the new alerts.
+ if not (int_fetch_limit := arg_to_number(fetch_limit)):
+ raise ValueError("limit cannot be empty.")
+ if last_run:
+ int_fetch_limit += 1
last_fetched_alert_create_time = last_run.get('last_fetched_alert_create_time')
last_fetched_alert_id = last_run.get('last_fetched_alert_id', '')
if not last_fetched_alert_create_time:
@@ -1207,7 +1307,7 @@ def fetch_incidents(client: Client, fetch_time: str, fetch_limit: str, last_run:
incidents = []
response = client.search_alerts_request(
- sort_field='first_event_time',
+ sort_field='first_event_timestamp',
sort_order='ASC',
create_time=assign_params(
start=last_fetched_alert_create_time,
@@ -1216,14 +1316,14 @@ def fetch_incidents(client: Client, fetch_time: str, fetch_limit: str, last_run:
limit=fetch_limit,
)
alerts = response.get('results', [])
-
+ demisto.debug(f'{LOG_INIT} got {len(alerts)} alerts from server')
for alert in alerts:
alert_id = alert.get('id')
if alert_id == last_fetched_alert_id:
- # got an alert we already fetched, skipping it
+ demisto.debug(f'{LOG_INIT} got previously fetched alert {alert_id}, skipping it')
continue
- alert_create_date = alert.get('create_time')
+ alert_create_date = alert.get('backend_timestamp')
incident = {
'name': f'Carbon Black Enterprise EDR alert {alert_id}',
'occurred': alert_create_date,
@@ -1235,12 +1335,89 @@ def fetch_incidents(client: Client, fetch_time: str, fetch_limit: str, last_run:
latest_alert_create_date = datetime.strftime(parsed_date + timedelta(seconds=1),
'%Y-%m-%dT%H:%M:%S.000Z')
latest_alert_id = alert_id
-
+ demisto.debug(f'{LOG_INIT} sending {len(incidents)} incidents')
res = {'last_fetched_alert_create_time': latest_alert_create_date, 'last_fetched_alert_id': latest_alert_id}
return incidents, res
-def process_search_command(client: Client, args: dict) -> CommandResults:
+@polling_function(
+ name='cb-eedr-process-search',
+ interval=arg_to_number(demisto.args().get('interval_in_seconds')) or 60,
+ timeout=arg_to_number(demisto.args().get('timeout')) or 600,
+ requires_polling_arg=False
+)
+def process_search_command_with_polling(args: dict, client: Client) -> PollResult:
+ """
+ Returns the process search results. This is a polling function.
+
+ Args:
+ args (dict): The input arguments from the user.
+ client (Client): The client.
+
+ Returns:
+ PollResult: If the job's status is COMPLETED will stop polling, otherwise will poll again.
+ """
+ job_id = args.get('job_id')
+ interval_in_seconds = arg_to_number(args.get('interval_in_seconds'))
+ demisto.debug(f'{LOG_INIT} in process_search_command_with_polling function, {job_id=}')
+
+ if not job_id: # if this is the first time
+ process_name = args.get('process_name', '')
+ process_hash = args.get('process_hash', '')
+ event_id = args.get('event_id', '')
+ query = args.get('query', '')
+ start_time = str(args.get('start_time', '1 day'))
+ end_time = str(args.get('end_time', ''))
+ limit = arg_to_number(args.get('limit'))
+ start = arg_to_number(args.get('start'))
+
+ response = client.create_search_process_request(process_name=process_name, process_hash=process_hash,
+ event_id=event_id, query=query, limit=limit,
+ start_time=start_time, end_time=end_time, start=start)
+ demisto.debug(f'{LOG_INIT} got {response=}')
+ return PollResult(partial_result=CommandResults(readable_output=f"job_id is {response.get('job_id')}."),
+ response=None,
+ continue_to_poll=True,
+ args_for_next_run={"job_id": response['job_id']} | args
+ )
+
+ # this is not the first time, there is a job_id
+ response = client.get_search_process_request(job_id=args['job_id'])
+ if response.get('contacted'):
+ # The response has no 'status' field. If contacted equals to completed then the status is completed, else we are still \
+ # in progress. If there is no 'contacted' or no 'completed' fields then it means that something failed in server.
+ status = 'Completed' if response.get('contacted') == response.get('completed') else 'In Progress'
+ else:
+ status = None
+ if status == 'In Progress':
+ message = CommandResults(
+ readable_output=f"Checking again in {interval_in_seconds} seconds...")
+ return PollResult(
+ partial_result=message,
+ response=None,
+ continue_to_poll=True,
+ args_for_next_run={"job_id": job_id,
+ **args})
+
+ elif status == 'Completed':
+ output = {'status': status, 'job_id': job_id, 'results': response.get('results')}
+ title = "Completed Search Results:"
+ headers = ["process_hash", "process_name", "device_name", "device_timestamp", "process_pid", "process_username"]
+ human_readable = tableToMarkdown(name=title, t=output.get('results'), removeNull=True, headers=headers)
+ message = CommandResults(outputs_prefix='CarbonBlackEEDR.SearchProcess',
+ outputs=output,
+ outputs_key_field='job_id',
+ raw_response=response,
+ readable_output=human_readable)
+ return PollResult(
+ response=message,
+ continue_to_poll=False)
+
+ else:
+ raise DemistoException(f'Failed to run process search. response keys: {response.keys()}')
+
+
+def process_search_command_without_polling(client: Client, args: dict) -> CommandResults:
"""
Gets arguments for a process search task, and returns the task's id and status.
"""
@@ -1419,10 +1596,8 @@ def main():
token = f'{cb_custom_key}/{cb_custom_id}'
# get the service API url
base_url = demisto.params().get('url')
-
verify_certificate = not demisto.params().get('insecure', False)
proxy = demisto.params().get('proxy', False)
-
LOG(f'Command being called is {demisto.command()}')
try:
client = Client(
@@ -1449,7 +1624,8 @@ def main():
return_results(alert_list_command(client, demisto.args()))
elif demisto.command() == 'cb-eedr-alert-workflow-update':
- return_results(alert_workflow_update_command(client, demisto.args()))
+ # args have to be sent before client because this is a polling function!
+ return_results(alert_workflow_update_command_with_polling(demisto.args(), client))
elif demisto.command() == 'cb-eedr-devices-list':
return_results(list_devices_command(client, demisto.args()))
@@ -1542,7 +1718,12 @@ def main():
return_results(get_file_path_command(client, demisto.args()))
elif demisto.command() == 'cb-eedr-process-search':
- return_results(process_search_command(client, demisto.args()))
+ polling = argToBoolean(demisto.args().get('polling'))
+ if polling:
+ # args have to be sent before client because this is a polling function!!
+ return return_results(process_search_command_with_polling(demisto.args(), client))
+ else:
+ return return_results(process_search_command_without_polling(client, demisto.args()))
elif demisto.command() == 'cb-eedr-process-search-results':
for command_result_item in process_search_get_command(client, demisto.args()):
diff --git a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.yml b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.yml
index 8c33477343dd..8b4f5d496f20 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.yml
+++ b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR.yml
@@ -83,13 +83,39 @@ script:
description: Workflow state to update.
name: state
predefined:
- - dismissed
- - open
+ - IN_PROGRESS
+ - CLOSED
+ - DISMISSED
+ - OPEN
+ - description: Value judgement of whether the alert(s) represent a true or false positive.
+ name: remediation_state
+ auto: PREDEFINED
+ predefined:
+ - TRUE_POSITIVE
+ - FALSE_POSITIVE
+ - NONE
+ - description: Relative time range for the request. Should not be provided if using `start` and `end` arguments.
+ name: time_range
+ defaultValue: -2w
+ - name: start
+ description: The lower bound of the time range. Requires end and must be a timestamp before end.
+ - name: end
+ description: The upper bound of the time range. Requires start and must be a timestamp after start.
+ - name: closure_reason
+ description: The closure reason.
+ auto: PREDEFINED
+ predefined:
+ - NO_REASON
+ - RESOLVED
+ - RESOLVED_BENIGN_KNOWN_GOOD
+ - DUPLICATE_CLEANUP
+ - OTHER
- description: Comment to include with the operation.
name: comment
- - description: Description of the changes done in the workflow state.
- name: remediation_state
- description: Updates the workflow of a single event.
+ - description: Request ID to retrieve.
+ name: request_id
+ hidden: true
+ description: Updates the workflow of a single alert.
name: cb-eedr-alert-workflow-update
outputs:
- contextPath: CarbonBlackEEDR.Alert.AlertID
@@ -110,6 +136,7 @@ script:
- contextPath: CarbonBlackEEDR.Alert.State
description: The alert state.
type: String
+ polling: true
- arguments:
- description: The devices on which to perform the action. Get the ID from the devices-list command. Supports comma-separated values.
name: device_id
@@ -371,13 +398,6 @@ script:
description: The MAC address of the endpoint.
type: String
- arguments:
- - auto: PREDEFINED
- defaultValue: 'true'
- description: Whether to group results.
- name: group_results
- predefined:
- - 'true'
- - 'false'
- description: Alert minimum severity (In range of 1-10).
name: minimum_severity
- description: Device OS version. Supports comma-separated values.
@@ -402,31 +422,27 @@ script:
name: reputation
- description: Alert type. Supports comma-separated values.
name: alert_type
- - description: Alert category. Supports comma-separated values.
- name: alert_category
- - description: Alert workflow. Supports comma-separated values.
- name: workflow
- - description: Device name. Supports comma-separated values.
+ - description: A comma-separated list of device names. Examples- "C:\\Users\\example_user\\Example" or "/home/example_user/Example".
name: device_name
- - description: Process name. Supports comma-separated values.
+ - description: A comma-separated list of process names. Examples- "C:\\Users\\example_user\\Example" or "/home/example_user/Example".
name: process_name
- - auto: PREDEFINED
- defaultValue: first_event_time
- description: Field by which to sort the results. Can be "first_event_time", or "last_event_time".
+ - description: Field by which to sort the results.
name: sort_field
+ auto: PREDEFINED
+ defaultValue: first_event_timestamp
predefined:
- - first_event_time
- - last_event_time
- - auto: PREDEFINED
- defaultValue: DESC
- description: How to order the results. Can be "ASC" (ascending) or "DESC" (descending).
+ - first_event_timestamp
+ - last_event_timestamp
+ - description: How to order the results. Can be "ASC" (ascending) or "DESC" (descending).
name: sort_order
+ auto: PREDEFINED
+ defaultValue: DESC
predefined:
- ASC
- DESC
- - defaultValue: '10'
- description: The maximum number of results to return.
+ - description: The maximum number of results to return.
name: limit
+ defaultValue: '10'
- description: Alert start time.
name: start_time
- description: Alert end time.
@@ -548,9 +564,6 @@ script:
- contextPath: CarbonBlackEEDR.Alert.severity
description: Alert severity.
type: Number
- - contextPath: CarbonBlackEEDR.Alert.category
- description: Alert category.
- type: String
- contextPath: CarbonBlackEEDR.Alert.threat_cause_vector
description: Threat cause vector.
type: String
@@ -570,7 +583,7 @@ script:
description: Alert workflow - remediation.
type: String
- contextPath: CarbonBlackEEDR.Alert.workflow.state
- description: Alert workflow - state.
+ description: Alert workflow state.
type: String
- description: Retrieves all watchlists.
name: cb-eedr-watchlist-list
@@ -1126,7 +1139,23 @@ script:
name: end_time
- description: Index of first records to fetch. Default is 0.
name: start
- description: Creates a process search job.
+ - description: Job ID to retrieve.
+ name: job_id
+ hidden: true
+ - description: whether to run the command with polling.
+ name: polling
+ defaultValue: true
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - description: The time in seconds to wait between polling.
+ name: interval_in_seconds
+ defaultValue: '60'
+ - description: The timeout duration in seconds for polling retries.
+ name: time_out
+ defaultValue: 600
+ description: Creates a process search job and returns results if 'polling' argument is True.
name: cb-eedr-process-search
outputs:
- contextPath: CarbonBlackEEDR.SearchProcess.job_id
@@ -1135,6 +1164,94 @@ script:
- contextPath: CarbonBlackEEDR.SearchProcess.status
description: The status of the job found by the search.
type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.device_id
+ description: The device ID that is guaranteed to be unique within each PSC environment.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_username
+ description: The user names related to the process.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.backend_timestamp
+ description: A date/time field formatted as an ISO-8601 string based on the UTC timezone. For example, device_timestamp:2018-03-14T21:06:45.183Z.
+ type: Date
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.childproc_count
+ description: The cumulative count of child-process creations since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.crossproc_count
+ description: The cumulative count of cross-process events since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.device_group_id
+ description: The ID of the sensor group where the device belongs.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.device_name
+ description: The name of the device.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.device_policy_id
+ description: The ID of the policy applied to the device.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.device_timestamp
+ description: The time displayed on the sensor based on the sensor’s clock. The time is an ISO-8601 formatted time string based on the UTC timezone.
+ type: Date
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.enriched
+ description: True if the process document came from the CBD data stream.
+ type: Boolean
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.enriched_event_type
+ description: The CBD enriched event type.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.event_type
+ description: 'The CBD event type (valid only for events coming through analytics). Possible values are: CREATE_PROCESS, DATA_ACCESS, FILE_CREATE, INJECT_CODE, NETWORK, POLICY_ACTION, REGISTRY_ACCESS, and SYSTEM_API_CALL.'
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.filemod_count
+ description: The cumulative count of file modifications since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.ingress_time
+ description: Unknown.
+ type: Date
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.legacy
+ description: True if the process document came from the legacy data stream (deprecated, use enriched).
+ type: Boolean
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.modload_count
+ description: The cumulative count of module loads since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.netconn_count
+ description: The cumulative count of network connections since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.org_id
+ description: The globally unique organization key. This will most likely be the PSC organization ID + PSC environment ID or some other unique token used across environments.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.parent_guid
+ description: The process GUID of the parent process.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.parent_pid
+ description: The PID of the parent process.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_guid
+ description: Unique ID of the solr document. Appears as process_guid + server-side timestamp in epoch ms (1/1/1970 based).
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_hash
+ description: The MD5 and SHA-256 hashes of the process's main module in a multi-valued field.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_name
+ description: The tokenized file path of the process's main module.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_pid
+ description: The PID of a process. Can be multi-valued in case of exec/fork on Linux/OSX.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.process_username
+ description: User names related to the process.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.regmod_count
+ description: The cumulative count of registry modifications since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.results.scriptload_count
+ description: The cumulative count of loaded scripts since process tracking started.
+ type: Number
+ - contextPath: CarbonBlackEEDR.SearchProcess.job_id
+ description: The ID of the job found by the search.
+ type: String
+ - contextPath: CarbonBlackEEDR.SearchProcess.status
+ description: The status of the job found by the search.
+ type: String
+ polling: true
- arguments:
- description: The process GUID to search. Must be focused on a single process.
name: process_guid
@@ -1358,7 +1475,7 @@ script:
description: Threat ID.
- contextPath: CarbonBlackEEDR.Threat.Tags
description: Threat tags.
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.14.101217
isfetch: true
runonce: false
script: ''
diff --git a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR_test.py b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR_test.py
index 77d29f1128cb..84729205f6e1 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR_test.py
+++ b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/CarbonBlackEnterpriseEDR_test.py
@@ -9,6 +9,13 @@
import demistomock as demisto
from freezegun import freeze_time
+CLIENT = cbe.Client(
+ base_url='https://server_url.com',
+ use_ssl=False,
+ use_proxy=False,
+ token=None,
+ cb_org_key="123")
+
PROCESS_CASES = [
(
{'process_hash': '63d423ea882264dbb157a965c200306212fc5e1c6ddb8cbbb0f1d3b51ecd82e6',
@@ -44,15 +51,9 @@ def test_create_process_search_body(mocker, demisto_args, expected_results):
"""
mocker.patch.object(demisto, 'args', return_value=demisto_args)
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
- m = mocker.patch.object(client, '_http_request', return_value={})
-
- client.create_search_process_request(**demisto_args)
+ m = mocker.patch.object(CLIENT, '_http_request', return_value={})
+
+ CLIENT.create_search_process_request(**demisto_args)
assert m.call_args[1].get('json_data') == expected_results
@@ -81,16 +82,10 @@ def test_create_process_search_failing(mocker, requests_mock, demisto_args, expe
"""
mocker.patch.object(demisto, 'args', return_value=demisto_args)
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
- mocker.patch.object(client, '_http_request', return_value={})
+ mocker.patch.object(CLIENT, '_http_request', return_value={})
with pytest.raises(Exception) as e:
- client.create_search_process_request(**demisto_args)
+ CLIENT.create_search_process_request(**demisto_args)
assert str(e.value) == expected_error_msg
@@ -125,15 +120,9 @@ def test_create_event_by_process_search_body(mocker, demisto_args, expected_resu
"""
mocker.patch.object(demisto, 'args', return_value=demisto_args)
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
- m = mocker.patch.object(client, '_http_request', return_value={})
-
- client.create_search_event_by_process_request(**demisto_args)
+ m = mocker.patch.object(CLIENT, '_http_request', return_value={})
+
+ CLIENT.create_search_event_by_process_request(**demisto_args)
assert m.call_args[1].get('json_data') == expected_results
@@ -166,16 +155,10 @@ def test_event_by_process_failing(mocker, requests_mock, demisto_args, expected_
"""
mocker.patch.object(demisto, 'args', return_value=demisto_args)
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
- mocker.patch.object(client, '_http_request', return_value={})
+ mocker.patch.object(CLIENT, '_http_request', return_value={})
with pytest.raises(Exception) as e:
- client.create_search_event_by_process_request(**demisto_args)
+ CLIENT.create_search_event_by_process_request(**demisto_args)
assert str(e.value) == expected_error_msg
@@ -196,17 +179,10 @@ def test_add_threat_tags_command(mocker):
- validate that the returned results were parsed as expected.
"""
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
-
- mocker.patch.object(client, '_http_request', return_value=MOCK_UPDATE_THREAT_TAGS_RESPONSE)
+ mocker.patch.object(CLIENT, '_http_request', return_value=MOCK_UPDATE_THREAT_TAGS_RESPONSE)
args = {'threat_id': '123456', 'tags': ['tag1', 'tag2']}
- result = add_threat_tags_command(client, args)
+ result = add_threat_tags_command(CLIENT, args)
assert result.outputs == {'ThreatID': '123456', 'Tags': ['tag1', 'tag2']}
assert result.outputs_prefix == 'CarbonBlackEEDR.Threat'
@@ -233,17 +209,10 @@ def test_add_threat_notes_command(mocker):
- validate that the returned results were parsed as expected.
"""
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
-
- mocker.patch.object(client, '_http_request', return_value=MOCK_CREATE_THREAT_NOTES_RESPONSE)
+ mocker.patch.object(CLIENT, '_http_request', return_value=MOCK_CREATE_THREAT_NOTES_RESPONSE)
args = {'threat_id': '123456', 'notes': 'These are threat notes'}
- result = add_threat_notes_command(client, args)
+ result = add_threat_notes_command(CLIENT, args)
assert result.outputs == {'ThreatID': '123456', 'Notes': 'These are threat notes'}
assert result.outputs_prefix == 'CarbonBlackEEDR.Threat'
@@ -273,17 +242,10 @@ def test_get_threat_tags_command(mocker):
- validate that the returned results was parsed as expected.
"""
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
-
- mocker.patch.object(client, '_http_request', return_value=MOCK_GET_THREAT_TAGS_RESPONSE)
+ mocker.patch.object(CLIENT, '_http_request', return_value=MOCK_GET_THREAT_TAGS_RESPONSE)
args = {'threat_id': '123456'}
- result = get_threat_tags_command(client, args)
+ result = get_threat_tags_command(CLIENT, args)
assert result.outputs == {'ThreatID': '123456', 'Tags': [{'tag': 'malware'}, {'tag': 'suspicious'}]}
assert result.outputs_prefix == 'CarbonBlackEEDR.Threat'
@@ -310,17 +272,10 @@ def test_add_alert_notes_command(mocker):
- validate that the returned results were parsed as expected.
"""
- client = cbe.Client(
- base_url='https://server_url.com',
- use_ssl=False,
- use_proxy=False,
- token=None,
- cb_org_key="123")
-
- mocker.patch.object(client, '_http_request', return_value=MOCK_UPDATE_ALERT_NOTES_RESPONSE)
+ mocker.patch.object(CLIENT, '_http_request', return_value=MOCK_UPDATE_ALERT_NOTES_RESPONSE)
args = {'alert_id': '789012', 'notes': 'These are alert notes'}
- result = add_alert_notes_command(client, args)
+ result = add_alert_notes_command(CLIENT, args)
assert result.outputs == {'AlertID': '789012', 'Notes': 'These are alert notes'}
assert result.outputs_prefix == 'CarbonBlackEEDR.Threat'
@@ -328,3 +283,159 @@ def test_add_alert_notes_command(mocker):
assert "Successfully added notes to alert: \"789012\"" in result.readable_output
assert result.raw_response == MOCK_UPDATE_ALERT_NOTES_RESPONSE
+
+
+def test_test_module(mocker):
+ """
+ Given:
+ - All relevant parameters for the integration.
+
+ When:
+ - testing the configuration of the integration.
+
+ Then:
+ - The http request is called with the right API version.
+ - The 'start' field in the body of the request equals to 1.
+ """
+ from CarbonBlackEnterpriseEDR import test_module
+ http_request = mocker.patch.object(CLIENT, '_http_request', return_value=[])
+ test_module(client=CLIENT)
+ assert 'api/alerts/v7/orgs' in http_request.call_args.kwargs['url_suffix']
+ assert http_request.call_args.kwargs['json_data']['start'] == 1
+
+
+def test_search_alerts_request(mocker):
+ """
+ Given:
+ - All argument needed for a search_alert_request
+
+ When:
+ - calling search_alert_request function
+
+ Then:
+ - The http request is called with the right API version.
+ - the 'start' field in the body of the request equals to 1.
+ """
+ http_request = mocker.patch.object(CLIENT, '_http_request', return_value=[])
+ CLIENT.search_alerts_request()
+ assert 'api/alerts/v7/orgs' in http_request.call_args[0][1]
+ assert http_request.call_args.kwargs['json_data']['start'] == 1
+
+
+def test_alert_workflow_update_get_request(mocker):
+ """
+ Given:
+ - A request_id
+
+ When:
+ - Calling alert_workflow_update_get_request function
+
+ Then:
+ - The http request is called with the request_id.
+ """
+ http_request = mocker.patch.object(CLIENT, '_http_request', return_value=[])
+ CLIENT.alert_workflow_update_get_request('1234')
+ assert '1234' in http_request.call_args[0][1]
+
+
+def test_alert_workflow_update_request_good_arguments(mocker):
+ """
+ Given:
+ - All required arguments.
+
+ When:
+ - Calling alert_workflow_update_request function.
+
+ Then:
+ - The http request is called with the right version.
+ - The http request is called with the right json body.
+ """
+ http_request = mocker.patch.object(CLIENT, '_http_request', return_value=[])
+ CLIENT.alert_workflow_update_request(alert_id='1234', state='OPEN', comment='bla1', determination='NONE',
+ time_range='-2w', start='1', end='2', closure_reason='bla2')
+ assert 'api/alerts/v7/orgs' in http_request.call_args[0][1]
+ assert http_request.call_args.kwargs['json_data'] == {'time_range': {'start': '1', 'end': '2', 'range': '-2w'},
+ 'criteria': {'id': ['1234']}, 'determination': 'NONE',
+ 'closure_reason': 'bla2', 'status': 'OPEN', 'note': 'bla1'}
+
+
+alert_workflow_update_command_func_called_data = [
+ ({'alert_id': '123', 'state': 'OPEN'}, # case first time polling (no request_id).
+ 'alert_workflow_update_request', # func to be called.
+ {'request_id': '123456789'} # response
+ ),
+ ({'alert_id': '123', 'request_id': '12345'}, # case there is a request_id.
+ 'alert_workflow_update_get_request', # func to be called.
+ {'status': 'COMPLETED',
+ 'job_parameters': {'job_parameters': {'request': {'state': 'OPEN'}, 'userWorkflowDto': {'changed_by': 'bla'}}},
+ 'last_update_time': 'now'})
+]
+
+
+@pytest.mark.parametrize('args, func_to_be_called, response', alert_workflow_update_command_func_called_data)
+def test_alert_workflow_update_command_func_called(mocker, args, func_to_be_called, response):
+ """
+ Given:
+ - All arguments needed.
+
+ When:
+ - Running 'cb-eedr-alert-workflow-update' command.
+
+ Then:
+ - The right function is called regarding polling.
+ """
+ from CarbonBlackEnterpriseEDR import alert_workflow_update_command_with_polling
+ execute_command = mocker.patch.object(CLIENT, func_to_be_called, return_value=response)
+ alert_workflow_update_command_with_polling(args, CLIENT)
+ assert execute_command.called is True
+
+
+alert_workflow_update_command_bad_argument_data = [
+ ({'alert_id': '123'}), # case no status and no determination.
+ ({'alert_id': '123', 'start': '2019-01-01T11:00:00.157Z'}), # case there is start but no end.
+ ({'alert_id': '123', 'start': '2019-01-01T11:00:00.157Z', 'end': '2018-01-01T11:00:00.157Z'}) # case end is before start
+]
+
+
+@pytest.mark.parametrize('args', alert_workflow_update_command_bad_argument_data)
+def test_alert_workflow_update_command_bad_arguments(args):
+ """
+ Given:
+ - Invalid command's input.
+ When:
+ - Running 'cb-eedr-alert-workflow-update' command.
+
+ Then:
+ - The right exception is called.
+ """
+ from CarbonBlackEnterpriseEDR import alert_workflow_update_command_with_polling
+ from CommonServerPython import DemistoException
+ with pytest.raises(DemistoException):
+ alert_workflow_update_command_with_polling(args, CLIENT)
+
+
+process_search_command_func_called_data = [
+ ({'process_name': 'bla1'}, # case first time polling (no job_id).
+ 'create_search_process_request' # func to be called.
+ ),
+ ({'process_name': 'bla2', 'job_id': '12345'}, # case there is a job_id.
+ 'get_search_process_request' # func to be called.
+ )]
+
+
+@pytest.mark.parametrize('args, func_to_be_called', process_search_command_func_called_data)
+def test_alert_process_search_command_func_called(mocker, args, func_to_be_called):
+ """
+ Given:
+ - All arguments needed.
+
+ When:
+ - Running 'cb-eedr-alert-workflow-update' command.
+
+ Then:
+ - The right function is called regarding polling.
+ """
+ from CarbonBlackEnterpriseEDR import process_search_command_with_polling
+ execute_command = mocker.patch.object(CLIENT, func_to_be_called)
+ process_search_command_with_polling(args, CLIENT)
+ assert execute_command.called is True
diff --git a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/README.md b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/README.md
index 358fb3aae2a6..faf8e1a011b9 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/README.md
+++ b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/README.md
@@ -74,9 +74,15 @@ RBAC Permissions Required - org.alerts.dismiss: EXECUTE
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| alert_id | The ID of the alert to update. Get the ID from list_alerts command. | Required |
-| state | Workflow state to update. | Optional |
+| status | Workflow status to update. | Optional |
| comment | Comment to include with the operation. | Optional |
-| remediation_state | Description of the changes done in the workflow state. | Optional |
+| closure_reason | The closure reason | Optional |
+| determination | Value judgement of whether the alert(s) represent a true or false positive. | Optional |
+| end | The upper bound of the time range. Requires start and must be a timestamp after start. | Optional |
+| start | The lower bound of the time range. Requires end and must be a timestamp before end. | Optional |
+| time_range | Relative time range for the request. Should not be provided if using 'start' and 'end' arguments. | Optional |
+
+
##### Context Output
@@ -92,7 +98,7 @@ RBAC Permissions Required - org.alerts.dismiss: EXECUTE
##### Command Example
-```!cb-eedr-alert-workflow-update alert_id=A28C720DCBCD66333A624893AB1E0FE9 state=open```
+```!cb-eedr-alert-workflow-update alert_id=A28C720DCBCD66333A624893AB1E0FE9 status=open```
##### Context Example
```
@@ -102,7 +108,7 @@ RBAC Permissions Required - org.alerts.dismiss: EXECUTE
"ChangedBy": "ATL5Y9DR4B",
"AlertID": "A28C720DCBCD66333A624893AB1E0FE9",
"LastUpdateTime": "2020-05-26T13:33:12.890Z",
- "State": "OPEN",
+ "Status": "OPEN",
"Remediation": null
}
}
@@ -486,7 +492,6 @@ RBAC Permissions Required - org.alerts: READ
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| group_results | Whether to group results. Can be "true" or "false". The default is "true". | Optional |
| minimum_severity | Alert minimum severity. | Optional |
| device_os_version | Device OS version. Supports comma-separated values. | Optional |
| policy_id | The policy ID. Supports comma-separated values. | Optional |
@@ -499,8 +504,6 @@ RBAC Permissions Required - org.alerts: READ
| policy_name | Policy name. Supports comma-separated values. | Optional |
| reputation | Alert reputation. Supports comma-separated values. | Optional |
| alert_type | Alert type. Supports comma-separated values. | Optional |
-| alert_category | Alert category. Supports comma-separated values. | Optional |
-| workflow | Alert workflow. Supports comma-separated values. | Optional |
| device_name | Device name. Supports comma-separated values. | Optional |
| process_name | Process name. Supports comma-separated values. | Optional |
| sort_field | Field by which to sort the results. Can be "first_event_time", "last_event_time", "severity", or "target_value". | Optional |
@@ -552,7 +555,6 @@ RBAC Permissions Required - org.alerts: READ
| CarbonBlackEEDR.Alert.notes_present | Boolean | Whether notes are present. |
| CarbonBlackEEDR.Alert.run_state | String | Alert run state. |
| CarbonBlackEEDR.Alert.severity | Number | Alert severity. |
-| CarbonBlackEEDR.Alert.category | String | Alert category. |
| CarbonBlackEEDR.Alert.threat_cause_vector | String | Threat cause vector. |
| CarbonBlackEEDR.Alert.device_username | String | Device username. |
| CarbonBlackEEDR.Alert.workflow.changed_by | String | Alert workflow - changed by. |
@@ -1706,7 +1708,7 @@ RBAC Permissions Required - RBAC Permissions Required: READ
### 33. cb-eedr-process-search
---
-Creates a process search job.
+Creates a process search job and returns results if 'polling' argument is True.
##### Required Permissions
RBAC Permissions Required - org.search.events: CREATE
@@ -1733,7 +1735,33 @@ RBAC Permissions Required - org.search.events: CREATE
| --- | --- | --- |
| CarbonBlackEEDR.SearchProcess.job_id | String | The ID of the job found by the search. |
| CarbonBlackEEDR.SearchProcess.status | String | The status of the job found by the search. |
-
+| CarbonBlackEEDR.SearchProcess.results.device_id | Number | The device ID that is guaranteed to be unique within each PSC environment. |
+| CarbonBlackEEDR.SearchProcess.results.process_username | String | The user names related to the process. |
+| CarbonBlackEEDR.SearchProcess.results.backend_timestamp | Date | A date/time field formatted as an ISO-8601 string based on the UTC timezone. For example, device_timestamp:2018-03-14T21:06:45.183Z. |
+| CarbonBlackEEDR.SearchProcess.results.childproc_count | Number | The cumulative count of child-process creations since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.crossproc_count | Number | The cumulative count of cross-process events since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.device_group_id | Number | The ID of the sensor group where the device belongs. |
+| CarbonBlackEEDR.SearchProcess.results.device_name | String | The name of the device. |
+| CarbonBlackEEDR.SearchProcess.results.device_policy_id | Number | The ID of the policy applied to the device. |
+| CarbonBlackEEDR.SearchProcess.results.device_timestamp | Date | The time displayed on the sensor based on the sensor’s clock. The time is an ISO-8601 formatted time string based on the UTC timezone. |
+| CarbonBlackEEDR.SearchProcess.results.enriched | Boolean | True if the process document came from the CBD data stream. |
+| CarbonBlackEEDR.SearchProcess.results.enriched_event_type | String | The CBD enriched event type. |
+| CarbonBlackEEDR.SearchProcess.results.event_type | String | The CBD event type \(valid only for events coming through analytics\). Possible values are: CREATE_PROCESS, DATA_ACCESS, FILE_CREATE, INJECT_CODE, NETWORK, POLICY_ACTION, REGISTRY_ACCESS, and SYSTEM_API_CALL. |
+| CarbonBlackEEDR.SearchProcess.results.filemod_count | Number | The cumulative count of file modifications since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.ingress_time | Date | Unknown |
+| CarbonBlackEEDR.SearchProcess.results.legacy | Boolean | True if the process document came from the legacy data stream \(deprecated, use enriched\). |
+| CarbonBlackEEDR.SearchProcess.results.modload_count | Number | The cumulative count of module loads since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.netconn_count | Number | The cumulative count of network connections since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.org_id | String | The globally unique organization key. This will most likely be the PSC organization ID \+ PSC environment ID or some other unique token used across environments. |
+| CarbonBlackEEDR.SearchProcess.results.parent_guid | String | The process GUID of the parent process. |
+| CarbonBlackEEDR.SearchProcess.results.parent_pid | Number | The PID of the parent process. |
+| CarbonBlackEEDR.SearchProcess.results.process_guid | String | Unique ID of the Solr document. Appears as process_guid \+ server-side timestamp in epoch ms \(1/1/1970 based\). |
+| CarbonBlackEEDR.SearchProcess.results.process_hash | String | The MD5 and SHA256 hashes of the process’s main module in a multi-valued field. |
+| CarbonBlackEEDR.SearchProcess.results.process_name | String | The tokenized file path of the process’s main module. |
+| CarbonBlackEEDR.SearchProcess.results.process_pid | Number | The PID of a process. Can be multi-valued in case of exec/fork on Linux/OSX. |
+| CarbonBlackEEDR.SearchProcess.results.process_username | String | User names related to the process. |
+| CarbonBlackEEDR.SearchProcess.results.regmod_count | Number | The cumulative count of registry modifications since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.scriptload_count | Number | The cumulative count of loaded scripts since process tracking started. |
#### Command Example
```!cb-eedr-process-search process_name="vmtoolsd.exe" limit=10```
@@ -2801,4 +2829,145 @@ Output a list of tags for the provided threat ID.
| **Path** | **Type** | **Description** |
| --- | --- | --- |
| CarbonBlackEEDR.Threat.ThreatID | unknown | Threat ID. |
-| CarbonBlackEEDR.Threat.Tags | unknown | Threat tags. |
\ No newline at end of file
+| CarbonBlackEEDR.Threat.Tags | unknown | Threat tags. |
+### cb-eedr-list-alerts
+
+***
+Returns a list of alerts.
+
+#### Base Command
+
+`cb-eedr-list-alerts`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| minimum_severity | Alert minimum severity (In range of 1-10). | Optional |
+| device_os_version | Device OS version. Supports comma-separated values. | Optional |
+| policy_id | The policy ID. Supports comma-separated values. | Optional |
+| alert_tag | Alert tags. Supports comma-separated values. | Optional |
+| alert_id | Alert ID. Supports comma-separated values. | Optional |
+| device_username | Device username. Supports comma-separated values. | Optional |
+| device_id | Device ID. Supports comma-separated values. | Optional |
+| device_os | Device OS. Supports comma-separated values. | Optional |
+| process_sha256 | Process SHA256. Supports comma-separated values. | Optional |
+| policy_name | Policy name. Supports comma-separated values. | Optional |
+| reputation | Alert reputation. Supports comma-separated values. | Optional |
+| alert_type | Alert type. Supports comma-separated values. | Optional |
+| device_name | A comma-separated list of device names. Examples- "C:\\Users\\example_user\\Example" or "/home/example_user/Example". | Optional |
+| process_name | A comma-separated list of process names. Examples- "C:\\Users\\example_user\\Example" or "/home/example_user/Example". | Optional |
+| sort_field | Field by which to sort the results. Possible values are: first_event_timestamp, last_event_timestamp. Default is first_event_timestamp. | Optional |
+| sort_order | How to order the results. Can be "ASC" (ascending) or "DESC" (descending). Possible values are: ASC, DESC. Default is DESC. | Optional |
+| limit | The maximum number of results to return. Default is 10. | Optional |
+| start_time | Alert start time. | Optional |
+| end_time | Alert end time. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CarbonBlackEEDR.Alert.threat_id | String | Threat ID. |
+| CarbonBlackEEDR.Alert.first_event_time | Date | First event time. |
+| CarbonBlackEEDR.Alert.target_value | String | Alert target value. |
+| CarbonBlackEEDR.Alert.reason | String | Alert reason. |
+| CarbonBlackEEDR.Alert.org_key | String | Organization key. |
+| CarbonBlackEEDR.Alert.device_id | String | Device ID. |
+| CarbonBlackEEDR.Alert.report_id | String | Report ID. |
+| CarbonBlackEEDR.Alert.watchlists.id | String | Watchlist ID. |
+| CarbonBlackEEDR.Alert.watchlists.name | String | Watchlist name. |
+| CarbonBlackEEDR.Alert.device_os_version | String | Device OS version. |
+| CarbonBlackEEDR.Alert.threat_cause_threat_category | String | Threat cause threat category. |
+| CarbonBlackEEDR.Alert.policy_id | String | Policy ID. |
+| CarbonBlackEEDR.Alert.threat_indicators.process_name | String | Threat indicator - process name. |
+| CarbonBlackEEDR.Alert.threat_indicators.sha256 | String | Indicator SHA256 hash. |
+| CarbonBlackEEDR.Alert.threat_cause_actor_sha256 | String | Threat cause actor SHA256. |
+| CarbonBlackEEDR.Alert.device_os | String | Device OS. |
+| CarbonBlackEEDR.Alert.document_guid | String | Document GUID. |
+| CarbonBlackEEDR.Alert.create_time | Date | Alert create time. |
+| CarbonBlackEEDR.Alert.threat_cause_actor_name | String | Threat cause actor name. |
+| CarbonBlackEEDR.Alert.ioc_hit | String | IOC hit. |
+| CarbonBlackEEDR.Alert.threat_cause_reputation | String | Threat cause reputation. |
+| CarbonBlackEEDR.Alert.legacy_alert_id | String | Legacy alert ID. |
+| CarbonBlackEEDR.Alert.device_name | String | Device name. |
+| CarbonBlackEEDR.Alert.report_name | String | Report name. |
+| CarbonBlackEEDR.Alert.policy_name | String | Policy name. |
+| CarbonBlackEEDR.Alert.ioc_field | String | IOC field. |
+| CarbonBlackEEDR.Alert.tags | String | Alert tags. |
+| CarbonBlackEEDR.Alert.process_guid | String | Process GUID. |
+| CarbonBlackEEDR.Alert.threat_cause_actor_md5 | String | Threat cause actor MD5 hash. |
+| CarbonBlackEEDR.Alert.last_update_time | Date | Alert last updated time. |
+| CarbonBlackEEDR.Alert.type | String | Alert type. |
+| CarbonBlackEEDR.Alert.id | String | Alert ID. |
+| CarbonBlackEEDR.Alert.process_name | String | Process name. |
+| CarbonBlackEEDR.Alert.last_event_time | Date | Alert last event time. |
+| CarbonBlackEEDR.Alert.ioc_id | String | IOC ID. |
+| CarbonBlackEEDR.Alert.notes_present | Boolean | Whether notes are present. |
+| CarbonBlackEEDR.Alert.run_state | String | Alert run state. |
+| CarbonBlackEEDR.Alert.severity | Number | Alert severity. |
+| CarbonBlackEEDR.Alert.threat_cause_vector | String | Threat cause vector. |
+| CarbonBlackEEDR.Alert.device_username | String | Device username. |
+| CarbonBlackEEDR.Alert.workflow.changed_by | String | Alert workflow - changed by. |
+| CarbonBlackEEDR.Alert.workflow.comment | String | Alert workflow - comment. |
+| CarbonBlackEEDR.Alert.workflow.last_update_time | Date | Alert workflow - last updated time. |
+| CarbonBlackEEDR.Alert.workflow.remediation | String | Alert workflow - remediation. |
+| CarbonBlackEEDR.Alert.workflow.state | String | Alert workflow state. |
+### cb-eedr-process-search
+
+***
+Creates a process search job and returns results if 'polling' argument is True.
+
+#### Base Command
+
+`cb-eedr-process-search`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| process_name | The process name to search. | Optional |
+| process_hash | The process hash to search. | Optional |
+| event_id | The event ID to search. | Optional |
+| limit | The maximum number of rows to return. Default is 20. | Optional |
+| query | A free-style query. For example, "process_name:svchost.exe". | Optional |
+| start_time | First appearance time range (<number> <time unit>, e.g., 1 hour, 30 minutes). Default is 1 day ago. | Optional |
+| end_time | Last appearance time range (<number> <time unit>, e.g., 1 hour, 30 minutes). Default is current time. | Optional |
+| start | Index of first records to fetch. Default is 0. | Optional |
+| job_id | Job ID to retrieve. | Optional |
+| polling | whether to run the command with polling. Possible values are: true, false. Default is True. | Optional |
+| interval_in_seconds | The time in seconds to wait between polling. Default is 60. | Optional |
+| time_out | The timeout duration in seconds for polling retries. Default is 600. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CarbonBlackEEDR.SearchProcess.job_id | String | The ID of the job found by the search. |
+| CarbonBlackEEDR.SearchProcess.status | String | The status of the job found by the search. |
+| CarbonBlackEEDR.SearchProcess.results.device_id | Number | The device ID that is guaranteed to be unique within each PSC environment. |
+| CarbonBlackEEDR.SearchProcess.results.process_username | String | The user names related to the process. |
+| CarbonBlackEEDR.SearchProcess.results.backend_timestamp | Date | A date/time field formatted as an ISO-8601 string based on the UTC timezone. For example, device_timestamp:2018-03-14T21:06:45.183Z. |
+| CarbonBlackEEDR.SearchProcess.results.childproc_count | Number | The cumulative count of child-process creations since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.crossproc_count | Number | The cumulative count of cross-process events since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.device_group_id | Number | The ID of the sensor group where the device belongs. |
+| CarbonBlackEEDR.SearchProcess.results.device_name | String | The name of the device. |
+| CarbonBlackEEDR.SearchProcess.results.device_policy_id | Number | The ID of the policy applied to the device. |
+| CarbonBlackEEDR.SearchProcess.results.device_timestamp | Date | The time displayed on the sensor based on the sensor’s clock. The time is an ISO-8601 formatted time string based on the UTC timezone. |
+| CarbonBlackEEDR.SearchProcess.results.enriched | Boolean | True if the process document came from the CBD data stream. |
+| CarbonBlackEEDR.SearchProcess.results.enriched_event_type | String | The CBD enriched event type. |
+| CarbonBlackEEDR.SearchProcess.results.event_type | String | The CBD event type \(valid only for events coming through analytics\). Possible values are: CREATE_PROCESS, DATA_ACCESS, FILE_CREATE, INJECT_CODE, NETWORK, POLICY_ACTION, REGISTRY_ACCESS, and SYSTEM_API_CALL. |
+| CarbonBlackEEDR.SearchProcess.results.filemod_count | Number | The cumulative count of file modifications since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.ingress_time | Date | Unknown |
+| CarbonBlackEEDR.SearchProcess.results.legacy | Boolean | True if the process document came from the legacy data stream \(deprecated, use enriched\). |
+| CarbonBlackEEDR.SearchProcess.results.modload_count | Number | The cumulative count of module loads since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.netconn_count | Number | The cumulative count of network connections since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.org_id | String | The globally unique organization key. This will most likely be the PSC organization ID \+ PSC environment ID or some other unique token used across environments. |
+| CarbonBlackEEDR.SearchProcess.results.parent_guid | String | The process GUID of the parent process. |
+| CarbonBlackEEDR.SearchProcess.results.parent_pid | Number | The PID of the parent process. |
+| CarbonBlackEEDR.SearchProcess.results.process_guid | String | Unique ID of the solr document. Appears as process_guid \+ server-side timestamp in epoch ms \(1/1/1970 based\). |
+| CarbonBlackEEDR.SearchProcess.results.process_hash | String | The MD5 and SHA-256 hashes of the process’s main module in a multi-valued field. |
+| CarbonBlackEEDR.SearchProcess.results.process_name | String | The tokenized file path of the process’s main module. |
+| CarbonBlackEEDR.SearchProcess.results.process_pid | Number | The PID of a process. Can be multi-valued in case of exec/fork on Linux/OSX. |
+| CarbonBlackEEDR.SearchProcess.results.process_username | String | User names related to the process. |
+| CarbonBlackEEDR.SearchProcess.results.regmod_count | Number | The cumulative count of registry modifications since process tracking started. |
+| CarbonBlackEEDR.SearchProcess.results.scriptload_count | Number | The cumulative count of loaded scripts since process tracking started. |
diff --git a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/command_examples.txt b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/command_examples.txt
index aca3761cc5aa..e1931ce22f90 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/command_examples.txt
+++ b/Packs/CarbonBlackEnterpriseEDR/Integrations/CarbonBlackEnterpriseEDR/command_examples.txt
@@ -5,3 +5,4 @@
!cb-eedr-add-alert-notes alert_id=1bc5dff7-e2fb-f336-7997-277b142c9ec1 notes="xsoar alert notes"
!cb-eedr-add-threat-tags threat_id=fb6a305cd33e6b99b3010d3005f65943 tags=CSIRC-77777
!cb-eedr-get-threat-tags threat_id=fb6a305cd33e6b99b3010d3005f65943
+!cb-eedr-list-alerts
diff --git a/Packs/CarbonBlackEnterpriseEDR/Playbooks/Carbon_Black_EDR_Search_Process.yml b/Packs/CarbonBlackEnterpriseEDR/Playbooks/Carbon_Black_EDR_Search_Process.yml
index a419016d7bf7..8f67d0ff611a 100644
--- a/Packs/CarbonBlackEnterpriseEDR/Playbooks/Carbon_Black_EDR_Search_Process.yml
+++ b/Packs/CarbonBlackEnterpriseEDR/Playbooks/Carbon_Black_EDR_Search_Process.yml
@@ -1,9 +1,10 @@
id: Carbon Black EDR Search Process
version: -1
name: Carbon Black EDR Search Process
+deprecated: true
starttaskid: "0"
description: |-
- Use this playbook to search processes in Carbon Black Enterprise EDR.
+ Deprecated. Use 'cb-eedr-process-search' command instead.
This playbook implements polling by continuously running the `cb-eedr-process-search-results` command
until the operation completes.
tasks:
@@ -61,6 +62,8 @@ tasks:
simple: ${inputs.process_name}
query:
simple: ${inputs.query}
+ polling:
+ simple: "false"
separatecontext: false
view: |-
{
diff --git a/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.json b/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.json
new file mode 100644
index 000000000000..366deb53092f
--- /dev/null
+++ b/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "***Note:*** Breaking Change - The 'cb-eedr-list-alerts' command no longer supports the 'alert_category', 'workflow', and 'group_results' fields. ***Note:*** Breaking Change - The ***cb-eedr-alert-workflow-update*** command no longer supports free text in *remediation_state* field, it is predefined."
+}
\ No newline at end of file
diff --git a/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.md b/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.md
new file mode 100644
index 000000000000..776b5fba67bf
--- /dev/null
+++ b/Packs/CarbonBlackEnterpriseEDR/ReleaseNotes/1_1_34.md
@@ -0,0 +1,16 @@
+
+#### Integrations
+
+##### Carbon Black Enterprise EDR
+
+- Updated the integration to match the new API version (v7) of Carbon Black.
+ Note: Breaking Change - The ***cb-eedr-list-alerts*** command no longer supports the *alert_category*, *workflow*, and *group_results* fields.
+ Note: Breaking Change - The ***cb-eedr-alert-workflow-update*** command no longer supports free text in *remediation_state* field, it is predefined.
+- Updated the Docker image to demisto/python3:3.10.14.101217.
+
+#### Playbooks
+
+##### Carbon Black EDR Search Process
+
+Deprecated. Use ***cb-eedr-process-search*** command instead.
+
diff --git a/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_Test.yml b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_Test.yml
index 699f72f39477..c7bb590f37cc 100644
--- a/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_Test.yml
+++ b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_Test.yml
@@ -604,7 +604,6 @@ tasks:
'#none#':
- "20"
scriptarguments:
- alert_category: {}
alert_id: {}
alert_tag: {}
alert_type: {}
@@ -616,7 +615,6 @@ tasks:
device_os_version: {}
device_username: {}
end_time: {}
- group_results: {}
limit:
simple: "5"
minimum_severity: {}
@@ -626,10 +624,9 @@ tasks:
process_sha256: {}
reputation: {}
sort_field:
- simple: first_event_time
+ simple: first_event_timestamp
sort_order: {}
start_time: {}
- workflow: {}
separatecontext: false
view: |-
{
diff --git a/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_alert-workflow-update_Test.yml b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_alert-workflow-update_Test.yml
new file mode 100644
index 000000000000..1bf5ce558e7b
--- /dev/null
+++ b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_alert-workflow-update_Test.yml
@@ -0,0 +1,149 @@
+id: Carbon Black Enterprise EDR alert-workflow-update Test
+version: -1
+name: Carbon Black Enterprise EDR alert-workflow-update Test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: f589cf6f-cb71-483f-8f2e-c654039f1f5b
+ type: start
+ task:
+ id: f589cf6f-cb71-483f-8f2e-c654039f1f5b
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: be2d3bb1-dc2f-44a8-8828-8e504362ed1e
+ type: regular
+ task:
+ id: be2d3bb1-dc2f-44a8-8828-8e504362ed1e
+ version: -1
+ name: Run cb-eedr-alert-workflow-update command
+ description: Updates the workflow of a single alert.
+ script: Carbon Black Enterprise EDR|||cb-eedr-alert-workflow-update
+ type: regular
+ iscommand: true
+ brand: Carbon Black Enterprise EDR
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ alert_id:
+ simple: "2d8edf70-45dc-729a-a5f9-e1303667ac8d\t"
+ comment:
+ simple: alert's status has changed
+ status:
+ simple: OPEN
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 220
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 9e760e62-2349-4917-8260-dd09656bdd7e
+ type: title
+ task:
+ id: 9e760e62-2349-4917-8260-dd09656bdd7e
+ version: -1
+ name: Done
+ tags:
+ - N/A
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 980,
+ "y": 390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 405,
+ "width": 380,
+ "x": 980,
+ "y": 50
+ }
+ }
+ }
+inputs:
+- key: alert_id
+ value:
+ simple: "2d8edf70-45dc-729a-a5f9-e1303667ac8d\t"
+ required: true
+ description: The alert id
+ playbookInputQuery:
+- key: status
+ value:
+ simple: OPEN
+ required: false
+ description: ""
+ playbookInputQuery:
+- key: comment
+ value:
+ simple: status has changed
+ required: false
+ description: comment to add to alert
+ playbookInputQuery:
+inputSections:
+- inputs:
+ - alert_id
+ - status
+ - comment
+ name: General (Inputs group)
+ description: Generic group for inputs
+outputSections:
+- outputs: []
+ name: General (Outputs group)
+ description: Generic group for outputs
+outputs: []
+quiet: true
+fromversion: 6.10.0
+description: Test playbook for 'alert-workflow-update' command.
diff --git a/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_process_search_with_polling_test.yml b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_process_search_with_polling_test.yml
new file mode 100644
index 000000000000..f087a0b8f351
--- /dev/null
+++ b/Packs/CarbonBlackEnterpriseEDR/TestPlaybooks/Carbon_Black_Enterprise_EDR_process_search_with_polling_test.yml
@@ -0,0 +1,115 @@
+id: Carbon Black Enterprise EDR process search with polling test
+version: -1
+name: Carbon Black Enterprise EDR process search with polling test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 237493a4-d661-494b-8909-42db4c7cc387
+ type: start
+ task:
+ id: 237493a4-d661-494b-8909-42db4c7cc387
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 10
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 5719c8fb-127c-411f-8967-e2e515066d71
+ type: regular
+ task:
+ id: 5719c8fb-127c-411f-8967-e2e515066d71
+ version: -1
+ name: run cb-eedr-process-search command
+ description: Creates a process search job.
+ script: Carbon Black Enterprise EDR|||cb-eedr-process-search
+ type: regular
+ iscommand: true
+ brand: Carbon Black Enterprise EDR
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ process_name:
+ simple: chrome.exe
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 1e384fe6-42de-48f7-80cc-f7fc6bfb007f
+ type: title
+ task:
+ id: 1e384fe6-42de-48f7-80cc-f7fc6bfb007f
+ version: -1
+ name: Done
+ tags:
+ - N/A
+ type: title
+ iscommand: false
+ brand: ""
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 405,
+ "width": 380,
+ "x": 650,
+ "y": 10
+ }
+ }
+ }
+inputs: []
+outputs: []
+quiet: true
+fromversion: 6.10.0
+description: "Test playbook for testing 'cb-eedr-process-search' command with polling: true"
diff --git a/Packs/CarbonBlackEnterpriseEDR/pack_metadata.json b/Packs/CarbonBlackEnterpriseEDR/pack_metadata.json
index ecd53d42a864..438a03fa3ed3 100644
--- a/Packs/CarbonBlackEnterpriseEDR/pack_metadata.json
+++ b/Packs/CarbonBlackEnterpriseEDR/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Carbon Black Cloud Enterprise EDR",
"description": "Advanced threat hunting and incident response solution.",
"support": "xsoar",
- "currentVersion": "1.1.33",
+ "currentVersion": "1.1.34",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -17,4 +17,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/CarbonBlackProtect/.pack-ignore b/Packs/CarbonBlackProtect/.pack-ignore
index 0780739fd8ee..de08adc051db 100644
--- a/Packs/CarbonBlackProtect/.pack-ignore
+++ b/Packs/CarbonBlackProtect/.pack-ignore
@@ -5,7 +5,7 @@ ignore=BA101,PB107
ignore=BA101
[file:CarbonBlackProtect.yml]
-ignore=IN126,IN124
+ignore=IN126
[file:README.md]
ignore=RM104
diff --git a/Packs/CarbonBlackProtect/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml b/Packs/CarbonBlackProtect/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml
index dc5e8f6e549d..3c911c2aa45e 100644
--- a/Packs/CarbonBlackProtect/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml
+++ b/Packs/CarbonBlackProtect/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml
@@ -76,21 +76,21 @@ name: CarbonBlackProtectionV2
script:
commands:
- arguments:
- - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field. Example: group=osShortName'
+ - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field. Example: group=osShortName.'
name: group
- description: '(Int) Is maximum number of results to retrieve. If not specified: First 1000 results will be returned. If set to -1: Only result count will be returned, without actual results. Offset parameter is ignored in this case. If set to 0: All results will be returned. Offset parameter is ignored in this case. Note that some result sets could be very large, resulting in query timeout. Therefore, unless you know that query will not return more than 1000 results, it is recommended to retrieve data in chunks using offset and limit.'
name: limit
- - description: (Int) Offset in data set
+ - description: (Int) Offset in data set.
name: offset
- description: 'A condition contains three parts: name, operator, and value. Name is any valid field in the object that is being queried. Operator (: LIKE, ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating values) is any of valid operators (see below). All operators consist of a single character. Value is compared with operator and depends on field type. See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
name: query
- description: 'Sorting is optional and can be defined with a single attribute: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order (if omitted) is ASC. xyz is field name from the result set.'
name: sort
- - description: Name of the file under which this unique hash was first seen
+ - description: Name of the file under which this unique hash was first seen.
name: fileName
- - description: Type of the file
+ - description: Type of the file.
name: fileType
- - description: Id of computer where this file was first seen. You can get this by executing cbp-computer-search command
+ - description: Id of computer where this file was first seen. You can get this by executing cbp-computer-search command.
name: computerId
- auto: PREDEFINED
description: |-
@@ -98,7 +98,7 @@ script:
-1=Unknown
0=Clean
50=Potential risk
- 100=Malicious
+ 100=Malicious.
name: threat
predefined:
- Unknown
@@ -112,7 +112,7 @@ script:
2=Approved
3=Banned
4=Approved by Policy
- 5=Banned by Policy
+ 5=Banned by Policy.
name: fileState
predefined:
- Unapproved
@@ -120,7 +120,7 @@ script:
- Banned
- Approved by Policy
- Banned by Polic
- - description: Hash of the file
+ - description: Hash of the file.
name: hash
description: 'Search for file catalogs. See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filecatalog'
name: cbp-fileCatalog-search
@@ -157,17 +157,17 @@ script:
name: query
- description: (Int) Offset in data set.
name: offset
- - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName.'
name: group
- description: 'Sorting is optional and can be defined with a single attribute: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order (if omitted) is ascending (ASC). xyz is field name from the result set.'
name: sort
- description: Maximum number of results to retrieve (Int). If not specified, the first 1000 results will be returned. If set to "-1", only the result count will be returned, without actual results, and the Offset parameter is ignored. If set to "0", all results will be returned, and the Offset parameter is ignored. Some result sets might be very large, resulting in query timeout. Therefore, unless you know that query will not return more than 1000 results, it is recommended to retrieve data in chunks using offset and limit.
name: limit
- - description: Computer name
+ - description: Computer name.
name: name
- - description: Last known IP address of this computer
+ - description: Last known IP address of this computer.
name: ipAddress
- - description: MAC address of adapter used to connect to the CB Protection Server
+ - description: MAC address of adapter used to connect to the CB Protection Server.
name: macAddress
- description: CSV list of fields to limit the fields returned from the console.
name: fields
@@ -185,7 +185,7 @@ script:
description: (Int) Unique computer ID.
name: id
required: true
- - description: (String) Computer name can be changed only if computer is a template
+ - description: (String) Computer name can be changed only if computer is a template.
name: name
- description: (String) Custom computer tag.
name: computerTag
@@ -309,13 +309,13 @@ script:
name: limit
- description: (Int) Offset in the data set.
name: offset
- - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName.'
name: group
- description: 'Sorting is optional and can be defined with a single attribute, where xyz is the field name from the result set: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
- - description: Id of computer associated with this fileInstance
+ - description: Id of computer associated with this fileInstance.
name: computerId
- - description: Name of the file on the agent
+ - description: Name of the file on the agent.
name: fileName
description: 'Search for file instances. For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#fileinstance'
name: cbp-fileInstance-search
@@ -342,7 +342,7 @@ script:
name: limit
- description: (Int) Offset in the data set.
name: offset
- - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field, for example: group=osShortName.'
name: group
- description: 'Sorting is optional and can be defined with a single attribute, where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
@@ -356,7 +356,7 @@ script:
4 = Policy Enforcement
5 = Discovery
6 = General Management
- 8 = Internal Events
+ 8 = Internal Events.
name: type
predefined:
- Server Management
@@ -367,11 +367,11 @@ script:
- Discovery
- General Management
- Internal Events
- - description: Id of computer associated with this event. You can get this by executing cbp-computer-search command
+ - description: Id of computer associated with this event. You can get this by executing cbp-computer-search command.
name: computerId
- - description: IP address associated with this event
+ - description: IP address associated with this event.
name: ipAddress
- - description: Name of the file associated with this event
+ - description: Name of the file associated with this event.
name: fileName
- auto: PREDEFINED
description: |-
@@ -381,7 +381,7 @@ script:
4 = Warning
5 = Notice
6 = Info
- 7 = Debug
+ 7 = Debug.
name: severity
predefined:
- Critical
@@ -390,9 +390,9 @@ script:
- Notice
- Info
- Debug
- - description: User name associated with this event
+ - description: User name associated with this event.
name: userName
- - description: Id of fileCatalog entry associated with this fileRule. Can be null if file hasn’t been seen on any endpoints yet. You can get this by executing cbp-fileCatalog-search
+ - description: Id of fileCatalog entry associated with this fileRule. Can be null if file hasn’t been seen on any endpoints yet. You can get this by executing cbp-fileCatalog-search.
name: fileCatalogId
description: 'Search for events. For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#event'
name: cbp-event-search
@@ -482,7 +482,7 @@ script:
name: limit
- description: (Int) Offset in the data set.
name: offset
- - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field. Example: group=osShortName'
+ - description: 'Grouping is optional and can be defined with a single attribute: &group=xyz. There can be only one grouping field. Example: group=osShortName.'
name: group
- description: 'Sorting is optional and can be defined with a single attribute, where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
@@ -548,23 +548,23 @@ script:
name: group
- description: 'Sorting is optional and can be defined with a single attribute where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
- - description: Id of fileCatalog entry associated with this fileRule. Can be null if file hasn’t been seen on any endpoints yet. You can get this by executing cbp-fileCatalog-search
+ - description: Id of fileCatalog entry associated with this fileRule. Can be null if file hasn’t been seen on any endpoints yet. You can get this by executing cbp-fileCatalog-search.
name: fileCatalogId
- - description: Name of this rule
+ - description: Name of this rule.
name: name
- auto: PREDEFINED
description: |-
File state for this rule. Can be one of:
1=Unapproved
2=Approved
- 3=Banned
+ 3=Banned.
name: fileState
predefined:
- Unapproved
- Approved
- Banned
- auto: PREDEFINED
- description: "Mechanism that created this rule. Can be one of: \n1 = Manual\n2 = Trusted Directory\n3 = Reputation\n4 = Imported\n5 = External (API)\n6 = Event Rule\n7 = Application Template\n8 = Unified Management"
+ description: "Mechanism that created this rule. Can be one of: \n1 = Manual\n2 = Trusted Directory\n3 = Reputation\n4 = Imported\n5 = External (API)\n6 = Event Rule\n7 = Application Template\n8 = Unified Management."
name: sourceType
predefined:
- Manual
@@ -575,9 +575,9 @@ script:
- Event Rule
- Application Template
- Unified Management
- - description: Hash associated with this rule. Note that hash will be available only if rule was created through md5 or sha-1 hash. If rule was created through fileCatalogId or sha-256 hash that exists in the catalog, this field will be empty
+ - description: Hash associated with this rule. Note that hash will be available only if rule was created through md5 or sha-1 hash. If rule was created through fileCatalogId or sha-256 hash that exists in the catalog, this field will be empty.
name: hash
- - description: File name associated with this rule. Note that file name will be available only if rule was created through file name. If rule was created through fileCatalogId or hash, this field will be empty
+ - description: File name associated with this rule. Note that file name will be available only if rule was created through file name. If rule was created through fileCatalogId or hash, this field will be empty.
name: fileName
description: 'Search for file rules. For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
name: cbp-fileRule-search
@@ -640,7 +640,7 @@ script:
type: String
- arguments:
- default: true
- description: (Int) Unique id of this fileRule
+ description: (Int) Unique id of this fileRule.
name: id
required: true
description: 'Deletes the file rule. For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
@@ -663,7 +663,7 @@ script:
30=Medium (Prompt Unapproved)
40=Low (Monitor Unapproved)
60=None (Visibility)
- 80=None (Disabled)
+ 80=None (Disabled).
name: enforcementLevel
predefined:
- High (Block Unapproved)
@@ -678,7 +678,7 @@ script:
30=Medium (Prompt Unapproved)
40=Low (Monitor Unapproved)
60=None (Visibility)
- 80=None (Disabled)
+ 80=None (Disabled).
name: disconnectedEnforcementLevel
predefined:
- High (Block Unapproved)
@@ -771,7 +771,7 @@ script:
name: group
- description: 'Sorting is optional and can be defined with a single attribute where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
- - description: Subject name of leaf certificate for this publisher
+ - description: Subject name of leaf certificate for this publisher.
name: name
- auto: PREDEFINED
description: |-
@@ -779,7 +779,7 @@ script:
0=Not trusted (Unknown)
1=Low
2=Medium
- 3=High
+ 3=High.
name: publisherReputation
predefined:
- Not trusted (Unknown)
@@ -793,7 +793,7 @@ script:
2=Approved
3=Banned
4=Approved By Policy
- 5=Banned By Policy
+ 5=Banned By Policy.
name: publisherState
predefined:
- Unapproved
@@ -891,7 +891,7 @@ script:
description: The DBot score vendor.
type: string
- contextPath: DBotScore.Score
- description: The DBot score
+ description: The DBot score.
type: number
- arguments:
- description: (Int) ID of the fileCatalog entry for which analysis is requested. This value can be fetched via cbp-fileCatalog-search command.
@@ -957,12 +957,12 @@ script:
name: group
- description: 'Sorting is optional and can be defined with a single attribute: &sort=xyz [ASC|DESC], where xyz is the field name from the result set. There can be only one sorting field. Default sort order is ascending (ASC). '
name: sort
- - description: Id of fileCatalog entry associated with this analysis. You can get this by executing cbp-fileCatalog-search
+ - description: Id of fileCatalog entry associated with this analysis. You can get this by executing cbp-fileCatalog-search.
name: fileCatalogId
- - description: Id of connector associated with this analysis. You can get this by executing cbp-connector-search
+ - description: Id of connector associated with this analysis. You can get this by executing cbp-connector-search.
name: connectorId
- - description: |
- Name of the file where file exists on the endpoint
+ - description: |-
+ Name of the file where file exists on the endpoint.
name: fileName
- auto: PREDEFINED
description: |-
@@ -972,7 +972,7 @@ script:
2 = processed (file is processed but results are not available yet)
3 = analyzed (file is processed and results are available)
4 = error
- 5 = cancelled
+ 5 = cancelled.
name: analysisStatus
predefined:
- scheduled
@@ -987,7 +987,7 @@ script:
0 = Not yet available
1 = File is clean
2 = File is a potential threat
- 3 = File is malicious
+ 3 = File is malicious.
name: analysisResult
predefined:
- Not yet available
@@ -1142,11 +1142,11 @@ script:
name: group
- description: 'Sorting is optional and can be defined with a single attribute: &sort=xyz [ASC|DESC], where xyz is the field name from the result set. There can be only one sorting field. Default sort order is ascending (ASC).'
name: sort
- - description: Id of computer entry associated with this analysis. This can be fetched via cbp-computer-search
+ - description: Id of computer entry associated with this analysis. This can be fetched via cbp-computer-search.
name: computerId
- - description: Id of fileCatalog entry associated with this upload. This can be fetched via cbp-fileCatalog-search
+ - description: Id of fileCatalog entry associated with this upload. This can be fetched via cbp-fileCatalog-search.
name: fileCatalogId
- - description: Name of the file where file exists on the endpoint
+ - description: Name of the file where file exists on the endpoint.
name: fileName
- auto: PREDEFINED
description: |-
@@ -1157,7 +1157,7 @@ script:
3 = Completed
4 = Error
5 = Cancelled
- 6 = Deleted
+ 6 = Deleted.
name: uploadStatus
predefined:
- Queued
@@ -1272,7 +1272,7 @@ script:
type: String
- arguments:
- default: true
- description: ID of the approval request to update
+ description: ID of the approval request to update.
name: id
required: true
- auto: PREDEFINED
@@ -1285,7 +1285,7 @@ script:
3=Resolved - Rule Change4=Resolved - Installer
5=Resolved - Updater
6=Resolved - Publisher
- 7=Resolved - Other
+ 7=Resolved - Other.
name: resolution
predefined:
- Rejected
@@ -1317,23 +1317,23 @@ script:
description: Comments added by the user that resolved the request.
type: String
- contextPath: CBP.ApprovalRequest.Resolution
- description: 'Resolution of request. Can be one of: 0=Not Resolved, 1=Rejected, 2=Resolved - Approved, 3=Resolved - Rule Change, 4=Resolved - Installer, 5=Resolved - Updater, 6=Resolved - Publisher, 7=Resolved - Other'
+ description: 'Resolution of request. Can be one of: 0=Not Resolved, 1=Rejected, 2=Resolved - Approved, 3=Resolved - Rule Change, 4=Resolved - Installer, 5=Resolved - Updater, 6=Resolved - Publisher, 7=Resolved - Other.'
type: Number
- contextPath: CBP.ApprovalRequest.Status
- description: 'Request status. Can be one of: 1=New, 2=Open, 3=Closed, 4=Escalated'
+ description: 'Request status. Can be one of: 1=New, 2=Open, 3=Closed, 4=Escalated.'
type: Number
- arguments:
- description: (String) Hash associated with this rule. This parameter is not required if fileCatalogId is supplied.
name: hash
- auto: PREDEFINED
- description: '(Int) File state for this rule. Can be one of: 1=Unapproved 2=Approved 3=Banned'
+ description: '(Int) File state for this rule. Can be one of: 1=Unapproved 2=Approved 3=Banned.'
name: fileState
predefined:
- '1'
- '2'
- '3'
required: true
- - description: (Int) Unique id of this fileRule
+ - description: (Int) Unique id of this fileRule.
name: id
- description: (Int) ID of the fileCatalog entry associated with this fileRule. Can be "0" if creating or modifying the rule based on the hash or file name.
name: fileCatalogId
@@ -1475,7 +1475,7 @@ script:
- contextPath: CBP.FileRule.ReportOnly
description: Is this rule "reporting only" or is it also "enforcing".
type: String
- dockerimage: demisto/python3:3.10.13.72123
+ dockerimage: demisto/python3:3.10.14.99865
subtype: python3
isfetch: true
script: ''
diff --git a/Packs/CarbonBlackProtect/ReleaseNotes/1_0_42.md b/Packs/CarbonBlackProtect/ReleaseNotes/1_0_42.md
new file mode 100644
index 000000000000..d6129a5ec2bb
--- /dev/null
+++ b/Packs/CarbonBlackProtect/ReleaseNotes/1_0_42.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### VMware Carbon Black App Control v2
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/CarbonBlackProtect/doc_files/Carbon_black_Protection_Rapid_IOC_Hunting.png b/Packs/CarbonBlackProtect/doc_files/Carbon_black_Protection_Rapid_IOC_Hunting.png
new file mode 100644
index 000000000000..0f034020884e
Binary files /dev/null and b/Packs/CarbonBlackProtect/doc_files/Carbon_black_Protection_Rapid_IOC_Hunting.png differ
diff --git a/Packs/CarbonBlackProtect/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Protection.png b/Packs/CarbonBlackProtect/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Protection.png
new file mode 100644
index 000000000000..e5ba4937e3a6
Binary files /dev/null and b/Packs/CarbonBlackProtect/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Protection.png differ
diff --git a/Packs/CarbonBlackProtect/pack_metadata.json b/Packs/CarbonBlackProtect/pack_metadata.json
index d098a79ec8ce..9a304530470f 100644
--- a/Packs/CarbonBlackProtect/pack_metadata.json
+++ b/Packs/CarbonBlackProtect/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Carbon Black Enterprise Protection",
"description": "Carbon Black Enterprise Protection is a next-generation endpoint threat prevention solution to deliver a portfolio of protection policies, real-time visibility across environments, and comprehensive compliance rule sets in a single platform.",
"support": "xsoar",
- "currentVersion": "1.0.41",
+ "currentVersion": "1.0.42",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_By_Hash_Carbon_Black_Enterprise_Response.png b/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_By_Hash_Carbon_Black_Enterprise_Response.png
new file mode 100644
index 000000000000..627e7e66bee9
Binary files /dev/null and b/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_By_Hash_Carbon_Black_Enterprise_Response.png differ
diff --git a/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_From_Path_Carbon_Black_Enterprise_Response.png b/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_From_Path_Carbon_Black_Enterprise_Response.png
new file mode 100644
index 000000000000..506889123b02
Binary files /dev/null and b/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_File_Sample_From_Path_Carbon_Black_Enterprise_Response.png differ
diff --git a/Packs/CaseManagement-Generic/ReleaseNotes/1_4_8.md b/Packs/CaseManagement-Generic/ReleaseNotes/1_4_8.md
new file mode 100644
index 000000000000..407aa7c451ad
--- /dev/null
+++ b/Packs/CaseManagement-Generic/ReleaseNotes/1_4_8.md
@@ -0,0 +1,27 @@
+
+#### Scripts
+
+##### TimersOnOwnerChange
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### AddUserToIncidentTeam
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CaseMgmtIncidentTypesDisplay
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CaseMgmtIncidentTypesByRole
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CaseMgmtAnalystTools
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CaseMgmtResponseProcess
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CaseMgmtDisplayLabels
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CompleteTaskOnTimerBreach
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/CaseManagement-Generic/Scripts/AddUserToIncidentTeam/AddUserToIncidentTeam.yml b/Packs/CaseManagement-Generic/Scripts/AddUserToIncidentTeam/AddUserToIncidentTeam.yml
index 052f6c88bf95..fa1807636542 100644
--- a/Packs/CaseManagement-Generic/Scripts/AddUserToIncidentTeam/AddUserToIncidentTeam.yml
+++ b/Packs/CaseManagement-Generic/Scripts/AddUserToIncidentTeam/AddUserToIncidentTeam.yml
@@ -8,8 +8,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: AddUserToIncidentTeam
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtAnalystTools/CaseMgmtAnalystTools.yml b/Packs/CaseManagement-Generic/Scripts/CaseMgmtAnalystTools/CaseMgmtAnalystTools.yml
index f1020519180f..2d9195f56407 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtAnalystTools/CaseMgmtAnalystTools.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtAnalystTools/CaseMgmtAnalystTools.yml
@@ -1,11 +1,11 @@
-comment: |-
- Dynamic display script to display a list of useful Analyst Tools on an Incident layout.
+comment: 'Dynamic display script to display a list of useful Analyst Tools on an Incident layout.
- Create an XSOAR list called "Case Management Analyst Tools", and add a markdown table to provide your own list.
+
+ Create an XSOAR list called "Case Management Analyst Tools", and add a markdown table to provide your own list.'
commonfields:
id: CaseMgmtAnalystTools
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CaseMgmtAnalystTools
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtDisplayLabels/CaseMgmtDisplayLabels.yml b/Packs/CaseManagement-Generic/Scripts/CaseMgmtDisplayLabels/CaseMgmtDisplayLabels.yml
index 94bd2b8a6282..669ddebd08e2 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtDisplayLabels/CaseMgmtDisplayLabels.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtDisplayLabels/CaseMgmtDisplayLabels.yml
@@ -2,7 +2,7 @@ comment: 'Dynamic section that will display the Labels for an Incident in a mark
commonfields:
id: CaseMgmtDisplayLabels
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CaseMgmtDisplayLabels
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.py b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.py
index 0f918fafcc8b..7fcde77bf89f 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.py
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.py
@@ -28,7 +28,7 @@
# for each role the user has, add their types if the role exists in the list
for role in roles:
- if role in role_list.keys():
+ if role in role_list:
allowedTypes.extend(role_list[role])
# remove duplicates
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.yml b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.yml
index 3ac577d64b85..66abf37089d7 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesByRole/CaseMgmtIncidentTypesByRole.yml
@@ -2,7 +2,7 @@ comment: "Restricts the Incident Types a user can create manually, based on thei
commonfields:
id: CaseMgmtIncidentTypesByRole
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CaseMgmtIncidentTypesByRole
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesDisplay/CaseMgmtIncidentTypesDisplay.yml b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesDisplay/CaseMgmtIncidentTypesDisplay.yml
index 6c26b9d6cf71..ea15c4b9d1e3 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesDisplay/CaseMgmtIncidentTypesDisplay.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtIncidentTypesDisplay/CaseMgmtIncidentTypesDisplay.yml
@@ -2,7 +2,7 @@ comment: "Restricts the Incident Types a user can create manually based on an XS
commonfields:
id: CaseMgmtIncidentTypesDisplay
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CaseMgmtIncidentTypesDisplay
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CaseMgmtResponseProcess/CaseMgmtResponseProcess.yml b/Packs/CaseManagement-Generic/Scripts/CaseMgmtResponseProcess/CaseMgmtResponseProcess.yml
index ed919a31029f..86139ccb2d46 100644
--- a/Packs/CaseManagement-Generic/Scripts/CaseMgmtResponseProcess/CaseMgmtResponseProcess.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CaseMgmtResponseProcess/CaseMgmtResponseProcess.yml
@@ -2,7 +2,7 @@ comment: Dynamic display script to display a response process on an Incident lay
commonfields:
id: CaseMgmtResponseProcess
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CaseMgmtResponseProcess
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/CompleteTaskOnTimerBreach/CompleteTaskOnTimerBreach.yml b/Packs/CaseManagement-Generic/Scripts/CompleteTaskOnTimerBreach/CompleteTaskOnTimerBreach.yml
index b052e5177088..bc00cdc80b97 100644
--- a/Packs/CaseManagement-Generic/Scripts/CompleteTaskOnTimerBreach/CompleteTaskOnTimerBreach.yml
+++ b/Packs/CaseManagement-Generic/Scripts/CompleteTaskOnTimerBreach/CompleteTaskOnTimerBreach.yml
@@ -4,8 +4,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CompleteTaskOnTimerBreach
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/Scripts/TimersOnOwnerChange/TimersOnOwnerChange.yml b/Packs/CaseManagement-Generic/Scripts/TimersOnOwnerChange/TimersOnOwnerChange.yml
index eb7e14edef9b..ffabe0017799 100644
--- a/Packs/CaseManagement-Generic/Scripts/TimersOnOwnerChange/TimersOnOwnerChange.yml
+++ b/Packs/CaseManagement-Generic/Scripts/TimersOnOwnerChange/TimersOnOwnerChange.yml
@@ -9,7 +9,7 @@ comment: |-
commonfields:
id: TimersOnOwnerChange
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: TimersOnOwnerChange
runas: DBotWeakRole
diff --git a/Packs/CaseManagement-Generic/pack_metadata.json b/Packs/CaseManagement-Generic/pack_metadata.json
index e1034e0372b2..d84248042850 100644
--- a/Packs/CaseManagement-Generic/pack_metadata.json
+++ b/Packs/CaseManagement-Generic/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CaseManagement-Generic",
"description": "Case Management - Generic\n\nBuilt by the Cortex Customer Success Team to provide quick deployment of Case Management with XSOAR",
"support": "community",
- "currentVersion": "1.4.7",
+ "currentVersion": "1.4.8",
"author": "Cortex XSOAR Customer Success",
"url": "",
"email": "",
diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2.yml b/Packs/Censys/Integrations/CensysV2/CensysV2.yml
index 14e8d84c483a..1a0d8e7e6de7 100644
--- a/Packs/Censys/Integrations/CensysV2/CensysV2.yml
+++ b/Packs/Censys/Integrations/CensysV2/CensysV2.yml
@@ -29,7 +29,8 @@ configuration:
name: premium_access
type: 8
required: false
- section: Connect
+ section: Collect
+ advanced: true
additionalinfo: |-
Censys API provides reputation data exclusively to paid subscribers.
When set to True, the integration will use labels to determine the IP score.
@@ -60,6 +61,7 @@ configuration:
Labels to classify IP as Malicious.
Input can be an array or comma-separated values.
section: Collect
+ advanced: true
- display: IP Suspicious labels
name: suspicious_labels
type: 16
@@ -87,18 +89,21 @@ configuration:
Labels to classify IP as Suspicious.
Input can be an array or comma-separated values.
section: Collect
+ advanced: true
- display: Malicious labels threshold
name: malicious_labels_threshold
type: 0
required: false
additionalinfo: Determines the minimum number of labels returned that are classified as malicious for IP.
section: Collect
+ advanced: true
- display: Suspicious labels threshold
name: suspicious_labels_threshold
type: 0
required: false
additionalinfo: Determines the minimum number of labels returned that are classified as suspicious for IP.
section: Collect
+ advanced: true
- display: Source Reliability
name: integration_reliability
defaultvalue: C - Fairly reliable
@@ -112,7 +117,7 @@ configuration:
- E - Unreliable
- F - Reliability cannot be judged
additionalinfo: Reliability of the source providing the intelligence data.
- section: collect
+ section: Connect
description: Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the internet. Driven by internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, and certificates are configured and deployed.
display: Censys v2
name: CensysV2
diff --git a/Packs/Censys/ReleaseNotes/2_0_30.md b/Packs/Censys/ReleaseNotes/2_0_30.md
new file mode 100644
index 000000000000..7982fd1ff2ca
--- /dev/null
+++ b/Packs/Censys/ReleaseNotes/2_0_30.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Censys v2
+
+- Documentation and metadata improvements.
diff --git a/Packs/Censys/pack_metadata.json b/Packs/Censys/pack_metadata.json
index 2b44b209fb72..36093ec01b28 100644
--- a/Packs/Censys/pack_metadata.json
+++ b/Packs/Censys/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Censys",
"description": "Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the Internet. Driven by Internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, websites, and certificates are configured and deployed.",
"support": "xsoar",
- "currentVersion": "2.0.29",
+ "currentVersion": "2.0.30",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.py b/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.py
index 60f87be4bb6a..fd9529d5ebab 100644
--- a/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.py
+++ b/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.py
@@ -51,10 +51,9 @@ def authenticate_oauth(self):
bearer_token = integration_context.get('bearer_token')
valid_until = integration_context.get('valid_until')
time_now = int(time.time())
- if bearer_token and valid_until:
- if time_now < valid_until:
- # Bearer Token is still valid - did not expire yet
- return bearer_token
+ if bearer_token and valid_until and time_now < valid_until:
+ # Bearer Token is still valid - did not expire yet
+ return bearer_token
response = self.get_token_request()
bearer_token = response.get('access_token')
t = time.time()
@@ -230,10 +229,7 @@ def fetch_set_details(client: Client, set_details_list):
centrify_setdetails_response = client.request_set_details(url_suffix=urlSuffix, data=payload)
centrify_setdetails_response = centrify_setdetails_response.get('Result').get('Results')
for set_item in centrify_setdetails_response:
- if 'Description' not in set_item['Row']:
- set_description = ""
- else:
- set_description = set_item['Row']['Description']
+ set_description = set_item['Row'].get('Description', '')
set_details_list.append({'SetName': set_item['Row']['Name'], 'SetID': set_item['Row']['ID'],
'SetDescription': set_description})
return set_details_list
@@ -419,7 +415,7 @@ def fetch_secrets(args: dict, client: Client):
else:
folder_id = ""
secret_ids_list = fetch_secretids_folder(client, folder_id, secret_ids_list, True)
- secret_list = list()
+ secret_list = []
for secret_id in secret_ids_list:
secret_list.append(fetch_secret(client, secret_id, secret_name, True))
secret_list = list(filter(None, secret_list))
@@ -492,8 +488,8 @@ def create_vault_secret(args: dict, client: Client):
folder_name = args.get('holderName')
folder_id = fetch_secret_folder_id(client, folder_name)
else:
- setId_list = list()
- set_name_list = list()
+ setId_list = []
+ set_name_list = []
if ';' in str(args.get('holderName')):
set_name_list = str(args.get('holderName')).split(';')
for set_item in set_name_list:
@@ -681,7 +677,7 @@ def delete_vault_secret(args: dict, client: Client):
def delete_vault_secretid(args: dict, client: Client):
try:
secret_id = args.get('secretId')
- delete_secret_id_list = list()
+ delete_secret_id_list = []
delete_secret_id_list.append(fetch_secret(client, secret_id, None, None))
delete_secret(client, secret_id)
if delete_secret_id_list:
diff --git a/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.yml b/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.yml
index 19ca593d5c90..038295bda2d9 100644
--- a/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.yml
+++ b/Packs/CentrifyVault/Integrations/CentrifyVault/CentrifyVault.yml
@@ -227,15 +227,15 @@ script:
description: '"Yes" if you want to delete all the secrets having same name in all the subfolders recursively. "No" if you want do not want to delete the secret in the subfolders.'
name: recursiveDelete
predefined:
- - "Yes"
- - "No"
+ - 'Yes'
+ - 'No'
required: true
- auto: PREDEFINED
description: '"Yes" if you want to delete the secret having the provided secretname as a part of the Secret. "No" if you want to delete the secret with the exact name match. Ex: Demisto* will delete all secrets like Demisto_1, Demisto_pwd, Demisto. '
name: matchPartOfSecret
predefined:
- - "Yes"
- - "No"
+ - 'Yes'
+ - 'No'
required: true
description: 'Delete Secret from the Centrify Vault. Please note: Enabling "recursiveDelete" to "Yes" will delete all secrets if there multiple secrets with same name in subfolders.'
name: centrify-delete-secret
@@ -273,7 +273,7 @@ script:
required: true
description: Delete set from the Centrify Vault
name: centrify-delete-set
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: ''
subtype: python3
diff --git a/Packs/CentrifyVault/ReleaseNotes/1_0_8.md b/Packs/CentrifyVault/ReleaseNotes/1_0_8.md
new file mode 100644
index 000000000000..8e5259e808c9
--- /dev/null
+++ b/Packs/CentrifyVault/ReleaseNotes/1_0_8.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Centrify Vault
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/CentrifyVault/pack_metadata.json b/Packs/CentrifyVault/pack_metadata.json
index edb67bc0f8ec..663b599081da 100644
--- a/Packs/CentrifyVault/pack_metadata.json
+++ b/Packs/CentrifyVault/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Centrify Vault",
"description": "Centrify Vault integration to create/fetch/delete secrets/folders/sets.",
"support": "community",
- "currentVersion": "1.0.7",
+ "currentVersion": "1.0.8",
"author": "prashasthbaliga",
"url": "",
"email": "",
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
index 36dcc14ba689..928a4824ef4c 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
@@ -11,10 +11,18 @@ configuration:
options:
- https://smart-api-production-1-us.avanan.net
- https://smart-api-production-1-eu.avanan.net
+ - https://smart-api-production-5-ap.avanan.net
- https://smart-api-production-1-ca.avanan.net
- - https://cloudinfra-gw.portal.checkpoint.com
+ - https://smart-api-production-1-euw2.avanan.net
+ - https://smart-api-production-1-mec1.avanan.net
+ - https://smart-api-production-1-aps1.avanan.net
- https://cloudinfra-gw-us.portal.checkpoint.com
+ - https://cloudinfra-gw.portal.checkpoint.com
+ - https://cloudinfra-gw.ca.portal.checkpoint.com
- https://cloudinfra-gw.ap.portal.checkpoint.com
+ - https://cloudinfra-gw.uk.portal.checkpoint.com
+ - https://cloudinfra-gw.me.portal.checkpoint.com
+ - https://cloudinfra-gw.in.portal.checkpoint.com
required: true
- section: Collect
display: Fetch incidents
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
index 5c5f4288f7e9..4efbed4ebf99 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
@@ -2,15 +2,23 @@
To set up a Check Point HEC instance, please provide the SMART API url based on your portal's region:
-* US: https://smart-api-production-1-us.avanan.net
-* EU: https://smart-api-production-1-eu.avanan.net
-* CA: https://smart-api-production-1-ca.avanan.net
+* USA: https://smart-api-production-1-us.avanan.net
+* Europe: https://smart-api-production-1-eu.avanan.net
+* Australia: https://smart-api-production-5-ap.avanan.net
+* Canada: https://smart-api-production-1-ca.avanan.net
+* United Kingdom (UK): https://smart-api-production-1-euw2.avanan.net
+* United Arab Emirates (UAE): https://smart-api-production-1-mec1.avanan.net
+* India: https://smart-api-production-1-aps1.avanan.net
You can also use Check Point Infinity API Credentials instead of the SMART API. To do so, please provide one of the following urls based on your region:
-* EU: https://cloudinfra-gw.portal.checkpoint.com
-* US: https://cloudinfra-gw-us.portal.checkpoint.com
-* AU: https://cloudinfra-gw.ap.portal.checkpoint.com
+* USA: https://cloudinfra-gw-us.portal.checkpoint.com
+* Europe: https://cloudinfra-gw.portal.checkpoint.com
+* Australia: https://cloudinfra-gw.ap.portal.checkpoint.com
+* Canada: https://cloudinfra-gw.ca.portal.checkpoint.com
+* United Kingdom (UK): https://cloudinfra-gw.uk.portal.checkpoint.com
+* United Arab Emirates (UAE): https://cloudinfra-gw.me.portal.checkpoint.com
+* India: https://cloudinfra-gw.in.portal.checkpoint.com
If you have several portals in different regions, you will need to use an instance per region
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
index 476ad138e534..97611ae7cdc0 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
@@ -1,5 +1,5 @@
The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc.
-This integration was integrated and tested with version 1.1.2 of CheckPointHEC
+This integration was integrated and tested with version 1.1.3 of CheckPointHEC
## Configure Check Point Harmony Email and Collaboration (HEC) on Cortex XSOAR
diff --git a/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md b/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md
new file mode 100644
index 000000000000..3606865a1994
--- /dev/null
+++ b/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Check Point Harmony Email and Collaboration (HEC)
+
+- Updated the option for url in the connect section configuration. Now more regions are supported.
+
diff --git a/Packs/CheckPointHEC/pack_metadata.json b/Packs/CheckPointHEC/pack_metadata.json
index 28887ac66bb1..04166aaee2f3 100644
--- a/Packs/CheckPointHEC/pack_metadata.json
+++ b/Packs/CheckPointHEC/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Check Point Harmony Email and Collaboration (HEC)",
"description": "The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc.",
"support": "partner",
- "currentVersion": "1.1.2",
+ "currentVersion": "1.1.3",
"author": "Check Point Harmony Email & Collaboration (HEC)",
"url": "https://supportcenter.checkpoint.com/",
"email": "EmailSecurity_Support@checkpoint.com",
diff --git a/Packs/CheckPointHarmonyEndpoint/.pack-ignore b/Packs/CheckPointHarmonyEndpoint/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CheckPointHarmonyEndpoint/.secrets-ignore b/Packs/CheckPointHarmonyEndpoint/.secrets-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.py b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.py
new file mode 100644
index 000000000000..5fd248ff79d9
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.py
@@ -0,0 +1,2796 @@
+import dataclasses
+import http
+from typing import Tuple
+
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+@dataclasses.dataclass
+class ScheduleCommandMetadata:
+ """Schedule commands metadata."""
+
+ outputs_prefix: str
+ message: str = ""
+ headers: list[str] = dataclasses.field(default_factory=list)
+
+ def format_message(self, id):
+ self.message = self.message.format(id=id)
+
+
+REGISTRY_VALUE_TYPE_MAP = {
+ "DWORD (REG_DWORD)": "DWORD",
+ "STRING (REG_GZ)": "STRING",
+}
+
+MIN_PAGE_NUM = 1
+MAX_PAGE_SIZE = 50
+MIN_PAGE_SIZE = 1
+MAX_LIMIT = 50
+MIN_LIMIT = 1
+DEFAULT_HEADERS = [
+ "machine_id",
+ "machine_name",
+ "operation_status",
+ "operation_response_output",
+ "operation_response_status",
+]
+
+DEFAULT_FILTER_TYPE = "Contains"
+DEFAULT_SUFFIX_MESSAGE = "was added to the push operation list successfully."
+
+COLUMN_NAMES_MAPPER = {
+ "computer_ids": "computerId",
+ "computer_names": "computerName",
+ "computer_ips": "computerIP",
+ "computer_types": "computerType",
+ "computer_deployment_statuses": "computerDeploymentStatus",
+}
+COLUMN_NAMES = [
+ "emonJsonDataColumns",
+ "computerId",
+ "computerName",
+ "computerDeploymentStatus",
+ "computerIP",
+ "computerClientVersion",
+ "osName",
+ "osVersion",
+ "daInstalled",
+ "computerDeployTime",
+ "computerDeployTimeFrom",
+ "computerDeployTimeTo",
+ "computerLastErrorCode",
+ "computerLastErrorDescription",
+ "computerLastConnection",
+ "computerLastConnectionFrom",
+ "computerLastConnectionTo",
+ "computerSyncedonTo",
+ "computerSyncedon",
+ "computerSyncedonFrom",
+ "syncedOn",
+ "computerLastLoggedInUser",
+ "computerLastLoggedInPrebootUser",
+ "computerFdeStatus",
+ "computerFdeVersion",
+ "computerFdeLastRecoveryDate",
+ "computerFdeLastRecoveryDateFrom",
+ "computerFdeLastRecoveryDateTo",
+ "computerFdeWilWolStatus",
+ "computerFdeTpmId",
+ "computerFdeTpmStatus",
+ "computerFdeTpmVersion",
+ "computerFdeWilWolStatusUpdatedOn",
+ "computerFdeWilWolStatusUpdatedOnFrom",
+ "computerFdeWilWolStatusUpdatedOnTo",
+ "computerFdeProgress",
+ "computerFdeProgressFrom",
+ "computerFdeProgressTo",
+ "computerType",
+ "endpointType",
+ "amUpdatedOn",
+ "amUpdatedOnFrom",
+ "amUpdatedOnTo",
+ "amStatus",
+ "isolated",
+ "isDeleted",
+ "complianceStatus",
+ "amUpdatedIntervalStatus",
+ "isInDomain",
+ "domainName",
+ "o_x",
+ "devices_emon_status_data_selector",
+ "deleted_devices_emon_status_data_selector",
+ "computerAmDatVersion",
+ "computerAmDatDate",
+ "computerAmDatDateFrom",
+ "computerAmDatDateTo",
+ "computerAmLicExpirationDate",
+ "computerAmProviderBrandReport",
+ "computerAmLicExpirationDateFrom",
+ "computerAmLicExpirationDateTo",
+ "computerAmTotalInfected",
+ "computerAmTotalInfectedFrom",
+ "computerAmTotalInfectedTo",
+ "computerAmInfections",
+ "computerSdPackageName",
+ "computerSdPolicyName",
+ "computerSdPolicyVersion",
+ "computerAbState",
+ "computerAbStatusBotNames",
+ "computerAmScannedon",
+ "computerAmScannedonFrom",
+ "computerAmScannedonTo",
+ "computerAmTotalQuarantined",
+ "computerAmTotalQuarantinedFrom",
+ "computerAmTotalQuarantinedTo",
+ "computerLastContactedPolicyServerIp",
+ "computerLastContactedPolicyServerName",
+ "computerSdPackageVersion",
+ "computerComplianceViolationIds",
+ "computerSmartCardStatus",
+ "fdeRemoteUnlockOperation",
+ "fdeRemoteUnlockStatus",
+ "computerCanonicalName",
+ "stoppedBlades",
+ "enforcedModifiedOn",
+ "enforcedPolicyMalware20",
+ "enforcedPolicyTe130",
+ "enforcedPolicyEfr120",
+ "enforcedPolicyAntibot100",
+ "enforcedPolicyMe30",
+ "enforcedPolicyFdeDevice35",
+ "enforcedPolicyFdeUser36",
+ "enforcedPolicyFw10",
+ "enforcedPolicyCompliance60",
+ "enforcedPolicyApplicationControl22",
+ "enforcedPolicySaAccessZones11",
+ "enforcedPolicyCommonClientSettings51",
+ "enforcedPolicyDocSecPolicy91",
+ "enforcedVersionPolicyMalware20",
+ "enforcedVersionPolicyTe130",
+ "enforcedVersionPolicyEfr120",
+ "enforcedVersionPolicyAntibot100",
+ "enforcedVersionPolicyMe30",
+ "enforcedVersionPolicyFdeDevice35",
+ "enforcedVersionPolicyFdeUser36",
+ "enforcedVersionPolicyFw10",
+ "enforcedVersionPolicyCompliance60",
+ "enforcedVersionPolicyApplicationControl22",
+ "enforcedVersionPolicySaAccessZones11",
+ "enforcedVersionPolicyCommonClientSettings51",
+ "enforcedVersionPolicyDocSecPolicy91",
+ "enforcedNamePolicyMalware20",
+ "enforcedNamePolicyTe130",
+ "enforcedNamePolicyEfr120",
+ "enforcedNamePolicyAntibot100",
+ "enforcedNamePolicyMe30",
+ "enforcedNamePolicyFdeDevice35",
+ "enforcedNamePolicyFdeUser36",
+ "enforcedNamePolicyFw10",
+ "enforcedNamePolicyCompliance60",
+ "enforcedNamePolicyApplicationControl22",
+ "enforcedNamePolicySaAccessZones11",
+ "enforcedNamePolicyCommonClientSettings51",
+ "enforcedNamePolicyDocSecPolicy91",
+ "deployedModifiedOn",
+ "deployedPolicyMalware20",
+ "deployedPolicyTe130",
+ "deployedPolicyEfr120",
+ "deployedPolicyAntibot100",
+ "deployedPolicyMe30",
+ "deployedPolicyFdeDevice35",
+ "deployedPolicyFdeUser36",
+ "deployedPolicyFw10",
+ "deployedPolicyCompliance60",
+ "deployedPolicyApplicationControl22",
+ "deployedPolicySaAccessZones11",
+ "deployedPolicyCommonClientSettings51",
+ "deployedPolicyDocSecPolicy91",
+ "deployedVersionPolicyMalware20",
+ "deployedVersionPolicyTe130",
+ "deployedVersionPolicyEfr120",
+ "deployedVersionPolicyAntibot100",
+ "deployedVersionPolicyMe30",
+ "deployedVersionPolicyFdeDevice35",
+ "deployedVersionPolicyFdeUser36",
+ "deployedVersionPolicyFw10",
+ "deployedVersionPolicyCompliance60",
+ "deployedVersionPolicyApplicationControl22",
+ "deployedVersionPolicySaAccessZones11",
+ "deployedVersionPolicyCommonClientSettings51",
+ "deployedVersionPolicyDocSecPolicy91",
+ "deployedNamePolicyMalware20",
+ "deployedNamePolicyTe130",
+ "deployedNamePolicyEfr120",
+ "deployedNamePolicyAntibot100",
+ "deployedNamePolicyMe30",
+ "deployedNamePolicyFdeDevice35",
+ "deployedNamePolicyFdeUser36",
+ "deployedNamePolicyFw10",
+ "deployedNamePolicyCompliance60",
+ "deployedNamePolicyApplicationControl22",
+ "deployedNamePolicySaAccessZones11",
+ "deployedNamePolicyCommonClientSettings51",
+ "deployedNamePolicyDocSecPolicy91",
+ "computerCpuLoadCategory",
+ "computerTotalCpuLoadCategory",
+ "computerCpuRank",
+ "computerTotalCpuRank",
+ "computerGroups",
+ "computerOrUsers",
+ "computerInactiveCapabilities",
+ "filterAndThoseComputers",
+ "filterAndThoseComputersOrUsers",
+ "filterComplianceStatus",
+ "computerFreeSearch",
+ "computerEnforcedInstalledPolicyName",
+ "computerEnforcedInstalledPolicyVersion",
+ "computerStoppedBlades",
+ "Is_Device_In_Group",
+ "global",
+ "permission",
+]
+FILTER_TYPES = [
+ "Contains",
+ "StartsWith",
+ "EndsWith",
+ "Exact",
+ "Grater",
+ "Smaller",
+ "BitOr",
+ "BitAnd",
+ "IsNull",
+ "NotNull",
+ "Not",
+ "JsonbExact",
+ "JsonbContainsAnd",
+ "JsonbContainsOr",
+ "NestedJsonbContainsAnd",
+ "NestedJsonbContainsOr",
+ "NestedJsonbExactAnd",
+ "NestedJsonbExactOr",
+ "NestedJsonbDateRange",
+ "ArrayContains",
+ "Between",
+]
+SCHEDULED_COMMANDS_MAPPER = {
+ "harmony-ep-policy-rule-install": ScheduleCommandMetadata(
+ outputs_prefix="PolicyRuleInstall", message="Policies have been installed successfully."
+ ),
+ "harmony-ep-policy-rule-modifications-get": ScheduleCommandMetadata(
+ outputs_prefix="Rule",
+ message="Rule {id} modification:",
+ headers=["id", "name", "family", "connectionState", "lastModifiedBy", "job_id"],
+ ),
+ "harmony-ep-policy-rule-metadata-list": ScheduleCommandMetadata(
+ outputs_prefix="Rule", message="Rule metadata list:"
+ ),
+ "harmony-ep-push-operation-status-list": ScheduleCommandMetadata(
+ outputs_prefix="PushOperation",
+ message="Push operations status list:",
+ headers=["id", "comment", "type", "createdOn", "overallStatus"],
+ ),
+ "harmony-ep-push-operation-get": ScheduleCommandMetadata(
+ outputs_prefix="PushOperation",
+ message="Push operations:",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-push-operation-abort": ScheduleCommandMetadata(
+ outputs_prefix="PushOperationAbort",
+ message=f"Remediation operation abort {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-remediation-computer-isolate": ScheduleCommandMetadata(
+ outputs_prefix="ComputerIsolate.PushOperation",
+ message=f"Remediation isolate {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-remediation-computer-deisolate": ScheduleCommandMetadata(
+ outputs_prefix="ComputerDeisolate.PushOperation",
+ message=f"Remediation de-isolate {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-anti-malware-scan": ScheduleCommandMetadata(
+ outputs_prefix="AntiMalwareScan.PushOperation",
+ message=f"Anti-Malware scan {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-anti-malware-update": ScheduleCommandMetadata(
+ outputs_prefix="AntiMalwareUpdate.PushOperation",
+ message=f"Anti-Malware Signature Database update {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-anti-malware-restore": ScheduleCommandMetadata(
+ outputs_prefix="AntiMalwareRestore.PushOperation",
+ message=f"File restore {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-forensics-indicator-analyze": ScheduleCommandMetadata(
+ outputs_prefix="IndicatorAnalyze.PushOperation",
+ message=f"IOC analyze {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-forensics-file-quarantine": ScheduleCommandMetadata(
+ outputs_prefix="FileQuarantine.PushOperation",
+ message=f"File quarantine {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-forensics-file-restore": ScheduleCommandMetadata(
+ outputs_prefix="FileRestore.PushOperation",
+ message=f"File restore {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-computer-list": ScheduleCommandMetadata(
+ outputs_prefix="Computer",
+ message="Computer list:",
+ headers=[
+ "id",
+ "name",
+ "ip",
+ "type",
+ "groups",
+ "user_name",
+ "client_version",
+ ],
+ ),
+ "harmony-ep-agent-computer-restart": ScheduleCommandMetadata(
+ outputs_prefix="ComputerReset.PushOperation",
+ message=f"Computer reset restore {DEFAULT_SUFFIX_MESSAGE}",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-computer-shutdown": ScheduleCommandMetadata(
+ outputs_prefix="ComputerShutdown.PushOperation",
+ message=f"Computer shutdown {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-computer-repair": ScheduleCommandMetadata(
+ outputs_prefix="ComputerRepair.PushOperation",
+ message=f"Computer repair {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-process-information-get": ScheduleCommandMetadata(
+ outputs_prefix="ProcessInformation.PushOperation",
+ message=f"Process information fetch {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-process-terminate": ScheduleCommandMetadata(
+ outputs_prefix="ProcessTerminate.PushOperation",
+ message=f"Process terminate {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-registry-key-add": ScheduleCommandMetadata(
+ outputs_prefix="RegistryKeyAdd.PushOperation",
+ message=f"Registry key addition {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-registry-key-delete": ScheduleCommandMetadata(
+ outputs_prefix="RegistryKeyDelete.PushOperation",
+ message=f"Registry key delete {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-file-copy": ScheduleCommandMetadata(
+ outputs_prefix="FileCopy.PushOperation",
+ message=f"File copy {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-file-move": ScheduleCommandMetadata(
+ outputs_prefix="FileMove",
+ message=f"File move {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-file-delete": ScheduleCommandMetadata(
+ outputs_prefix="FileDelete.PushOperation",
+ message=f"File delete {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-vpn-site-add": ScheduleCommandMetadata(
+ outputs_prefix="VPNsiteConfigurationAdd.PushOperation",
+ message=f"VPN site configuration addition {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+ "harmony-ep-agent-vpn-site-remove": ScheduleCommandMetadata(
+ outputs_prefix="VPNsiteConfigurationRemove.PushOperation",
+ message=f"VPN site configuration remove {DEFAULT_SUFFIX_MESSAGE}.",
+ headers=DEFAULT_HEADERS,
+ ),
+}
+
+
+class Client(BaseClient):
+ URL_PREFIX = "app/endpoint-web-mgmt/harmony/endpoint/api/v1"
+
+ def __init__(
+ self,
+ base_url: str,
+ client_id: str,
+ client_secret: str,
+ verify_certificate: bool,
+ proxy: bool,
+ ):
+ self.token: str
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.base_url = base_url
+
+ super().__init__(
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy,
+ headers={},
+ )
+
+ def get_token(self):
+ """Get temporary authentication token from CheckPoint.
+ This token will expire 30 minutes from its generation time."""
+ self._headers = {}
+ self._session.cookies.clear()
+
+ response = self._http_request(
+ method="POST",
+ url_suffix="auth/external",
+ json_data={"clientId": self.client_id, "accessKey": self.client_secret},
+ )
+ try:
+ self.token = response["data"]["token"]
+ except DemistoException as exc:
+ raise DemistoException(f"Authentication failed: token not found. {exc}")
+
+ def login(self):
+ """Login to Harmony with the generated temporary token and get new token for HarmonyEP."""
+ self._session.cookies.clear()
+ self._headers["Authorization"] = f"Bearer {self.token}"
+ self._base_url = urljoin(self.base_url, self.URL_PREFIX)
+
+ try:
+ response = self._http_request(
+ method="POST",
+ url_suffix="/session/login/cloud",
+ )
+ self._headers["x-mgmt-api-token"] = response["apiToken"]
+
+ except DemistoException as exc:
+ if (
+ exc.res is not None
+ and exc.res.status_code == http.HTTPStatus.BAD_REQUEST
+ ):
+ raise DemistoException(
+ f"Authentication failed: cookie not found. {exc}"
+ )
+
+ def job_status_get(self, job_id: str) -> dict[str, Any]:
+ """Get job status and data by ID.
+
+ Args:
+ job_id (str): The job ID.
+
+ Returns:
+ dict[str,Any]: API response.
+ """
+
+ return self._http_request(
+ method="GET",
+ url_suffix=f"/jobs/{job_id}",
+ )
+
+ def ioc_list(
+ self,
+ page: int,
+ page_size: int,
+ ioc_filter: str = None,
+ field: str = None,
+ sort_direction: str = None,
+ ) -> dict[str, Any]:
+ """Fetch IOCs list.
+
+ Args:
+ page (str): Index of page to return.
+ page_size (int): Size of the page to return.
+ ioc_filter (str, optional): The indicator value or comment to search for. Defaults to None.
+ sort_field (str, optional): The Indicator of Compromise field to search by. Defaults to None.
+ sort_direction (str, optional): The ways in which to sort the results. Defaults to None.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "off"
+ data = remove_empty_elements(
+ {
+ "filter": ioc_filter,
+ "page": page,
+ "size": page_size,
+ "sort": [{"field": field, "direction": sort_direction}],
+ }
+ )
+ return self._http_request(method="POST", url_suffix="/ioc/get", json_data=data)
+
+ def ioc_update(
+ self,
+ ioc_type: str,
+ value: str,
+ comment: str,
+ ioc_id: str,
+ ) -> dict[str, Any]:
+ """Update IOC by ID.
+
+ Args:
+ ioc_type (str): The IOC type to update.
+ value (str): The IOC value to update.
+ comment (str): The IOC comment to update.
+ ioc_id (str): The ID of the IOC to update.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "off"
+ return self._http_request(
+ method="PUT",
+ url_suffix="/ioc/edit",
+ json_data=[
+ {"comment": comment, "id": ioc_id, "type": ioc_type, "value": value}
+ ],
+ )
+
+ def ioc_create(
+ self,
+ ioc_type: str | None = None,
+ value: str | None = None,
+ comment: str | None = None,
+ ) -> dict[str, Any]:
+ """Create an IOC.
+
+ Args:
+ ioc_type (str): The IOC type.
+ value (str): The IOC value.
+ comment (str): The IOC comment.
+
+ Returns:
+ dict[str,Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "off"
+
+ return self._http_request(
+ method="POST",
+ url_suffix="/ioc/create",
+ json_data=[{"comment": comment, "type": ioc_type, "value": value}],
+ )
+
+ def ioc_delete(
+ self,
+ delete_all: bool,
+ ioc_ids: str | None,
+ ) -> dict[str, Any]:
+ """Delete IOCs by IDs or delete all IOCs.
+
+ Args:
+ ioc_ids (list[int]): IOC IDs list to delete.
+ delete_all (bool): Whether to delete all IOCs or not.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "off"
+ url = "/ioc/delete/all" if delete_all else f"/ioc/delete?ids={ioc_ids}"
+ return self._http_request(
+ method="DELETE",
+ url_suffix=url,
+ )
+
+ def rule_assignments_get(
+ self,
+ rule_id: str,
+ ) -> dict[str, Any]:
+ """Gets all entities directly assigned to the given rule.
+
+ Args:
+ rule_id (str): The rule ID.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+
+ self._headers["x-mgmt-run-as-job"] = "off"
+
+ return self._http_request(
+ "GET",
+ f"/policy/{rule_id}/assignments",
+ )
+
+ def rule_assignments_add(
+ self, rule_id: str, entities_ids: list[str]
+ ) -> dict[str, Any]:
+ """Assigns the specified entities to the given rule.
+ Specified IDs that are already assigned to the rule are ignored.
+
+ Args:
+ rule_id (str): The ID of the rule to add assignments to.
+ entities_ids (list[str]): The entities IDs to assign.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+
+ self._headers["x-mgmt-run-as-job"] = "off"
+
+ return self._http_request(
+ "PUT",
+ f"/policy/{rule_id}/assignments/add",
+ json_data=entities_ids,
+ )
+
+ def rule_assignments_remove(
+ self, rule_id: str, entities_ids: list[str]
+ ) -> dict[str, Any]:
+ """Removes the specified entities from the given rule's assignments.
+ Specified IDs that are not assigned to the rule are ignored.
+
+ Args:
+ rule_id (str): The ID of the rule to remove assignments from.
+ entities_ids (list[str]): The entities IDs to remove.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+
+ self._headers["x-mgmt-run-as-job"] = "off"
+
+ return self._http_request(
+ "PUT",
+ f"/policy/{rule_id}/assignments/remove",
+ json_data=entities_ids,
+ )
+
+ def rule_policy_install(self) -> dict[str, Any]:
+ """Installs all policies. If a rule ID is specified,
+ only the policies associated with that rule will be installed.
+
+ Args:
+ rule_id (str, optional): The ID of the rule. Defaults to None.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/policy/install",
+ )
+
+ def rule_modifications_get(
+ self,
+ rule_id: str,
+ ) -> dict[str, Any]:
+ """Gets information on modifications to a given rule
+ (modifications are the addition or removal of assignments on a rule since it was last installed).
+
+ Args:
+ rule_id (str): The rule ID.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+ return self._http_request(
+ "GET",
+ f"/policy/{rule_id}/modifications",
+ )
+
+ def rule_metadata_list(
+ self,
+ rule_id: str | None = None,
+ rule_family: str | None = None,
+ connection_state: str | None = None,
+ ) -> list[dict[str, Any]]:
+ """Gets the metadata of all rules or the given rule's metadata
+ (Metadata refers to all information relating to the rule except it's actual settings).
+
+ Args:
+ rule_id (str): The rule ID.
+ rule_family (str): An optional filter.
+ Used to filter the results to only the selected capability family (e.g. only 'Threat Prevention').
+ connection_state (str): An optional filter. Used to filter the results to only
+ the selected Connection State (e.g. only rules pertaining to policies for 'Connected' clients).
+
+ Returns:
+ list[dict[str,Any]]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "off"
+ params = {"ruleFamily": rule_family, "connectionState": connection_state}
+
+ return self._http_request(
+ "GET",
+ (f"/policy/{rule_id}/metadata" if rule_id else "/policy/metadata"),
+ params=params,
+ )
+
+ def push_operation_status_list(
+ self, remediation_operation_id: str | None
+ ) -> dict[str, Any]:
+ """Gets the current statuses of all remediation operations or if a specific ID is specified,
+ retrieve the current status of the given remediation operation.
+
+ Args:
+ remediation_operation_id (str): Remediation operations ID.
+
+ Returns:
+ Dict[str,Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "GET",
+ (
+ f"/remediation/{remediation_operation_id}/status"
+ if remediation_operation_id
+ else "/remediation/status"
+ ),
+ )
+
+ def push_operation_get(
+ self,
+ remediation_operation_id: str,
+ filter_text: str | None = None,
+ new_page: int | None = None,
+ new_page_size: int | None = None,
+ ) -> dict[str, Any]:
+ """Gets the results of a given Remediation Operation. Remediation Operations may produce results
+ such a Forensics Report or yield status updates such as an Anti-Malware scan progress.
+
+ Args:
+ remediation_operation_id (str): Remediation operation ID.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ f"/remediation/{remediation_operation_id}/results/slim",
+ json_data=remove_empty_elements(
+ {
+ "filters": {"freeText": filter_text},
+ "paging": {"pageSize": new_page_size, "offset": new_page},
+ }
+ ),
+ )
+
+ def push_operation_abort(self, remediation_operation_id: str) -> dict[str, Any]:
+ """Aborts the given Remediation Operation.
+ Aborting an operation prevents it from being sent to further Harmony Endpoint Clients.
+ Clients that have already received the operation are not affected.
+
+ Args:
+ remediation_operation_id (str): Remediation operation ID.
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ f"/remediation/{remediation_operation_id}/abort",
+ )
+
+ def anti_malware_scan(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Performs an Anti-Malware scan on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+ return self._http_request(
+ "POST",
+ "/remediation/anti-malware/scan",
+ json_data=request_body,
+ )
+
+ def anti_malware_update(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Updates the Anti-Malware Signature Database on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+ return self._http_request(
+ "POST",
+ "/remediation/anti-malware/update",
+ json_data=request_body,
+ )
+
+ def anti_malware_restore(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Restores a file that was previously quarantined by the Harmony Endpoint Client's Anti-Malware capability.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/anti-malware/restore",
+ json_data=request_body,
+ )
+
+ def indicator_analyze(
+ self,
+ indicator_type: str,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Collects forensics data whenever a computer that matches the given query
+ accesses or executes the given IP, URL, file name, MD5, or path.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+
+ self._headers["x-mgmt-run-as-job"] = "on"
+ return self._http_request(
+ "POST",
+ f"/remediation/forensics/analyze-by-indicator/{indicator_type.lower()}",
+ json_data=request_body,
+ )
+
+ def file_quarantine(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Quarantines files given by path or MD5 or detections relating to a forensic incident.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/forensics/file/quarantine",
+ json_data=request_body,
+ )
+
+ def file_restore(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Restores previously quarantined files given by path or MD5 or detections relating to a forensic incident.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/forensics/file/restore",
+ json_data=request_body,
+ )
+
+ def remediation_computer_isolate(
+ self, request_body: dict[str, Any]
+ ) -> dict[str, Any]:
+ """Isolates the computers matching the given query. Isolation is the act of denying all
+ network access from a given computer.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/isolate",
+ json_data=request_body,
+ )
+
+ def remediation_computer_deisolate(
+ self, request_body: dict[str, Any]
+ ) -> dict[str, Any]:
+ """De-Isolates the computers matching the given query. De-isolating a computer restores
+ its access to network resources.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/de-isolate",
+ json_data=request_body,
+ )
+
+ def computer_restart(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Restarts computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/reset-computer",
+ json_data=request_body,
+ )
+
+ def computer_shutdown(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Shuts-down computers match the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/shutdown-computer",
+ json_data=request_body,
+ )
+
+ def computer_repair(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Repairs the Harmony Endpoint Client installation on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/repair-computer",
+ json_data=request_body,
+ )
+
+ def computer_list(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Gets a list of computers matching the given filters.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/asset-management/computers/filtered",
+ json_data=request_body,
+ )
+
+ def process_information_get(self, request_body: dict[str, Any]) -> dict[str, Any]:
+ """Collects information about processes on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/process/information",
+ json_data=request_body,
+ )
+
+ def process_terminate(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Terminates the given process on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/process/terminate",
+ json_data=request_body,
+ )
+
+ def agent_registry_key_add(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Adds a given registry key and/or value to the registry of computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/registry/key/add",
+ json_data=request_body,
+ )
+
+ def agent_registry_key_delete(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Removes the given registry key or value to the registry of computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+ return self._http_request(
+ "POST",
+ "/remediation/agent/registry/key/delete",
+ json_data=request_body,
+ )
+
+ def agent_file_copy(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Copies the given file from the given source to the given destination on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/file/copy",
+ json_data=request_body,
+ )
+
+ def agent_file_move(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Moves the given file from the given source to the given destination on computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/file/move",
+ json_data=request_body,
+ )
+
+ def agent_file_delete(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Deletes the given file from the given source on computers matching the given query.
+ This operation is risky! Use with caution as it allows you to change Harmony Endpoint protected
+ files or registry entries that are in use by your operating system.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/file/delete",
+ json_data=request_body,
+ )
+
+ def agent_vpn_site_add(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Adds the given VPN Site's configuration to computers matching the given query.
+ Adding a VPN Site allows Harmony Endpoint Clients to connect to it.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/vpn/site/add",
+ json_data=request_body,
+ )
+
+ def agent_vpn_site_remove(
+ self,
+ request_body: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Removes the given VPN Site's configuration to computers matching the given query.
+
+ Args:
+ request_body (dict[str, Any]): The request body for the API request (query computers).
+
+ Returns:
+ dict[str, Any]: API response.
+ """
+ self._headers["x-mgmt-run-as-job"] = "on"
+
+ return self._http_request(
+ "POST",
+ "/remediation/agent/vpn/site/remove",
+ json_data=request_body,
+ )
+
+
+def job_status_get_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Get job status and data by ID.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ response = client.job_status_get(job_id=args.get("job_id", ""))
+
+ return CommandResults(
+ outputs_prefix="HarmonyEP.Job",
+ outputs_key_field="id",
+ outputs=response,
+ raw_response=response,
+ )
+
+
+def ioc_list_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Fetch IOCs list.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ new_page, new_page_size, pagination_message = get_pagination_args(args)
+
+ response = client.ioc_list(
+ page=new_page,
+ page_size=new_page_size,
+ ioc_filter=args.get("filter"),
+ field=args.get("field"),
+ sort_direction=args.get("sort_direction"),
+ )
+
+ for ioc in response["content"]:
+ ioc["modifiedOn"] = convert_unix_to_date_string(ioc["modifiedOn"])
+
+ readable_output = tableToMarkdown(
+ name="IOC List:",
+ metadata=pagination_message,
+ t=response["content"],
+ headers=["id", "type", "value", "comment", "modifiedOn"],
+ headerTransform=string_to_table_header,
+ )
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix="HarmonyEP.IOC",
+ outputs_key_field="id",
+ outputs=response["content"],
+ raw_response=response,
+ )
+
+
+def ioc_update_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Update IOC by ID.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ ioc_id = args.get("ioc_id", "")
+
+ response = client.ioc_update(
+ ioc_type=args.get("type", ""),
+ value=args.get("value", ""),
+ comment=args.get("comment", ""),
+ ioc_id=ioc_id,
+ )
+ readable_output = tableToMarkdown(
+ name=f"IOC {ioc_id} was updated successfully.",
+ t=response,
+ headers=["id", "type", "value", "comment", "modifiedOn"],
+ headerTransform=string_to_table_header,
+ )
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix="HarmonyEP.IOC",
+ outputs_key_field="id",
+ outputs=response,
+ raw_response=response,
+ )
+
+
+def ioc_create_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Create new IOC.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ client.ioc_create(
+ ioc_type=args.get("type"),
+ value=args.get("value"),
+ comment=args.get("comment"),
+ )
+ return CommandResults(readable_output="IOC was created successfully.")
+
+
+def ioc_delete_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Delete IOCs by IDs or delete all IOCs.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ ioc_ids = args.get("ids")
+ delete_all = argToBoolean(args.get("delete_all"))
+
+ client.ioc_delete(ioc_ids=ioc_ids, delete_all=delete_all)
+
+ return CommandResults(
+ readable_output=(
+ "All IOCs were deleted successfully."
+ if delete_all
+ else f"IOCs {ioc_ids} was deleted successfully."
+ )
+ )
+
+
+def rule_assignments_get_command(
+ args: dict[str, Any], client: Client
+) -> CommandResults:
+ """Gets all entities directly assigned to the given rule.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ rule_id = args.get("rule_id", "")
+
+ response = client.rule_assignments_get(
+ rule_id=rule_id,
+ )
+ output = {"id": rule_id, "assignments": response}
+ readable_output = tableToMarkdown(
+ name=f"Rule {rule_id} assignments:",
+ t=response,
+ headers=["id", "name", "type"],
+ headerTransform=string_to_table_header,
+ )
+
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix="HarmonyEP.Rule",
+ outputs_key_field="id",
+ outputs=output,
+ raw_response=response,
+ )
+
+
+def rule_assignments_add_command(
+ args: dict[str, Any], client: Client
+) -> CommandResults:
+ """Assigns the specified entities to the given rule. Specified IDs that are already assigned to the rule are ignored.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ rule_id = args.get("rule_id", "")
+ entities_ids = argToList(args.get("entities_ids"))
+
+ client.rule_assignments_add(rule_id=rule_id, entities_ids=entities_ids)
+ return CommandResults(
+ readable_output=f"Entities {entities_ids} were assigned to rule {rule_id} successfully."
+ )
+
+
+def rule_assignments_remove_command(
+ args: dict[str, Any], client: Client
+) -> CommandResults:
+ """Removes the specified entities from the given rule's assignments.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+ rule_id = args.get("rule_id", "")
+ entities_ids = argToList(args.get("entities_ids"))
+
+ client.rule_assignments_remove(rule_id=rule_id, entities_ids=entities_ids)
+ return CommandResults(
+ readable_output=f"Entities {entities_ids} were removed from rule {rule_id} successfully."
+ )
+
+
+@polling_function(
+ name="harmony-ep-policy-rule-install",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Policy installation request is executing",
+ requires_polling_arg=False,
+)
+def rule_policy_install_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Installs all policies. If a rule ID is specified, only the policies associated with that rule will be installed.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ response = client.rule_policy_install()
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-policy-rule-install")
+
+
+@polling_function(
+ name="harmony-ep-policy-rule-modifications-get",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Fetch rule modifications request is executing",
+ requires_polling_arg=False,
+)
+def rule_modifications_get_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Gets information on modifications to a given rule (modifications are the addition or
+ removal of assignments on a rule since it was last installed).
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ rule_id = args.get("rule_id", "")
+ SCHEDULED_COMMANDS_MAPPER[
+ "harmony-ep-policy-rule-modifications-get"
+ ].format_message(rule_id)
+
+ if not args.get("job_id"):
+ response = client.rule_modifications_get(rule_id=rule_id)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-policy-rule-modifications-get")
+
+
+def rule_metadata_list_command(args: dict[str, Any], client: Client) -> CommandResults:
+ """Gets the metadata of all rules or the given rule's metadata
+ (Metadata refers to all information relating to the rule except it's actual settings).
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ CommandResults: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ rule_id = args.get("rule_id")
+ rule_family = args.get("rule_family")
+ connection_state = args.get("connection_state")
+ limit = arg_to_number(args.get("limit"))
+ all_results = argToBoolean(args.get("all_results"))
+
+ response = client.rule_metadata_list(rule_id, rule_family, connection_state)
+
+ if not rule_id and not all_results:
+ response = response[:limit]
+
+ readable_output = tableToMarkdown(
+ name="Rule metadata List:" if not rule_id else f"Rule {rule_id} metadata:",
+ metadata=f"Showing {len(response)} items." if not rule_id else None,
+ t=response,
+ headers=[
+ "id",
+ "name",
+ "family",
+ "comment",
+ "orientation",
+ "connectionState",
+ "assignments",
+ ],
+ headerTransform=string_to_table_header,
+ )
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix="HarmonyEP.Rule",
+ outputs_key_field="id",
+ outputs=response,
+ raw_response=response,
+ )
+
+
+@polling_function(
+ name="harmony-ep-push-operation-status-list",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Fetch remediation status list request is executing",
+ requires_polling_arg=False,
+)
+def push_operation_status_list_command(
+ args: dict[str, Any], client: Client
+) -> PollResult:
+ """Gets the current statuses of all remediation operations or if a specific ID is specified,
+ retrieve the current status of the given remediation operation.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ if not args.get("job_id"):
+ remediation_operation_id = args.get("remediation_operation_id")
+ response = client.push_operation_status_list(
+ remediation_operation_id=remediation_operation_id
+ )
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-push-operation-status-list")
+
+
+@polling_function(
+ name="harmony-ep-push-operation-get",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Fetch remediation request is executing",
+ requires_polling_arg=False,
+)
+def push_operation_get_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Gets the results of a given Remediation Operation.
+ Remediation Operations may produce results such a Forensics Report or yield status
+ updates such as an Anti-Malware scan progress.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ if not args.get("job_id"):
+ new_page, new_page_size, _ = get_pagination_args(args)
+
+ response = client.push_operation_get(
+ remediation_operation_id=args.get("remediation_operation_id", ""),
+ filter_text=args.get("filter_text"),
+ new_page=new_page,
+ new_page_size=new_page_size,
+ )
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-push-operation-get")
+
+
+@polling_function(
+ name="harmony-ep-push-operation-abort",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Remediation operation abort request is executing",
+ requires_polling_arg=False,
+)
+def push_operation_abort_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Aborts the given Remediation Operation.
+ Aborting an operation prevents it from being sent to further Harmony Endpoint Clients.
+ Clients that have already received the operation are not affected.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ remediation_operation_id = args.get("remediation_operation_id", "")
+ SCHEDULED_COMMANDS_MAPPER["harmony-ep-push-operation-abort"].message = (
+ f"Remediation operation {remediation_operation_id} was aborted successfully."
+ )
+
+ response = client.push_operation_abort(
+ remediation_operation_id=remediation_operation_id
+ )
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-push-operation-abort")
+
+
+@polling_function(
+ name="harmony-ep-anti-malware-scan",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Anti malware scan request is executing",
+ requires_polling_arg=False,
+)
+def anti_malware_scan_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Performs an Anti-Malware scan on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ response = client.anti_malware_scan(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-anti-malware-scan")
+
+
+@polling_function(
+ name="harmony-ep-anti-malware-update",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Anti malware update request is executing",
+ requires_polling_arg=False,
+)
+def anti_malware_update_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Updates the Anti-Malware Signature Database on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "updateFromEpServer": arg_to_bool(args.get("update_from_ep_server")),
+ "updateFromCpServer": arg_to_bool(args.get("update_from_cp_server")),
+ }
+
+ response = client.anti_malware_update(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-anti-malware-update")
+
+
+@polling_function(
+ name="harmony-ep-anti-malware-restore",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Anti malware restore request is executing",
+ requires_polling_arg=False,
+)
+def anti_malware_restore_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Restores a file that was previously quarantined by the Harmony Endpoint Client's Anti-Malware capability.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {"files": argToList(args.get("files"))}
+
+ response = client.anti_malware_restore(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-anti-malware-restore")
+
+
+@polling_function(
+ name="harmony-ep-forensics-indicator-analyze",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Indicator analyze request is executing",
+ requires_polling_arg=False,
+)
+def indicator_analyze_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Collects forensics data whenever a computer that matches the given query accesses
+ or executes the given IP, URL, file name, MD5, or path.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "generateActivityLogs": arg_to_bool(args.get("generate_activity_logs")),
+ "indicator": args.get("indicator_value"),
+ }
+
+ response = client.indicator_analyze(
+ indicator_type=args.get("indicator_type", ""),
+ request_body=request_body,
+ )
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-forensics-indicator-analyze")
+
+
+@polling_function(
+ name="harmony-ep-forensics-file-quarantine",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="File quarantine request is executing",
+ requires_polling_arg=False,
+)
+def file_quarantine_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Quarantines files given by path or MD5 or detections relating to a forensic incident.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "items": {
+ "type": args.get("file_type"),
+ "value": args.get("file_value"),
+ }
+ }
+
+ response = client.file_quarantine(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-forensics-file-quarantine")
+
+
+@polling_function(
+ name="harmony-ep-forensics-file-restore",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="File restore request is executing",
+ requires_polling_arg=False,
+)
+def file_restore_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Restores previously quarantined files given by path or MD5 or detections relating to a forensic incident.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "items": {
+ "type": args.get("file_type"),
+ "value": args.get("file_value"),
+ }
+ }
+
+ response = client.file_restore(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-forensics-file-restore")
+
+
+@polling_function(
+ name="harmony-ep-remediation-computer-isolate",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer isolate request is executing",
+ requires_polling_arg=False,
+)
+def remediation_computer_isolate_command(
+ args: dict[str, Any], client: Client
+) -> PollResult:
+ """Isolates the computers matching the given query. Isolation is the act of denying all network access from a given computer.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ response = client.remediation_computer_isolate(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-remediation-computer-isolate")
+
+
+@polling_function(
+ name="harmony-ep-remediation-computer-deisolate",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer de-isolate request is executing",
+ requires_polling_arg=False,
+)
+def remediation_computer_deisolate_command(
+ args: dict[str, Any], client: Client
+) -> PollResult:
+ """De-Isolates the computers matching the given query. De-isolating a computer restores its access to network resources.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ response = client.remediation_computer_deisolate(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-remediation-computer-deisolate")
+
+
+@polling_function(
+ name="harmony-ep-agent-computer-restart",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer restart request is executing",
+ requires_polling_arg=False,
+)
+def computer_restart_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Restarts computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "forceAppsShutdown": args.get("force_apps_shutdown"),
+ }
+ response = client.computer_restart(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-computer-restart")
+
+
+@polling_function(
+ name="harmony-ep-agent-computer-shutdown",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer shutdown request is executing",
+ requires_polling_arg=False,
+)
+def computer_shutdown_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Shuts-down computers match the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "forceAppsShutdown": args.get("force_apps_shutdown"),
+ }
+ response = client.computer_shutdown(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-computer-shutdown")
+
+
+@polling_function(
+ name="harmony-ep-agent-computer-repair",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer repair request is executing",
+ requires_polling_arg=False,
+)
+def computer_repair_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Repairs the Harmony Endpoint Client installation on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ response = client.computer_repair(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-computer-repair")
+
+
+@polling_function(
+ name="harmony-ep-computer-list",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Computer list fetch request is executing",
+ requires_polling_arg=False,
+)
+def computer_list_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Gets a list of computers matching the given filters.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ new_page, new_page_size, _ = get_pagination_args(args)
+ request_body = {
+ "filters": extract_query_filter(args),
+ "paging": {"pageSize": new_page_size, "offset": new_page},
+ }
+
+ response = client.computer_list(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-computer-list")
+
+
+@polling_function(
+ name="harmony-ep-agent-process-information-get",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Process information fetch request is executing",
+ requires_polling_arg=False,
+)
+def process_information_get_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Collects information about processes on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "processName": args.get("process_name"),
+ "additionalFields": argToList(args.get("additional_fields")),
+ }
+ response = client.process_information_get(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-process-information-get")
+
+
+@polling_function(
+ name="harmony-ep-agent-process-terminate",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Process terminate request is executing",
+ requires_polling_arg=False,
+)
+def process_terminate_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Terminates the given process on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "name": args.get("name"),
+ "pid": arg_to_number(args.get("pid")),
+ "terminateAllInstances": arg_to_bool(args.get("terminate_all_instances")),
+ }
+ response = client.process_terminate(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-process-terminate")
+
+
+@polling_function(
+ name="harmony-ep-agent-registry-key-add",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Registry key add request is executing",
+ requires_polling_arg=False,
+)
+def agent_registry_key_add_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Adds a given registry key and/or value to the registry of computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "hive": args.get("hive"),
+ "key": args.get("key"),
+ "valueName": args.get("value_name"),
+ "valueType": REGISTRY_VALUE_TYPE_MAP[args.get("value_type", "")],
+ "valueData": args.get("value_data"),
+ "isRedirected": arg_to_bool(args.get("is_redirected")),
+ }
+ response = client.agent_registry_key_add(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-registry-key-add")
+
+
+@polling_function(
+ name="harmony-ep-agent-registry-key-delete",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Registry key remove request is executing",
+ requires_polling_arg=False,
+)
+def agent_registry_key_delete_command(
+ args: dict[str, Any], client: Client
+) -> PollResult:
+ """Removes the given registry key or value to the registry of computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "hive": args.get("hive"),
+ "key": args.get("key"),
+ "valueName": args.get("value_name"),
+ "isRedirected": arg_to_bool(args.get("is_redirected")),
+ }
+ response = client.agent_registry_key_delete(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-registry-key-delete")
+
+
+@polling_function(
+ name="harmony-ep-agent-file-copy",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="File copy request is executing",
+ requires_polling_arg=False,
+)
+def agent_file_copy_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Copies the given file from the given source to the given destination on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "sourceAbsolutePath": args.get("destination_absolute_path"),
+ "destinationAbsolutePath": args.get("source_absolute_path"),
+ }
+ response = client.agent_file_copy(remove_empty_elements(request_body))
+
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-file-copy")
+
+
+@polling_function(
+ name="harmony-ep-agent-file-move",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="File move request is executing",
+ requires_polling_arg=False,
+)
+def agent_file_move_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Moves the given file from the given source to the given destination on computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "sourceAbsolutePath": args.get("destination_absolute_path"),
+ "destinationAbsolutePath": args.get("source_absolute_path"),
+ }
+ response = client.agent_file_move(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-file-move")
+
+
+@polling_function(
+ name="harmony-ep-agent-file-delete",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="File delete request is executing",
+ requires_polling_arg=False,
+)
+def agent_file_delete_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Deletes the given file from the given source on computers matching the given query.
+ This operation is risky! Use with caution as it allows you to change Harmony Endpoint protected
+ files or registry entries that are in use by your operating system.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "targetAbsolutePath": args.get("target_absolute_path"),
+ }
+ response = client.agent_file_delete(request_body)
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-file-delete")
+
+
+@polling_function(
+ name="harmony-ep-agent-vpn-site-add",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Add VPN Site's configuration request is executing",
+ requires_polling_arg=False,
+)
+def agent_vpn_site_add_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Adds the given VPN Site's configuration to computers matching the given query.
+ Adding a VPN Site allows Harmony Endpoint Clients to connect to it.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "remoteAccessGatewayName": args.get("remote_access_gateway_name"),
+ "fingerprint": args.get("fingerprint"),
+ "authentication": {"method": args.get("authentication_method")},
+ "host": args.get("host"),
+ "displayName": args.get("display_name"),
+ }
+ response = client.agent_vpn_site_add(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-vpn-site-add")
+
+
+@polling_function(
+ name="harmony-ep-agent-vpn-site-remove",
+ interval=arg_to_number(demisto.args().get("interval", 30)),
+ timeout=arg_to_number(demisto.args().get("timeout", 600)),
+ poll_message="Remove VPN Site's configuration request is executing",
+ requires_polling_arg=False,
+)
+def agent_vpn_site_remove_command(args: dict[str, Any], client: Client) -> PollResult:
+ """Removes the given VPN Site's configuration to computers matching the given query.
+
+ Args:
+ client (Client): Harmony API client.
+ args (dict): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: outputs, readable outputs and raw response for XSOAR.
+ """
+ if not args.get("job_id"):
+ request_body = build_request_body(args)
+ request_body["operationParameters"] |= {
+ "displayName": args.get("display_name"),
+ }
+ response = client.agent_vpn_site_remove(remove_empty_elements(request_body))
+ args["job_id"] = response.get("jobId")
+
+ return schedule_command(args, client, "harmony-ep-agent-vpn-site-remove")
+
+
+def test_module(client: Client) -> str:
+ """
+ Builds the iterator to check that the feed is accessible.
+
+ Args:
+ client: Client object.
+
+ Returns:
+ Outputs.
+ """
+ try:
+ client.ioc_list(0, 1)
+ except DemistoException as exc:
+ if exc.res is not None and (
+ exc.res.status_code == http.HTTPStatus.UNAUTHORIZED
+ or exc.res.status_code == http.HTTPStatus.FORBIDDEN
+ ):
+ return "Authorization Error: Invalid URL or credentials."
+ raise exc
+
+ return "ok"
+
+
+# Helper Commands #
+
+
+def schedule_command(
+ args: dict[str, Any], client: Client, command_name: str
+) -> PollResult:
+ """Build scheduled command in case:
+ - Job state is not 'DONE'
+ - Job state is 'DONE' but the API response data is a remediation operation ID.
+
+ Args:
+ client (Client): Harmony Endpoint API client.
+ args (dict[str, Any]): Command arguments from XSOAR.
+
+ Returns:
+ PollResult: Command, args, timeout and interval for CommandResults.
+ """
+ if last_job_id := dict_safe_get(get_integration_context(), ["job_id"]):
+ args["job_id"] = last_job_id
+
+ command_results: CommandResults = job_status_get_command(args, client)
+ sample_state = dict_safe_get(command_results.raw_response, ["status"])
+
+ if sample_state == "DONE":
+ command_data = dict_safe_get(command_results.raw_response, ["data"])
+
+ # Check if data is a remediation operation ID
+ # If so, will fetch another job ID for the push operation data
+ if isinstance(command_data, str):
+ response = client.push_operation_get(command_data)
+
+ # Save new schedule arguments in integration context
+ # (cause for the second run there are new arguments or/and values)
+ set_integration_context(
+ {"job_id": response["jobId"], "remediation_operation_id": command_data}
+ )
+ return PollResult(
+ response=command_results,
+ continue_to_poll=True,
+ args_for_next_run=args,
+ )
+ else:
+
+ updated_command_readable_output, updated_command_response = (
+ prepare_command_output_and_readable_output(
+ command_data=command_data,
+ command_name=command_name,
+ job_id=args["job_id"],
+ )
+ )
+
+ command_results.readable_output = get_readable_output(
+ command_name=command_name,
+ updated_command_data=updated_command_readable_output,
+ args=args,
+ )
+
+ clear_integration_context()
+
+ return PollResult(
+ response=update_command_results(
+ command_name=command_name,
+ updated_command_response=updated_command_response,
+ command_results=command_results,
+ ),
+ continue_to_poll=False,
+ )
+
+ if sample_state == "FAILED":
+ clear_integration_context()
+ # In case the job not succeeded raise the error
+ raise DemistoException(
+ f"Executing {args['job_id']} for Harmony Endpoint failed. Error: {command_results.raw_response}"
+ )
+
+ return PollResult(
+ response=command_results,
+ continue_to_poll=True,
+ args_for_next_run=args,
+ )
+
+
+def update_command_results(
+ command_name: str,
+ updated_command_response: dict | list,
+ command_results: CommandResults,
+) -> CommandResults:
+ """Update the command results for schedule commands.
+
+ Args:
+ command_name (str): The command name.
+ updated_command_response (dict | list): The updated command response.
+ command_results (CommandResults): The exist command results.
+
+ Returns:
+ CommandResults: The updated command results.
+ """
+ command_results.raw_response = updated_command_response
+ command_results.outputs = updated_command_response
+ command_results.outputs_key_field = "job_id"
+ command_results.outputs_prefix = (
+ f"HarmonyEP.{SCHEDULED_COMMANDS_MAPPER[command_name].outputs_prefix}"
+ )
+
+ return command_results
+
+
+def get_readable_output(
+ command_name: str,
+ updated_command_data: list | dict[str, Any] | None,
+ args: dict[str, Any],
+) -> Any:
+ """Get readable output for schedule command.
+
+ Args:
+ command_name (str): The command name.
+ updated_command_data (list | dict[str, Any]): The updated command data.
+ args (dict[str, Any]): Command arguments.
+
+ Returns:
+ Any: tableToMarkdown object.
+ """
+ _, page_size, pagination_message = get_pagination_args(args)
+ if page_size:
+ SCHEDULED_COMMANDS_MAPPER[command_name].message += f"\n\n{pagination_message}"
+
+ return tableToMarkdown(
+ name=SCHEDULED_COMMANDS_MAPPER[command_name].message,
+ t=remove_empty_elements(updated_command_data),
+ headers=SCHEDULED_COMMANDS_MAPPER[command_name].headers,
+ headerTransform=string_to_table_header,
+ removeNull=True,
+ )
+
+
+def prepare_command_output_and_readable_output(
+ command_data: dict[str, Any],
+ command_name: str,
+ job_id: str,
+) -> tuple[dict | list | None, dict | list]:
+ """Prepare the command output and readable output according the API response type.
+
+ Args:
+ command_data (dict[str, Any]): The command data.
+ command_name (str): The command name.
+ job_id (str): The job ID.
+
+ Returns:
+ tuple: The command output and readable output for the command results.
+ """
+ SCHEDULED_COMMANDS_MAPPER[command_name].message += f"\nJob ID: {job_id}"
+
+ # check if API response is empty
+ if not command_data:
+ return None, {"job_id": job_id}
+
+ # check if API return computers data
+ if computer_list := dict_safe_get(command_data, ["computers"]):
+ return prepare_computer_list_output_and_readable_output(
+ computers_data=computer_list,
+ job_id=job_id,
+ )
+
+ # check if API return data is push operation list
+ if SCHEDULED_COMMANDS_MAPPER[command_name].headers == DEFAULT_HEADERS:
+ return prepare_push_operation_output_and_readable_output(
+ command_data=dict_safe_get(
+ dict_object=command_data,
+ keys=["data"],
+ default_return_value=command_data,
+ ),
+ job_id=job_id,
+ )
+
+ if isinstance(command_data, list):
+ for data in command_data:
+ data["job_id"] = job_id
+
+ else:
+ command_data["job_id"] = job_id
+
+ return command_data, command_data
+
+
+def prepare_computer_list_output_and_readable_output(
+ computers_data: list[dict[str, Any]], job_id: str
+) -> tuple[list, dict[str, Any]]:
+ """Prepare the computer list command output and readable output.
+
+ Args:
+ computers_data (list[dict[str, Any]]): The computer list data.
+ job_id (str): The job ID.
+
+ Returns:
+ tuple[list, dict[str, Any]]: The command output and readable output.
+ """
+ updated_response = []
+ for computer in computers_data:
+ updated_response.append(
+ {
+ "id": computer.get("computerId"),
+ "name": computer.get("computerName"),
+ "ip": computer.get("computerIP"),
+ "type": computer.get("computerType"),
+ "deployment_status": computer.get("computerDeploymentStatus"),
+ "client_version": computer.get("computerClientVersion"),
+ "groups": computer.get("computerGroups"),
+ "user_name": computer.get("computerUserName"),
+ "domain_name": computer.get("domainName"),
+ "isolation_status": computer.get("isolationStatus"),
+ "last_logged_in_user": computer.get("computerLastLoggedInUser"),
+ "os_name": computer.get("osName"),
+ "os_version": computer.get("osVersion"),
+ }
+ )
+ return updated_response, {"job_id": job_id, "Computer": updated_response}
+
+
+def prepare_push_operation_output_and_readable_output(
+ command_data: list[dict[str, Any]],
+ job_id: str,
+) -> Tuple[list | dict[str, Any], list | dict[str, Any]]:
+ """Update the API response data for the readable output in case the API response is push operation data.
+
+ Args:
+ command_name (str): The commands name.
+ command_data (dict[str, Any]): The API response.
+ job_id (str): The job ID.
+
+ Returns:
+ Tuple[list | dict[str, Any], list | dict[str, Any]]: The updated command data.
+ """
+ updated_command_readable_output = []
+
+ for data in command_data:
+ updated_command_readable_output.append(
+ {
+ "machine_id": dict_safe_get(data, ["machine", "id"]),
+ "machine_name": dict_safe_get(data, ["machine", "name"]),
+ "operation_status": dict_safe_get(data, ["operation", "status"]),
+ "operation_response_status": dict_safe_get(
+ data, ["operation", "response", "status"]
+ ),
+ "operation_response_output": dict_safe_get(
+ data, ["operation", "response", "output"]
+ ),
+ }
+ )
+ data["operation"] |= {
+ "id": dict_safe_get(get_integration_context(), ["remediation_operation_id"])
+ }
+ data["job_id"] = job_id
+
+ return updated_command_readable_output, command_data
+
+
+def validate_pagination_arguments(
+ page: int | None | None = None,
+ page_size: int | None | None = None,
+ limit: int | None | None = None,
+):
+ """Validate pagination arguments according to their default.
+
+ Args:
+ page (int, optional): Page number of paginated results.
+ page_size (int, optional): Number of items per page.
+ limit (int, optional): The maximum number of records to retrieve.
+
+ Raises:
+ ValueError: Appropriate error message.
+ """
+ if page_size and (page_size < MIN_PAGE_SIZE or page_size > MAX_PAGE_SIZE):
+ raise ValueError(
+ f"page_size argument must be greater than {MIN_PAGE_SIZE} and smaller than {MAX_PAGE_SIZE}."
+ )
+ if page and page < MIN_PAGE_NUM:
+ raise ValueError(f"page argument must be greater than {MIN_PAGE_NUM - 1}.")
+ if limit and limit <= MIN_LIMIT:
+ raise ValueError(f"limit argument must be greater than {MIN_LIMIT}.")
+
+
+def get_pagination_args(args: dict[str, Any]) -> tuple:
+ """Return the correct limit and offset for the API
+ based on the user arguments page, page_size and limit.
+
+ Args:
+ args (dict[str, Any]): Command arguments from XSOAR.
+
+ Returns:
+ Tuple: new_limit, offset, pagination_message.
+ """
+ page = arg_to_number(args.get("page"))
+ page_size = arg_to_number(args.get("page_size"))
+ limit = arg_to_number(args.get("limit"))
+
+ validate_pagination_arguments(page, page_size, limit)
+
+ new_page = 0
+ new_page_size = limit
+
+ if page and page_size:
+ new_page_size = page_size
+ new_page = page - 1
+
+ pagination_message = (
+ f"Showing page {new_page+1}.\nCurrent page size: {new_page_size}."
+ )
+
+ return new_page, new_page_size, pagination_message
+
+
+def validate_filter_arguments(column_name: str | None = None, filter_type: str = None):
+ """Validate filter arguments values are allowed.
+
+ Args:
+ column_name (str, optional): The column name to filter by. Defaults to None.
+ filter_type (str, optional): The filter operator. Defaults to None.
+
+ Raises:
+ ValueError: Raise error in case column_name or filter_type values are not allowed.
+ """
+ if column_name and column_name not in COLUMN_NAMES:
+ raise ValueError(
+ f"'column_name' must be one of the followings: {COLUMN_NAMES}."
+ )
+
+ if filter_type and filter_type not in FILTER_TYPES:
+ raise ValueError(
+ f"'filter_type' must be one of the followings: {FILTER_TYPES}."
+ )
+
+
+def extract_query_filter(args: dict[str, Any]) -> list[dict[str, Any]]:
+ """Extract query filters from the specified arguments.
+
+ Args:
+ args (dict[str, Any]): Command arguments from XSOAR.
+
+ Returns:
+ list[dict[str, Any]]: The updated query filter according to Harmony requirements.
+ """
+ query_filter = []
+
+ if filter_by_query := args.get("filter"):
+ queries = argToList(filter_by_query, "' , ")
+
+ for query in queries:
+ query_parts = query.split(" ")
+
+ if len(query_parts) != 3:
+ raise ValueError(
+ "'filter' must be in the following format: 'column_name filter_type filter_value'."
+ )
+
+ column_name = query_parts[0]
+ filter_type = query_parts[1]
+ filter_values = query_parts[2].replace("'", "")
+
+ validate_filter_arguments(column_name, filter_type)
+
+ query_filter.append(
+ {
+ "columnName": column_name,
+ "filterValues": argToList(filter_values),
+ "filterType": filter_type,
+ }
+ )
+ for key, value in args.items():
+ if key in COLUMN_NAMES_MAPPER:
+ query_filter.append(
+ {
+ "columnName": COLUMN_NAMES_MAPPER[key],
+ "filterValues": argToList(value),
+ "filterType": DEFAULT_FILTER_TYPE,
+ }
+ )
+
+ if computer_last_connection := args.get("computer_last_connection"):
+ computer_last_connection_times = argToList(computer_last_connection)
+
+ if len(computer_last_connection_times) != 2:
+ raise ValueError(
+ "'computer_last_connection' must be in the following format: 'YYYY-MM-DD HH:MM, YYYY-MM-DD HH:MM'."
+ )
+
+ query_filter += [
+ {
+ "columnName": "computerLastConnection",
+ "filterValues": computer_last_connection_times[0],
+ "filterType": "Grater",
+ },
+ {
+ "columnName": "computerLastConnection",
+ "filterValues": computer_last_connection_times[1],
+ "filterType": "Smaller",
+ },
+ ]
+
+ if not query_filter:
+ raise DemistoException(
+ """At least one of the following query arguments are required: computer_ids, computer_names, computer_ips,
+ computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter."""
+ )
+
+ return query_filter
+
+
+def build_request_body(args: dict[str, Any]) -> dict[str, Any]:
+ """Build a query for Harmony API.
+
+ Args:
+ args (dict[str, Any]): Command arguments from XSOAR.
+
+ Returns:
+ dict[str, Any]: The query for Harmony API.
+ """
+
+ new_page, new_page_size, _ = get_pagination_args(args)
+
+ computers_to_include = []
+ if computers_ids_to_include := args.get("computers_ids_to_include"):
+ for computer_id in argToList(computers_ids_to_include):
+ computers_to_include.append({"id": computer_id})
+
+ return remove_empty_elements(
+ {
+ "comment": args.get("comment"),
+ "timing": {
+ "expirationSeconds": args.get("expiration_seconds"),
+ "schedulingDateTime": args.get("scheduling_date_time"),
+ },
+ "targets": {
+ "query": {
+ "filter": extract_query_filter(args),
+ "paging": {"pageSize": new_page_size, "offset": new_page},
+ },
+ "exclude": {
+ "groupsIds": argToList(args.get("groups_ids_to_exclude")),
+ "computerIds": argToList(args.get("computers_ids_to_exclude")),
+ },
+ "include": {"computers": computers_to_include},
+ },
+ "operationParameters": {
+ "informUser": arg_to_bool(args.get("inform_user")),
+ "allowPostpone": arg_to_bool(args.get("allow_postpone")),
+ },
+ }
+ )
+
+
+def arg_to_bool(arg: str = None) -> bool | None:
+ """Convert string to boolean if value is not none.
+
+ Args:
+ arg (str, optional): The argument value. Defaults to None.
+
+ Returns:
+ bool | None: The converted value or none.
+ """
+ return argToBoolean(arg) if arg else None
+
+
+def clear_integration_context() -> None:
+ """Reset integration context."""
+ set_integration_context({"job_id": None, "remediation_operation_id": None})
+
+
+def convert_unix_to_date_string(unix_timestamp: int) -> str:
+ """Convert unix timestamp to date string.
+
+ Args:
+ unix_timestamp (int): unix.
+
+ Returns:
+ str: Datetime string.
+ """
+ timestamp_in_seconds = unix_timestamp / 1000
+ date_time = datetime.fromtimestamp(timestamp_in_seconds, tz=timezone.utc)
+ return date_time.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
+
+
+def main() -> None:
+
+ params: dict[str, Any] = demisto.params()
+ args: dict[str, Any] = demisto.args()
+ base_url = params.get("base_url", "")
+ client_id = dict_safe_get(params, ["credentials", "identifier"])
+ secret_key = dict_safe_get(params, ["credentials", "password"])
+
+ verify_certificate: bool = not params.get("insecure", False)
+ proxy = params.get("proxy", False)
+
+ command = demisto.command()
+ demisto.debug(f"Command being called is {command}")
+
+ try:
+
+ client: Client = Client(
+ base_url=base_url,
+ client_id=client_id,
+ client_secret=secret_key,
+ verify_certificate=verify_certificate,
+ proxy=proxy,
+ )
+ client.get_token()
+ client.login()
+
+ commands = {
+ "harmony-ep-job-status-get": job_status_get_command,
+ "harmony-ep-ioc-list": ioc_list_command,
+ "harmony-ep-ioc-update": ioc_update_command,
+ "harmony-ep-ioc-create": ioc_create_command,
+ "harmony-ep-ioc-delete": ioc_delete_command,
+ "harmony-ep-policy-rule-assignments-get": rule_assignments_get_command,
+ "harmony-ep-policy-rule-assignments-add": rule_assignments_add_command,
+ "harmony-ep-policy-rule-assignments-remove": rule_assignments_remove_command,
+ "harmony-ep-policy-rule-install": rule_policy_install_command,
+ "harmony-ep-policy-rule-modifications-get": rule_modifications_get_command,
+ "harmony-ep-policy-rule-metadata-list": rule_metadata_list_command,
+ "harmony-ep-push-operation-status-list": push_operation_status_list_command,
+ "harmony-ep-push-operation-get": push_operation_get_command,
+ "harmony-ep-push-operation-abort": push_operation_abort_command,
+ "harmony-ep-anti-malware-scan": anti_malware_scan_command,
+ "harmony-ep-anti-malware-update": anti_malware_update_command,
+ "harmony-ep-anti-malware-restore": anti_malware_restore_command,
+ "harmony-ep-forensics-indicator-analyze": indicator_analyze_command,
+ "harmony-ep-forensics-file-quarantine": file_quarantine_command,
+ "harmony-ep-forensics-file-restore": file_restore_command,
+ "harmony-ep-remediation-computer-isolate": remediation_computer_isolate_command,
+ "harmony-ep-remediation-computer-deisolate": remediation_computer_deisolate_command,
+ "harmony-ep-computer-list": computer_list_command,
+ "harmony-ep-agent-computer-restart": computer_restart_command,
+ "harmony-ep-agent-computer-shutdown": computer_shutdown_command,
+ "harmony-ep-agent-computer-repair": computer_repair_command,
+ "harmony-ep-agent-process-information-get": process_information_get_command,
+ "harmony-ep-agent-process-terminate": process_terminate_command,
+ "harmony-ep-agent-registry-key-add": agent_registry_key_add_command,
+ "harmony-ep-agent-registry-key-delete": agent_registry_key_delete_command,
+ "harmony-ep-agent-file-copy": agent_file_copy_command,
+ "harmony-ep-agent-file-move": agent_file_move_command,
+ "harmony-ep-agent-file-delete": agent_file_delete_command,
+ "harmony-ep-agent-vpn-site-add": agent_vpn_site_add_command,
+ "harmony-ep-agent-vpn-site-remove": agent_vpn_site_remove_command,
+ }
+
+ if command == "test-module":
+ return_results(test_module(client))
+ elif command in commands:
+ return_results(commands[command](args, client))
+ else:
+ raise NotImplementedError(f"{command} command is not implemented.")
+
+ except Exception as e:
+ clear_integration_context()
+ return_error(str(e))
+
+
+if __name__ in ["__main__", "builtin", "builtins"]:
+ main()
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.yml b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.yml
new file mode 100644
index 000000000000..8d840cfe6ced
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint.yml
@@ -0,0 +1,3215 @@
+category: Endpoint
+commonfields:
+ id: CheckPointHarmonyEndpoint
+ version: -1
+configuration:
+- name: base_url
+ display: Base URL
+ required: true
+ type: 0
+ defaultvalue: https://cloudinfra-gw.portal.checkpoint.com
+- name: credentials
+ display: Client ID
+ required: true
+ type: 9
+ displaypassword: Secret Key
+- name: insecure
+ display: Trust any certificate (not secure)
+ required: false
+ type: 8
+- name: proxy
+ display: Use system proxy settings
+ required: false
+ type: 8
+ defaultvalue: 'false'
+description: Checkpoint Harmony Endpoint provides a complete endpoint security solution built to protect organizations and the remote workforce from today's complex threat landscape.
+display: Check Point Harmony Endpoint
+name: CheckPointHarmonyEndpoint
+script:
+ commands:
+ - name: harmony-ep-job-status-get
+ description: Retrieves the status and result (if any) of a given asynchronous operation. A job is a way to monitor the progress of an asynchronous operation while avoiding issues that may manifest during long synchronous waits.
+ arguments:
+ - name: job_id
+ description: The ID of the operation to query the status of. Job ID will returned from most of the commands in this integration. It can be found in the context path.
+ required: true
+ default: true
+ outputs:
+ - contextPath: HarmonyEP.Job.data
+ description: The job data.
+ type: String
+ - contextPath: HarmonyEP.Job.status
+ description: The job status.
+ type: String
+ - name: harmony-ep-ioc-list
+ description: Gets a list of all Indicators of Compromise. Use the filter parameters to fetch specific IOCs.
+ arguments:
+ - name: filter
+ description: The indicator value or comment to search for. The filter is case-insensitive. For example, filter 'efg will match IoCs 'abcdEFG', 'efGGG', and 'yEfG'.
+ - name: field
+ description: The Indicator of Compromise field to search by.
+ defaultValue: iocValue
+ auto: PREDEFINED
+ predefined:
+ - iocValue
+ - iocComment
+ - name: sort_direction
+ description: The way to sort the results.
+ defaultValue: DESC
+ auto: PREDEFINED
+ predefined:
+ - ASC
+ - DESC
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ outputs:
+ - contextPath: HarmonyEP.IOC.comment
+ description: The IOC comment.
+ type: String
+ - contextPath: HarmonyEP.IOC.modifiedOn
+ description: The time the IOC was modified.
+ type: Number
+ - contextPath: HarmonyEP.IOC.value
+ description: The IOC value.
+ type: String
+ - contextPath: HarmonyEP.IOC.type
+ description: The IOC type.
+ type: String
+ - contextPath: HarmonyEP.IOC.id
+ description: The IOC ID.
+ type: String
+ - name: harmony-ep-ioc-update
+ description: Updates the given Indicators of Compromise with the given parameters.
+ arguments:
+ - name: ioc_id
+ description: The ID of the IOC to update. Use harmony-ep-ioc-list command to get all IOC IDs.
+ required: true
+ default: true
+ - name: comment
+ description: The IOC comment to update.
+ required: true
+ - name: value
+ description: The IOC value to update.
+ required: true
+ - name: type
+ description: The IOC type to update.
+ auto: PREDEFINED
+ predefined:
+ - Domain
+ - IP
+ - URL
+ - MD5
+ - SHA1
+ required: true
+ outputs:
+ - contextPath: HarmonyEP.IOC.comment
+ description: The IOC comment.
+ type: String
+ - contextPath: HarmonyEP.IOC.modifiedOn
+ description: The time the IOC was modified.
+ type: Number
+ - contextPath: HarmonyEP.IOC.value
+ description: The IOC value.
+ type: String
+ - contextPath: HarmonyEP.IOC.type
+ description: The IOC type.
+ type: String
+ - contextPath: HarmonyEP.IOC.id
+ description: The IOC ID.
+ type: String
+ - name: harmony-ep-ioc-create
+ description: Creates new Indicators of Compromise using the given parameters.
+ arguments:
+ - name: comment
+ description: The IOC comment.
+ required: true
+ - name: value
+ description: The IOC value. For example, 8.8.8.8 for IP or example.com for Domain.
+ required: true
+ - name: type
+ description: The IOC type.
+ auto: PREDEFINED
+ predefined:
+ - Domain
+ - IP
+ - URL
+ - MD5
+ - SHA1
+ required: true
+ - name: harmony-ep-ioc-delete
+ description: Deletes the given Indicators of Compromise by their ID.
+ arguments:
+ - name: ids
+ description: A A comma-separated list of list of IOC IDs to delete. Use harmony-ep-ioc-list command to get all IOC IDs.
+ isArray: true
+ - name: delete_all
+ description: Whether to delete all IOCs. This action permanently deletes all Indicators of Compromise and cannot be undone.
+ defaultValue: 'false'
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: harmony-ep-policy-rule-assignments-get
+ description: Gets all entities directly assigned to the given rule.
+ arguments:
+ - name: rule_id
+ description: The ID of the rule to get the assignments. Use harmony-ep-rule-metadata-list command to get all rule IDs.
+ required: true
+ outputs:
+ - contextPath: HarmonyEP.Rule.Assignments.type
+ description: The rule assignment type.
+ type: String
+ - contextPath: HarmonyEP.Rule.Assignments.name
+ description: The rule assignment name.
+ type: String
+ - contextPath: HarmonyEP.Rule.Assignments.id
+ description: The rule assignment ID.
+ type: String
+ - name: harmony-ep-policy-rule-assignments-add
+ description: Assigns the specified entities to the given rule. Specified IDs that are already assigned to the rule are ignored.
+ arguments:
+ - name: rule_id
+ description: The ID of the rule to add assignments to. Use harmony-ep-rule-metadata-list command to get all rule IDs.
+ required: true
+ - name: entities_ids
+ description: The entity IDs to assign.
+ required: true
+ isArray: true
+ - name: harmony-ep-policy-rule-assignments-remove
+ description: Removes the specified entities from the given rule's assignments. Specified IDs that are not assigned to the rule are ignored.
+ arguments:
+ - name: rule_id
+ description: The ID of the rule to remove assignments from. Use harmony-ep-rule-metadata-list command to get all rule IDs.
+ required: true
+ default: true
+ - name: entities_ids
+ description: The entity IDs to remove.
+ required: true
+ isArray: true
+ - name: harmony-ep-policy-rule-install
+ description: Installs all policies.
+ arguments:
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.PolicyRuleInstall.job_id
+ description: The job ID of the policy installation.
+ type: String
+ - name: harmony-ep-policy-rule-modifications-get
+ description: Gets information on modifications to a given rule. (Modifications are the additions or removal of assignments on a rule since it was last installed).
+ arguments:
+ - name: rule_id
+ description: The ID of the rule to get the modifications of. Use harmony-ep-rule-metadata-list command to get all rule IDs.
+ required: true
+ default: true
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.Rule.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.Rule.order
+ description: Rule order.
+ type: Number
+ - contextPath: HarmonyEP.Rule.isDefaultRule
+ description: Whether or not the rule is the default.
+ type: Boolean
+ - contextPath: HarmonyEP.Rule.family
+ description: A family in the rule-base (legacy and unified).
+ type: String
+ - contextPath: HarmonyEP.Rule.connectionState
+ description: Rule connection state.
+ type: String
+ - contextPath: HarmonyEP.Rule.comment
+ description: Rule comment.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.type
+ description: Rule assignments type.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.name
+ description: Rule assignments name.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.id
+ description: Rule assignments ID.
+ type: String
+ - contextPath: HarmonyEP.Rule.name
+ description: Rule name.
+ type: String
+ - contextPath: HarmonyEP.Rule.id
+ description: Rule ID.
+ type: String
+ - contextPath: HarmonyEP.Rule.orientation
+ description: Rule policy orientation.
+ type: String
+ - name: harmony-ep-policy-rule-metadata-list
+ description: Gets the metadata of all rules or the given rule's metadata. (Metadata refers to all information relating to the rule except it's actual settings).
+ arguments:
+ - name: rule_id
+ description: The ID of the rule to get the metadata.
+ - name: rule_family
+ description: An optional 'Rule Family' filter. Used to filter the results to only the selected rule family (e.g., only 'Threat Prevention').
+ auto: PREDEFINED
+ predefined:
+ - General Settings
+ - Threat Prevention
+ - Data Protection
+ - OneCheck
+ - Deployment
+ - Remote Access VPN
+ - Capsule Docs
+ - Access
+ - Agent Settings
+ - name: connection_state
+ description: An optional 'Connection State' filter. Used to filter the results to only the selected Connection State (e.g., only rules pertaining to policies for connected clients).
+ auto: PREDEFINED
+ predefined:
+ - CONNECTED
+ - DISCONNECTED
+ - RESTRICTED
+ - name: limit
+ description: The maximum number of IP lists to return.
+ defaultValue: '50'
+ - name: all_results
+ description: Whether to return all of the results or not.
+ auto: PREDEFINED
+ defaultValue: 'false'
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: HarmonyEP.Rule.order
+ description: Rule order.
+ type: Number
+ - contextPath: HarmonyEP.Rule.isDefaultRule
+ description: Whether or not the rule is the default.
+ type: Boolean
+ - contextPath: HarmonyEP.Rule.family
+ description: A family in the rule-base (legacy and unified).
+ type: String
+ - contextPath: HarmonyEP.Rule.connectionState
+ description: Rule connection state.
+ type: String
+ - contextPath: HarmonyEP.Rule.comment
+ description: Rule comment.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.type
+ description: Rule assignments type.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.name
+ description: Rule assignments name.
+ type: String
+ - contextPath: HarmonyEP.Rule.assignments.id
+ description: Rule assignments ID.
+ type: String
+ - contextPath: HarmonyEP.Rule.name
+ description: Rule name.
+ type: String
+ - contextPath: HarmonyEP.Rule.id
+ description: Rule ID.
+ type: String
+ - contextPath: HarmonyEP.Rule.orientation
+ description: Rule policy orientation.
+ type: String
+ - name: harmony-ep-push-operation-status-list
+ polling: true
+ description: Gets the current statuses of all remediation operations or if a specific ID is specified, retrieve the current status of the given remediation operation.
+ arguments:
+ - name: remediation_operation_id
+ description: Remediation operations ID.
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ outputs:
+ - contextPath: HarmonyEP.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.adminName
+ description: The name of the administrator who initiated the operation.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.aborted
+ description: Indicated whether the operation was aborted by an administrator.
+ type: Boolean
+ - contextPath: HarmonyEP.PushOperation.remainingTimeoutSeconds
+ description: The amount of time, in seconds, the operation will remain active. When elapsed, no more entities will be affected.
+ type: Number
+ - contextPath: HarmonyEP.PushOperation.createdOn
+ description: The date and time the operation was created.
+ type: Date
+ - contextPath: HarmonyEP.PushOperation.type
+ description: Remediation operation type.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.comment
+ description: A comment that was provided during the operation's creation.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.id
+ description: The operation's ID.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.overallStatus
+ description: Remediation operation status.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.numberOfAffectedEntities
+ description: The total number of entities affected by the operation.
+ type: Number
+ - name: harmony-ep-push-operation-get
+ description: Gets the results of a given Remediation Operation. Remediation Operations may produce results such a Forensics Report or yield status updates such as an anti-malware scan progress.
+ arguments:
+ - name: remediation_operation_id
+ description: Remediation operation ID. Use the harmony-ep-remediation-status-list command to get all remediation operation IDs.
+ required: true
+ default: true
+ - name: filter_text
+ description: Optional free text search in any of the potential response fields excluding "id". Can be used to search for specific results, devices or IPs, for example.
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-push-operation-abort
+ description: Aborts the given remediation operation. Aborting an operation prevents it from being sent to further Harmony Endpoint Clients. Clients that have already received the operation are not affected.
+ arguments:
+ - name: remediation_operation_id
+ description: Remediation operation ID. Use the harmony-ep-remediation-status-list command to get all remediation operation IDs.
+ required: true
+ default: true
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.PushOperationAbort.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - name: harmony-ep-anti-malware-scan
+ description: 'Performs an anti-malware scan on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. '
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareScan.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-anti-malware-update
+ description: 'Updates the anti-malware Signature Database on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. '
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: update_from_ep_server
+ description: Determines whether to update from the EP server.
+ auto: PREDEFINED
+ defaultValue: 'false'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: update_from_cp_server
+ description: Determines whether to update from the CP server.
+ auto: PREDEFINED
+ defaultValue: 'false'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareUpdate.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-anti-malware-restore
+ description: "Restores a file that was previously quarantined by the Harmony Endpoint Client's anti-malware capability. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: files
+ description: A list of file paths to restore.
+ required: true
+ isArray: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.AntiMalwareRestore.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-forensics-indicator-analyze
+ description: "Collects forensics data whenever a computer that matches the given query accesses or executes the given IP, URL, filename, MD5 or path. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: indicator_type
+ description: The indictor type to analyze.
+ auto: PREDEFINED
+ predefined:
+ - IP
+ - URL
+ - File
+ - MD5
+ - Path
+ required: true
+ - name: indicator_value
+ description: A URL, IP, Path, File or MD5 that when accessed or executed will trigger a forensics report.
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: generate_activity_logs
+ description: Determines whether to generate detailed activity logs.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.IndicatorAnalyze.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-forensics-file-quarantine
+ description: "Quarantines files given by path or MD5 or detections relating to a forensic incident. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: file_type
+ description: The forensics quarantine item type.
+ auto: PREDEFINED
+ predefined:
+ - PATH
+ - INCIDENT_ID
+ - MD5
+ required: true
+ - name: file_value
+ description: The forensics quarantine item value.
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.FileQuarantine.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-forensics-file-restore
+ description: "Restores previously quarantined files given by path or MD5 or detections relating to a forensic incident. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: file_type
+ description: The forensics quarantine item type.
+ auto: PREDEFINED
+ predefined:
+ - PATH
+ - INCIDENT_ID
+ - MD5
+ required: true
+ - name: file_value
+ description: The forensics quarantine item value.
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.FileRestore.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.FileRestore.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-remediation-computer-isolate
+ description: "Isolates the computers matching the given query. Isolation is the act of denying all network access from a given computer. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ComputerIsolate.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-remediation-computer-deisolate
+ description: "De-Isolates the computers matching the given query. De-isolating a computer restores its access to network resources. Affects only isolated computers. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ComputerDeisolate.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-computer-restart
+ description: "Restarts computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: force_apps_shutdown
+ description: Determines whether to force applications shutdown.
+ auto: PREDEFINED
+ defaultValue: 'false'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ComputerRestart.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-computer-shutdown
+ description: "Shuts-down computers match the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: force_apps_shutdown
+ description: Determines whether to force applications shutdown.
+ auto: PREDEFINED
+ defaultValue: 'false'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ComputerShutdown.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-computer-repair
+ description: "Repairs the Harmony Endpoint Client installation on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ComputerRepair.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-computer-list
+ description: "Gets a list of computers matching the given filters. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.Computer.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.Computer.CapabilitiesInstalled
+ description: A list of all installed capabilities.
+ type: String
+ - contextPath: HarmonyEP.Computer.InstalledAndRunning
+ description: A list of installed and running capabilities.
+ type: String
+ - contextPath: HarmonyEP.Computer.ClientVersion
+ description: The computer client version.
+ type: String
+ - contextPath: HarmonyEP.Computer.DeployTime
+ description: The computer deploy time.
+ type: String
+ - contextPath: HarmonyEP.Computer.Groups
+ description: The computer groups.
+ type: String
+ - contextPath: HarmonyEP.Computer.type
+ description: The computer type.
+ type: String
+ - contextPath: HarmonyEP.Computer.userName
+ description: The computer user name.
+ type: String
+ - contextPath: HarmonyEP.Computer.domainName
+ description: The computer domain name.
+ type: String
+ - contextPath: HarmonyEP.Computer.isolationStatus
+ description: The computer isolation status.
+ type: String
+ - contextPath: HarmonyEP.Computer.ClientVersion
+ description: The computer client veraion.
+ type: String
+ - contextPath: HarmonyEP.Computer.LastLoggedInUser
+ description: The computer last login user.
+ type: String
+ - contextPath: HarmonyEP.Computer.osName
+ description: The computer operating system name.
+ type: String
+ - contextPath: HarmonyEP.Computer.osVersion
+ description: The computer operating system version.
+ type: String
+ - contextPath: HarmonyEP.Computer.ip
+ description: The computer IP address.
+ type: String
+ - contextPath: HarmonyEP.Computer.DeploymentStatus
+ description: The computer deployment status.
+ type: String
+ - contextPath: HarmonyEP.Computer.name
+ description: The computer name.
+ type: String
+ - contextPath: HarmonyEP.Computer.id
+ description: The computer's unique ID.
+ type: String
+ - name: harmony-ep-agent-process-information-get
+ description: "Collects information about processes on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: process_name
+ description: The name of the process to collect information on. If not provided, all running processes will be collected.
+ - name: additional_fields
+ description: Additional process properties to collect. If not provided, only the process's name and ID will be collected.
+ auto: PREDEFINED
+ predefined:
+ - SI
+ - Handles
+ - VM
+ - WS
+ - PM
+ - NPM
+ - Path
+ - CPU
+ - ExitCode
+ - ExitTime
+ - Handle
+ - HandleCount
+ - HasExited
+ - Id
+ - MachineName
+ - MainModule
+ - MainWindowHandle
+ - MainWindowTitle
+ - MaxWorkingSet
+ - MinWorkingSet
+ - Modules
+ - NonpagedSystemMemorySize
+ - NonpagedSystemMemorySize64
+ - PagedMemorySize
+ - PagedMemorySize64
+ - PagedSystemMemorySize
+ - PagedSystemMemorySize64
+ - PeakPagedMemorySize
+ - PeakPagedMemorySize64
+ - PeakVirtualMemorySize
+ - PeakVirtualMemorySize64
+ - PeakWorkingSet
+ - PeakWorkingSet64
+ - PriorityBoostEnabled
+ - PriorityClass
+ - PrivateMemorySize
+ - PrivateMemorySize64
+ - PrivilegedProcessorTime
+ - ProcessName
+ - ProcessorAffinity
+ - Responding
+ - SafeHandle
+ - SessionId
+ - StandardError
+ - StandardInput
+ - StandardOutput
+ - StartInfo
+ - StartTime
+ - SynchronizingObject
+ - Threads
+ - TotalProcessorTime
+ - UserProcessorTime
+ - VirtualMemorySize
+ - VirtualMemorySize64
+ - WorkingSet
+ - WorkingSet64
+ isArray: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ProcessInformation.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-process-terminate
+ description: "Terminates the given process on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: terminate_all_instances
+ description: Indicates whether to terminate all processes matching the given name. If set to true while a non-zero PID is given, only a single process with the given name AND PID may be matched. If set to false or not provided, will terminate only the first matching process.
+ defaultValue: 'false'
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: name
+ description: The name of the process to terminate.
+ required: true
+ default: true
+ - name: pid
+ description: The ID (PID) of the process to terminate. When used in conjunction with the name field, the PID must match the named process. If both name and PID are provided but the process matching the PID does not match the provided name, the operation will be ignored by the agent. If set to 0 or not provided, the agent will seek to terminate the process or processes as indicated by the name field.
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.ProcessTerminate.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-registry-key-add
+ description: "Adds a given registry key and/or value to the registry of computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: is_redirected
+ description: Determines if the key should reside under WOW6432Node. Keys intended for 64bit versions of Windows may target 32bit versions by setting this value to 'true, thus specifying that the registry key/value be added under the WOW6432Node.
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: value_data
+ description: The actual value to be added the the specified registry key.
+ required: true
+ - name: value_type
+ description: A registry value's type.
+ auto: PREDEFINED
+ predefined:
+ - DWORD (REG_DWORD)
+ - STRING (REG_GZ)
+ required: true
+ - name: value_name
+ description: The name of the value to be added to the specified registry key.
+ required: true
+ - name: key
+ description: The full path path of the key to create or add a value to. For example, 'SOFTWARE\Node.js\Components'.
+ required: true
+ - name: hive
+ description: Defines known Windows Registry Hives. For more information, see https://docs.microsoft.com/en-us/windows/win32/sysinfo/predefined-keys.
+ auto: PREDEFINED
+ predefined:
+ - HKEY_CURRENT_USER
+ - HKEY_LOCAL_MACHINE
+ - HKEY_CLASSES_ROOT
+ - HKEY_USERS
+ - HKEY_CURRENT_CONFIG
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyAdd.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-registry-key-delete
+ description: "Removes the given registry key or value to the registry of computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: is_redirected
+ description: Determines if the key should be removed from under WOW6432Node. Keys intended for 64bit versions of Windows may target 32bit versions by setting this value to 'true', thus specifying that the registry key/value be removed under the WOW6432Node.
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: value_name
+ description: The value to remove from the key. If not provided, the entire key will be deleted.
+ - name: key
+ description: The full path path of the key to delete or remove a value from. For example, 'SOFTWARE\Node.js\Components'.
+ required: true
+ - name: hive
+ description: Defines known Windows Registry Hives. For more information, see https://docs.microsoft.com/en-us/windows/win32/sysinfo/predefined-keys.
+ auto: PREDEFINED
+ predefined:
+ - HKEY_CURRENT_USER
+ - HKEY_LOCAL_MACHINE
+ - HKEY_CLASSES_ROOT
+ - HKEY_USERS
+ - HKEY_CURRENT_CONFIG
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.RegistryKeyDelete.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-file-copy
+ description: "Copies the given file from the given source to the given destination on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: destination_absolute_path
+ description: The absolute, full destination path. The provided path must include the target file's name (e.g., c:\backup\backup1.txt).
+ required: true
+ - name: source_absolute_path
+ description: The absolute, full source path (e.g., c:\backup\backup1.txt).
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.FileCopy.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.FileCopy.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-file-move
+ description: "Moves the given file from the given source to the given destination on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: destination_absolute_path
+ description: The absolute, full destination path. The provided path must include the target file's name (e.g., c:\backup\backup1.txt).
+ required: true
+ - name: source_absolute_path
+ description: The absolute, full source path (e.g., c:\backup\backup1.txt).
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.FileMove.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.FileMove.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-file-delete
+ description: "Deletes the given file from the given source on computers matching the given query. This operation is risky! Use with caution as it allows you to change Harmony Endpoint protected files or registry entries that are in use by your operating system. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: target_absolute_path
+ description: The absolute, full path of the file to remove.
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.FileDelete.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.FileDelete.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-vpn-site-add
+ description: " Adds the given VPN site's configuration to computers matching the given query. Adding a VPN site allows Harmony Endpoint Clients to connect to it. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: remote_access_gateway_name
+ description: The remote gateway's name.
+ required: true
+ - name: fingerprint
+ description: The remote gateway's certificate fingerprint. Fingerprints are used to verify the authenticity of the gateway.
+ required: true
+ - name: authentication_method
+ description: Authentication methods used in conjunction with VPN site standard login.
+ auto: PREDEFINED
+ predefined:
+ - CERTIFICATE
+ - P12_CERTIFICATE
+ - USERNAME_PASSWORD
+ - SECURID_KEY_FOB
+ - SECURID_PIN_PAD
+ - SOFTID
+ - CHALLENGE_RESPONSE
+ required: true
+ - name: display_name
+ description: The VPN site's display name.
+ - name: host
+ description: The target site's host name or IP address.
+ required: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ - name: harmony-ep-agent-vpn-site-remove
+ description: "Removes the given VPN site's configuration to computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter. "
+ arguments:
+ - name: display_name
+ description: The display name of the VPN site to remove. If a display name was not provided during the site's creation, the host name/IP should be used instead.
+ required: true
+ default: true
+ - name: comment
+ description: Operation comment.
+ - name: scheduling_date_time
+ description: Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”.
+ - name: expiration_seconds
+ description: The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1.
+ - name: computer_ids
+ description: A comma-separated list of computer IDs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_names
+ description: A comma-separated list of computer names to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_ips
+ description: A comma-separated list of computer IPs to include in the operation.
+ required: false
+ isArray: true
+ - name: computer_types
+ description: A comma-separated list of computer types to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Desktop
+ - Laptop
+ - N/A
+ - Domain Controller
+ - Server
+ - name: computer_deployment_statuses
+ description: A comma-separated list of computer deployment statuses to include in the operation.
+ required: false
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - Retrying
+ - Error
+ - Scheduled
+ - Downloading
+ - Deploying
+ - Completed
+ - Failed
+ - Uninstalling
+ - Not Scheduled
+ - Not Installed
+ - N/A
+ - name: computer_last_connection
+ description: Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”.
+ required: false
+ isArray: true
+ - name: filter
+ description: "A comma-separated list of list of search filters according to the following template: \"column_name operator 'values_list' \". For example, the query \"computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' \" will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType ."
+ required: false
+ - name: groups_ids_to_exclude
+ description: A comma-separated list of group IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_exclude
+ description: A comma-separated list of computer IDs to exclude from the operation.
+ isArray: true
+ - name: computers_ids_to_include
+ description: A comma-separated list of computer IDs to include in the operation.
+ isArray: true
+ - name: inform_user
+ description: Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: allow_postpone
+ description: Determines whether to allow the user to postpone the operation.
+ auto: PREDEFINED
+ defaultValue: 'true'
+ predefined:
+ - 'true'
+ - 'false'
+ - name: page
+ description: 'Page number of paginated results. Minimum value: 1.'
+ - name: page_size
+ description: The number of items per page.
+ - name: limit
+ description: The maximum number of records to retrieve.
+ defaultValue: '50'
+ - name: interval
+ description: The interval between each poll in seconds. Minimum value is `10`.
+ defaultValue: '30'
+ - name: timeout
+ description: The timeout for the polling in seconds.
+ defaultValue: '600'
+ - name: job_id
+ description: The job ID to fetch data for. Hidden argument.
+ hidden: true
+ polling: true
+ outputs:
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.job_id
+ description: The job ID of the remediation operation.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.id
+ description: The remediation operation ID.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.status
+ description: Describes possible states in which a push operation may be in regards to a specific device.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.response.status
+ description: Push operation response status.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.response.output
+ description: Push operation response output.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.ipAddress
+ description: The client device's IPv4 address.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.name
+ description: The client device's name.
+ type: String
+ - contextPath: HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.id
+ description: The client device's unique ID.
+ type: String
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.99865
+ isfetch: false
+ script: ''
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_description.md b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_description.md
new file mode 100644
index 000000000000..e86b45016a3c
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_description.md
@@ -0,0 +1,12 @@
+To generate Client ID and Secret Key from Check Point Harmony Endpoint:
+
+1. Log in to Harmony.
+2. Click the settings icon.
+3. Click **API Keys** to go to the API keys page.
+4. Click **New**.
+5. On the *Create new API key* pop up, define the following:
+ - For *Service* select **Endpoint**.
+ - Choose the required expiration.
+ - For *Roles*, select **Admin**.
+6. Click **Create** and save the *Client ID* and *Secret Key* values.
+
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_image.png b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_image.png
new file mode 100644
index 000000000000..0e16005d5c78
Binary files /dev/null and b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_image.png differ
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_test.py b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_test.py
new file mode 100644
index 000000000000..cdf613d07e78
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/CheckPointHarmonyEndpoint_test.py
@@ -0,0 +1,1058 @@
+import json
+import os
+import unittest.mock
+from typing import Any, Callable
+
+import CommonServerPython
+import pytest
+
+import CheckPointHarmonyEndpoint
+
+TEST_DATA = "test_data"
+BASE_URL = "https://www.example.com/"
+API_URL = CommonServerPython.urljoin(
+ BASE_URL, "app/endpoint-web-mgmt/harmony/endpoint/api/v1"
+)
+
+
+def load_mock_response(file_name: str) -> dict[str, Any] | list[dict[str, Any]]:
+ """Load mock file that simulates an API response.
+
+ Args:
+ file_name (str): Name of the mock response JSON file to return.
+ Returns:
+ dict[str, Any]: Mock file content.
+ """
+ file_path = os.path.join(TEST_DATA, file_name)
+
+ with open(file_path, mode="r", encoding="utf-8") as mock_file:
+ return json.loads(mock_file.read())
+
+
+@pytest.fixture()
+def mock_client() -> CheckPointHarmonyEndpoint.Client:
+ """
+ Establish a mock connection to the client with a user name and password.
+
+ Returns:
+ Client: Mock connection to client.
+ """
+ return CheckPointHarmonyEndpoint.Client(
+ base_url=API_URL,
+ client_id="test",
+ client_secret="test",
+ verify_certificate=False,
+ proxy=False,
+ )
+
+
+@pytest.mark.parametrize(
+ "command_args, endpoint, response_file",
+ [
+ (
+ {"job_id": "123"},
+ "jobs/123",
+ "job_status.json",
+ ),
+ ],
+)
+def test_job_status_get_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_args: dict[str, Any],
+ endpoint: str,
+ response_file: str,
+):
+ """
+ Scenario:
+ - Test retrieving job status.
+
+ Given:
+ - Arguments for retrieving job status.
+
+ When:
+ - Executing job_status_get_command function.
+
+ Then:
+ - Ensure that the CommandResults outputs is correct.
+ - Ensure that the CommandResults raw_response is correct.
+ - Ensure that the CommandResults outputs_prefix is correct.
+ - Ensure that the CommandResults outputs_key_field is correct.
+ """
+ mock_response = load_mock_response(response_file)
+ requests_mock.get(
+ url=f"{API_URL}/{endpoint}",
+ json=mock_response,
+ )
+
+ command_results = CheckPointHarmonyEndpoint.job_status_get_command(
+ command_args, mock_client
+ )
+
+ assert command_results.raw_response == mock_response
+ assert command_results.outputs == mock_response
+ assert command_results.outputs_prefix == "HarmonyEP.Job"
+ assert command_results.outputs_key_field == "id"
+
+
+@pytest.mark.parametrize(
+ "command_args, endpoint, response_file",
+ [
+ (
+ {
+ "filter": "com",
+ "sort_field": "iocValue",
+ "sort_direction": "ASC",
+ },
+ "ioc/get",
+ "ioc_list.json",
+ ),
+ ],
+)
+def test_ioc_list_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_args: dict[str, Any],
+ endpoint: str,
+ response_file: str,
+):
+ """
+ Scenario:
+ - Test listing IOCs.
+
+ Given:
+ - Arguments for listing IOCs.
+
+ When:
+ - Executing ioc_list_command function.
+
+ Then:
+ - Ensure that the CommandResults outputs is correct.
+ - Ensure that the CommandResults raw_response is correct.
+ - Ensure that the CommandResults outputs_prefix is correct.
+ - Ensure that the CommandResults outputs_key_field is correct.
+ """
+ mock_response = load_mock_response(response_file)
+ requests_mock.post(
+ url=f"{API_URL}/{endpoint}",
+ json=mock_response,
+ )
+
+ command_results = CheckPointHarmonyEndpoint.ioc_list_command(
+ command_args, mock_client
+ )
+ mock_response["content"][0]["modifiedOn"] = (
+ CheckPointHarmonyEndpoint.convert_unix_to_date_string(
+ mock_response["content"][0]["modifiedOn"]
+ )
+ )
+
+ assert command_results.raw_response == mock_response
+ assert command_results.outputs == mock_response["content"]
+ assert command_results.outputs_prefix == "HarmonyEP.IOC"
+ assert command_results.outputs_key_field == "id"
+
+
+@pytest.mark.parametrize(
+ "command_args, response_file",
+ [
+ (
+ {
+ "type": "Domain",
+ "value": "tal.com",
+ "comment": "Tal Domain Test",
+ "ioc_id": "1108",
+ },
+ "ioc_update.json",
+ ),
+ ],
+)
+def test_ioc_update_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_args: dict[str, Any],
+ response_file: str,
+):
+ """
+ Scenario:
+ - Test updating an IOC.
+
+ Given:
+ - Arguments for updating an IOC.
+
+ When:
+ - Executing ioc_update_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ - Ensure that the CommandResults raw_response is correct.
+ - Ensure that the CommandResults outputs_prefix is correct.
+ - Ensure that the CommandResults outputs_key_field is correct.
+ """
+ mock_response = load_mock_response(response_file)
+ requests_mock.put(
+ url=f"{API_URL}/ioc/edit",
+ json=mock_response,
+ )
+
+ command_results = CheckPointHarmonyEndpoint.ioc_update_command(
+ command_args, mock_client
+ )
+
+ assert command_results.raw_response == mock_response
+ assert command_results.outputs == mock_response
+ assert command_results.outputs_prefix == "HarmonyEP.IOC"
+ assert command_results.outputs_key_field == "id"
+
+
+def test_ioc_create_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+):
+ """
+ Scenario:
+ - Test creating an IOC.
+
+ Given:
+ - Arguments for creating an IOC.
+
+ When:
+ - Executing ioc_create_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ """
+ requests_mock.post(
+ url=f"{API_URL}/ioc/create",
+ json="",
+ )
+
+ command_results = CheckPointHarmonyEndpoint.ioc_create_command(
+ {"type": "Domain", "value": "example.com", "comment": "Suspicious domain"},
+ mock_client,
+ )
+
+ assert command_results.readable_output == "IOC was created successfully."
+
+
+@pytest.mark.parametrize(
+ "command_args, endpoint, response_file, readable_output",
+ [
+ (
+ {"ids": [1, 2, 3], "delete_all": False},
+ "ioc/delete?ids=%5B1,%202,%203%5D",
+ "ioc_delete.json",
+ "IOCs [1, 2, 3] was deleted successfully.",
+ ),
+ (
+ {"ids": None, "delete_all": True},
+ "ioc/delete/all",
+ "ioc_delete.json",
+ "All IOCs were deleted successfully.",
+ ),
+ ],
+)
+def test_ioc_delete_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_args: dict[str, Any],
+ endpoint: str,
+ response_file: str,
+ readable_output: str,
+):
+ """
+ Scenario:
+ - Test deleting an IOC.
+
+ Given:
+ - Arguments for deleting an IOC.
+
+ When:
+ - Executing ioc_delete_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ """
+ mock_response = load_mock_response(response_file)
+ requests_mock.delete(
+ url=f"{API_URL}/{endpoint}",
+ json=mock_response,
+ )
+
+ command_results = CheckPointHarmonyEndpoint.ioc_delete_command(
+ command_args, mock_client
+ )
+
+ assert command_results.readable_output == readable_output
+
+
+def test_rule_assignments_get_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+):
+ """
+ Scenario:
+ - Test getting rule assignments.
+
+ Given:
+ - Arguments for getting rule assignments.
+
+ When:
+ - Executing rule_assignments_get_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ """
+ mock_response = load_mock_response("rule_assignments.json")
+ requests_mock.get(
+ url=f"{API_URL}/policy/1/assignments",
+ json=mock_response,
+ )
+ output = {"id": 1, "assignments": mock_response}
+ command_results = CheckPointHarmonyEndpoint.rule_assignments_get_command(
+ {"rule_id": 1}, mock_client
+ )
+
+ assert command_results.outputs_prefix == "HarmonyEP.Rule"
+ assert command_results.outputs_key_field == "id"
+ assert command_results.raw_response == mock_response
+ assert command_results.outputs == output
+
+
+def test_rule_assignments_add_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+):
+ """
+ Scenario:
+ - Test adding rule assignments.
+
+ Given:
+ - Arguments for adding rule assignments.
+
+ When:
+ - Executing rule_assignments_add_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ """
+
+ requests_mock.put(
+ url=f"{API_URL}/policy/1/assignments/add",
+ json="",
+ )
+
+ command_results = CheckPointHarmonyEndpoint.rule_assignments_add_command(
+ {"rule_id": 1, "entities_ids": ["3", "4"]}, mock_client
+ )
+
+ assert (
+ command_results.readable_output
+ == "Entities ['3', '4'] were assigned to rule 1 successfully."
+ )
+
+
+def test_rule_assignments_remove_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+):
+ """
+ Scenario:
+ - Test removing rule assignments.
+
+ Given:
+ - Arguments for removing rule assignments.
+
+ When:
+ - Executing rule_assignments_remove_command function.
+
+ Then:
+ - Ensure that the CommandResults readable_output is correct.
+ """
+
+ requests_mock.put(
+ url=f"{API_URL}/policy/1/assignments/remove",
+ json="",
+ )
+
+ command_results = CheckPointHarmonyEndpoint.rule_assignments_remove_command(
+ {"rule_id": 1, "entities_ids": ["3", "4"]}, mock_client
+ )
+
+ assert (
+ command_results.readable_output
+ == "Entities ['3', '4'] were removed from rule 1 successfully."
+ )
+
+
+@pytest.mark.parametrize(
+ "command_args, endpoint, response_file",
+ [
+ (
+ {"limit": 2, "all_results": False},
+ "policy/metadata",
+ "rule_metadata_list.json",
+ ),
+ (
+ {"rule_id": 1, "all_results": True},
+ "policy/1/metadata",
+ "rule_metadata_get.json",
+ ),
+ ],
+)
+def test_rule_metadata_list_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_args: dict[str, Any],
+ endpoint: str,
+ response_file: str,
+):
+ """
+ Scenario:
+ - Test the rule_metadata_list_command function.
+
+ Given:
+ - Arguments for the command.
+
+ When:
+ - Executing the rule_metadata_list_command function.
+
+ Then:
+ - Ensure that the CommandResults are as expected.
+ """
+
+ mock_response: dict[str, Any] | list[dict[str, Any]] = load_mock_response(
+ response_file
+ )
+ requests_mock.get(
+ url=f"{API_URL}/{endpoint}",
+ json=mock_response,
+ )
+ command_results = CheckPointHarmonyEndpoint.rule_metadata_list_command(
+ command_args, mock_client
+ )
+ mock_response = (
+ mock_response[: command_args["limit"]]
+ if "limit" in command_args
+ else mock_response
+ )
+
+ assert command_results.raw_response == mock_response
+ assert command_results.outputs == mock_response
+ assert command_results.outputs_prefix == "HarmonyEP.Rule"
+ assert command_results.outputs_key_field == "id"
+
+
+@pytest.mark.parametrize(
+ "args,command_name,integration_context,response_file,expected_integration_context,expected_poll_result",
+ [
+ # Mock success first run
+ (
+ {"job_id": "3"},
+ "harmony-ep-push-operation-status-list",
+ {},
+ "push_operation_status_list.json",
+ {"job_id": None, "remediation_operation_id": None},
+ CommonServerPython.PollResult(
+ response=CommonServerPython.CommandResults(
+ outputs=load_mock_response("push_operation_status_list.json"),
+ outputs_prefix="HarmonyEP.PushOperation",
+ outputs_key_field="job_id",
+ raw_response=load_mock_response("push_operation_status_list.json"),
+ ),
+ continue_to_poll=False,
+ args_for_next_run=None,
+ ),
+ ),
+ # Mock continue to poll
+ (
+ {"job_id": "3"},
+ "harmony-ep-push-operation-status-list",
+ {},
+ "push_operation_status_in_progress.json",
+ None,
+ CommonServerPython.PollResult(
+ response=CommonServerPython.CommandResults(
+ outputs=load_mock_response(
+ "push_operation_status_in_progress.json"
+ ),
+ outputs_prefix="HarmonyEP.Job",
+ outputs_key_field="id",
+ raw_response=load_mock_response(
+ "push_operation_status_in_progress.json"
+ ),
+ ),
+ continue_to_poll=True,
+ args_for_next_run={"job_id": "3"},
+ ),
+ ),
+ # Mock success second run
+ (
+ {"job_id": "3"},
+ "harmony-ep-push-operation-status-list",
+ {"job_id": "3"},
+ "push_operation_status_list.json",
+ {"job_id": None, "remediation_operation_id": None},
+ CommonServerPython.PollResult(
+ response=CommonServerPython.CommandResults(
+ outputs=load_mock_response("push_operation_status_list.json"),
+ outputs_prefix="HarmonyEP.PushOperation",
+ outputs_key_field="job_id",
+ raw_response=load_mock_response("push_operation_status_list.json"),
+ ),
+ continue_to_poll=False,
+ args_for_next_run=None,
+ ),
+ ),
+ # Mock success first run with push operation data
+ (
+ {"job_id": "3"},
+ "harmony-ep-anti-malware-scan",
+ {"job_id": "3", "remediation_operation_id": None},
+ "push_operation_remediation_data.json",
+ {"job_id": "new1", "remediation_operation_id": "222"},
+ CommonServerPython.PollResult(
+ response=CommonServerPython.CommandResults(
+ outputs=load_mock_response("push_operation_remediation_data.json"),
+ outputs_prefix="HarmonyEP.Job",
+ outputs_key_field="id",
+ raw_response=load_mock_response(
+ "push_operation_remediation_data.json"
+ ),
+ ),
+ continue_to_poll=True,
+ args_for_next_run={"job_id": "3"},
+ ),
+ ),
+ # Mock success second run with push operation data
+ (
+ {"job_id": "3"},
+ "harmony-ep-anti-malware-scan",
+ {"job_id": "3", "remediation_operation_id": "222"},
+ "job_status.json",
+ {"job_id": None, "remediation_operation_id": None},
+ CommonServerPython.PollResult(
+ response=CommonServerPython.CommandResults(
+ outputs=load_mock_response("job_status.json"),
+ outputs_prefix="HarmonyEP.AntiMalwareScan.PushOperation",
+ outputs_key_field="job_id",
+ raw_response=load_mock_response("job_status.json"),
+ ),
+ continue_to_poll=False,
+ args_for_next_run=None,
+ ),
+ ),
+ ],
+)
+def test_schedule_command(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ args: dict[str, Any],
+ command_name: str,
+ integration_context: dict[str, Any],
+ response_file: str,
+ expected_integration_context: dict[str, Any],
+ expected_poll_result: CommonServerPython.PollResult,
+):
+ """Test the schedule_command function.
+
+ Args:
+ requests_mock (pytest_mock.plugin.MockerFixture): Mocked requests.
+ mock_client (HarmonyEndpoint.Client): Mocked client.
+ args (dict[str, Any]): The arguments to pass to the function.
+ integration_context (dict[str, Any]): The integration context to patch.
+ response_file (str): The file names for the mocked responses.
+ expected_integration_context (dict[str, Any]): The expected integration context.
+ expected_poll_result (CommonServerPython.PollResult): The expected poll result.
+ """
+ requests_mock.get(
+ f"{API_URL}/jobs/3",
+ json=load_mock_response(response_file),
+ )
+
+ if command_name == "harmony-ep-anti-malware-scan":
+ requests_mock.post(
+ f"{API_URL}/remediation/222/results/slim",
+ json={"jobId": "new1"},
+ )
+
+ with (
+ unittest.mock.patch(
+ "CheckPointHarmonyEndpoint.get_integration_context",
+ return_value=integration_context,
+ ),
+ unittest.mock.patch(
+ "CheckPointHarmonyEndpoint.set_integration_context"
+ ) as mock_set_integration_context,
+ ):
+ poll_result: CommonServerPython.PollResult = (
+ CheckPointHarmonyEndpoint.schedule_command(
+ client=mock_client,
+ args=args,
+ command_name=command_name,
+ )
+ )
+
+ if expected_integration_context:
+ mock_set_integration_context.assert_called_once_with(
+ expected_integration_context
+ )
+
+ assert poll_result.continue_to_poll == expected_poll_result.continue_to_poll
+ assert poll_result.args_for_next_run == expected_poll_result.args_for_next_run
+ assert (
+ poll_result.response.outputs_prefix
+ == expected_poll_result.response.outputs_prefix
+ )
+ assert (
+ poll_result.response.outputs_key_field
+ == expected_poll_result.response.outputs_key_field
+ )
+
+
+@pytest.mark.parametrize(
+ "command_name,request_method,request_function,command_args,endpoint",
+ [
+ (
+ "harmony-ep-policy-rule-install",
+ "POST",
+ CheckPointHarmonyEndpoint.rule_policy_install_command,
+ {"job_id": None},
+ "policy/install",
+ ),
+ (
+ "harmony-ep-policy-rule-modifications-get",
+ "GET",
+ CheckPointHarmonyEndpoint.rule_modifications_get_command,
+ {"rule_id": "1994", "job_id": None},
+ "policy/1994/modifications",
+ ),
+ (
+ "harmony-ep-push-operation-status-list",
+ "GET",
+ CheckPointHarmonyEndpoint.push_operation_status_list_command,
+ {"remediation_operation_id": "11081994", "job_id": None},
+ "remediation/11081994/status",
+ ),
+ (
+ "harmony-ep-push-operation-status-list",
+ "GET",
+ CheckPointHarmonyEndpoint.push_operation_status_list_command,
+ {"all_results": True, "remediation_operation_id": None, "job_id": None},
+ "remediation/status",
+ ),
+ (
+ "harmony-ep-push-operation-get",
+ "POST",
+ CheckPointHarmonyEndpoint.push_operation_get_command,
+ {
+ "remediation_operation_id": "11081994",
+ "filter_text": None,
+ "job_id": None,
+ },
+ "remediation/11081994/results/slim",
+ ),
+ (
+ "harmony-ep-push-operation-abort",
+ "POST",
+ CheckPointHarmonyEndpoint.push_operation_abort_command,
+ {"remediation_operation_id": "11081994", "job_id": None},
+ "remediation/11081994/abort",
+ ),
+ (
+ "harmony-ep-anti-malware-scan",
+ "POST",
+ CheckPointHarmonyEndpoint.anti_malware_scan_command,
+ {
+ "comment": "test",
+ "computer_ids": ["3"],
+ "groups_ids_to_exclude": ["a"],
+ "computers_ids_to_include": ["1"],
+ "computers_ids_to_exclude": ["2"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/anti-malware/scan",
+ ),
+ (
+ "harmony-ep-anti-malware-update",
+ "POST",
+ CheckPointHarmonyEndpoint.anti_malware_update_command,
+ {
+ "comment": "test",
+ "computer_ids": ["3"],
+ "groups_ids_to_exclude": ["a"],
+ "computers_ids_to_include": ["1"],
+ "computers_ids_to_exclude": ["2"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/anti-malware/update",
+ ),
+ (
+ "harmony-ep-anti-malware-restore",
+ "POST",
+ CheckPointHarmonyEndpoint.anti_malware_restore_command,
+ {
+ "comment": "test",
+ "computer_ids": ["3"],
+ "groups_ids_to_exclude": ["a"],
+ "computers_ids_to_include": ["1"],
+ "computers_ids_to_exclude": ["2"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/anti-malware/restore",
+ ),
+ (
+ "harmony-ep-forensics-indicator-analyze",
+ "POST",
+ CheckPointHarmonyEndpoint.indicator_analyze_command,
+ {
+ "indicator_type": "IP",
+ "indicator_value": "1.1.1.1",
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/forensics/analyze-by-indicator/ip",
+ ),
+ (
+ "harmony-ep-forensics-file-quarantine",
+ "POST",
+ CheckPointHarmonyEndpoint.file_quarantine_command,
+ {
+ "file_type": "PATH",
+ "file_value": "file_name",
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/forensics/file/quarantine",
+ ),
+ (
+ "harmony-ep-forensics-file-restore",
+ "POST",
+ CheckPointHarmonyEndpoint.file_restore_command,
+ {
+ "file_type": "PATH",
+ "file_value": "file_name",
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/forensics/file/restore",
+ ),
+ (
+ "harmony-ep-remediation-computer-isolate",
+ "POST",
+ CheckPointHarmonyEndpoint.remediation_computer_isolate_command,
+ {
+ "file_type": "PATH",
+ "file_value": "file_name",
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/isolate",
+ ),
+ (
+ "harmony-ep-remediation-computer-deisolate",
+ "POST",
+ CheckPointHarmonyEndpoint.remediation_computer_deisolate_command,
+ {
+ "file_type": "PATH",
+ "file_value": "file_name",
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/de-isolate",
+ ),
+ (
+ "harmony-ep-agent-computer-restart",
+ "POST",
+ CheckPointHarmonyEndpoint.computer_restart_command,
+ {
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "force_apps_shutdown": False,
+ "job_id": None,
+ },
+ "remediation/agent/reset-computer",
+ ),
+ (
+ "harmony-ep-agent-computer-repair",
+ "POST",
+ CheckPointHarmonyEndpoint.computer_repair_command,
+ {
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/repair-computer",
+ ),
+ (
+ "harmony-ep-agent-computer-shutdown",
+ "POST",
+ CheckPointHarmonyEndpoint.computer_shutdown_command,
+ {
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "force_apps_shutdown": False,
+ "job_id": None,
+ },
+ "remediation/agent/shutdown-computer",
+ ),
+ (
+ "harmony-ep-computer-list",
+ "POST",
+ CheckPointHarmonyEndpoint.computer_list_command,
+ {
+ "computer_ids": ["3"],
+ "job_id": None,
+ },
+ "asset-management/computers/filtered",
+ ),
+ (
+ "harmony-ep-agent-process-information-get",
+ "POST",
+ CheckPointHarmonyEndpoint.process_information_get_command,
+ {
+ "computer_ids": ["3"],
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/process/information",
+ ),
+ (
+ "harmony-ep-agent-process-terminate",
+ "POST",
+ CheckPointHarmonyEndpoint.process_terminate_command,
+ {
+ "computer_ids": ["3"],
+ "name": "test",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/process/terminate",
+ ),
+ (
+ "harmony-ep-agent-registry-key-add",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_registry_key_add_command,
+ {
+ "computer_ids": ["3"],
+ "hive": "hive",
+ "key": "key",
+ "value_name": "value_name",
+ "value_type": "STRING (REG_GZ)",
+ "value_data": "value_data",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/registry/key/add",
+ ),
+ (
+ "harmony-ep-agent-registry-key-delete",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_registry_key_delete_command,
+ {
+ "computer_ids": ["3"],
+ "hive": "hive",
+ "key": "key",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/registry/key/delete",
+ ),
+ (
+ "harmony-ep-agent-file-copy",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_file_copy_command,
+ {
+ "computer_ids": ["3"],
+ "destination_absolute_path": "destination_absolute_path",
+ "source_absolute_path": "source_absolute_path",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/file/copy",
+ ),
+ (
+ "harmony-ep-agent-file-move",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_file_move_command,
+ {
+ "computer_ids": ["3"],
+ "destination_absolute_path": "destination_absolute_path",
+ "source_absolute_path": "source_absolute_path",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/file/move",
+ ),
+ (
+ "harmony-ep-agent-file-delete",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_file_delete_command,
+ {
+ "computer_ids": ["3"],
+ "target_absolute_path": "target_absolute_path",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/file/delete",
+ ),
+ (
+ "harmony-ep-agent-vpn-site-add",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_vpn_site_add_command,
+ {
+ "computer_ids": ["3"],
+ "remote_access_gateway_name": "remote_access_gateway_name",
+ "fingerprint": "fingerprint",
+ "host": "host",
+ "authentication_method": "authentication_method",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/vpn/site/add",
+ ),
+ (
+ "harmony-ep-agent-vpn-site-remove",
+ "POST",
+ CheckPointHarmonyEndpoint.agent_vpn_site_remove_command,
+ {
+ "computer_ids": ["3"],
+ "display_name": "display_name",
+ "inform_user": True,
+ "allow_postpone": True,
+ "job_id": None,
+ },
+ "remediation/agent/vpn/site/remove",
+ ),
+ ],
+)
+def test_all_schedule_commands(
+ requests_mock,
+ mock_client: CheckPointHarmonyEndpoint.Client,
+ command_name: str,
+ request_method: str,
+ request_function: Callable,
+ command_args: dict[str, Any],
+ endpoint: str,
+):
+ """
+ Scenario:
+ - Test the process_terminate_command function.
+
+ Given:
+ - Arguments for the command.
+
+ When:
+ - Executing the process_terminate_command function.
+
+ Then:
+ - Ensure that the schedule_command is called with the appropriate arguments.
+ """
+ requests_mock.request(
+ request_method,
+ f"{API_URL}/{endpoint}",
+ json={"jobId": "tg1108"},
+ )
+
+ with (
+ unittest.mock.patch(
+ "CheckPointHarmonyEndpoint.schedule_command"
+ ) as mock_schedule_command,
+ unittest.mock.patch("demistomock.command", return_value=command_name),
+ ):
+ request_function(command_args, mock_client)
+ mock_schedule_command.assert_called_once_with(
+ command_args, mock_client, command_name
+ )
+
+
+# test helper commands
+
+
+@pytest.mark.parametrize(
+ "page_size, page, limit", [(-1, 0, 10), (5, -1, 5), (5, 5, -1)]
+)
+def test_validate_pagination_arguments(page_size, page, limit):
+ """
+ Given:
+ - invalid values of page_size, page and limit
+
+ When:
+ - executing validate_pagination_arguments function
+
+ Then:
+ - Ensure that ValueError is raised
+ """
+
+ with pytest.raises(ValueError):
+ CheckPointHarmonyEndpoint.validate_pagination_arguments(
+ page=page, page_size=page_size, limit=limit
+ )
+
+
+@pytest.mark.parametrize(
+ "args,expected",
+ [
+ ({"limit": "10"}, (0, 10, "Showing page 1.\nCurrent page size: 10.")),
+ (
+ {"page": "2", "page_size": "5"},
+ (1, 5, "Showing page 2.\nCurrent page size: 5."),
+ ),
+ (
+ {"page": "3", "page_size": "5", "limit": "15"},
+ (2, 5, "Showing page 3.\nCurrent page size: 5."),
+ ),
+ ],
+)
+def test_get_pagination_args(args: dict[str, str], expected):
+ """Test get_pagination_args function.
+
+ Args:
+ args (dict[str, str]): Pagination arguments.
+ expected (tuple): Updated pagination arguments and pagination message.
+ """
+ with unittest.mock.patch(
+ "CommonServerPython.arg_to_number",
+ side_effect=lambda x: int(x) if x is not None else None,
+ ):
+ with unittest.mock.patch(
+ "CheckPointHarmonyEndpoint.validate_pagination_arguments"
+ ) as mock_validate:
+ assert CheckPointHarmonyEndpoint.get_pagination_args(args) == expected
+ mock_validate.assert_called()
+
+
+def test_validate_filter_arguments():
+ """Test validate_filter_arguments function and ensure that ValueError is raised."""
+ with pytest.raises(ValueError) as exc_info:
+ CheckPointHarmonyEndpoint.validate_filter_arguments(
+ column_name="invalid_name", filter_type="equals"
+ )
+ assert "'column_name' must be one of the followings" in str(exc_info.value)
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/README.md b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/README.md
new file mode 100644
index 000000000000..0326adeaf88a
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/README.md
@@ -0,0 +1,2608 @@
+Checkpoint Harmony Endpoint provides a complete endpoint security solution built to protect organizations and the remote workforce from today's complex threat landscape.
+This integration was integrated and tested with version 1 of CheckPointHarmonyEndpoint.
+
+## Configure Check Point Harmony Endpoint on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Check Point Harmony Endpoint.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Required** |
+ | --- | --- |
+ | Base URL | True |
+ | Client ID | True |
+ | Secret Key | True |
+ | Trust any certificate (not secure) | False |
+ | Use system proxy settings | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### harmony-ep-job-status-get
+
+***
+Retrieves the status and result (if any) of a given asynchronous operation. A job is a way to monitor the progress of an asynchronous operation while avoiding issues that may manifest during long synchronous waits.
+
+#### Base Command
+
+`harmony-ep-job-status-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| job_id | The ID of the operation to query the status of. Job ID will returned from most of the commands in this integration. It can be found in the context path. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.Job.data | String | The job data. |
+| HarmonyEP.Job.status | String | The job status. |
+
+#### Command example
+```!harmony-ep-job-status-get23```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "Job": {
+ "data": {
+ "data": [
+ {
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ },
+ {
+ "machine": {
+ "id": "2",
+ "name": "DESKTOP-2"
+ },
+ "operation": {
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ],
+ "metadata": {
+ "count": 2,
+ "from": 0,
+ "to": 100
+ }
+ },
+ "status": "DONE",
+ "statusCode": 200,
+ "statusType": 2
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Results
+>|data|status|statusCode|statusType|
+>|---|---|---|---|
+>| data: {'machine': {'id': '1', 'name': 'DESKTOP-1'}, 'operation': {'response': None, 'status': 'DA_NOT_INSTALLED'}},
{'machine': {'id': '2', 'name': 'DESKTOP-2'}, 'operation': {'response': None, 'status': 'DA_NOT_INSTALLED'}}
metadata: {"from": 0, "to": 100, "count": 2} | DONE | 200 | 2 |
+
+
+### harmony-ep-ioc-list
+
+***
+Gets a list of all Indicators of Compromise. Use the filter parameters to fetch specific IOCs.
+
+#### Base Command
+
+`harmony-ep-ioc-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| filter | The indicator value or comment to search for. The filter is case-insensitive. For example, filter 'efg will match IoCs 'abcdEFG', 'efGGG', and 'yEfG'. | Optional |
+| field | The Indicator of Compromise field to search by. Possible values are: iocValue, iocComment. Default is iocValue. | Optional |
+| sort_direction | The way to sort the results. Possible values are: ASC, DESC. Default is DESC. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.IOC.comment | String | The IOC comment. |
+| HarmonyEP.IOC.modifiedOn | Number | The time the IOC was modified. |
+| HarmonyEP.IOC.value | String | The IOC value. |
+| HarmonyEP.IOC.type | String | The IOC type. |
+| HarmonyEP.IOC.id | String | The IOC ID. |
+
+#### Command example
+```!harmony-ep-ioc-list```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "IOC": [
+ {
+ "comment": "test",
+ "id": "3",
+ "modifiedOn": "2024-04-03T09:15:04.182Z",
+ "type": "Domain",
+ "value": "test2.com"
+ },
+ {
+ "comment": "comment",
+ "id": "4",
+ "modifiedOn": "2024-05-20T13:14:28.290Z",
+ "type": "Domain",
+ "value": "test1.com"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### IOC List:
+>Showing page 1.
+>Current page size: 50.
+>|Id|Type|Value|Comment|Modifiedon|
+>|---|---|---|---|---|
+>| 3 | Domain | test2.com | test | 2024-04-03T09:15:04.182Z |
+>| 4 | Domain | test1.com | comment | 2024-05-20T13:14:28.290Z |
+
+
+### harmony-ep-ioc-update
+
+***
+Updates the given Indicators of Compromise with the given parameters.
+
+#### Base Command
+
+`harmony-ep-ioc-update`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ioc_id | The ID of the IOC to update. Use harmony-ep-ioc-list command to get all IOC IDs. | Required |
+| comment | The IOC comment to update. | Required |
+| value | The IOC value to update. | Required |
+| type | The IOC type to update. Possible values are: Domain, IP, URL, MD5, SHA1. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.IOC.comment | String | The IOC comment. |
+| HarmonyEP.IOC.modifiedOn | Number | The time the IOC was modified. |
+| HarmonyEP.IOC.value | String | The IOC value. |
+| HarmonyEP.IOC.type | String | The IOC type. |
+| HarmonyEP.IOC.id | String | The IOC ID. |
+
+#### Command example
+```!harmony-ep-ioc-update ioc_id=8 comment=test value=8.8.8.8 type=IP```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "IOC": {
+ "comment": "test",
+ "id": "8",
+ "modifiedOn": "2024-06-24T06:44:49.214Z",
+ "type": "IP",
+ "value": "8.8.8.8"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### IOC 8 was updated successfully.
+>|Id|Type|Value|Comment|Modifiedon|
+>|---|---|---|---|---|
+>| 8 | IP | 8.8.8.8 | test | 2024-06-24T06:44:49.214Z |
+
+
+### harmony-ep-ioc-create
+
+***
+Creates new Indicators of Compromise using the given parameters.
+
+#### Base Command
+
+`harmony-ep-ioc-create`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | The IOC comment. | Required |
+| value | The IOC value. For example, 8.8.8.8 for IP or example.com for Domain. | Required |
+| type | The IOC type. Possible values are: Domain, IP, URL, MD5, SHA1. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+#### Command example
+```!harmony-ep-ioc-create comment=test value=1.1.1.2 type=IP```
+#### Human Readable Output
+
+>IOC was created successfully.
+
+### harmony-ep-ioc-delete
+
+***
+Deletes the given Indicators of Compromise by their ID.
+
+#### Base Command
+
+`harmony-ep-ioc-delete`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ids | A A comma-separated list of list of IOC IDs to delete. Use harmony-ep-ioc-list command to get all IOC IDs. | Optional |
+| delete_all | Whether to delete all IOCs. This action permanently deletes all Indicators of Compromise and cannot be undone. Possible values are: true, false. Default is false. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+#### Command example
+```!harmony-ep-ioc-delete ids=7```
+#### Human Readable Output
+
+>IOCs 7 was deleted successfully.
+
+### harmony-ep-policy-rule-assignments-get
+
+***
+Gets all entities directly assigned to the given rule.
+
+#### Base Command
+
+`harmony-ep-policy-rule-assignments-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_id | The ID of the rule to get the assignments. Use harmony-ep-rule-metadata-list command to get all rule IDs. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.Rule.Assignments.type | String | The rule assignment type. |
+| HarmonyEP.Rule.Assignments.name | String | The rule assignment name. |
+| HarmonyEP.Rule.Assignments.id | String | The rule assignment ID. |
+
+#### Command example
+```!harmony-ep-policy-rule-assignments-get rule_id=1a2b ```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "Rule": {
+ "assignments": [
+ {
+ "id": "456",
+ "name": "ChromeOsLaptops",
+ "type": "VIRTUAL_GROUP"
+ }
+ ],
+ "id": "1a2b"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Rule 1a2b assignments:
+>|Id|Name|Type|
+>|---|---|---|
+>| 456 | ChromeOsLaptops | VIRTUAL_GROUP |
+
+
+### harmony-ep-policy-rule-assignments-add
+
+***
+Assigns the specified entities to the given rule. Specified IDs that are already assigned to the rule are ignored.
+
+#### Base Command
+
+`harmony-ep-policy-rule-assignments-add`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_id | The ID of the rule to add assignments to. Use harmony-ep-rule-metadata-list command to get all rule IDs. | Required |
+| entities_ids | The entity IDs to assign. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+#### Command example
+```!harmony-ep-policy-rule-assignments-add rule_id=1a2b entities_ids=000```
+#### Human Readable Output
+
+>Entities ['000'] were assigned to rule 1a2b successfully.
+
+### harmony-ep-policy-rule-assignments-remove
+
+***
+Removes the specified entities from the given rule's assignments. Specified IDs that are not assigned to the rule are ignored.
+
+#### Base Command
+
+`harmony-ep-policy-rule-assignments-remove`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_id | The ID of the rule to remove assignments from. Use harmony-ep-rule-metadata-list command to get all rule IDs. | Required |
+| entities_ids | The entity IDs to remove. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+#### Command example
+```!harmony-ep-policy-rule-assignments-remove rule_id=1a2b entities_ids=000```
+#### Human Readable Output
+
+>Entities ['000'] were removed from rule 1a2b successfully.
+
+### harmony-ep-policy-rule-install
+
+***
+Installs all policies.
+
+#### Base Command
+
+`harmony-ep-policy-rule-install`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.PolicyRuleInstall.job_id | String | The job ID of the policy installation. |
+
+#### Command example
+```!harmony-ep-policy-rule-install job_id=976```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "PolicyRuleInstall": {
+ "job_id": "976"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Policy was installed successfully.
+>Job ID: 976
+>**No entries.**
+
+
+### harmony-ep-policy-rule-modifications-get
+
+***
+Gets information on modifications to a given rule. (Modifications are the additions or removal of assignments on a rule since it was last installed).
+
+#### Base Command
+
+`harmony-ep-policy-rule-modifications-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_id | The ID of the rule to get the modifications of. Use harmony-ep-rule-metadata-list command to get all rule IDs. | Required |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.Rule.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.Rule.order | Number | Rule order. |
+| HarmonyEP.Rule.isDefaultRule | Boolean | Whether or not the rule is the default. |
+| HarmonyEP.Rule.family | String | A family in the rule-base \(legacy and unified\). |
+| HarmonyEP.Rule.connectionState | String | Rule connection state. |
+| HarmonyEP.Rule.comment | String | Rule comment. |
+| HarmonyEP.Rule.assignments.type | String | Rule assignments type. |
+| HarmonyEP.Rule.assignments.name | String | Rule assignments name. |
+| HarmonyEP.Rule.assignments.id | String | Rule assignments ID. |
+| HarmonyEP.Rule.name | String | Rule name. |
+| HarmonyEP.Rule.id | String | Rule ID. |
+| HarmonyEP.Rule.orientation | String | Rule policy orientation. |
+
+#### Command example
+```!harmony-ep-policy-rule-modifications-get rule_id=1a2b job_id=999```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "Rule": {
+ "connectionState": "CONNECTED",
+ "family": "Access",
+ "id": "1a2b",
+ "job_id": "999",
+ "lastModifiedBy": "talg",
+ "lastModifiedOn": {
+ "iso-8601": "2024-06-24T09:04:43.000Z",
+ "posix": 1719219883000
+ },
+ "modified": {
+ "assignments": {
+ "modified": false
+ },
+ "order": {
+ "modified": false
+ },
+ "settings": {
+ "modified": true
+ }
+ },
+ "name": "New Rule 1"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Rule 1a2b modification:
+>Job ID: 999
+>|Id|Name|Family|Connectionstate|Lastmodifiedby|Job Id|
+>|---|---|---|---|---|---|
+>| 1a2b | New Rule 1 | Access | CONNECTED | talg | 999 |
+
+
+### harmony-ep-policy-rule-metadata-list
+
+***
+Gets the metadata of all rules or the given rule's metadata. (Metadata refers to all information relating to the rule except it's actual settings).
+
+#### Base Command
+
+`harmony-ep-policy-rule-metadata-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_id | The ID of the rule to get the metadata. | Optional |
+| rule_family | An optional 'Rule Family' filter. Used to filter the results to only the selected rule family (e.g., only 'Threat Prevention'). Possible values are: General Settings, Threat Prevention, Data Protection, OneCheck, Deployment, Remote Access VPN, Capsule Docs, Access, Agent Settings. | Optional |
+| connection_state | An optional 'Connection State' filter. Used to filter the results to only the selected Connection State (e.g., only rules pertaining to policies for connected clients). Possible values are: CONNECTED, DISCONNECTED, RESTRICTED. | Optional |
+| limit | The maximum number of IP lists to return. Default is 50. | Optional |
+| all_results | Whether to return all of the results or not. Possible values are: true, false. Default is false. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.Rule.order | Number | Rule order. |
+| HarmonyEP.Rule.isDefaultRule | Boolean | Whether or not the rule is the default. |
+| HarmonyEP.Rule.family | String | A family in the rule-base \(legacy and unified\). |
+| HarmonyEP.Rule.connectionState | String | Rule connection state. |
+| HarmonyEP.Rule.comment | String | Rule comment. |
+| HarmonyEP.Rule.assignments.type | String | Rule assignments type. |
+| HarmonyEP.Rule.assignments.name | String | Rule assignments name. |
+| HarmonyEP.Rule.assignments.id | String | Rule assignments ID. |
+| HarmonyEP.Rule.name | String | Rule name. |
+| HarmonyEP.Rule.id | String | Rule ID. |
+| HarmonyEP.Rule.orientation | String | Rule policy orientation. |
+
+#### Command example
+```!harmony-ep-policy-rule-metadata-list rule_id=1a2b```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "Rule": {
+ "assignments": [
+ {
+ "id": "000",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ },
+ {
+ "id": "456",
+ "name": "ChromeOsLaptops",
+ "type": "VIRTUAL_GROUP"
+ }
+ ],
+ "comment": "",
+ "connectionState": "CONNECTED",
+ "family": "Threat Prevention",
+ "id": "1a2b",
+ "isDefaultRule": true,
+ "name": "TalTest",
+ "order": 2,
+ "orientation": "DEVICE"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Rule 1a2b metadata:
+>|Id|Name|Family|Comment|Orientation|Connectionstate|Assignments|
+>|---|---|---|---|---|---|---|
+>| 1a2b | TalTest | Threat Prevention | | DEVICE | CONNECTED | {'id': '000', 'name': 'Entire Organization', 'type': 'ORGANIZATION_ROOT'},
{'id': '456', 'name': 'ChromeOsLaptops', 'type': 'VIRTUAL_GROUP'} |
+
+
+### harmony-ep-push-operation-status-list
+
+***
+Gets the current statuses of all remediation operations or if a specific ID is specified, retrieve the current status of the given remediation operation.
+
+#### Base Command
+
+`harmony-ep-push-operation-status-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| remediation_operation_id | Remediation operations ID. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.PushOperation.adminName | String | The name of the administrator who initiated the operation. |
+| HarmonyEP.PushOperation.aborted | Boolean | Indicated whether the operation was aborted by an administrator. |
+| HarmonyEP.PushOperation.remainingTimeoutSeconds | Number | The amount of time, in seconds, the operation will remain active. When elapsed, no more entities will be affected. |
+| HarmonyEP.PushOperation.createdOn | Date | The date and time the operation was created. |
+| HarmonyEP.PushOperation.type | String | Remediation operation type. |
+| HarmonyEP.PushOperation.comment | String | A comment that was provided during the operation's creation. |
+| HarmonyEP.PushOperation.id | String | The operation's ID. |
+| HarmonyEP.PushOperation.overallStatus | String | Remediation operation status. |
+| HarmonyEP.PushOperation.numberOfAffectedEntities | Number | The total number of entities affected by the operation. |
+
+#### Command example
+```!harmony-ep-push-operation-status-list remediation_operation_id=4d```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "PushOperation": {
+ "aborted": true,
+ "adminName": "talg",
+ "createdOn": "2024-06-20T10:58:19.407Z",
+ "id": "d45",
+ "job_id": "3",
+ "numberOfAffectedEntities": 6,
+ "operationParameters": {
+ "allowPostpone": false,
+ "informUser": true,
+ "originalTimeoutSeconds": 86400,
+ "schedulingType": "IMMEDIATE"
+ },
+ "overallStatus": "ABORTED",
+ "remainingTimeoutSeconds": 0,
+ "type": "AM_SCAN"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Push operations status list:
+>Job ID: 3
+>|Id|Type|Createdon|Overallstatus|
+>|---|---|---|---|
+>| d45 | AM_SCAN | 2024-06-20T10:58:19.407Z | ABORTED |
+
+
+### harmony-ep-push-operation-get
+
+***
+Gets the results of a given Remediation Operation. Remediation Operations may produce results such a Forensics Report or yield status updates such as an anti-malware scan progress.
+
+#### Base Command
+
+`harmony-ep-push-operation-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| remediation_operation_id | Remediation operation ID. Use the harmony-ep-remediation-status-list command to get all remediation operation IDs. | Required |
+| filter_text | Optional free text search in any of the potential response fields excluding "id". Can be used to search for specific results, devices or IPs, for example. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-push-operation-get remediation_operation_id=4d```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "PushOperation": [
+ {
+ "job_id": "6",
+ "machine": {
+ "id": "5s",
+ "name": "DESKTOP-M4OAKII"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Push operations:
+>Job ID: 6
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 5s | DESKTOP-M4OAKII | DA_NOT_INSTALLED |
+
+
+### harmony-ep-push-operation-abort
+
+***
+Aborts the given remediation operation. Aborting an operation prevents it from being sent to further Harmony Endpoint Clients. Clients that have already received the operation are not affected.
+
+#### Base Command
+
+`harmony-ep-push-operation-abort`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| remediation_operation_id | Remediation operation ID. Use the harmony-ep-remediation-status-list command to get all remediation operation IDs. | Required |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.PushOperationAbort.job_id | String | The job ID of the remediation operation. |
+
+#### Command example
+```!harmony-ep-push-operation-abort remediation_operation_id=93 job_id=976```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "PushOperationAbort": {
+ "job_id": "976"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Remediation operation abort was added to the push operation list successfully.
+>Job ID: 976
+>**No entries.**
+
+
+### harmony-ep-anti-malware-scan
+
+***
+Performs an anti-malware scan on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-anti-malware-scan`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.AntiMalwareScan.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.AntiMalwareScan.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.AntiMalwareScan.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.AntiMalwareScan.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.AntiMalwareScan.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.AntiMalwareScan.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.AntiMalwareScan.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.AntiMalwareScan.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-anti-malware-scan computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "AntiMalwareScan": {
+ "PushOperation": [
+ {
+ "job_id": "13",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Anti-Malware scan was added to the push operation list successfully.
+>Job ID: 13
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-anti-malware-update
+
+***
+Updates the anti-malware Signature Database on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-anti-malware-update`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| update_from_ep_server | Determines whether to update from the EP server. Possible values are: true, false. Default is false. | Optional |
+| update_from_cp_server | Determines whether to update from the CP server. Possible values are: true, false. Default is false. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.AntiMalwareUpdate.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-anti-malware-update computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "AntiMalwareUpdate": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Anti-Malware Signature Database update was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-anti-malware-restore
+
+***
+Restores a file that was previously quarantined by the Harmony Endpoint Client's anti-malware capability. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-anti-malware-restore`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| files | A list of file paths to restore. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.AntiMalwareRestore.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.AntiMalwareRestore.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-anti-malware-restore files=test computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "AntiMalwareRestore": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File restore was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-forensics-indicator-analyze
+
+***
+Collects forensics data whenever a computer that matches the given query accesses or executes the given IP, URL, filename, MD5 or path. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-forensics-indicator-analyze`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| indicator_type | The indictor type to analyze. Possible values are: IP, URL, File, MD5, Path. | Required |
+| indicator_value | A URL, IP, Path, File or MD5 that when accessed or executed will trigger a forensics report. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| generate_activity_logs | Determines whether to generate detailed activity logs. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.IndicatorAnalyze.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.IndicatorAnalyze.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-forensics-indicator-analyze indicator_type=IP indicator_value=8.8.8.8 computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "IndicatorAnalyze": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### IOC analyze was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-forensics-file-quarantine
+
+***
+Quarantines files given by path or MD5 or detections relating to a forensic incident. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-forensics-file-quarantine`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file_type | The forensics quarantine item type. Possible values are: PATH, INCIDENT_ID, MD5. | Required |
+| file_value | The forensics quarantine item value. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.FileQuarantine.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.FileQuarantine.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.FileQuarantine.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.FileQuarantine.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.FileQuarantine.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.FileQuarantine.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.FileQuarantine.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.FileQuarantine.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-forensics-file-quarantine file_type=PATH file_value=test computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "FileQuarantine": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File quarantine was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-forensics-file-restore
+
+***
+Restores previously quarantined files given by path or MD5 or detections relating to a forensic incident. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-forensics-file-restore`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file_type | The forensics quarantine item type. Possible values are: PATH, INCIDENT_ID, MD5. | Required |
+| file_value | The forensics quarantine item value. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.FileRestore.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.FileRestore.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.FileRestore.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.FileRestore.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.FileRestore.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.FileRestore.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.FileRestore.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.FileRestore.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-forensics-file-restore file_type=PATH file_value=test computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "FileRestore": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File restore was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-remediation-computer-isolate
+
+***
+Isolates the computers matching the given query. Isolation is the act of denying all network access from a given computer. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-remediation-computer-isolate`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ComputerIsolate.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ComputerIsolate.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ComputerIsolate.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ComputerIsolate.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ComputerIsolate.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ComputerIsolate.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ComputerIsolate.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ComputerIsolate.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-remediation-computer-isolate computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ComputerIsolate": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Remediation isolate was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-remediation-computer-deisolate
+
+***
+De-Isolates the computers matching the given query. De-isolating a computer restores its access to network resources. Affects only isolated computers. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-remediation-computer-deisolate`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ComputerDeisolate.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ComputerDeisolate.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ComputerDeisolate.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ComputerDeisolate.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ComputerDeisolate.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ComputerDeisolate.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ComputerDeisolate.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ComputerDeisolate.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-remediation-computer-deisolate computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ComputerDeisolate": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Remediation de-isolate was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-computer-restart
+
+***
+Restarts computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-computer-restart`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| force_apps_shutdown | Determines whether to force applications shutdown. Possible values are: true, false. Default is false. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ComputerRestart.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ComputerRestart.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ComputerRestart.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ComputerRestart.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ComputerRestart.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ComputerRestart.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ComputerRestart.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ComputerRestart.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-computer-restart computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ComputerReset": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Computer reset restore was added to the push operation list successfully.
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-computer-shutdown
+
+***
+Shuts-down computers match the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-computer-shutdown`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| force_apps_shutdown | Determines whether to force applications shutdown. Possible values are: true, false. Default is false. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ComputerShutdown.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ComputerShutdown.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ComputerShutdown.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ComputerShutdown.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ComputerShutdown.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ComputerShutdown.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ComputerShutdown.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ComputerShutdown.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-computer-shutdown computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ComputerShutdown": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Computer shutdown was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-computer-repair
+
+***
+Repairs the Harmony Endpoint Client installation on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-computer-repair`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ComputerRepair.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ComputerRepair.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ComputerRepair.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ComputerRepair.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ComputerRepair.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ComputerRepair.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ComputerRepair.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ComputerRepair.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-computer-repair computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ComputerRepair": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Computer repair was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-computer-list
+
+***
+Gets a list of computers matching the given filters. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-computer-list`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.Computer.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.Computer.CapabilitiesInstalled | String | A list of all installed capabilities. |
+| HarmonyEP.Computer.InstalledAndRunning | String | A list of installed and running capabilities. |
+| HarmonyEP.Computer.ClientVersion | String | The computer client version. |
+| HarmonyEP.Computer.DeployTime | String | The computer deploy time. |
+| HarmonyEP.Computer.Groups | String | The computer groups. |
+| HarmonyEP.Computer.type | String | The computer type. |
+| HarmonyEP.Computer.userName | String | The computer user name. |
+| HarmonyEP.Computer.domainName | String | The computer domain name. |
+| HarmonyEP.Computer.isolationStatus | String | The computer isolation status. |
+| HarmonyEP.Computer.ClientVersion | String | The computer client veraion. |
+| HarmonyEP.Computer.LastLoggedInUser | String | The computer last login user. |
+| HarmonyEP.Computer.osName | String | The computer operating system name. |
+| HarmonyEP.Computer.osVersion | String | The computer operating system version. |
+| HarmonyEP.Computer.ip | String | The computer IP address. |
+| HarmonyEP.Computer.DeploymentStatus | String | The computer deployment status. |
+| HarmonyEP.Computer.name | String | The computer name. |
+| HarmonyEP.Computer.id | String | The computer's unique ID. |
+
+#### Command example
+```!harmony-ep-computer-list computer_ids=1 job_id=845```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "Computer": {
+ "Computer": [
+ {
+ "client_version": "87.62.2002",
+ "deployment_status": "Completed",
+ "domain_name": ".WORKGROUP",
+ "groups": [
+ {
+ "id": "666",
+ "name": "Desktops"
+ },
+ {
+ "id": "222",
+ "name": "WinDesktops"
+ }
+ ],
+ "id": "888",
+ "ip": "1.1.1.1",
+ "isolation_status": "Not Isolated",
+ "last_logged_in_user": "ntlocal",
+ "name": "DESKTOP-E7V07D5",
+ "os_name": "Microsoft Windows 10 Pro",
+ "os_version": "10.0-19045-SP0.0-SMP",
+ "type": "Desktop",
+ "user_name": "ntlocal"
+ }
+ ],
+ "job_id": "845"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Computer list:
+>Job ID: 845
+>
+>Showing page 1.
+>Current page size: 50.
+>|Id|Name|Ip|Type|Groups|User Name|Client Version|
+>|---|---|---|---|---|---|---|
+>| 888 | DESKTOP-E7V07D5 | 1.1.1.1 | Desktop | {'id': '666', 'name': 'Desktops'},
{'id': '222', 'name': 'WinDesktops'} | ntlocal | 87.62.2002 |
+
+
+### harmony-ep-agent-process-information-get
+
+***
+Collects information about processes on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-process-information-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| process_name | The name of the process to collect information on. If not provided, all running processes will be collected. | Optional |
+| additional_fields | Additional process properties to collect. If not provided, only the process's name and ID will be collected. Possible values are: SI, Handles, VM, WS, PM, NPM, Path, CPU, ExitCode, ExitTime, Handle, HandleCount, HasExited, Id, MachineName, MainModule, MainWindowHandle, MainWindowTitle, MaxWorkingSet, MinWorkingSet, Modules, NonpagedSystemMemorySize, NonpagedSystemMemorySize64, PagedMemorySize, PagedMemorySize64, PagedSystemMemorySize, PagedSystemMemorySize64, PeakPagedMemorySize, PeakPagedMemorySize64, PeakVirtualMemorySize, PeakVirtualMemorySize64, PeakWorkingSet, PeakWorkingSet64, PriorityBoostEnabled, PriorityClass, PrivateMemorySize, PrivateMemorySize64, PrivilegedProcessorTime, ProcessName, ProcessorAffinity, Responding, SafeHandle, SessionId, StandardError, StandardInput, StandardOutput, StartInfo, StartTime, SynchronizingObject, Threads, TotalProcessorTime, UserProcessorTime, VirtualMemorySize, VirtualMemorySize64, WorkingSet, WorkingSet64. | Optional |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ProcessInformation.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ProcessInformation.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ProcessInformation.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ProcessInformation.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ProcessInformation.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ProcessInformation.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ProcessInformation.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ProcessInformation.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-process-information-get computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ProcessInformation": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Process information fetch was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-process-terminate
+
+***
+Terminates the given process on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-process-terminate`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| terminate_all_instances | Indicates whether to terminate all processes matching the given name. If set to true while a non-zero PID is given, only a single process with the given name AND PID may be matched. If set to false or not provided, will terminate only the first matching process. Possible values are: true, false. Default is false. | Optional |
+| name | The name of the process to terminate. | Required |
+| pid | The ID (PID) of the process to terminate. When used in conjunction with the name field, the PID must match the named process. If both name and PID are provided but the process matching the PID does not match the provided name, the operation will be ignored by the agent. If set to 0 or not provided, the agent will seek to terminate the process or processes as indicated by the name field. | Optional |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.ProcessTerminate.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.ProcessTerminate.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.ProcessTerminate.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.ProcessTerminate.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.ProcessTerminate.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.ProcessTerminate.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.ProcessTerminate.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.ProcessTerminate.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-process-terminate name=test computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "ProcessTerminate": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Process terminate was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-registry-key-add
+
+***
+Adds a given registry key and/or value to the registry of computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-registry-key-add`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| is_redirected | Determines if the key should reside under WOW6432Node. Keys intended for 64bit versions of Windows may target 32bit versions by setting this value to 'true, thus specifying that the registry key/value be added under the WOW6432Node. Possible values are: true, false. | Optional |
+| value_data | The actual value to be added the the specified registry key. | Required |
+| value_type | A registry value's type. Possible values are: DWORD (REG_DWORD), STRING (REG_GZ). | Required |
+| value_name | The name of the value to be added to the specified registry key. | Required |
+| key | The full path path of the key to create or add a value to. For example, 'SOFTWARE\Node.js\Components'. | Required |
+| hive | Defines known Windows Registry Hives. For more information, see https://docs.microsoft.com/en-us/windows/win32/sysinfo/predefined-keys. Possible values are: HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, HKEY_CLASSES_ROOT, HKEY_USERS, HKEY_CURRENT_CONFIG. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.RegistryKeyAdd.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.RegistryKeyAdd.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-registry-key-add value_data=test value_type="STRING (REG_GZ)" value_name=test key=test hive=HKEY_USERS computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "RegistryKeyAdd": {
+ "PushOperation": {
+ "job_id": "54",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": "88",
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Registry key add was added to the push operation list successfully..
+>Job ID: 54
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+### harmony-ep-agent-registry-key-delete
+
+***
+Removes the given registry key or value to the registry of computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-registry-key-delete`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| is_redirected | Determines if the key should be removed from under WOW6432Node. Keys intended for 64bit versions of Windows may target 32bit versions by setting this value to 'true', thus specifying that the registry key/value be removed under the WOW6432Node. Possible values are: true, false. | Optional |
+| value_name | The value to remove from the key. If not provided, the entire key will be deleted. | Optional |
+| key | The full path path of the key to delete or remove a value from. For example, 'SOFTWARE\Node.js\Components'. | Required |
+| hive | Defines known Windows Registry Hives. For more information, see https://docs.microsoft.com/en-us/windows/win32/sysinfo/predefined-keys. Possible values are: HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, HKEY_CLASSES_ROOT, HKEY_USERS, HKEY_CURRENT_CONFIG. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.RegistryKeyDelete.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.RegistryKeyDelete.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-registry-key-delete value_name='test' key='test' hive=HKEY_USERS computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "RegistryKeyDelete": {
+ "PushOperation": {
+ "job_id": "54",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": "88",
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Registry key delete was added to the push operation list successfully..
+>Job ID: 54
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+### harmony-ep-agent-file-copy
+
+***
+Copies the given file from the given source to the given destination on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-file-copy`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| destination_absolute_path | The absolute, full destination path. The provided path must include the target file's name (e.g., c:\backup\backup1.txt). | Required |
+| source_absolute_path | The absolute, full source path (e.g., c:\backup\backup1.txt). | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.FileCopy.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.FileCopy.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.FileCopy.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.FileCopy.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.FileCopy.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.FileCopy.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.FileCopy.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.FileCopy.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-file-copy destination_absolute_path='test.txt' source_absolute_path='test.txt' computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "FileCopy": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File copy was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-file-move
+
+***
+Moves the given file from the given source to the given destination on computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-file-move`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| destination_absolute_path | The absolute, full destination path. The provided path must include the target file's name (e.g., c:\backup\backup1.txt). | Required |
+| source_absolute_path | The absolute, full source path (e.g., c:\backup\backup1.txt). | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.FileMove.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.FileMove.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.FileMove.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.FileMove.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.FileMove.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.FileMove.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.FileMove.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.FileMove.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-file-move destination_absolute_path='test.txt' source_absolute_path='test.txt' computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "FileMove": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File move was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-file-delete
+
+***
+Deletes the given file from the given source on computers matching the given query. This operation is risky! Use with caution as it allows you to change Harmony Endpoint protected files or registry entries that are in use by your operating system. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-file-delete`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| target_absolute_path | The absolute, full path of the file to remove. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.FileDelete.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.FileDelete.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.FileDelete.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.FileDelete.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.FileDelete.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.FileDelete.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.FileDelete.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.FileDelete.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-file-delete target_absolute_path='test.txt' computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "FileDelete": {
+ "PushOperation": [
+ {
+ "job_id": "16",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": null,
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ ]
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### File delete was added to the push operation list successfully..
+>Job ID: 16
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-vpn-site-add
+
+***
+ Adds the given VPN site's configuration to computers matching the given query. Adding a VPN site allows Harmony Endpoint Clients to connect to it. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-vpn-site-add`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| remote_access_gateway_name | The remote gateway's name. | Required |
+| fingerprint | The remote gateway's certificate fingerprint. Fingerprints are used to verify the authenticity of the gateway. | Required |
+| authentication_method | Authentication methods used in conjunction with VPN site standard login. Possible values are: CERTIFICATE, P12_CERTIFICATE, USERNAME_PASSWORD, SECURID_KEY_FOB, SECURID_PIN_PAD, SOFTID, CHALLENGE_RESPONSE. | Required |
+| display_name | The VPN site's display name. | Optional |
+| host | The target site's host name or IP address. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.VPNsiteConfigurationAdd.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-vpn-site-add remote_access_gateway_name='test' fingerprint='test' authentication_method=CERTIFICATE host='test' computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "VPNsiteConfigurationAdd": {
+ "PushOperation": {
+ "job_id": "67",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": "23",
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### VPN site configuration remove was added to the push operation list successfully..
+>Job ID: 67
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
+
+
+### harmony-ep-agent-vpn-site-remove
+
+***
+Removes the given VPN site's configuration to computers matching the given query. Note that you must specify at least one of the following filter arguments: computer_ids, computer_names, computer_ips, computer_group_names, computer_types, computer_deployment_status, computer_last_connection, or filter.
+
+#### Base Command
+
+`harmony-ep-agent-vpn-site-remove`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| display_name | The display name of the VPN site to remove. If a display name was not provided during the site's creation, the host name/IP should be used instead. | Required |
+| comment | Operation comment. | Optional |
+| scheduling_date_time | Start the operation on a given date and time. If not specified, defaults to 'Now' (i.e. immediate execution). For example, “2024-04-12 03:59”. | Optional |
+| expiration_seconds | The amount of time, in seconds, the operation will be valid for. When the specified time has elapsed, the operation will expire and will not be pushed to any more clients. If not specified, defaults to 86400 seconds (24 hours). Minimum value is 1. | Optional |
+| computer_ids | A comma-separated list of computer IDs to include in the operation. | Optional |
+| computer_names | A comma-separated list of computer names to include in the operation. | Optional |
+| computer_ips | A comma-separated list of computer IPs to include in the operation. | Optional |
+| computer_types | A comma-separated list of computer types to include in the operation. Possible values are: Desktop, Laptop, N/A, Domain Controller, Server. | Optional |
+| computer_deployment_statuses | A comma-separated list of computer deployment statuses to include in the operation. Possible values are: Retrying, Error, Scheduled, Downloading, Deploying, Completed, Failed, Uninstalling, Not Scheduled, Not Installed, N/A. | Optional |
+| computer_last_connection | Computer last connection range time (start time, end time) to include in the operation. For example, "2024-01-01 07:58, 2024-04-02 02:00”. | Optional |
+| filter | A comma-separated list of list of search filters according to the following template: "column_name operator 'values_list' ". For example, the query "computerId Contains '1,2,3,4' , computerIP Exact '1.1.1.1' " will refer to computers contains '1', '2', '3', and '4' in their ID and that their IP is '1.1.1.1'. For more optional 'column_name' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/ComputerColumnNames. For more optional 'operator' values, see https://app.swaggerhub.com/apis/Check-Point/web-mgmt-external-api-production/1.9.179#/FilterType . | Optional |
+| groups_ids_to_exclude | A comma-separated list of group IDs to exclude from the operation. | Optional |
+| computers_ids_to_exclude | A comma-separated list of computer IDs to exclude from the operation. | Optional |
+| computers_ids_to_include | A comma-separated list of computer IDs to include in the operation. | Optional |
+| inform_user | Determines whether to inform the user, via a UserCheck (popup) message, that the operation is taking place. Possible values are: true, false. Default is true. | Optional |
+| allow_postpone | Determines whether to allow the user to postpone the operation. Possible values are: true, false. Default is true. | Optional |
+| page | Page number of paginated results. Minimum value: 1. | Optional |
+| page_size | The number of items per page. | Optional |
+| limit | The maximum number of records to retrieve. Default is 50. | Optional |
+| interval | The interval between each poll in seconds. Minimum value is `10`. Default is 30. | Optional |
+| timeout | The timeout for the polling in seconds. Default is 600. | Optional |
+| job_id | The job ID to fetch data for. Hidden argument. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.job_id | String | The job ID of the remediation operation. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.id | String | The remediation operation ID. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.status | String | Describes possible states in which a push operation may be in regards to a specific device. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.response.status | String | Push operation response status. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.response.output | String | Push operation response output. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.ipAddress | String | The client device's IPv4 address. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.name | String | The client device's name. |
+| HarmonyEP.VPNsiteConfigurationRemove.PushOperation.machine.id | String | The client device's unique ID. |
+
+#### Command example
+```!harmony-ep-agent-vpn-site-remove display_name='test' computer_ids=1```
+#### Context Example
+```json
+{
+ "HarmonyEP": {
+ "VPNsiteConfigurationRemove": {
+ "PushOperation": {
+ "job_id": "67",
+ "machine": {
+ "id": "1",
+ "name": "DESKTOP-1"
+ },
+ "operation": {
+ "id": "23",
+ "response": null,
+ "status": "DA_NOT_INSTALLED"
+ }
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### VPN site configuration remove was added to the push operation list successfully..
+>Job ID: 67
+>
+>Showing page 1.
+>Current page size: 50.
+>|Machine Id|Machine Name|Operation Status|
+>|---|---|---|
+>| 1 | DESKTOP-1 | DA_NOT_INSTALLED |
+
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_delete.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_delete.json
new file mode 100644
index 000000000000..3903c8aac87c
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_delete.json
@@ -0,0 +1,5 @@
+{
+ "statusType": 2,
+ "status": "DONE",
+ "statusCode": 204
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_list.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_list.json
new file mode 100644
index 000000000000..188971fbe6b0
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_list.json
@@ -0,0 +1,12 @@
+{
+ "content": [
+ {
+ "comment": "Tal Domain Test",
+ "id": "1108",
+ "modifiedOn": 1705242267719,
+ "type": "Domain",
+ "value": "tal.com"
+ }
+ ],
+ "total": 1
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_update.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_update.json
new file mode 100644
index 000000000000..fdcee69b5330
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/ioc_update.json
@@ -0,0 +1,16 @@
+{
+ "data": {
+ "content": [
+ {
+ "comment": "Tal Domain Test",
+ "id": "1108",
+ "modifiedOn": 1705242267719,
+ "type": "Domain",
+ "value": "tal.com"
+ }
+ ]
+ },
+ "status": "DONE",
+ "statusCode": 200,
+ "statusType": 2
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/job_status.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/job_status.json
new file mode 100644
index 000000000000..152e9e7e5757
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/job_status.json
@@ -0,0 +1,25 @@
+{
+ "statusType": 2,
+ "status": "DONE",
+ "statusCode": 200,
+ "data": {
+ "data": [
+ {
+ "machine": {
+ "id": "a1",
+ "name": "DESKTOP-MTEUD0M",
+ "ipAddress": "1.1.1.1"
+ },
+ "operation": {
+ "response": null,
+ "status": "TIMED_OUT"
+ }
+ }
+ ],
+ "metadata": {
+ "from": 0,
+ "to": 100,
+ "count": 1
+ }
+ }
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_remediation_data.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_remediation_data.json
new file mode 100644
index 000000000000..c174eec24d2a
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_remediation_data.json
@@ -0,0 +1,6 @@
+{
+ "statusType": 2,
+ "status": "DONE",
+ "statusCode": 201,
+ "data": "222"
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_in_progress.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_in_progress.json
new file mode 100644
index 000000000000..ce2164fbea18
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_in_progress.json
@@ -0,0 +1,5 @@
+{
+ "status": "IN_PROGRESS",
+ "statusCode": 0,
+ "statusType": 0
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_list.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_list.json
new file mode 100644
index 000000000000..abb3152efa27
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/push_operation_status_list.json
@@ -0,0 +1,43 @@
+{
+ "statusType": 2,
+ "status": "DONE",
+ "statusCode": 200,
+ "data": [
+ {
+ "numberOfAffectedEntities": 1,
+ "overallStatus": "COMPLETED_WITH_WARNINGS",
+ "operationParameters": {
+ "informUser": true,
+ "allowPostpone": true,
+ "schedulingType": "SCHEDULED",
+ "schedulingDateTime": "2024-02-06T07:58:30.000Z",
+ "generateActivityLogs": true,
+ "indicator": "8.8.8.8",
+ "originalTimeoutSeconds": 3600
+ },
+ "id": "b",
+ "comment": "Test",
+ "type": "EFR_EVENT_BY_URL",
+ "createdOn": "2024-03-05T11:37:05.211Z",
+ "remainingTimeoutSeconds": 0,
+ "aborted": false,
+ "adminName": "TAL"
+ },
+ {
+ "numberOfAffectedEntities": 1,
+ "overallStatus": "COMPLETED_WITH_WARNINGS",
+ "operationParameters": {
+ "informUser": true,
+ "allowPostpone": false,
+ "schedulingType": "IMMEDIATE",
+ "originalTimeoutSeconds": 86400
+ },
+ "id": "c",
+ "type": "AM_SCAN",
+ "createdOn": "2024-02-12T11:00:06.350Z",
+ "remainingTimeoutSeconds": 0,
+ "aborted": false,
+ "adminName": "TAL"
+ }
+ ]
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_assignments.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_assignments.json
new file mode 100644
index 000000000000..a0824f214753
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_assignments.json
@@ -0,0 +1,12 @@
+[
+ {
+ "id": "01",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ },
+ {
+ "id": "12",
+ "name": "Desktops",
+ "type": "VIRTUAL_GROUP"
+ }
+]
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_get.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_get.json
new file mode 100644
index 000000000000..12097336e36e
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_get.json
@@ -0,0 +1,22 @@
+{
+ "orientation": "DEVICE",
+ "id": "34",
+ "name": "New Rule 1",
+ "assignments": [
+ {
+ "id": "01",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ },
+ {
+ "id": "12",
+ "name": "Desktops",
+ "type": "VIRTUAL_GROUP"
+ }
+ ],
+ "comment": "",
+ "connectionState": "CONNECTED",
+ "family": "Access",
+ "isDefaultRule": true,
+ "order": 0
+}
diff --git a/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_list.json b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_list.json
new file mode 100644
index 000000000000..45e5b7baaf2e
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/Integrations/CheckPointHarmonyEndpoint/test_data/rule_metadata_list.json
@@ -0,0 +1,64 @@
+[
+
+ {
+ "orientation": "DEVICE",
+ "id": "14",
+ "name": "New Rule 1",
+ "assignments": [
+ {
+ "id": "a1",
+ "name": "ChromeOsDesktops",
+ "type": "VIRTUAL_GROUP"
+ },
+ {
+ "id": "b2",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ },
+ {
+ "id": "c3",
+ "name": "Desktops",
+ "type": "VIRTUAL_GROUP"
+ }
+ ],
+ "comment": "",
+ "connectionState": "CONNECTED",
+ "family": "Threat Prevention",
+ "isDefaultRule": false,
+ "order": 0
+ },
+ {
+ "orientation": "DEVICE",
+ "id": "01",
+ "name": "Default settings for the entire organization",
+ "assignments": [
+ {
+ "id": "001",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ }
+ ],
+ "comment": "Default settings for the entire organization",
+ "connectionState": "CONNECTED",
+ "family": "Data Protection",
+ "isDefaultRule": true,
+ "order": 0
+ },
+ {
+ "orientation": "USER",
+ "id": "02",
+ "name": "Default settings for the entire organization",
+ "assignments": [
+ {
+ "id": "002",
+ "name": "Entire Organization",
+ "type": "ORGANIZATION_ROOT"
+ }
+ ],
+ "comment": "Default settings for the entire organization",
+ "connectionState": "CONNECTED",
+ "family": "OneCheck",
+ "isDefaultRule": true,
+ "order": 0
+ }
+]
diff --git a/Packs/CheckPointHarmonyEndpoint/README.md b/Packs/CheckPointHarmonyEndpoint/README.md
new file mode 100644
index 000000000000..3d9707f6bde1
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/README.md
@@ -0,0 +1,7 @@
+Cortex XSOAR interfaces with Check Point Harmony Endpoint to help protect organizations with security solutions and IT operations.
+
+# What does this pack do?
+
+- Views, creates, updates, and deletes IOCs from Harmony Endpoint.
+- Views, updates and installs policy rules.
+- Views, creates, and aborts push operation items.
diff --git a/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.json b/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.json
new file mode 100644
index 000000000000..38f86e38684b
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Deprecated the rule_id argument from the ***harmony-ep-policy-rule-install*** command.
"
+}
\ No newline at end of file
diff --git a/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.md b/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..072f2b933fcb
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/ReleaseNotes/1_0_1.md
@@ -0,0 +1,4 @@
+
+#### Integrations
+##### Check Point Harmony Endpoint
+- Deprecated the **rule_id** argument from the ***harmony-ep-policy-rule-install*** command.
diff --git a/Packs/CheckPointHarmonyEndpoint/TestPlaybooks/playbook-CheckPointHarmonyEndpoint_Test.yml b/Packs/CheckPointHarmonyEndpoint/TestPlaybooks/playbook-CheckPointHarmonyEndpoint_Test.yml
new file mode 100644
index 000000000000..d2639947d4d6
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/TestPlaybooks/playbook-CheckPointHarmonyEndpoint_Test.yml
@@ -0,0 +1,2922 @@
+id: CheckPointHarmonyEndpoint
+version: -1
+name: CheckPointHarmonyEndpoint
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 81cec45d-59f1-4d3f-8469-edab2193e6f1
+ type: start
+ task:
+ id: 81cec45d-59f1-4d3f-8469-edab2193e6f1
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: d0dad84d-05ca-4799-8762-d85e188d1a17
+ type: regular
+ task:
+ id: d0dad84d-05ca-4799-8762-d85e188d1a17
+ version: -1
+ name: DeleteContext
+ script: DeleteContext
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: d47be8da-db81-4265-88eb-bdf6b53a87ed
+ type: regular
+ task:
+ id: d47be8da-db81-4265-88eb-bdf6b53a87ed
+ version: -1
+ name: harmony-ep-job-status-get
+ script: HarmonyEndpoint|||harmony-ep-job-status-get
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ job_id:
+ simple: 561097ab-cec6-43c0-8d4f-fd5c891d5c7a
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 0dcbf24b-dddc-4bf2-83b7-d9fc4c6e19b5
+ type: condition
+ task:
+ id: 0dcbf24b-dddc-4bf2-83b7-d9fc4c6e19b5
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "4"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Job.data
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Job.status
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 0d28a01d-170b-411f-8f0a-2b25dde4a0cb
+ type: regular
+ task:
+ id: 0d28a01d-170b-411f-8f0a-2b25dde4a0cb
+ version: -1
+ name: harmony-ep-ioc-list
+ script: HarmonyEndpoint|||harmony-ep-ioc-list
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 1a9509dd-0b2b-48a6-890f-a992a0b616c3
+ type: condition
+ task:
+ id: 1a9509dd-0b2b-48a6-890f-a992a0b616c3
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "6"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.IOC.comment
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.IOC.modifiedOn
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.IOC.value
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.IOC.type
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.IOC.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: e39fa08d-72f6-4246-8fed-8f602ef41547
+ type: regular
+ task:
+ id: e39fa08d-72f6-4246-8fed-8f602ef41547
+ version: -1
+ name: harmony-ep-ioc-update
+ script: HarmonyEndpoint|||harmony-ep-ioc-update
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ comment:
+ simple: test
+ ioc_id:
+ simple: c6628b2c-c4a0-5c51-609d-e98028859165
+ type:
+ simple: IP
+ value:
+ simple: 1.1.1.1
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 1f7916de-4e53-4b24-860b-1deeed7e44cf
+ type: regular
+ task:
+ id: 1f7916de-4e53-4b24-860b-1deeed7e44cf
+ version: -1
+ name: harmony-ep-ioc-create
+ script: HarmonyEndpoint|||harmony-ep-ioc-create
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "9"
+ scriptarguments:
+ comment:
+ simple: test
+ type:
+ simple: Domain
+ value:
+ simple: talali.com
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: e7ff8ba6-a9c2-4e1a-84e9-2fb3c3ae671a
+ type: regular
+ task:
+ id: e7ff8ba6-a9c2-4e1a-84e9-2fb3c3ae671a
+ version: -1
+ name: harmony-ep-ioc-delete
+ script: HarmonyEndpoint|||harmony-ep-ioc-delete
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ delete_all:
+ simple: "false"
+ ids:
+ simple: "6"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: 48c42e17-62e2-47e5-87c7-51c8855c6e47
+ type: regular
+ task:
+ id: 48c42e17-62e2-47e5-87c7-51c8855c6e47
+ version: -1
+ name: harmony-ep-policy-rule-assignments-get
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-assignments-get
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ rule_id:
+ simple: 3604df6f-a13b-47df-ac1e-b3e2f822aa20@705b1672-7be5-47e8-9837-c849b9b16632@b1503d7b-48f5-45fe-bff5-f07f22faac44@df7fc627-5d6d-489e-94d3-15a1990db27e
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 8ef55e17-d78e-4f27-81f7-69fda6896cbe
+ type: condition
+ task:
+ id: 8ef55e17-d78e-4f27-81f7-69fda6896cbe
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "12"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.Assignments.type
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.Assignments.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.Assignments.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: 09b73116-cde2-4ca8-8cd6-fe7c014aeff4
+ type: regular
+ task:
+ id: 09b73116-cde2-4ca8-8cd6-fe7c014aeff4
+ version: -1
+ name: harmony-ep-policy-rule-assignments-add
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-assignments-add
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ entities_ids:
+ simple: 00000000-0000-0000-0000-000000000000
+ rule_id:
+ simple: 3604df6f-a13b-47df-ac1e-b3e2f822aa20@705b1672-7be5-47e8-9837-c849b9b16632@b1503d7b-48f5-45fe-bff5-f07f22faac44@df7fc627-5d6d-489e-94d3-15a1990db27e
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 4cabb560-a0e8-474d-8142-1e5ead85459b
+ type: regular
+ task:
+ id: 4cabb560-a0e8-474d-8142-1e5ead85459b
+ version: -1
+ name: harmony-ep-policy-rule-assignments-remove
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-assignments-remove
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "14"
+ scriptarguments:
+ entities_ids:
+ simple: 00000000-0000-0000-0000-000000000000
+ rule_id:
+ simple: 3604df6f-a13b-47df-ac1e-b3e2f822aa20@705b1672-7be5-47e8-9837-c849b9b16632@b1503d7b-48f5-45fe-bff5-f07f22faac44@df7fc627-5d6d-489e-94d3-15a1990db27e
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 1cf80022-e287-4026-86bd-1d1a8720228c
+ type: regular
+ task:
+ id: 1cf80022-e287-4026-86bd-1d1a8720228c
+ version: -1
+ name: harmony-ep-policy-rule-install
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-install
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "15"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "15":
+ id: "15"
+ taskid: e1b0631d-29e1-468e-8ad6-328cfceb78c6
+ type: condition
+ task:
+ id: e1b0631d-29e1-468e-8ad6-328cfceb78c6
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "16"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PolicyRuleInstall.job_id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "16":
+ id: "16"
+ taskid: 076c4876-bfd7-42da-808b-a647dffd36ac
+ type: regular
+ task:
+ id: 076c4876-bfd7-42da-808b-a647dffd36ac
+ version: -1
+ name: harmony-ep-policy-rule-modifications-get
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-modifications-get
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "17"
+ scriptarguments:
+ rule_id:
+ simple: 3604df6f-a13b-47df-ac1e-b3e2f822aa20@705b1672-7be5-47e8-9837-c849b9b16632@b1503d7b-48f5-45fe-bff5-f07f22faac44@df7fc627-5d6d-489e-94d3-15a1990db27e
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: 3e205bf2-4289-4cb6-8905-4ddee071b1ec
+ type: condition
+ task:
+ id: 3e205bf2-4289-4cb6-8905-4ddee071b1ec
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "18"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.family
+ iscontext: true
+ right:
+ value: {}
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3390
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: 285f9043-9b84-4c02-8da9-4ba5e2ede998
+ type: regular
+ task:
+ id: 285f9043-9b84-4c02-8da9-4ba5e2ede998
+ version: -1
+ name: harmony-ep-policy-rule-metadata-list
+ script: HarmonyEndpoint|||harmony-ep-policy-rule-metadata-list
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "19"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: 9c36e0e4-6004-4740-84a9-e9b786cea70e
+ type: condition
+ task:
+ id: 9c36e0e4-6004-4740-84a9-e9b786cea70e
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "20"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.order
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.family
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.comment
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Rule.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: a61d1929-716f-4a31-81b1-c7bbf5512042
+ type: regular
+ task:
+ id: a61d1929-716f-4a31-81b1-c7bbf5512042
+ version: -1
+ name: harmony-ep-push-operation-status-list
+ script: HarmonyEndpoint|||harmony-ep-push-operation-status-list
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "21"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: 29a021a8-a3d9-4ab0-8220-d709f3deb581
+ type: condition
+ task:
+ id: 29a021a8-a3d9-4ab0-8220-d709f3deb581
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "22"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.adminName
+ iscontext: true
+ - - operator: isExists
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.aborted
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.remainingTimeoutSeconds
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.createdOn
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.type
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.comment
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: 77721ea6-633c-4454-800e-ce3c48134ba3
+ type: regular
+ task:
+ id: 77721ea6-633c-4454-800e-ce3c48134ba3
+ version: -1
+ name: harmony-ep-push-operation-get
+ script: HarmonyEndpoint|||harmony-ep-push-operation-get
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "23"
+ scriptarguments:
+ remediation_operation_id:
+ simple: 861597dc-e5a3-4b34-ad01-65defb5f4a70
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: bad20af5-b661-4517-8b01-55452fc2817d
+ type: condition
+ task:
+ id: bad20af5-b661-4517-8b01-55452fc2817d
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "24"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "24":
+ id: "24"
+ taskid: 5fe922f2-ca85-41da-8fc8-f5c42ec7e4ca
+ type: regular
+ task:
+ id: 5fe922f2-ca85-41da-8fc8-f5c42ec7e4ca
+ version: -1
+ name: harmony-ep-push-operation-abort
+ script: HarmonyEndpoint|||harmony-ep-push-operation-abort
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ remediation_operation_id:
+ simple: 861597dc-e5a3-4b34-ad01-65defb5f4a70
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "25":
+ id: "25"
+ taskid: 46cb0c41-3c2a-44c3-8253-819d9486264d
+ type: condition
+ task:
+ id: 46cb0c41-3c2a-44c3-8253-819d9486264d
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "26"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperationAbort.job_id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "26":
+ id: "26"
+ taskid: 47759c58-8ea5-4e4c-8f79-b515cd84e667
+ type: regular
+ task:
+ id: 47759c58-8ea5-4e4c-8f79-b515cd84e667
+ version: -1
+ name: harmony-ep-anti-malware-scan
+ script: HarmonyEndpoint|||harmony-ep-anti-malware-scan
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "27"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: 48520b82-3b94-40c3-8373-96d947697f32
+ type: condition
+ task:
+ id: 48520b82-3b94-40c3-8373-96d947697f32
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "28"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.AntiMalwareScan.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "28":
+ id: "28"
+ taskid: a3916fb5-644e-42ad-846f-3421d53a6723
+ type: regular
+ task:
+ id: a3916fb5-644e-42ad-846f-3421d53a6723
+ version: -1
+ name: harmony-ep-anti-malware-update
+ script: HarmonyEndpoint|||harmony-ep-anti-malware-update
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "29"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5590
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "29":
+ id: "29"
+ taskid: b6a8e707-ccbb-4500-8c3c-6b5b79078b84
+ type: condition
+ task:
+ id: b6a8e707-ccbb-4500-8c3c-6b5b79078b84
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "30"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.AntiMalwareUpdate.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "30":
+ id: "30"
+ taskid: 75310e70-88a1-4bc0-839c-e4345428dc0a
+ type: regular
+ task:
+ id: 75310e70-88a1-4bc0-839c-e4345428dc0a
+ version: -1
+ name: harmony-ep-anti-malware-restore
+ script: HarmonyEndpoint|||harmony-ep-anti-malware-restore
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "31"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ files:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 5990
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "31":
+ id: "31"
+ taskid: 56b9ced4-8f74-401b-839c-8c236b156076
+ type: condition
+ task:
+ id: 56b9ced4-8f74-401b-839c-8c236b156076
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "32"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileRestore.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 6200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "32":
+ id: "32"
+ taskid: 36c246ca-a03c-44f9-8c68-3ec1e9237d8f
+ type: regular
+ task:
+ id: 36c246ca-a03c-44f9-8c68-3ec1e9237d8f
+ version: -1
+ name: harmony-ep-forensics-indicator-analyze
+ script: HarmonyEndpoint|||harmony-ep-forensics-indicator-analyze
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "33"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ indicator_type:
+ simple: IP
+ indicator_value:
+ simple: 1.1.1.1
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 6400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "33":
+ id: "33"
+ taskid: 5993bee2-e9f7-49f8-80af-af82520b9cf5
+ type: condition
+ task:
+ id: 5993bee2-e9f7-49f8-80af-af82520b9cf5
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "34"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.IndicatorAnalyze.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 6600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: 97bde934-b0ae-4e3b-8bc7-f2ef5a8a18e5
+ type: regular
+ task:
+ id: 97bde934-b0ae-4e3b-8bc7-f2ef5a8a18e5
+ version: -1
+ name: harmony-ep-forensics-file-quarantine
+ script: HarmonyEndpoint|||harmony-ep-forensics-file-quarantine
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ file_type:
+ simple: PATH
+ file_value:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 6800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "35":
+ id: "35"
+ taskid: 744df3d1-38ed-44b2-80f5-6b8ec10574a1
+ type: condition
+ task:
+ id: 744df3d1-38ed-44b2-80f5-6b8ec10574a1
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "36"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileQuarantine.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 7000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "36":
+ id: "36"
+ taskid: 5e1cefbe-1eca-4fe7-82c7-1d21065d9249
+ type: regular
+ task:
+ id: 5e1cefbe-1eca-4fe7-82c7-1d21065d9249
+ version: -1
+ name: harmony-ep-forensics-file-restore
+ script: HarmonyEndpoint|||harmony-ep-forensics-file-restore
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "37"
+ scriptarguments:
+ file_type:
+ simple: PATH
+ file_value:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 7200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "37":
+ id: "37"
+ taskid: f9c28da5-2d52-4c47-86cd-9739b32c1850
+ type: condition
+ task:
+ id: f9c28da5-2d52-4c47-86cd-9739b32c1850
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "38"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileRestore.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 7400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "38":
+ id: "38"
+ taskid: 0f1d13c2-5ecd-4c41-8678-c6e04776337d
+ type: regular
+ task:
+ id: 0f1d13c2-5ecd-4c41-8678-c6e04776337d
+ version: -1
+ name: harmony-ep-remediation-computer-isolate
+ script: HarmonyEndpoint|||harmony-ep-remediation-computer-isolate
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "39"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 7580
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "39":
+ id: "39"
+ taskid: 07c6c3ca-aadf-4094-8d08-d56d79525bb0
+ type: condition
+ task:
+ id: 07c6c3ca-aadf-4094-8d08-d56d79525bb0
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "40"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ComputerIsolate.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 7800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "40":
+ id: "40"
+ taskid: ebdf94b9-f975-42e5-8ead-fdd4ffb7b459
+ type: regular
+ task:
+ id: ebdf94b9-f975-42e5-8ead-fdd4ffb7b459
+ version: -1
+ name: harmony-ep-remediation-computer-deisolate
+ script: HarmonyEndpoint|||harmony-ep-remediation-computer-deisolate
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "41"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 8000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "41":
+ id: "41"
+ taskid: 38287c8a-a64b-4efa-8e4f-607967bf99d0
+ type: condition
+ task:
+ id: 38287c8a-a64b-4efa-8e4f-607967bf99d0
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "42"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ComputerDeisolate.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 8200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "42":
+ id: "42"
+ taskid: 21a9a913-26c5-4907-8966-69713c615eeb
+ type: regular
+ task:
+ id: 21a9a913-26c5-4907-8966-69713c615eeb
+ version: -1
+ name: harmony-ep-agent-computer-restart
+ script: HarmonyEndpoint|||harmony-ep-agent-computer-restart
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "43"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 8400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "43":
+ id: "43"
+ taskid: 44243633-bbf9-4a0f-8793-feb2c41243b2
+ type: condition
+ task:
+ id: 44243633-bbf9-4a0f-8793-feb2c41243b2
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "44"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ComputerRestart.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 8590
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "44":
+ id: "44"
+ taskid: 470d500d-b096-4bda-857b-34ab397d1d77
+ type: regular
+ task:
+ id: 470d500d-b096-4bda-857b-34ab397d1d77
+ version: -1
+ name: harmony-ep-agent-computer-shutdown
+ script: HarmonyEndpoint|||harmony-ep-agent-computer-shutdown
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "45"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 8790
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "45":
+ id: "45"
+ taskid: 3251da13-81ff-445c-82d8-83ffd3becd3f
+ type: condition
+ task:
+ id: 3251da13-81ff-445c-82d8-83ffd3becd3f
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "46"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ComputerShutdown.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 9000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "46":
+ id: "46"
+ taskid: 3b87b86d-cf34-436c-8351-064c34b278fb
+ type: regular
+ task:
+ id: 3b87b86d-cf34-436c-8351-064c34b278fb
+ version: -1
+ name: harmony-ep-agent-computer-repair
+ script: HarmonyEndpoint|||harmony-ep-agent-computer-repair
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "47"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 9200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "47":
+ id: "47"
+ taskid: b5e9d5a8-a534-4c4e-8b86-6ca183c4735c
+ type: condition
+ task:
+ id: b5e9d5a8-a534-4c4e-8b86-6ca183c4735c
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "48"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ComputerRepair.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 9400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "48":
+ id: "48"
+ taskid: 97fa981c-309f-437a-89f2-dd63b5bef5e8
+ type: regular
+ task:
+ id: 97fa981c-309f-437a-89f2-dd63b5bef5e8
+ version: -1
+ name: harmony-ep-computer-list
+ script: HarmonyEndpoint|||harmony-ep-computer-list
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "49"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 9600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "49":
+ id: "49"
+ taskid: 523e1bb5-d242-401b-8351-fd698a53bff1
+ type: condition
+ task:
+ id: 523e1bb5-d242-401b-8351-fd698a53bff1
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "50"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Computer.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Computer.type
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Computer.ip
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Computer.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.Computer.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 9800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "50":
+ id: "50"
+ taskid: 2df09170-e54c-4cc4-8cf8-a06ca17a70ae
+ type: regular
+ task:
+ id: 2df09170-e54c-4cc4-8cf8-a06ca17a70ae
+ version: -1
+ name: harmony-ep-agent-process-information-get
+ script: HarmonyEndpoint|||harmony-ep-agent-process-information-get
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "51"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 10000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "51":
+ id: "51"
+ taskid: 13b1b151-8b77-4ee6-8816-a3600381d861
+ type: condition
+ task:
+ id: 13b1b151-8b77-4ee6-8816-a3600381d861
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "52"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ProcessInformation.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 10200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "52":
+ id: "52"
+ taskid: acb1602b-43ec-43eb-81ab-ddab3a2dfd91
+ type: regular
+ task:
+ id: acb1602b-43ec-43eb-81ab-ddab3a2dfd91
+ version: -1
+ name: harmony-ep-agent-process-terminate
+ script: HarmonyEndpoint|||harmony-ep-agent-process-terminate
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "53"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ name:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 10400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "53":
+ id: "53"
+ taskid: a650e273-d9ec-4c0f-8d75-3a9f6d52df89
+ type: condition
+ task:
+ id: a650e273-d9ec-4c0f-8d75-3a9f6d52df89
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "54"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.ProcessTerminate.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 10600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "54":
+ id: "54"
+ taskid: dd9ec49f-0393-47c5-8a0a-54507f7a6860
+ type: regular
+ task:
+ id: dd9ec49f-0393-47c5-8a0a-54507f7a6860
+ version: -1
+ name: harmony-ep-agent-registry-key-add
+ script: HarmonyEndpoint|||harmony-ep-agent-registry-key-add
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "55"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ hive:
+ simple: HKEY_USERS
+ key:
+ simple: test
+ value_data:
+ simple: test
+ value_name:
+ simple: test
+ value_type:
+ simple: STRING (REG_GZ)
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 10800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "55":
+ id: "55"
+ taskid: 5a00c20d-c2fa-4d37-8b9e-c14e3c821e9f
+ type: condition
+ task:
+ id: 5a00c20d-c2fa-4d37-8b9e-c14e3c821e9f
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "56"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.RegistryKeyAdd.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 11000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "56":
+ id: "56"
+ taskid: e6823eb3-7354-47e9-829a-cc961a0a394c
+ type: regular
+ task:
+ id: e6823eb3-7354-47e9-829a-cc961a0a394c
+ version: -1
+ name: harmony-ep-agent-registry-key-delete
+ script: HarmonyEndpoint|||harmony-ep-agent-registry-key-delete
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "57"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ hive:
+ simple: HKEY_USERS
+ key:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 11200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "57":
+ id: "57"
+ taskid: 2d31ec7e-00ca-460d-8f18-e58e60ed20c9
+ type: condition
+ task:
+ id: 2d31ec7e-00ca-460d-8f18-e58e60ed20c9
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "58"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.RegistryKeyDelete.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 11400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "58":
+ id: "58"
+ taskid: 6a410c99-af6e-43c4-891a-12a8c3700893
+ type: regular
+ task:
+ id: 6a410c99-af6e-43c4-891a-12a8c3700893
+ version: -1
+ name: harmony-ep-agent-file-copy
+ script: HarmonyEndpoint|||harmony-ep-agent-file-copy
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "59"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ destination_absolute_path:
+ simple: test
+ source_absolute_path:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 11600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "59":
+ id: "59"
+ taskid: 074e4ff2-41b9-4cf9-8fdf-1bae59415694
+ type: condition
+ task:
+ id: 074e4ff2-41b9-4cf9-8fdf-1bae59415694
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "60"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileCopy.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 11800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "60":
+ id: "60"
+ taskid: 51c85196-4676-4f20-80bf-9a9045ca6e9f
+ type: regular
+ task:
+ id: 51c85196-4676-4f20-80bf-9a9045ca6e9f
+ version: -1
+ name: harmony-ep-agent-file-move
+ script: HarmonyEndpoint|||harmony-ep-agent-file-move
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "61"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ destination_absolute_path:
+ simple: test
+ source_absolute_path:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 12000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "61":
+ id: "61"
+ taskid: 46078d68-efb6-41dd-8301-49c4667d7680
+ type: condition
+ task:
+ id: 46078d68-efb6-41dd-8301-49c4667d7680
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "62"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileMove.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 12200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "62":
+ id: "62"
+ taskid: f4830ec8-5a12-4e5a-8caf-0911f5201099
+ type: regular
+ task:
+ id: f4830ec8-5a12-4e5a-8caf-0911f5201099
+ version: -1
+ name: harmony-ep-agent-file-delete
+ script: HarmonyEndpoint|||harmony-ep-agent-file-delete
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "63"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ target_absolute_path:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 12400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "63":
+ id: "63"
+ taskid: b4aa1120-6f62-4b7f-8f5b-40dc28c0ce27
+ type: condition
+ task:
+ id: b4aa1120-6f62-4b7f-8f5b-40dc28c0ce27
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "64"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.FileDelete.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 12600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "64":
+ id: "64"
+ taskid: c39e5ca2-6b38-4a40-8f16-5e702f9e61f8
+ type: regular
+ task:
+ id: c39e5ca2-6b38-4a40-8f16-5e702f9e61f8
+ version: -1
+ name: harmony-ep-agent-vpn-site-add
+ script: HarmonyEndpoint|||harmony-ep-agent-vpn-site-add
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "65"
+ scriptarguments:
+ authentication_method:
+ simple: USERNAME_PASSWORD
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ fingerprint:
+ simple: test
+ host:
+ simple: test
+ remote_access_gateway_name:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 12800
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "65":
+ id: "65"
+ taskid: 8b882806-1427-4ddd-865a-59d73ea6242a
+ type: condition
+ task:
+ id: 8b882806-1427-4ddd-865a-59d73ea6242a
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "66"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.VPNsiteConfigurationAdd.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 13000
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "66":
+ id: "66"
+ taskid: aa8ec5fd-a886-4251-8fda-6426f6b305ee
+ type: regular
+ task:
+ id: aa8ec5fd-a886-4251-8fda-6426f6b305ee
+ version: -1
+ name: harmony-ep-agent-vpn-site-remove
+ script: HarmonyEndpoint|||harmony-ep-agent-vpn-site-remove
+ type: regular
+ iscommand: true
+ brand: HarmonyEndpoint
+ nexttasks:
+ '#none#':
+ - "67"
+ scriptarguments:
+ computer_ids:
+ simple: 1beeda40-2c49-4909-83fd-225e20e32a74
+ display_name:
+ simple: test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 13200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "67":
+ id: "67"
+ taskid: 0315b82e-9359-451a-803a-f96551efd99a
+ type: condition
+ task:
+ id: 0315b82e-9359-451a-803a-f96551efd99a
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "68"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.VPNsiteConfigurationRemove.job_id
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.ipAddress
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: HarmonyEP.PushOperation.machine.id
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 13400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "68":
+ id: "68"
+ taskid: b9295b12-8ffa-4e56-8c1f-770c33569523
+ type: title
+ task:
+ id: b9295b12-8ffa-4e56-8c1f-770c33569523
+ version: -1
+ name: Test Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 13600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 13615,
+ "width": 380,
+ "x": 50,
+ "y": 50
+ }
+ }
+ }
+inputs: []
+outputs: []
+fromversion: 6.10.0
+description: Check Point Harmony Endpoint
diff --git a/Packs/CheckPointHarmonyEndpoint/pack_metadata.json b/Packs/CheckPointHarmonyEndpoint/pack_metadata.json
new file mode 100644
index 000000000000..5fe52e9606cd
--- /dev/null
+++ b/Packs/CheckPointHarmonyEndpoint/pack_metadata.json
@@ -0,0 +1,22 @@
+{
+ "name": "Check Point Harmony Endpoint",
+ "description": "Check Point Harmony Endpoint provides a complete endpoint security solution built to protect organizations and the remote workforce from today's complex threat landscape.",
+ "support": "xsoar",
+ "currentVersion": "1.0.1",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "created": "",
+ "categories": [
+ "Data Enrichment & Threat Intelligence"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "githubUser": [],
+ "certification": "certified",
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
diff --git a/Packs/Cisco-umbrella/Integrations/Cisco-umbrella-investigate/Cisco-umbrella-investigate_test.py b/Packs/Cisco-umbrella/Integrations/Cisco-umbrella-investigate/Cisco-umbrella-investigate_test.py
index c9a706a831a6..b070cdaa3d96 100644
--- a/Packs/Cisco-umbrella/Integrations/Cisco-umbrella-investigate/Cisco-umbrella-investigate_test.py
+++ b/Packs/Cisco-umbrella/Integrations/Cisco-umbrella-investigate/Cisco-umbrella-investigate_test.py
@@ -27,7 +27,7 @@ def test_verify_threshold_suspicious_and_malicious_parameters(suspicious, malici
Then:
- Verify suspicious is bigger then malicious and both of them in range of -100 to 100
"""
- mock_result = mocker.patch('Cisco-umbrella-investigate.return_error')
+ mock_result = mocker.patch.object(Cisco_umbrella_investigate, 'return_error')
Cisco_umbrella_investigate.verify_threshold_params(suspicious, malicious)
if not mock_result.call_args:
diff --git a/Packs/CiscoASA/doc_files/CiscoASDM_timestamp.png b/Packs/CiscoASA/doc_files/CiscoASDM_timestamp.png
new file mode 100644
index 000000000000..c820a5629347
Binary files /dev/null and b/Packs/CiscoASA/doc_files/CiscoASDM_timestamp.png differ
diff --git a/Packs/CiscoCatalyst/.secrets-ignore b/Packs/CiscoCatalyst/.secrets-ignore
index e69de29bb2d1..2a82b6215396 100644
--- a/Packs/CiscoCatalyst/.secrets-ignore
+++ b/Packs/CiscoCatalyst/.secrets-ignore
@@ -0,0 +1,4 @@
+https://www.cisco.com/c/en/us/td/docs/routers/access/wireless/software/guide/SysMsgLogging.html#wp1054470
+https://docs-cortex.paloaltonetworks.com/r/Cortex-Data-Model-Schema-Guide/XDM-Fields
+https://docs-cortex.paloaltonetworks.com/r/Cortex-XSIAM/Cortex-XSIAM-XQL-Language-Reference/json_extract_scalar
+10.1.2.3
\ No newline at end of file
diff --git a/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3.xif b/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3.xif
index 15f91f314cf1..7f36df01d725 100644
--- a/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3.xif
+++ b/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3.xif
@@ -1,46 +1,372 @@
[MODEL: dataset="cisco_catalyst_raw"]
-alter seq_number = arrayindex(regextract(_raw_log , "(\d+)\:\s+\**[A-Za-z]+\s+\d+\s\d+\:\d+\:\d+\.\d+\s*[A-Z]*\:\s%[A-Z]+"),0),
- facility = arrayindex(regextract(_raw_log, "%([\-\_\w]+[\-\_\w]*)\-\d+"),0),
- severity = arrayindex(regextract(_raw_log ,"%[\-\_\w]+[\-\_\w]*\-(\d+)\-"),0),
- event_type = arrayindex(regextract(_raw_log, "[\-\_\w]+[\-\_\w]*\-\d+\-([\w\-\_]+)"),0),
- message1 = arrayindex(regextract(_raw_log,"%[\-\_\w]+[\-\_\w]*\-\d+\-[A-Z\-\_]+\:\s(.*)"),0),
- message2 = arrayindex(regextract(_raw_log,"\<\d+\>\d+\:\s(.*)"),0),
- switch_hostname = arrayindex(regextract(_raw_log ,"\<\d+\>\d+\:\s([\w+\-\_]+)\:\s\d+\:"),0)
-| alter InterfaceName1 = arrayindex(regextract(_raw_log ,"Interface\s([^\/]+\/\d+\/*\d*)"),0),
- InterfaceName2 = arrayindex(regextract(_raw_log ,"interface\sname\s([^\/]+\/\d+\/\d+)"),0),
- InterfaceName3 = arrayindex(regextract(_raw_log , "Interface\s([A-Za-z\-\d]+)"),0),
- username1 = arrayindex(regextract(_raw_log ,"\[user\:\s([^\]]+)\]"),0),
- username2 = arrayindex(regextract(_raw_log ,"\:\sUser\s(\S+)\s"),0),
- username3 = arrayindex(regextract(_raw_log ,"\(\d+\.\d+\.\d+\.\d+\)\)\,\suser\s(\S+)"),0),
- sourceip1 = arrayindex(regextract(_raw_log , "list\s\d+\spermitted\s(\d+\.\d+\.\d+\.\d+)"),0),
- sourceip2 = arrayindex(regextract(_raw_log ,"\[Source\:\s(\d+\.\d+\.\d+\.\d+)\]"),0),
- sourceip3 = arrayindex(regextract(_raw_log ,"session\s\d+\((\d+\.\d+\.\d+\.\d+)\)"),0),
- sourceip4 = arrayindex(regextract(_raw_log ,"connection\sfrom\s(\d+\.\d+\.\d+\.\d+)"),0),
- sourceip5 = arrayindex(regextract(_raw_log ,"to\shost\s(\d+\.\d+\.\d+\.\d+)\s"),0),
- sourceip6 = arrayindex(regextract(_raw_log ,"address\s(\d+\.\d+\.\d+\.\d+)\son"),0),
- sourceip7 = arrayindex(regextract(_raw_log ,"\((\d+\.\d+\.\d+\.\d+)\)\)\,\suser"),0),
- sourceport1 = arrayindex(regextract(_raw_log ,"\[localport\:\s(\d+)\]"),0),
- sourceport2 = arrayindex(regextract(_raw_log ,"to\shost\s\d+\.\d+\.\d+\.\d+\sport\s(\d+)"),0),
- processname1 = arrayindex(regextract(_raw_log ,"by\sprocess\s([A-Za-z\s]+)Process"),0),
- processname2 = arrayindex(regextract(_raw_log ,"by\sprocess\s([A-Za-z\s]+)Policy\smanager"),0),
- vlan = arrayindex(regextract(_raw_log ,"address\s\d+\.\d+\.\d+\.\d+\son\sVlan(\d+)"),0),
- sourceipv6 = arrayindex(regextract(_raw_log ,"\,\ssourced\sby\s(\S+)"),0),
- pid = arrayindex(regextract(_raw_log ,"\[PID\:([^\,]+)\,"),0),
- change_state = arrayindex(regextract(_raw_log, "\,\s(changed\sstate\sto[a-zA-Z\s]+)"),0)
-| alter seq_number = replex(seq_number, "^0+","")
-| alter xdm.network.session_id = seq_number,
- xdm.event.type = event_type,
+/* ----------------------------------------------------------------------------------------------------
+ Cisco System Log Message General Format:
+ seq no:timestamp: %facility-severity-MNEMONIC:description
+ https://www.cisco.com/c/en/us/td/docs/routers/access/wireless/software/guide/SysMsgLogging.html#wp1054470)
+ ---------------------------------------------------------------------------------------------------*/
+filter _raw_log ~= "%\S+\-\d\-\S+:"
+| alter // extract out-of-the-box parsed fields
+ event_name = uppercase(parsed_fields -> mnemonic),
+ facility = uppercase(parsed_fields -> facility),
+ msg = parsed_fields -> description,
+ seq_number = parsed_fields -> sequence_number,
+ severity = parsed_fields -> severity
+| alter // extract dynamic fields from the syslog message description
+ change_state = arrayindex(regextract(msg, "\,\s(changed\sstate\sto[a-zA-Z\s]+)"), 0),
+ client_mac_address = coalesce(
+ arrayindex(regextract(msg, "from MAC address\s+([\da-fA-F]{6}\-[\da-fA-F]{6})"), 0),
+ arrayindex(regextract(msg, "client\s*\(((?:[\da-fA-F]{4}\.){2}[\da-fA-F]{4})\)"), 0),
+ arrayindex(regextract(msg, "(?:Host|Client MAC|sourced by)\s+((?:[\da-fA-F]{4}\.){2}[\da-fA-F]{4})"), 0)),
+ device_hostname = arrayindex(regextract(_raw_log ,"\<\d+\>\d+\:\s([\w+\-\_]+)\:\s\d+\:"), 0),
+ device_product_id = arrayindex(regextract(msg ,"\[PID\:([^\,]+)\,"), 0),
+ device_serial_number = arrayindex(regextract(msg , ",SN\:([\w\-]+)"), 0),
+ interface = coalesce(
+ arrayindex(regextract(msg ,"Interface\s([^\/]+\/\d+\/*\d*)"), 0),
+ arrayindex(regextract(msg ,"interface\sname\s([^\/]+\/\d+\/\d+)"), 0),
+ arrayindex(regextract(msg , "Interface\s([\w\-]+)"), 0),
+ arrayindex(regextract(msg , "on interface\s+(\S+)\s+by"), 0),
+ arrayindex(regextract(msg , "(?:on|port|Unblocking|Blocking)\s+(\w+\/\w+\/\w+)"), 0),
+ arrayindex(regextract(msg , "^(\w+\/\w+\/\w+):"), 0)),
+ outcome_reason = coalesce(
+ arrayindex(regextract(msg, "\sreason:\s*(.+?)\."), 0),
+ arrayindex(regextract(msg, "with reason\s*\(([^\)]+)\)"), 0),
+ arrayindex(regextract(msg, "\[Reason:\s*([^\]]+)\]"), 0)),
+ process_name = coalesce(
+ arrayindex(regextract(msg ,"by\sprocess\s(.+?)\s+(?:Process|Policy\smanager)"), 0),
+ arrayindex(regextract(msg ,"process\s*\=\s*([^\.]+)\."), 0)),
+ server_ipv4 = arrayindex(regextract(msg, "[Ss]erver(?:\s+not\s+found)?(?:\s+at)?\s+((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ server_name = arrayindex(regextract(msg, "Server\s+([\w\-]+)\s+is"), 0),
+ session_id = coalesce(
+ arrayindex(regextract(msg, "AuditSessionID (\w+)"), 0),
+ arrayindex(regextract(msg, "session (\w+)\("), 0)),
+ source_ipv4 = coalesce(
+ arrayindex(regextract(msg , "from\s+((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg , "from\s+host\s+((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg , "list\s\d+\spermitted\s((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg ,"\[Source\:\s((?:\d{1,3}\.){3}\d{1,3})\]"), 0),
+ arrayindex(regextract(msg ,"session\s\d+\(((?:\d{1,3}\.){3}\d{1,3})\)"), 0),
+ arrayindex(regextract(msg ,"connection\sfrom\s((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg ,"to\shost\s((?:\d{1,3}\.){3}\d{1,3})\s"), 0),
+ arrayindex(regextract(msg ,"address\s((?:\d{1,3}\.){3}\d{1,3})\son"), 0),
+ arrayindex(regextract(msg ,"neighbor\s((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg , "on\s+\w+\s*\(((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ arrayindex(regextract(msg ,"\(((?:\d{1,3}\.){3}\d{1,3})\)\)\,\suser"), 0)),
+ target_ipv4 = arrayindex(regextract(msg ,"to\shost\s((?:\d{1,3}\.){3}\d{1,3})\s"), 0),
+ target_port = coalesce(
+ arrayindex(regextract(msg ,"\[localport\:\s(\d+)\]"), 0),
+ arrayindex(regextract(msg ,"to\shost\s\d+\.\d+\.\d+\.\d+\sport\s(\d+)"), 0)),
+ username = coalesce(
+ arrayindex(regextract(msg ,"user\s+name\s*:\s*([\w\-]+)"), 0),
+ arrayindex(regextract(msg ,"User\s+\'([^\']+)\'\s+\w+"), 0),
+ arrayindex(regextract(msg ,"User\s+(\S+)\s+has"), 0),
+ arrayindex(regextract(msg ,"\[user\:\s([^\]]+)\]"), 0),
+ arrayindex(regextract(msg ,"\:\sUser\s(\S+)\s"), 0),
+ arrayindex(regextract(msg ,"Username:\s*()"), 0),
+ arrayindex(regextract(msg ,"by\s+(\S+)\s+on\s+"), 0),
+ arrayindex(regextract(msg ,"\(\d+\.\d+\.\d+\.\d+\)\)\,\suser\s(\S+)"), 0)),
+ vlan = arrayindex(regextract(msg, "(?:VID|vid|VLAN|vlan|Vlan|Vlan-id|VLAN-id)\s*:?\s*(\d+)"), 0)
+| alter // post-extraction formatting
+ client_mac_formatted = arraystring(regextract(client_mac_address, "[\da-fA-F]{2}"), ":"),
+ seq_number = replex(seq_number, "^0+", ""),
+ user_domain = arrayindex(regextract(username, "(.+)\\.+"), 0)
+| alter // XDM mappings
xdm.alert.severity = severity,
+ xdm.event.description = msg,
+ xdm.event.log_level = if(
+ severity = "0", XDM_CONST.LOG_LEVEL_EMERGENCY,
+ severity = "1", XDM_CONST.LOG_LEVEL_ALERT ,
+ severity = "2", XDM_CONST.LOG_LEVEL_CRITICAL,
+ severity = "3", XDM_CONST.LOG_LEVEL_ERROR,
+ severity = "4", XDM_CONST.LOG_LEVEL_WARNING,
+ severity = "5", XDM_CONST.LOG_LEVEL_NOTICE,
+ severity = "6", XDM_CONST.LOG_LEVEL_INFORMATIONAL,
+ severity = "7", XDM_CONST.LOG_LEVEL_DEBUG),
+ xdm.event.outcome = if(
+ msg ~= "(?i)ERROR|FAIL|REJECTED|UNABLE|ABORT|INCORRECT|PROBLEM|FAULT|DENY|DENIED|MISMATCH|COLLISIONS|CANNOT|UNREACHABLE|CORRUPTED|NOT FOUND|NOT SUCCEED|UNABLE TO PERMIT|NOT APPLIED|NOT MODIFIED|NOT REACHABLE|CANCELED BECAUSE|CAN\'T REACH", XDM_CONST.OUTCOME_FAILED,
+ event_name ~= "SUCCESS" or msg ~= "(?i)SUCCESS|SUCCEEDED", XDM_CONST.OUTCOME_SUCCESS),
+ xdm.event.outcome_reason = outcome_reason,
+ xdm.event.type = if( // rename vague event names to a meaningful name if necessary
+ event_name = "FAIL" and msg ~= "(?i)Authorization failed", "AUTHORIZATION_FAILED",
+ event_name = "FAIL" and msg ~= "(?i)Authentication failed", "AUTHENTICATION_FAILED",
+ event_name),
+ xdm.network.session_id = seq_number,
+ xdm.observer.action = change_state,
+ xdm.observer.name = device_hostname,
xdm.observer.type = facility,
- xdm.event.description = coalesce(message1,message2),
- xdm.observer.name = switch_hostname,
- xdm.target.interface = coalesce(InterfaceName1, InterfaceName2 , InterfaceName3),
- xdm.source.ipv4 = coalesce(sourceip1 , sourceip2 ,sourceip3, sourceip4, sourceip5,sourceip6,sourceip7),
- xdm.source.user.username = coalesce(username1 ,username2, username3),
- xdm.source.port = to_integer(coalesce(sourceport1,sourceport2 )),
- xdm.source.process.name = coalesce(processname1 ,processname2 ),
+ xdm.observer.unique_identifier = coalesce(device_serial_number, device_hostname),
+ xdm.session_context_id = session_id,
+ xdm.source.host.device_id = coalesce(device_serial_number, device_hostname),
+ xdm.source.host.device_model = device_product_id,
+ xdm.source.host.hardware_uuid = device_serial_number,
+ xdm.source.host.mac_addresses = if(client_mac_formatted != null, arraycreate(client_mac_formatted)),
+ xdm.source.ipv4 = coalesce(source_ipv4, _final_reporting_device_ip),
+ xdm.source.process.name = process_name,
+ xdm.source.user.domain = user_domain,
+ xdm.source.user.username = username,
xdm.source.vlan = to_integer(vlan),
- xdm.source.ipv6 = sourceipv6,
- xdm.source.process.identifier = pid,
- xdm.observer.action = change_state,
- xdm.event.outcome = if(event_type contains "SUCCESS", XDM_CONST.OUTCOME_SUCCESS, event_type contains "FAIL",XDM_CONST.OUTCOME_FAILED, null);
\ No newline at end of file
+ xdm.target.host.hostname = server_name,
+ xdm.target.interface = interface,
+ xdm.target.ipv4 = coalesce(server_ipv4, target_ipv4),
+ xdm.target.port = to_integer(target_port);
+
+
+/* ------------------------------------------------
+ Alternative Message Format:
+ origin_device_id event_id facility: description
+ -----------------------------------------------*/
+filter _raw_log !~= "%\S+\-\d\-\S+:"
+| alter // extract out-of-the-box parsed fields
+ device_ip = parsed_fields -> device_ip,
+ event_id = to_integer(parsed_fields -> event_id),
+ facility = uppercase(parsed_fields -> facility),
+ msg = parsed_fields -> description,
+ severity = parsed_fields -> severity
+| alter // Extract the varying dynamic fields from the message payload
+ access_mode = if(
+ event_id in (2400, 2401, 2402, 2403, 2404, 2405, 2411, 3210, 3216, 3217, 3218, 3356, 3357), arrayindex(regextract(msg, "^(\S+) client"), 0),
+ event_id in (3351, 3352, 3353, 3354, 3355), arrayindex(regextract(msg, "^(\S+)\s*\:"), 0),
+ event_id in (3211, 3212, 3213), arrayindex(regextract(msg, "received on port \S+ for (\S+) client"), 0),
+ event_id = 3350, arrayindex(regextract(msg, "^(\S+) unable to permit client"), 0),
+ event_id in (5212, 5213, 5722, 5724, 5797, 5798, 5799, 5800, 5801, 5802, 5803, 5804, 5805, 5806, 5814), arrayindex(regextract(msg, "to (\S+) client"), 0)),
+ cmd = if(
+ event_id in (992, 993), arrayindex(regextract(msg, "command (.+)\."), 0),
+ event_id in (3393, 3394), arrayindex(regextract(msg, "[Cc]ommand (.+) is"), 0)),
+ dhcp_server_ip = if(event_id = 5731, arrayindex(regextract(msg, "dhcp_server_ip ((?:\d{1,3}\.){3}\d{1,3})"), 0)),
+ gateway_ip = if(event_id = 5451, arrayindex(regextract(msg, "vlan \S+ \(?(\S+)\)?"), 0)),
+ gateway_name = if(event_id in (5595, 5596, 5597, 5598, 5599, 5600), arrayindex(regextract(msg, "gateway (\S+)"), 0)),
+ ip_mask = if(
+ event_id = 17, arrayindex(regextract(msg, "mask ((?:\d{1,3}\.){3}\d{1,3}\/\d{1,2})"), 0),
+ event_id in (25, 26), arrayindex(regextract(msg, "address ((?:\d{1,3}\.){3}\d{1,3}\/\d{1,2})"), 0),
+ event_id = 753, arrayindex(regextract(msg, "failure\:\s*((?:\d{1,3}\.){3}\d{1,3}\/\d{1,2})"), 0)),
+ mac_address = if(
+ event_id = 5, arrayindex(regextract(msg, "at ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (20, 21, 288, 334, 5681, 5682, 5683, 5684, 5731), arrayindex(regextract(msg, "(?:mac|MAC)[\s\-]+(?:address|ADDRESS) ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (845, 952, 958, 959, 960, 961, 2400, 2401, 2402, 2403, 2404, 2405, 2411, 3210, 3273, 3274, 3281, 3283, 3350, 3356, 3357, 3822, 4987), arrayindex(regextract(msg, "(?:mac|MAC)\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 2581, arrayindex(regextract(msg, "MAC address of ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (354, 858), arrayindex(regextract(msg, "address ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 348, arrayindex(regextract(msg, "error:\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 352, arrayindex(regextract(msg, "((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2}) moved from"), 0),
+ event_id = 353, arrayindex(regextract(msg, "Received Update Packet from ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 355, arrayindex(regextract(msg, "((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2}) MISMATCH"), 0),
+ event_id = 451, arrayindex(regextract(msg, "Upgrade ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 452, arrayindex(regextract(msg, "config with ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (453, 3282), arrayindex(regextract(msg, "switch ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (592, 594), arrayindex(regextract(msg, "\S+\s*:\s*(?:Move )?((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (613, 2209), arrayindex(regextract(msg, "MAC add for ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (5212, 5213, 5411, 5722, 5724), arrayindex(regextract(msg, "to ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (839, 3816), arrayindex(regextract(msg, "from \S+\s*:\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (870, 2690, 4548, 4556, 4565), arrayindex(regextract(msg, "from\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 915, arrayindex(regextract(msg, "source:\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (979, 4588, 4880), arrayindex(regextract(msg, "((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2}) on port"), 0),
+ event_id = 2431, arrayindex(regextract(msg, "multicast ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 714, 716, 718, 719, 720, 721, 722, 856, 2539, 2540, 2541, 3211, 3212, 3213, 3214, 3215, 3216, 3217, 3218, 3351, 3352, 3355, 3358, 4575, 5563, 5619, 5620, 5621, 5723, 5726, 5743, 5744, 5747, 5748, 5797, 5798, 5799, 5800, 5801, 5802, 5803, 5804, 5805, 5806, 5906, 5907, 5940, 5941), arrayindex(regextract(msg, "(?:client|Client):?\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (2720, 3354, 4590, 5385), arrayindex(regextract(msg, "for ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 3280, arrayindex(regextract(msg, "Member ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (4572, 4573, 4874), arrayindex(regextract(msg, "packet ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id = 4980, arrayindex(regextract(msg, "got ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (5120, 5121, 5143, 5144, 5145, 5146), arrayindex(regextract(msg, "device ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (5122, 5123), arrayindex(regextract(msg, "Aruba AP ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ event_id in (5406, 5407), arrayindex(regextract(msg, "user ((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0),
+ coalesce( // default fallback
+ arrayindex(regextract(msg, "from MAC address\s+([\da-fA-F]{6}\-[\da-fA-F]{6})"), 0),
+ arrayindex(regextract(msg, "client\s*\(((?:[\da-fA-F]{4}\.){2}[\da-fA-F]{4})\)"), 0),
+ arrayindex(regextract(msg, "(?:Host|Client MAC|sourced by)\s+((?:[\da-fA-F]{4}\.){2}[\da-fA-F]{4})"), 0))),
+ outcome_reason = if(
+ event_id in (5076, 5230, 5240, 5244, 5246, 5261), rtrim(arrayindex(regextract(msg, "[Ee]rror:?\s*(?:\=?\s*)?(.+)"), 0), "."),
+ event_id in (2204, 5254), arrayindex(regextract(msg, "error:?\s*(?:\=?\s*)?(.+) (?:occurred|on)"), 0),
+ event_id = 5682, arrayindex(regextract(msg, "reason (.+)\-\s*server"), 0),
+ event_id = 5222, arrayindex(regextract(msg, "server:\s*(.+)"), 0),
+ event_id in (5236, 5239, 5243, 5247, 5248, 5249, 5250, 5251, 5233, 5234), arrayindex(regextract(msg, "with error (?:\=\s*)(.+)"), 0),
+ event_id in (5241, 5242), arrayindex(regextract(msg, "string is (.+)"), 0),
+ event_id in (132, 134, 4187, 4188), arrayindex(regextract(msg, "msg:? (.+)"), 0),
+ event_id = 542, arrayindex(regextract(msg, "Skipped:\s*(.+)"), 0),
+ event_id in (2614, 2218), arrayindex(regextract(msg, "failed:\s*(.+)"), 0),
+ event_id = 609, arrayindex(regextract(msg, "from \S+ \(?(.+?)\)?\s*\-"), 0),
+ event_id = 2419, arrayindex(regextract(msg, "Received (.+) packet"), 0),
+ event_id in (5081, 5619), arrayindex(regextract(msg, " as (\.+)\."), 0),
+ event_id in (5602, 5604, 5606, 5623, 5867, 5869), arrayindex(regextract(msg, "because of (.+)\."), 0),
+ event_id = 158, arrayindex(regextract(msg, "line \S+\s*(.+)"), 0),
+ event_id in (686, 687, 3032, 3033, 3841, 3842, 3843), msg,
+ event_id in (3380, 3381), arrayindex(regextract(msg, "ERR:\s*(.+)"), 0),
+ event_id in (648, 2007, 2413, 3370), arrayindex(regextract(msg, "failed:?\s*(.+)"), 0),
+ event_id = 4252, arrayindex(regextract(msg, "start:\s*(.+)"), 0),
+ event_id = 3258, arrayindex(regextract(msg, "failed due to \s*(.+)"), 0),
+ event_id = 655, arrayindex(regextract(msg, "VLAN\.\s*(.+)"), 0),
+ event_id = 669, arrayindex(regextract(msg, "caution (.+)"), 0),
+ event_id = 5620, arrayindex(regextract(msg, "role (.+)"), 0),
+ event_id in (3152, 3278, 3279, 4872, 5407, 5408), arrayindex(regextract(msg, "[Rr]eason:?\s*(.+)"), 0),
+ event_id = 63, arrayindex(regextract(msg, "down:\s*(.+)"), 0),
+ event_id = 273, arrayindex(regextract(msg, "\S+\s*\-\s*(.+)"), 0),
+ event_id = 4892, arrayindex(regextract(msg, "Got (.+)\."), 0),
+ event_id = 3006, arrayindex(regextract(msg, "reboot: (.+)"), 0),
+ event_id in (4942, 4943, 4944, 4945, 4946, 4947, 4948, 4949), arrayindex(regextract(msg, "failed\s*;\s*(.+)"), 0)),
+ policy_name = if(
+ event_id = 5816, arrayindex(regextract(msg, "list \'(.+?)\'"), 0),
+ event_id = 5817, arrayindex(regextract(msg, "class \'(.+?)\'"), 0),
+ event_id in (2655, 2659, 2661), arrayindex(regextract(msg, "apply \'(.+?)\'"), 0),
+ event_id in (2637, 2638, 2652, 2653, 2654, 2656, 3025, 5215, 5745, 5746), arrayindex(regextract(msg, "[Pp]olicy (?:is\s+)?\'(.+?)\'"), 0)),
+ interface_port_id = if(
+ event_id in (76, 77, 324, 725, 826, 827, 828, 840, 841, 844, 848, 849, 884, 885, 886, 887, 888, 889, 927, 972, 975, 2671, 3827, 3828, 4170, 4171, 4300, 4301, 4584, 4593, 4646, 4647, 5161, 5162, 5834), arrayindex(regextract(msg, "^[Pp]ort ([\w\/]+)"), 0),
+ event_id in (725, 826, 827, 848, 849, 885, 927, 2671, 3827, 3828, 4170, 4171, 4300, 4301, 4646, 4647, 5161, 5162), arrayindex(regextract(msg, "^Port ([\w\/]+) (?:with|released|changed|inhibited|blocked|unblocked|disabled|has|is|\-)"), 0),
+ event_id in (346, 347, 348, 350, 351, 353, 356, 447), arrayindex(regextract(msg, "^([\w\/]+)\s*[\-]+\s*(?:ACK|Received|detected|initialized|established|not established)"), 0), //
+ event_id in (822, 823), arrayindex(regextract(msg, "^(?:Created|Deleted) ([\w\/]+)"), 0),
+ event_id in (883, 3381, 3815), arrayindex(regextract(msg, "Port ([\w\/]+) (?:has|communications|moved)"), 0),
+ event_id in (2640, 2641, 2642, 2643, 2644), arrayindex(regextract(msg, "src I\/F ([\w\/]+) (?:is|has)"), 0),
+ event_id in (4667, 4670), arrayindex(regextract(msg, "on Interface ([\w\/]+)"), 0),
+ event_id = 357, arrayindex(regextract(msg, "^([\w\/]+) load balance version"), 0),
+ event_id = 444, arrayindex(regextract(msg, "^([\w\/]+): received"), 0),
+ event_id = 453, arrayindex(regextract(msg, "detected on ([\w\/]+)"), 0),
+ event_id = 520, arrayindex(regextract(msg, "Ports ([\w\/]+):"), 0),
+ event_id = 592, arrayindex(regextract(msg, "Move \S+ to ([\w\/]+) denied"), 0),
+ event_id = 825, arrayindex(regextract(msg, "PORT-MAP (?:removed from|added to) ([\w\/]+)"), 0),
+ event_id = 2639, arrayindex(regextract(msg, "from ([\w\/]+) interface"), 0),
+ event_id = 4573, arrayindex(regextract(msg, "Invalid DHCPv6 packet \S+\.?\s*([\w\/]+)"), 0),
+ event_id = 4633, arrayindex(regextract(msg, "Mac-notify traps ([\w\/]+)"), 0),
+ event_id = 4986, arrayindex(regextract(msg, "enabled port ([\w\/]+)"), 0),
+ event_id = 5052, arrayindex(regextract(msg, "^Configuration conversion for \S+ ([\w\/]+)"), 0),
+ arrayindex(regextract(msg, "\s+[Pp]ort ([\w\/]+)"), 0)), // General fall back
+ src_ip = if(
+ event_id in (3, 3087, 5328), arrayindex(regextract(msg, "to (\S+)"), 0),
+ event_id in (11, 91, 97, 98, 99, 112, 236, 237, 419, 468, 469, 470, 609, 610, 611, 618, 619, 620, 621, 622, 636, 637, 639, 640, 754, 982, 983, 984, 985, 2205, 2206, 2207, 2210, 2211, 2219, 2220, 2419, 2420, 2421, 2422, 2425, 2667, 2684, 2686, 3310, 3311, 3318, 3345, 3362, 3363, 3387, 3390, 4215, 4216, 4241, 4242, 4244, 4245, 4281, 4542, 4558, 456, 4575, 4832, 4834, 4872, 4873, 5005, 5006, 5016, 5312, 5313, 5314, 5329, 5653), arrayindex(regextract(msg, "from (\S+)"), 0),
+ event_id in (12, 13, 3167), arrayindex(regextract(msg, "on (\S+)"), 0),
+ event_id in (16, 86, 748, 749, 2430, 2581, 2582, 3130, 4243, 4261, 4263, 4276, 4782, 4875, 5025, 5026, 5034, 5177, 5358), arrayindex(regextract(msg, "[Aa]ddress:? (\S+)"), 0),
+ event_id in (612, 2208, 5017), arrayindex(regextract(msg, "from rtr (\S+)"), 0),
+ event_id in (614, 623), arrayindex(regextract(msg, "flow (\S+)"), 0),
+ event_id in (617, 625, 626, 628, 694, 695, 696, 806, 917, 5013), arrayindex(regextract(msg, "IP (\S+)"), 0),
+ event_id in (755, 5012, 5014), arrayindex(regextract(msg, "flow g?\s*\S+\s*\-?\s*s?\s*(\S+)"), 0),
+ event_id in (850, 854, 3319, 3320, 4546, 4913, 5401), arrayindex(regextract(msg, "(?:Server|server) (\S+)"), 0),
+ event_id in (864, 4552), arrayindex(regextract(msg, "[Rr]eading (\S+)"), 0),
+ event_id in (979, 4877, 4880), arrayindex(regextract(msg, "for (\S+),?"), 0),
+ event_id in (981, 4589), arrayindex(regextract(msg, "Access denied (\S+)\s*\-\>"), 0),
+ event_id in (2214, 2215, 2221, 2416, 2418, 2423, 2639, 2645, 2646, 2647), arrayindex(regextract(msg, "addr:? (\S+)"), 0),
+ event_id in (2530, 2668), arrayindex(regextract(msg, "Client \'?(\S+)\'?"), 0),
+ event_id in (4781, 4783), arrayindex(regextract(msg, "interface (\S+)\.?"), 0),
+ event_id in (4961, 4962, 4966), arrayindex(regextract(msg, "device (\S+)"), 0),
+ event_id in (5020, 5021, 5024), arrayindex(regextract(msg, "Source IPv4 \S+\:\s*(\S+)"), 0),
+ event_id in (5059, 5060, 5061, 5062, 5063, 5233, 5310), arrayindex(regextract(msg, "^\s*(\S+)"), 0),
+ event_id in (5304, 5305, 5306, 5307, 5308, 5309, 5315, 5316, 5317, 5318), arrayindex(regextract(msg, "peers\s*(\S+)"), 0),
+ event_id in (5402, 5403), arrayindex(regextract(msg, "is (\S+)\.?"), 0),
+ event_id in (5534, 5535, 5537, 5538), arrayindex(regextract(msg, "source IP:\s*(\S+)\-?"), 0),
+ event_id = 5, arrayindex(regextract(msg, "^ARP:\s*(\S+)"), 0),
+ event_id = 17, arrayindex(regextract(msg, "mask (\S+)"), 0),
+ event_id = 8, arrayindex(regextract(msg, "Source:\s*(\S+)"), 0),
+ event_id = 805, arrayindex(regextract(msg, "block (\S+)"), 0),
+ event_id = 908, arrayindex(regextract(msg, "ipAddr:\s+(\S+)"), 0),
+ event_id = 911, arrayindex(regextract(msg, ",?\s*(\S+) port"), 0),
+ event_id = 2669, msg,
+ event_id = 4638, arrayindex(regextract(msg, "as (\S+)\.?"), 0),
+ event_id = 4878, arrayindex(regextract(msg, "binding (\S+)"), 0),
+ event_id = 5226, arrayindex(regextract(msg, "Activate (\S+)"), 0)),
+ tgt_ip = if(
+ event_id in (119, 411, 5181, 5182, 5183, 5184, 5845), arrayindex(regextract(msg, "server\s+(\S+)"), 0),
+ event_id in (120, 122, 413, 416), arrayindex(regextract(msg, "server at\s+(\S+)"), 0),
+ event_id in (747, 3340, 3341, 3342, 5226, 5649), arrayindex(regextract(msg, "to (\S+)"), 0),
+ event_id in (755, 5012, 5014), arrayindex(regextract(msg, "flow g?\s*(\S+)"), 0),
+ event_id in (866, 4554), arrayindex(regextract(msg, "[Ww]riting (\S+)"), 0),
+ event_id in (981, 4589), arrayindex(regextract(msg, "Access denied \S+\s*\-\>(\S+)"), 0),
+ event_id in (3876, 5809), arrayindex(regextract(msg, "address (\S+)"), 0),
+ event_id in (5020, 5021, 5024), arrayindex(regextract(msg, "Destination IPv4 \S+\:\s*(\S+)"), 0),
+ event_id in (5311, 5405, 5408, 5419, 5421, 5422), arrayindex(regextract(msg, "[Cc]ontroller (\S+)[,\.]?"), 0),
+ event_id in (5404, 5406), arrayindex(regextract(msg, "Controller \S+ (\S+)\.?"), 0),
+ event_id in (5534, 5535, 5537, 5538), arrayindex(regextract(msg, "destination IP:\s*(\S+)\-?"), 0),
+ event_id in (5685, 5686), arrayindex(regextract(msg, "\s+ip (\S+)"), 0),
+ event_id = 8, arrayindex(regextract(msg, "Target:\s*(\S+)"), 0),
+ event_id = 420, arrayindex(regextract(msg, "reach (\S+) server"), 0),
+ event_id = 315, arrayindex(regextract(msg, "target IP\s+(\S+)"), 0),
+ event_id = 317, arrayindex(regextract(msg, "Target IP address\s+(\S+)"), 0),
+ event_id = 630, arrayindex(regextract(msg, "group\s+(\S+)"), 0),
+ event_id = 631, arrayindex(regextract(msg, "to SUBSYSTEM\s+(\S+)"), 0),
+ event_id = 2631, arrayindex(regextract(msg, "at (\S+)\.?"), 0),
+ event_id = 4541, arrayindex(regextract(msg, "configure (\S+)"), 0),
+ event_id = 5451, arrayindex(regextract(msg, "^\s*(\S+)"), 0),
+ event_id = 5731, arrayindex(regextract(msg, "dhcp_server_ip (\S+)"), 0),
+ event_id = 5744, arrayindex(regextract(msg, "subscriber (\S+)"), 0),
+ event_id = 5812, arrayindex(regextract(msg, "Fqdn (\S+)"), 0)),
+ tgt_user = if(
+ event_id = 3368, arrayindex(regextract(_raw_log, "oldest user (\S+)") , 0),
+ event_id = 3386, arrayindex(regextract(_raw_log, "modified for user ([^\.]+)") , 0),
+ event_id = 4940, arrayindex(regextract(_raw_log, "^Password for user (\S+) (?:has )?expired") , 0),
+ event_id in (3391, 3392), arrayindex(regextract(_raw_log, "Local user (\S+) is") , 0)), // users added/removed to/from group
+ tgt_mac_address = if(event_id = 913, arrayindex(regextract(msg, "dest:\s*((?:[a-fA-F\d]{2}[\-\:]?){5}[a-fA-F\d]{2})"), 0)),
+ username = if(
+ event_id in(3362, 3363, 4241, 4242, 3369, 4942, 4943, 4944, 4945, 4946, 4947, 4948, 4949, 4951, 4952, 5005, 5006, 5007, 3387, 4930, 4938, 4939, 5826), arrayindex(regextract(msg, "^User \'?([\w\\]+)\'? (?:logged|logout|from|password|has logged|is logged|has been logged|denied access)"), 0),
+ event_id in (186, 468, 469, 470, 640, 2667, 2710, 2714, 2715, 2716, 2717, 3310, 3311, 3318, 3340, 3341, 3342, 3343, 3344, 3345, 3440, 3444, 4244, 4245, 4248, 5553, 5555, 5765, 5825, 5827, 5828, 5829), arrayindex(regextract(msg, "^User\s*\:?\s*\'?([\w\\]+)\'?\s*\:"), 0),
+ event_id in (419, 5008, 639, 992, 993, 3390, 4940, 4941), arrayindex(regextract(msg, "[Uu]ser \'?([\w\\]+)\'? (?:is (?:trying|logged|added|deleted)|command|logged|has|expired)"), 0),
+ event_id = 4950, arrayindex(regextract(msg, "History records cleared for \'?([\w\\]+)\'? user") , 0),
+ event_id = 236, arrayindex(regextract(msg, "community name or user name\s*[,\:]?\s*\'?([\w\\]+)\'?"), 0),
+ event_id = 5815, arrayindex(regextract(msg, "^The user name \'?([\w\\]+\'?)"), 0),
+ event_id = 5940, arrayindex(regextract(msg, "Authentication aborted for(?: client \S+)? user \'?([\w\\]+)\'?\."), 0),
+ event_id = 5681, arrayindex(regextract(msg, "userName \'?([\w\\]+)\'?\-?\s*authType"), 0),
+ event_id = 5749, arrayindex(regextract(msg, "username \'?([\w\\]+)\'? or password"), 0),
+ event_id = 4831, arrayindex(regextract(msg, "limit is reached for \'?([\w\\]+)\'?\."), 0),
+ event_id = 2668, arrayindex(regextract(msg, "for the User \'?([\w\\]+)\'?\."), 0),
+ event_id = 5409, arrayindex(regextract(msg, "for user (\S+):") , 0)),
+ user_role = if(
+ event_id in (5204, 5205, 5206, 5207, 5208, 5209, 5210, 5211, 5212, 5213, 5411, 5412, 5619, 5563, 5717, 5722, 5724, 5725, 5797, 5798, 5799, 5800, 5801, 5802, 5803, 5804, 5805, 5806, 5814), arrayindex(regextract(msg, "user[\s\-]+role \'?(.+?)\'? (?:to|has|with|is|\.)"), 0),
+ event_id in (3391, 3392, 3393, 3394), arrayindex(regextract(msg, "group \'?(.+?)\'?\s*\."), 0),
+ event_id in (5620, 5621), arrayindex(regextract(msg, "^\'?(.+?)\'? client"), 0),
+ event_id in (5715, 5716), arrayindex(regextract(msg, "^Initial role \'?(.+?)\'? is"), 0)),
+ vlan_id = if(
+ event_id in (1,2), arrayindex(regextract(msg, "^(\d+) virtual LAN"), 0),
+ event_id = 316, arrayindex(regextract(msg, "^VLAN ID out of range \(?(\d+)"), 0),
+ event_id = 964, arrayindex(regextract(msg, "^Failed to allocate memory for (\d+)"), 0),
+ event_id = 3815, arrayindex(regextract(msg, "^(\d+) \S+ Port \S+ moved"), 0),
+ event_id = 3824, arrayindex(regextract(msg, "^GVRP could not create (\d+) because"), 0),
+ event_id = 3826, arrayindex(regextract(msg, "^GVRP could not add port \S+ to (\d+)"), 0),
+ event_id = 3828, arrayindex(regextract(msg, "^Port \S+ unblocked on (\d+)"), 0),
+ event_id = 4588, arrayindex(regextract(msg, "^Unable to add binding for (\d+)"), 0),
+ arrayindex(regextract(msg, "(?:VID|vid|VLAN|vlan|Vlan|Vlan-id|VLAN-id)\s*:?\s*(\d+)"), 0)) // vlan default fallback
+| alter // Post Extraction Processing
+ formatted_mac_address = arraystring(regextract(mac_address, "[\da-fA-F]{2}"), ":"),
+ formatted_tgt_mac_address = arraystring(regextract(tgt_mac_address , "[\da-fA-F]{2}"), ":"),
+ gw_ipv4 = arrayindex(regextract(gateway_ip, "((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ gw_ipv6 = arrayindex(regextract(gateway_ip, "((?:[a-fA-F\d]{0,4}\:){1,7}[a-fA-F\d]{0,4})"), 0),
+ src_ipv4 = arrayindex(regextract(src_ip, "((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ src_ipv6 = arrayindex(regextract(src_ip, "((?:[a-fA-F\d]{0,4}\:){1,7}[a-fA-F\d]{0,4})"), 0),
+ tgt_ipv4 = arrayindex(regextract(tgt_ip, "((?:\d{1,3}\.){3}\d{1,3})"), 0),
+ tgt_ipv6 = arrayindex(regextract(tgt_ip, "((?:[a-fA-F\d]{0,4}\:){1,7}[a-fA-F\d]{0,4})"), 0),
+ tgt_user_domain = arrayindex(regextract(tgt_user, "(.+)\\.+"), 0),
+ user_domain = arrayindex(regextract(username, "(.+)\\.+"), 0)
+| alter // XDM Mappings
+ xdm.alert.severity = severity,
+ xdm.auth.auth_method = access_mode,
+ xdm.event.description = msg,
+ xdm.event.id = to_string(event_id),
+ xdm.event.log_level = if(
+ severity = "0", XDM_CONST.LOG_LEVEL_EMERGENCY,
+ severity = "1", XDM_CONST.LOG_LEVEL_ALERT ,
+ severity = "2", XDM_CONST.LOG_LEVEL_CRITICAL,
+ severity = "3", XDM_CONST.LOG_LEVEL_ERROR,
+ severity = "4", XDM_CONST.LOG_LEVEL_WARNING,
+ severity = "5", XDM_CONST.LOG_LEVEL_NOTICE,
+ severity = "6", XDM_CONST.LOG_LEVEL_INFORMATIONAL,
+ severity = "7", XDM_CONST.LOG_LEVEL_DEBUG),
+ xdm.event.outcome = if(
+ msg ~= "(?i)ERROR|FAIL|REJECTED|UNABLE|ABORT|INCORRECT|PROBLEM|FAULT|DENY|DENIED|MISMATCH|COLLISIONS|CANNOT|UNREACHABLE|CORRUPTED|NOT FOUND|NOT SUCCEED|UNABLE TO PERMIT|NOT APPLIED|NOT MODIFIED|NOT REACHABLE|CANCELED BECAUSE|CAN\'T REACH", XDM_CONST.OUTCOME_FAILED,
+ msg ~= "(?i)SUCCESS|SUCCEEDED", XDM_CONST.OUTCOME_SUCCESS),
+ xdm.event.outcome_reason = outcome_reason,
+ xdm.event.type = facility,
+ xdm.intermediate.host.hostname = gateway_name,
+ xdm.intermediate.ipv4 = device_ip,
+ xdm.intermediate.ipv6 = gw_ipv6,
+ xdm.intermediate.host.ipv4_addresses = arraycreate(device_ip, gw_ipv4),
+ xdm.intermediate.host.ipv6_addresses = arraycreate(gw_ipv6),
+ xdm.network.dhcp.siaddr = dhcp_server_ip,
+ xdm.network.rule = policy_name,
+ xdm.observer.type = facility,
+ xdm.observer.unique_identifier = device_ip,
+ xdm.source.host.mac_addresses = if(formatted_mac_address != null, arraycreate(formatted_mac_address)),
+ xdm.source.ipv4 = src_ipv4,
+ xdm.source.ipv6 = src_ipv6,
+ xdm.source.subnet = ip_mask,
+ xdm.source.user.domain = user_domain,
+ xdm.source.user.groups = if(user_role != null, arraycreate(user_role)),
+ xdm.source.user.username = username,
+ xdm.source.vlan = to_integer(vlan_id),
+ xdm.target.host.mac_addresses = if(formatted_tgt_mac_address != null, arraycreate(formatted_tgt_mac_address)),
+ xdm.target.process.command_line = cmd,
+ xdm.target.interface = to_string(interface_port_id),
+ xdm.target.ipv4 = tgt_ipv4,
+ xdm.target.ipv6 = tgt_ipv6,
+ xdm.target.user.domain = tgt_user_domain,
+ xdm.target.user.username = tgt_user;
\ No newline at end of file
diff --git a/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3_schema.json b/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3_schema.json
index 5a58f7df8dfb..cadecdeb64fe 100644
--- a/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3_schema.json
+++ b/Packs/CiscoCatalyst/ModelingRules/CiscoCatalyst_1_3/CiscoCatalyst_1_3_schema.json
@@ -1,8 +1,16 @@
{
- "cisco_catalyst_raw": {
- "_raw_log": {
- "type": "string",
- "is_array": false
- }
+ "cisco_catalyst_raw": {
+ "_raw_log": {
+ "type": "string",
+ "is_array": false
+ },
+ "parsed_fields": {
+ "type": "string",
+ "is_array": false
+ },
+ "_final_reporting_device_ip": {
+ "type": "string",
+ "is_array": false
}
- }
\ No newline at end of file
+ }
+}
\ No newline at end of file
diff --git a/Packs/CiscoCatalyst/ParsingRules/CiscoCatalyst/CiscoCatalyst.xif b/Packs/CiscoCatalyst/ParsingRules/CiscoCatalyst/CiscoCatalyst.xif
index 1f88b91107bd..dca68dea394b 100644
--- a/Packs/CiscoCatalyst/ParsingRules/CiscoCatalyst/CiscoCatalyst.xif
+++ b/Packs/CiscoCatalyst/ParsingRules/CiscoCatalyst/CiscoCatalyst.xif
@@ -1,22 +1,61 @@
-[INGEST:vendor="cisco", product="catalyst", target_dataset="cisco_catalyst_raw", no_hit = keep]
-filter _raw_log ~= "[A-Za-z]+\s+\d+\s+\d{4}\s+\d+\:\d+\:\d+\sUTC" or _raw_log ~= "[A-Za-z]+\s+\d+\s+\d+\:\d+\:\d+\.\d+\sUTC"
+/*------------------------------------
+ CONSTANTS - COMMON REGEX PATTERNS
+ -----------------------------------*/
+[CONST]
+CISCO_CATALYST_TIMESTAMP_FORMAT = "[A-Za-z]+\s+\d+\s+\d{4}\s+\d+\:\d+\:\d+\sUTC|[A-Za-z]+\s+\d+\s+\d+\:\d+\:\d+\.\d+\sUTC"; // supported format samples: May 16 2023 14:30:00 UTC or May 2 09:47:18.714 UTC
+CISCO_CATALYST_SYSTEM_LOG_MESSAGE_FORMAT = "%[\w\-]+\-\d\-\S+?:"; // %facility-severity-MNEMONIC:description (see https://www.cisco.com/c/en/us/td/docs/routers/access/wireless/software/guide/SysMsgLogging.html#wp1054470)
+
+/*---------
+ RULES
+ --------*/
+[RULE: CISCO_CATALYST_PARSE_TIMESTAMP]
// supported format May 16 2023 14:30:00 UTC
-| alter tmp_time_string1 = arrayindex(regextract(_raw_log ,"(\w+\s\d+\s\d+\s\d+\:\d+\:\d+)\sUTC"),0)
-| alter tmp_time_with_year = parse_timestamp("%b %d %Y %H:%M:%S",tmp_time_string1 )
+alter tmp_time_string1 = arrayindex(regextract(_raw_log ,"(\w+\s\d+\s\d+\s\d+\:\d+\:\d+)\sUTC"), 0)
+| alter tmp_time_with_year = parse_timestamp("%b %d %Y %H:%M:%S", tmp_time_string )
// supported format May 2 09:47:18.714 UTC
| alter
- tmp_time_without_year = arraystring(regextract( _raw_log, "(\w{3}\s+\d{1,2}\s+\d{2}\:\d{2}\:\d{2}\.\d+\s)UTC"), ""),
- tmp_year = to_string(format_timestamp("%Y",current_time()))
-| alter
- tmp_time_format2 = if(tmp_time_without_year != null and tmp_time_without_year != "", concat(tmp_year, " ", tmp_time_without_year), null)
-| alter
- tmp_time1_1 = parse_timestamp("%Y %b %e %H:%M:%E*S", tmp_time_format2 )
+ tmp_time_without_year = arraystring(regextract( _raw_log, "(\w{3}\s+\d{1,2}\s+\d{2}\:\d{2}\:\d{2}\.\d+\s)UTC"), ""),
+ tmp_year = to_string(format_timestamp("%Y", current_time()))
+| alter tmp_time_format2 = if(tmp_time_without_year != null and tmp_time_without_year != "", concat(tmp_year, " ", tmp_time_without_year), null)
+| alter tmp_time1_1 = parse_timestamp("%Y %b %e %H:%M:%E*S", tmp_time_format2 )
| alter tmp_timeDiff = timestamp_diff(tmp_time1_1, current_time(), "MILLISECOND")
// Check if the date is a future date
| alter tmp_year2 = if(tmp_timeDiff > 0, to_string(subtract(to_integer(tmp_year),1)),null)
// Create timestamp minus 1 year if the timestamp is a future one
| alter tmp_time1_2 = if(tmp_year2 != null, concat(tmp_year2, " ", tmp_time_without_year), null)
-| alter tmp_time1_2 = if(tmp_time1_2 != null, parse_timestamp("%Y %b %e %H:%M:%E*S", tmp_time1_2 ),null)
+| alter tmp_time1_2 = if(tmp_time1_2 != null, parse_timestamp("%Y %b %e %H:%M:%E*S", tmp_time1_2 ), null)
| alter tmp_time_without_year = coalesce(tmp_time1_2, tmp_time1_1)
| alter _time = coalesce(tmp_time_with_year, tmp_time_without_year)
-| fields -tmp_time_string1, tmp_time_with_year, tmp_time2 , tmp_time_concat2 , tmp_year, tmp_year2, tmp_time_without_year, tmp_time_format2, tmp_timeDiff, tmp_time1_1, tmp_time1_2;
\ No newline at end of file
+| fields -tmp_time_string1, tmp_time_with_year, tmp_time2, tmp_time_concat2, tmp_year, tmp_year2, tmp_time_without_year, tmp_time_format2, tmp_timeDiff, tmp_time1_1, tmp_time1_2;
+
+[RULE: CISCO_CATALYST_PARSE_STANDARD_LOG_MESSAGE_FORMAT]
+alter parsed_fields = regexcapture(_raw_log, "^\<\d+\>(?P\d+):\s+.+?%(?P[\w\-]+)\-(?P\d)\-(?P\S+?):\s*(?P.+)");
+
+[RULE: CISCO_CATALYST_PARSE_NON_STANDARD_LOG_MESSAGE_FORMAT]
+alter parsed_fields = regexcapture(_raw_log, "^.+?(?P(?:\d{1,3}\.){3}\d{1,3})\s+(?P\d+)\s+(?P[\w\-\.]+):\s+(?P.+)")
+| alter tmp_syslog_priority = to_integer(arrayindex(regextract(_raw_log, "^\<(\d{1,3})\>"), 0))
+| alter tmp_syslog_severity = to_string(subtract(tmp_syslog_priority, multiply(floor(divide(tmp_syslog_priority, 8)), 8)))
+| alter parsed_fields = object_merge(parsed_fields, object_create("severity", tmp_syslog_severity))
+| fields - tmp*;
+
+/*---------
+ INGEST
+ --------*/
+[INGEST:vendor="cisco", product="catalyst", target_dataset="cisco_catalyst_raw", no_hit = keep]
+// use case 1: log in standard format with a valid timestamp
+filter _raw_log ~= $CISCO_CATALYST_SYSTEM_LOG_MESSAGE_FORMAT and _raw_log ~= $CISCO_CATALYST_TIMESTAMP_FORMAT
+| call CISCO_CATALYST_PARSE_TIMESTAMP
+| call CISCO_CATALYST_PARSE_STANDARD_LOG_MESSAGE_FORMAT;
+
+// use case 2: log in standard format without a valid timestamp
+filter _raw_log ~= $CISCO_CATALYST_SYSTEM_LOG_MESSAGE_FORMAT and _raw_log !~= $CISCO_CATALYST_TIMESTAMP_FORMAT
+| call CISCO_CATALYST_PARSE_STANDARD_LOG_MESSAGE_FORMAT;
+
+// use case 3: log in non-standard format with a valid timestamp
+filter _raw_log !~= $CISCO_CATALYST_SYSTEM_LOG_MESSAGE_FORMAT and _raw_log ~= $CISCO_CATALYST_TIMESTAMP_FORMAT
+| call CISCO_CATALYST_PARSE_TIMESTAMP
+| call CISCO_CATALYST_PARSE_NON_STANDARD_LOG_MESSAGE_FORMAT;
+
+// use case 4: log in non-standard format without a valid timestamp
+filter _raw_log !~= $CISCO_CATALYST_SYSTEM_LOG_MESSAGE_FORMAT and _raw_log !~= $CISCO_CATALYST_TIMESTAMP_FORMAT
+| call CISCO_CATALYST_PARSE_NON_STANDARD_LOG_MESSAGE_FORMAT;
\ No newline at end of file
diff --git a/Packs/CiscoCatalyst/README.md b/Packs/CiscoCatalyst/README.md
index 7e563a66773c..9f4517381bca 100644
--- a/Packs/CiscoCatalyst/README.md
+++ b/Packs/CiscoCatalyst/README.md
@@ -2,9 +2,9 @@
# Cisco Catalyst
This pack includes Cortex XSIAM content.
-
-## Add timezone to the logs
-The only supported event time is an event time with the time zone.
+## Enabling Timestamps with a Time Zone on Log Messages
+The timestamp parsing is supported only for timestamps including a time zone.
+Follow the steps below to enable time stamping of log messages including a UTC timezone:
1. Access the switch's command-line interface (CLI) using a terminal emulator or SSH.
2. Access privileged EXEC mode by entering the following command and providing the enable password:
@@ -34,16 +34,17 @@ write memory
## Collect Events from Vendor
In order to use the collector, use the [Broker VM](#broker-vm) option.
-
-### Broker VM
-To create or configure the Broker VM, use the information described [here](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Configure-the-Broker-VM).
+ ### Broker VM
+You will need to use the information described [here](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Configure-the-Broker-VM).
You can configure the specific vendor and product for this instance.
-1. Navigate to **Settings** > **Configuration** > **Data Broker** > **Broker VMs**.
-2. Go to the apps tab and add the **Syslog** app. If it already exists, click the **Syslog** app and then click **Configure**.
+1. Navigate to **Settings** → **Configuration** → **Data Broker** → **Broker VMs**.
+2. Go to the **APPS** column under the **Brokers** tab and add the **Syslog** app for the relevant broker instance. If the Syslog app already exists, hover over it and click **Configure**.
3. Click **Add New**.
-4. When configuring the Syslog Collector, set the following values:
- - vendor as vendor - cisco
- - product as product - catalyst
-
+4. When configuring the Syslog Collector, set the following parameters:
+ | Parameter | Value
+ | :--- | :---
+ | `Port` | Enter the syslog service port that Cortex XSIAM Broker VM should listen on for receiving forwarded events from Cisco Catalyst Devices.
+ | `Vendor` | Enter **cisco**.
+ | `Product` | Enter **catalyst**.
diff --git a/Packs/CiscoCatalyst/ReleaseNotes/1_0_6.md b/Packs/CiscoCatalyst/ReleaseNotes/1_0_6.md
new file mode 100644
index 000000000000..9e25fba9182d
--- /dev/null
+++ b/Packs/CiscoCatalyst/ReleaseNotes/1_0_6.md
@@ -0,0 +1,54 @@
+
+#### Modeling Rules
+
+##### Cisco Catalyst Modeling Rule
+
+- Improved implementation of the various extractions.
+- Added support for additional event log formats.
+- Added mapping for additional [XDM fields](https://docs-cortex.paloaltonetworks.com/r/Cortex-Data-Model-Schema-Guide/XDM-Fields), including the following:
+ - `xdm.event.id`.
+ - `xdm.event.log_level`.
+ - `xdm.event.outcome_reason`.
+ - `xdm.network.session_id`.
+ - `xdm.source.host.device_id`.
+ - `xdm.source.host.device_model`.
+ - `xdm.source.host.hardware_uuid`.
+ - `xdm.source.host.mac_addresses`.
+ - `xdm.observer.unique_identifier`.
+ - `xdm.session_context_id`.
+ - `xdm.target.host.hostname`.
+ - `xdm.target.ipv4`.
+ - `xdm.target.port`.
+- Deprecated mappings of the following [XDM fields](https://docs-cortex.paloaltonetworks.com/r/Cortex-Data-Model-Schema-Guide/XDM-Fields):
+ - `xdm.source.port`.
+ - `xdm.source.process.identifier`.
+
+#### Parsing Rules
+
+##### CiscoCatalyst Parsing Rule
+
+Added support for out-of-the-box parsing for system log message format common fields, which would be available as JSON entries under the `parsed_fields` field, for example:
+ ```JSON
+ {
+ "parsed_fields": {
+ "sequence_number": "000123",
+ "facility": "SYS",
+ "severity": "5",
+ "mnemonic": "CONFIG_I",
+ "description": "Configured from console by vty2 (10.1.2.3)"
+ }
+ }
+ ```
+- Common fields included:
+ - `severity`.
+ - `facility`.
+ - `mnemonic`.
+ - `description`.
+ - `sequence_number`.
+ - `event_id`.
+- You can now access the out-of-the-box parsed fields listed above with the XQL [json_extract_scalar](https://docs-cortex.paloaltonetworks.com/r/Cortex-XSIAM/Cortex-XSIAM-XQL-Language-Reference/json_extract_scalar) function. For example:
+ ```javascript
+ | alter
+ event_name = parsed_fields -> mnemonic,
+ event_payload = parsed_fields -> description
+ ```
\ No newline at end of file
diff --git a/Packs/CiscoCatalyst/pack_metadata.json b/Packs/CiscoCatalyst/pack_metadata.json
index d3325a1414f7..d71502072aeb 100644
--- a/Packs/CiscoCatalyst/pack_metadata.json
+++ b/Packs/CiscoCatalyst/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco Catalyst",
"description": "Cisco Catalyst switch",
"support": "xsoar",
- "currentVersion": "1.0.5",
+ "currentVersion": "1.0.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.py b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.py
index 26b08518ae3d..1aab355f4498 100644
--- a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.py
+++ b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.py
@@ -22,6 +22,17 @@
''' HELPER FUNCTIONS '''
+def remove_integration_context_for_user(user: str):
+ """
+ Remove integration context for a user
+ Args:
+ user: The user to remove the integration context for.
+ """
+ integration_context = get_integration_context()
+ integration_context[user] = {}
+ set_integration_context(integration_context)
+
+
def date_time_to_iso_format(date_time: datetime) -> str:
"""
Gets a datetime object and returns s string represents a datetime is ISO format.
@@ -113,7 +124,7 @@ def create_access_token(self, grant_type: str, code: str | None = None, refresh_
client_secret=self.client_secret,
redirect_uri=self.redirect_uri,
)
- return self._http_request(method='POST', url_suffix='access_token', headers=headers, params=params)
+ return self._http_request(method='POST', url_suffix='access_token', headers=headers, data=params)
def save_tokens_to_integration_context(self, result: dict):
"""
@@ -488,6 +499,7 @@ def main() -> None: # pragma: no cover
elif demisto.command() == 'cisco-webex-oauth-start':
client = admin_client if args.get('user') == 'admin' else compliance_officer_client
+ remove_integration_context_for_user(client.user)
result = oauth_start(client)
return_results(result)
diff --git a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.yml b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.yml
index 4ff6aa98fd04..b5eebeff0f51 100644
--- a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.yml
+++ b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector.yml
@@ -147,7 +147,7 @@ script:
name: since_datetime
description: Gets events from Cisco Webex.
name: cisco-webex-get-compliance-officer-events
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.14.98471
isfetchevents: true
runonce: false
script: ''
diff --git a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector_test.py b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector_test.py
index a8d0ac53be1b..419cdc5c93a1 100644
--- a/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector_test.py
+++ b/Packs/CiscoSpark/Integrations/CiscoWebexEventCollector/CiscoWebexEventCollector_test.py
@@ -66,6 +66,22 @@ def get_access_token(self):
""" TEST HELPER FUNCTION """
+def mock_set_integration_context(context: dict = None) -> dict | None:
+ return context
+
+
+def test_remove_integration_context_for_user(mocker):
+ import CiscoWebexEventCollector
+
+ mock_integration_context = {'test_user': {'context_key': 'context_value'}}
+ mocker.patch.object(CiscoWebexEventCollector, 'get_integration_context', return_value=mock_integration_context)
+ mock_context = mocker.patch('CiscoWebexEventCollector.set_integration_context', side_effect=mock_set_integration_context)
+
+ assert CiscoWebexEventCollector.get_integration_context() == mock_integration_context
+ CiscoWebexEventCollector.remove_integration_context_for_user('test_user')
+ assert mock_context.call_args.args[0] == {'test_user': {}}
+
+
@freeze_time("2023-12-20 13:40:00 UTC")
def test_create_last_run():
"""
@@ -193,8 +209,7 @@ def test_oauth_complete(client):
with requests_mock.Mocker() as m:
m.post(
- 'https://url.com/access_token?grant_type=authorization_code&code=123456&client_id=1&client_secret=1'
- '&redirect_uri=https%3A%2F%2Fredirect.com',
+ 'https://url.com/access_token',
json=mock_get_access_token()
)
results = oauth_complete(client, {'code': '123456'})
diff --git a/Packs/CiscoSpark/ReleaseNotes/1_0_7.md b/Packs/CiscoSpark/ReleaseNotes/1_0_7.md
new file mode 100644
index 000000000000..c2917e17bc92
--- /dev/null
+++ b/Packs/CiscoSpark/ReleaseNotes/1_0_7.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cisco Webex Event Collector
+- Updated the Docker image to: *demisto/python3:3.10.14.98471*.
+- Fixed an issue where the ***cisco-webex-oauth-complete*** command failed on a bad request.
diff --git a/Packs/CiscoSpark/pack_metadata.json b/Packs/CiscoSpark/pack_metadata.json
index 45ea48644d61..606ffd820749 100644
--- a/Packs/CiscoSpark/pack_metadata.json
+++ b/Packs/CiscoSpark/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco Webex Teams",
"description": "Send messages, create rooms and more, via the Cisco Webex Teams (Cisco Spark) API.",
"support": "xsoar",
- "currentVersion": "1.0.6",
+ "currentVersion": "1.0.7",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CiscoUmbrellaReporting/Author_image.png b/Packs/CiscoUmbrellaReporting/Author_image.png
deleted file mode 100644
index 3e152be3e0e9..000000000000
Binary files a/Packs/CiscoUmbrellaReporting/Author_image.png and /dev/null differ
diff --git a/Packs/CitrixADC/doc_files/citrixadc1.png b/Packs/CitrixADC/doc_files/citrixadc1.png
new file mode 100644
index 000000000000..4201ff9721b9
Binary files /dev/null and b/Packs/CitrixADC/doc_files/citrixadc1.png differ
diff --git a/Packs/CitrixADC/doc_files/citrixadc2.png b/Packs/CitrixADC/doc_files/citrixadc2.png
new file mode 100644
index 000000000000..4a9e318ac4f1
Binary files /dev/null and b/Packs/CitrixADC/doc_files/citrixadc2.png differ
diff --git a/Packs/CloudConvert/doc_files/100620498-2ad0cc00-3327-11eb-8959-3ec0726dbced.png b/Packs/CloudConvert/doc_files/100620498-2ad0cc00-3327-11eb-8959-3ec0726dbced.png
new file mode 100644
index 000000000000..d9aa85456f86
Binary files /dev/null and b/Packs/CloudConvert/doc_files/100620498-2ad0cc00-3327-11eb-8959-3ec0726dbced.png differ
diff --git a/Packs/CloudIDS/.pack-ignore b/Packs/CloudIDS/.pack-ignore
index e69de29bb2d1..5c290f30245f 100644
--- a/Packs/CloudIDS/.pack-ignore
+++ b/Packs/CloudIDS/.pack-ignore
@@ -0,0 +1,2 @@
+[file:README.md]
+ignore=RM108
\ No newline at end of file
diff --git a/Packs/CloudIDS/README.md b/Packs/CloudIDS/README.md
index ef38aaae55ac..d3a8b141fb09 100644
--- a/Packs/CloudIDS/README.md
+++ b/Packs/CloudIDS/README.md
@@ -1,16 +1,16 @@
-# CloudIDS
-Google Cloud IDS is a next-generation advanced intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks.
-
-## What does this pack do?
-
-### Playbook
-* `Cloud_IDS-IP_Blacklist-GCP_Firewall_Extract`: Gets the attacker's IP address from Cloud IDS through Google Pub/Sub.
- `Cloud_IDS-IP_Blacklist-GCP_Firewall_Append` will update the ip list so GCP automatically blocks the IP address.
-
-#### Flow Chart of Playbook
-* [Cloud_IDS-IP_Blacklist-GCP_Firewall](https://github.com/demisto/content/blob/423e13b69b375288d3ec2183bfbd4d2ee6fe018c/Packs/CloudIDS/Playbooks/Cloud_IDS-IP_Blacklist-GCP_Firewall_README.md)
-![Playbook Image](https://github.com/demisto/content/raw/423e13b69b375288d3ec2183bfbd4d2ee6fe018c/Packs/CloudIDS/doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Combine.png)
-![Playbook Image](https://github.com/demisto/content/raw/423e13b69b375288d3ec2183bfbd4d2ee6fe018c/Packs/CloudIDS/doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Extract.png)
-![Playbook Image](https://github.com/demisto/content/raw/423e13b69b375288d3ec2183bfbd4d2ee6fe018c/Packs/CloudIDS/doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Append.png)
-
-
+# CloudIDS
+Google Cloud IDS is a next-generation advanced intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks.
+
+## What does this pack do?
+
+### Playbook
+* `Cloud_IDS-IP_Blacklist-GCP_Firewall_Extract`: Gets the attacker's IP address from Cloud IDS through Google Pub/Sub.
+ `Cloud_IDS-IP_Blacklist-GCP_Firewall_Append` will update the ip list so GCP automatically blocks the IP address.
+
+#### Flow Chart of Playbook
+* [Cloud_IDS-IP_Blacklist-GCP_Firewall](https://github.com/demisto/content/blob/423e13b69b375288d3ec2183bfbd4d2ee6fe018c/Packs/CloudIDS/Playbooks/Cloud_IDS-IP_Blacklist-GCP_Firewall_README.md)
+![Playbook Image](doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Combine.png)
+![Playbook Image](doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Extract.png)
+![Playbook Image](doc_files/Cloud_IDS-IP_Blacklist-GCP_Firewall_Append.png)
+
+
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent.yml b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent.yml
new file mode 100644
index 000000000000..9d02eca62760
--- /dev/null
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent.yml
@@ -0,0 +1,653 @@
+id: Cortex XDR - Malicious Pod Response - Agent
+version: -1
+name: Cortex XDR - Malicious Pod Response - Agent
+description: "This playbook ensures a swift and effective response to malicious activities within Kubernetes environments, leveraging cloud-native tools to maintain cluster security and integrity.\n\nThe playbook is designed to handle agent-generated alerts due to malicious activities within Kubernetes (K8S) pods, such as mining activities, which require immediate action. The playbook also addresses scenarios where the malicious pod is killed, but the malicious K8S workload repeatedly creates new pods.\n\n### Key Features:\n\nAWS Function Integration: This utilizes an AWS Lambda function that can manage resources and facilitate rapid response actions within an Amazon EKS cluster without the need for third-party tools such as Kubectl.\n\nThe Lambda function can initiate the following response actions:\n\n - Pod Termination: The playbook includes steps to safely terminate the affected pod within the K8S environment.\n - Workload Suspension: If necessary, the playbook can be escalated to suspend the entire workload associated with the mining activity.\n\nOnce the Lambda function execution is completed, the playbook deletes all of the created objects to ensure undesirable usage.\n\n### Workflow:\n\n1. Alert Detection: The playbook begins with the monitoring agent detecting a mining alert within a Kubernetes pod.\n2. Alert Validation: Validates the alert to ensure it is not a false positive.\n3. Response Decision: \n - Pod Termination: If the mining activity is isolated to a single pod, the AWS Lambda function is invoked to terminate the affected pod within the K8S environment.\n - Workload Suspension: If the mining activity is widespread or poses a significant threat, the AWS Lambda function suspends the entire workload within the K8S environment.\n4. Cleanup: This action initiates the complete removal of all objects created for the Lambda execution for security and hardening purposes.\n\n### Required Integration\n\n#### AWS IAM (Identity and Access Management)\n- [AWS IAM API Documentation](https://docs.aws.amazon.com/IAM/latest/APIReference/Welcome.html)\n- [Cortex XSOAR AWS IAM Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSIAM/)\n\n#### AWS EC2 (Elastic Compute Cloud)\n- [AWS EC2 API Documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Welcome.html)\n- [Cortex XSOAR AWS EC2 Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSEC2/)\n\n#### AWS EKS (Elastic Kubernetes Service)\n- [AWS EKS API Documentation](https://docs.aws.amazon.com/eks/latest/APIReference/Welcome.html)\n- [Cortex XSOAR AWS EKS Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSEKS/)\n\n#### AWS Lambda\n- [AWS Lambda API Documentation](https://docs.aws.amazon.com/lambda/latest/dg/API_Reference.html)\n- [Cortex XSOAR AWS Lambda Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSLambda/)."
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 95df4c10-3e22-4cbc-8777-a23ddd0ac508
+ type: start
+ task:
+ id: 95df4c10-3e22-4cbc-8777-a23ddd0ac508
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "36"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": -140
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: d96cdfcf-465a-439f-8165-d35257dc9fdf
+ type: regular
+ task:
+ id: d96cdfcf-465a-439f-8165-d35257dc9fdf
+ version: -1
+ name: Initiate Pod Deletion
+ description: Invokes a Lambda function. Specify just a function name to invoke the latest version of the function. To invoke a published version, use the Qualifier parameter to specify a version or alias. If you use the RequestResponse (synchronous) invocation option, note that the function may be invoked multiple times if a timeout is reached. For functions with a long timeout, your client may be disconnected during synchronous invocation while it waits for a response. Use the "timeout" and "retries" arguments to control this behavior. If you use the Event (asynchronous) invocation option, the function will be invoked at least once in response to an event and the function must be idempotent to handle this.
+ script: '|||aws-lambda-invoke'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "22"
+ scriptarguments:
+ functionName:
+ simple: ClusterResponse
+ invocationType:
+ simple: RequestResponse
+ payload:
+ simple: ${ResponsePayload}
+ region:
+ simple: ${inputs.region}
+ retry-count:
+ simple: "10"
+ retry-interval:
+ simple: "15"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 630
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: ac7527ab-877c-4ae0-89a1-d300de959d08
+ type: title
+ task:
+ id: ac7527ab-877c-4ae0-89a1-d300de959d08
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 1320
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: 6611e8f6-3101-4a4b-8da5-6523338cda09
+ type: playbook
+ task:
+ id: 6611e8f6-3101-4a4b-8da5-6523338cda09
+ version: -1
+ name: Function Removal - AWS
+ description: |
+ This playbook automates the removal of an AWS Lambda function and its associated resources used for managing resources within an Amazon EKS cluster. It ensures all related roles, policies, and security configurations are properly detached and deleted.
+
+ **Resource Detachment and Deletion**
+
+ - **Get the Lambda Role**: Retrieve the IAM role associated with the Lambda function.
+ - **Detach Policy from Lambda Role**: Remove the policy attached to the Lambda role.
+ - **Delete IAM Role**: Delete the IAM role that was used for the Lambda function.
+ - **Delete Lambda Policy**: Remove the policy specifically created for the Lambda function.
+ - **Delete Security Group**: Delete the security group that was managing the Lambda function's traffic.
+
+ **Access Entry Check**
+
+ - **Check if Access Entry was Created**: Verify if the access entry for the EKS cluster was created.
+ - **If YES**: Proceed to delete additional resources.
+ - **If NO**: Skip the deletion of additional resources.
+
+ **Additional Resource Deletion**
+
+ - **Delete Kubernetes Layer**: Remove the Kubernetes layer that was used by the Lambda function.
+ - **Delete Lambda Function**: Delete the Lambda function itself, ensuring all related code and configurations are removed.
+
+ **Conclusion**
+
+ - **Final Cleanup**: Ensure all specified resources have been deleted successfully.
+ - **Completion**: Confirm that the removal process is complete, providing a clean environment free from the previously deployed Lambda function and its configurations.
+
+ This playbook provides a comprehensive, automated approach to removing an AWS Lambda function and its related resources, ensuring all configurations and dependencies are properly managed and deleted.
+ playbookName: Function Removal - AWS
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ LambdaFunctionName:
+ simple: ${AWS.Lambda.Functions.FunctionName}
+ LambdaLayerName:
+ complex:
+ root: AWS.Lambda.Layers
+ accessor: LayerArn
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: ':'
+ fields:
+ value:
+ simple: "7"
+ LambdaLayerVersion:
+ complex:
+ root: AWS.Lambda.Layers
+ accessor: LayerArn
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: ':'
+ fields:
+ value:
+ simple: "8"
+ LambdaRoleName:
+ simple: ${AWS.IAM.Roles.RoleName}
+ LambdaRolePolicyARN:
+ simple: ${AWS.IAM.Policies.Arn}
+ SecurityGroupID:
+ simple: ${AWS.EC2.SecurityGroups.GroupId}
+ region:
+ simple: ${inputs.region}
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 790
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "30":
+ id: "30"
+ taskid: 3fe9c288-89fe-433b-8121-127edc79ed96
+ type: regular
+ task:
+ id: 3fe9c288-89fe-433b-8121-127edc79ed96
+ version: -1
+ name: Prepare payload for ClusterResponse function
+ description: Set a value in context under the key you entered.
+ scriptName: Set
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ key:
+ simple: ResponsePayload
+ stringify:
+ simple: "true"
+ value:
+ simple: |-
+ {
+ "cluster_name": "${inputs.ClusterName}",
+ "pod_name": "${PaloAltoNetworksXDR.OriginalAlert.messageData.containers.[0].runtimeInfo.extra.pod_name}",
+ "pod_namespace": "${PaloAltoNetworksXDR.OriginalAlert.messageData.containers.[0].runtimeInfo.extra.pod_namespace}"
+ }
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 310
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "32":
+ id: "32"
+ taskid: 284a508c-1f66-43b0-81de-b73204f3570b
+ type: condition
+ task:
+ id: 284a508c-1f66-43b0-81de-b73204f3570b
+ version: -1
+ name: Was the remediation completed successfully?
+ description: Checks if the remediation (CloudResponse function using AWS Lambda) completed successfully.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "33"
+ "yes":
+ - "12"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: AWS.Lambda.InvokedFunctions.Payload
+ iscontext: true
+ right:
+ value:
+ simple: Workload Scale Updated
+ ignorecase: true
+ - operator: containsGeneral
+ left:
+ value:
+ simple: AWS.Lambda.InvokedFunctions.Payload
+ iscontext: true
+ right:
+ value:
+ simple: Pod Deleted
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 950
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "33":
+ id: "33"
+ taskid: f52a28ce-1249-491d-8075-00dc52126ace
+ type: collection
+ task:
+ id: f52a28ce-1249-491d-8075-00dc52126ace
+ version: -1
+ name: Complete the remediation manually
+ description: The analyst is prompted to acknowledge manual remediation is needed.
+ type: collection
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "12"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 1140
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ simple: ${alert.assigneduser}
+ subject:
+ simple: Action Required - Manual Remediation - Alert ID ${alert.id}
+ body:
+ simple: |-
+ A manual remediation is required for:
+
+ Alert ID: ${alert.id}
+ Alert Name: ${alert.name}
+ Alert Time: ${alert.occurred}
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: Acknowledge the manual remediation required by clicking 'Yes'.
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: singleSelect
+ options: []
+ optionsarg:
+ - simple: "Yes"
+ - simple: "No"
+ fieldassociated: ""
+ placeholder: ""
+ tooltip: ""
+ readonly: false
+ title: Action Required - Manual Remediation for Malicious Pod
+ description: A manual remediation is required for incident ${incident.id}
+ sender: Your SOC team
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: 85a100e3-e025-41f7-8b21-c575c36a47b2
+ type: playbook
+ task:
+ id: 85a100e3-e025-41f7-8b21-c575c36a47b2
+ version: -1
+ name: Function Deployment - AWS
+ description: |+
+ This playbook automates the deployment of an AWS Lambda function to manage resources within an Amazon EKS cluster. It ensures that all necessary configurations are created, updated, and verified.
+
+ **Setup**
+
+ - **Describe EKS Cluster**: Gather essential details of the EKS cluster.
+ - **Create IAM Role**: Set up a new IAM role for the Lambda function.
+ - **Create and Attach Policy**: Define and attach a policy to the IAM role to grant necessary permissions.
+
+ **Authentication Mode Check**
+
+ - **Verify Authentication Mode**: Ensure the current authentication mode allows API access.
+ - **If not**: Update the cluster authentication mode to permit API access.
+
+ **Access Entry Configuration**
+
+ - **Create Access Entry**: Establish a new access entry in the EKS cluster.
+ - **Associate Access Policy**: Link the access policy with the created access entry.
+ - **Update Access Entry**: Apply the latest configurations to the access entry.
+
+ **VPC and Security Group Setup**
+
+ - **Describe VPCs**: Identify the appropriate VPC for the Lambda function.
+ - **Create Security Group**: Define a security group to manage Lambda function traffic.
+ - **Set Ingress Rules**: Configure ingress rules for the security group.
+
+ **VPC Endpoint Creation**
+
+ - **Create VPC Endpoint for eks-auth**: Establish a VPC endpoint for EKS authentication.
+ - **Check for Errors**: Verify if there are any errors during the creation of the VPC endpoint.
+ - **If errors**: Handle and log them.
+ - **Verify VPC Endpoint Existence**: Ensure the VPC endpoint already exists.
+ - **If exists**: Proceed with the next steps.
+
+ **Lambda Function Deployment**
+
+ - **Download Kubernetes Library**: Fetch the necessary Kubernetes library.
+ - **Publish AWS Lambda Layer**: Publish a new layer version for the AWS Lambda function.
+ - **Create Lambda Code**: Develop the Lambda function code.
+ - **Zip Lambda Code**: Compress the Lambda function code for deployment.
+ - **Create AWS Lambda Function**: Deploy the Lambda function using the zipped code.
+
+ **Conclusion**
+
+ - **Final Verification**: Ensure all operations have been successfully completed.
+ - **Completion**: Confirm the deployment process is finished, ensuring robust management of EKS authentication through AWS Lambda.
+
+
+
+ This playbook provides a comprehensive, automated approach to deploying an AWS Lambda function for managing resources within an EKS cluster, efficiently handling all configurations and potential errors.
+
+ playbookName: Function Deployment - AWS
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "30"
+ scriptarguments:
+ ClusterName:
+ simple: ${inputs.ClusterName}
+ FunctionArchitecture:
+ simple: x86_64
+ FunctionCode:
+ simple: "import base64\nimport re\nimport boto3\nfrom botocore.signers import RequestSigner\nfrom kubernetes import client, config\n\n# Initialize EKS client\neks_client = boto3.client('eks')\ncore_v1_api = None\napps_v1_api = None\n\n# Initialize STS client\nSTS_TOKEN_EXPIRES_IN = 60\nsession = boto3.session.Session()\nsts = session.client('sts')\nservice_id = sts.meta.service_model.service_id\n\n\ndef get_cluster_info(cluster_name):\n \"Retrieve cluster endpoint and certificate\"\n cluster_response = eks_client.describe_cluster(name=cluster_name)\n cluster_endpoint = cluster_response['cluster']['endpoint']\n cluster_ca_data = cluster_response['cluster']['certificateAuthority']['data']\n cluster_info = {\n \"endpoint\" : cluster_endpoint,\n \"ca\" : cluster_ca_data\n }\n return cluster_info\n \ndef get_bearer_token(cluster_name):\n \"Create authentication token\"\n signer = RequestSigner(\n service_id,\n session.region_name,\n 'sts',\n \"v4\",\n session.get_credentials(),\n session.events\n )\n \n params = {\n 'method': 'GET',\n 'url': 'https://sts.{}.amazonaws.com/?Action=GetCallerIdentity&Version=2011-06-15'.format(session.region_name),\n 'body': {},\n 'headers': {\n 'x-k8s-aws-id': cluster_name\n },\n 'context': {}\n }\n \n signed_url = signer.generate_presigned_url(\n params,\n region_name=session.region_name,\n expires_in=STS_TOKEN_EXPIRES_IN,\n operation_name=''\n )\n \n base64_url = base64.urlsafe_b64encode(signed_url.encode('utf-8')).decode('utf-8')\n \n # remove any base64 encoding padding:\n return 'k8s-aws-v1.' + re.sub(r'=*', '', base64_url)\n\ndef get_workload_info(pod_name, pod_namespace):\n global core_v1_api\n workload_type = None\n workload_name = None\n pod_details = core_v1_api.read_namespaced_pod(pod_name, pod_namespace)\n \n if isinstance(pod_details.metadata.owner_references, list):\n for owner in pod_details.metadata.owner_references:\n if owner.controller == True:\n workload_type = owner.kind\n workload_name = owner.name\n # Is replicaset?\n if workload_type == 'ReplicaSet':\n replicaset = apps_v1_api.read_namespaced_replica_set(name=workload_name, namespace=pod_namespace)\n # Is deployment?\n if isinstance(replicaset.metadata.owner_references, list):\n for owner in replicaset.metadata.owner_references:\n if owner.controller == True:\n workload_type = owner.kind\n workload_name = owner.name\n \n return workload_type, workload_name\n\ndef delete_pod(pod_namespace, pod_name):\n global core_v1_api\n core_v1_api.delete_namespaced_pod(pod_name, pod_namespace)\n return\n\ndef delete_workload(workload_type, workload_name, workload_namespace):\n global apps_v1_api\n match workload_type:\n case \"ReplicaSet\":\n replicaset = apps_v1_api.read_namespaced_replica_set(workload_name, workload_namespace)\n body = {\"spec\":{\"replicas\":0}}\n apps_v1_api.patch_namespaced_replica_set_scale(workload_name, workload_namespace, body)\n return \"ReplicaSet Scale Updated\"\n case \"Deployment\":\n deployment = apps_v1_api.read_namespaced_deployment(workload_name, workload_namespace)\n body = {\"spec\":{\"replicas\":0}}\n apps_v1_api.patch_namespaced_deployment_scale(workload_name, workload_namespace, body)\n return \"Deployment Scale Updated\"\n case \"StatefulSet\":\n statefulset = apps_v1_api.read_namespaced_stateful_set(workload_name, workload_namespace)\n body = {\"spec\":{\"replicas\":0}}\n apps_v1_api.patch_namespaced_stateful_set_scale(workload_name, workload_namespace, body)\n return \"StatefulSet Scale Updated\"\n case \"DaemonSet\":\n daemonset = apps_v1_api.read_namespaced_daemon_set(workload_name, workload_namespace)\n body = {\"spec\":{\"template\":{\"spec\":{\"nodeSelector\":{\"non-existing\":\"true\"}}}}}\n apps_v1_api.patch_namespaced_daemon_set(workload_name, workload_namespace, body)\n return \"DaemonSet Scale Updated\"\n case _:\n return \"Could Not Identify Pod Type\"\n\ndef lambda_handler(event, context):\n \n cluster_name = event['cluster_name']\n pod_name = event['pod_name']\n pod_namespace = event['pod_namespace']\n \n # Kubeconfig Configuration\n cluster_info = get_cluster_info(cluster_name)\n token = get_bearer_token(cluster_name)\n kubeconfig = {\n 'apiVersion': 'v1',\n 'clusters': [{\n 'name': 'cluster1',\n \
+ \ 'cluster': {\n 'certificate-authority-data': cluster_info[\"ca\"],\n 'server': cluster_info[\"endpoint\"]\n }\n }],\n 'contexts': [{'name': 'context1', 'context': {'cluster': 'cluster1', \"user\": \"user1\"}}],\n 'current-context': 'context1',\n 'kind': 'config',\n 'preferences': {},\n 'users': [{'name': 'user1', \"user\" : {'token': token}}]\n }\n config.load_kube_config_from_dict(config_dict=kubeconfig)\n \n # Kubernetes Client Configuration\n global core_v1_api, apps_v1_api\n core_v1_api = client.CoreV1Api()\n apps_v1_api = client.AppsV1Api()\n \n # Extract Workload Information\n workload_type, workload_name = get_workload_info(pod_name, pod_namespace)\n \n # Delete Pods\n if workload_type is None or workload_name is None:\n delete_pod(pod_namespace, pod_name)\n return \"Pod Deleted\"\n else: \n print(\"Workload Type: \" + workload_type)\n print(\"Workload Name: \" + workload_name)\n delete_workload(workload_type, workload_name, pod_namespace)\n return \"Workload Scale Updated\"\n \n return"
+ FunctionHandler:
+ simple: lambda_function.lambda_handler
+ FunctionName:
+ simple: ClusterResponse
+ FunctionRuntime:
+ simple: python3.12
+ LambdaPolicyName:
+ simple: LambdaHandlerPolicy
+ LambdaRoleName:
+ simple: LambdaHandlerRole
+ LayerArchitecture:
+ simple: x86_64
+ LayerName:
+ simple: kubernetes
+ LayerRuntime:
+ simple: python3.12
+ LibraryName:
+ simple: kubernetes
+ SecurityGroupName:
+ simple: LambdaHandlerSG
+ TrustPolicyDocument:
+ simple: |-
+ {
+ "Version": "2012-10-17",
+ "Statement": {
+ "Effect": "Allow",
+ "Principal": {
+ "Service": "lambda.amazonaws.com"
+ },
+ "Action": "sts:AssumeRole"
+ }
+ }
+ region:
+ simple: ${inputs.region}
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 150
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "35":
+ id: "35"
+ taskid: 2f0a7656-b5e5-42b8-8ec4-bb778acf5957
+ type: playbook
+ task:
+ id: 2f0a7656-b5e5-42b8-8ec4-bb778acf5957
+ version: -1
+ name: GenericPolling
+ description: |-
+ Use this playbook as a sub-playbook to block execution of the master playbook until a remote action is complete.
+ This playbook implements polling by continuously running the command in Step \#2 until the operation completes.
+ The remote action should have the following structure:
+
+ 1. Initiate the operation.
+ 2. Poll to check if the operation completed.
+ 3. (optional) Get the results of the operation.
+ playbookName: GenericPolling
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ Ids:
+ simple: ClusterResponse
+ Interval:
+ simple: "1"
+ PollingCommandArgName:
+ simple: functionName
+ PollingCommandName:
+ simple: aws-lambda-get-function
+ Timeout:
+ simple: "10"
+ dt:
+ simple: AWS.Lambda.Functions.Configuration(val.State!=='Active').RevisionId
+ separatecontext: true
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": 470
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "36":
+ id: "36"
+ taskid: 5e7e9144-ac89-4c1e-8214-c50dc8ce6b88
+ type: regular
+ task:
+ id: 5e7e9144-ac89-4c1e-8214-c50dc8ce6b88
+ version: -1
+ name: Get alert extra data
+ description: Returns information about each alert ID.
+ script: '|||xdr-get-cloud-original-alerts'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "34"
+ scriptarguments:
+ alert_ids:
+ simple: ${inputs.AlertID}
+ filter_alert_fields:
+ simple: "false"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 310,
+ "y": -10
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "32_33_#default#": 0.4
+ },
+ "paper": {
+ "dimensions": {
+ "height": 1525,
+ "width": 380,
+ "x": 310,
+ "y": -140
+ }
+ }
+ }
+inputs:
+- key: ClusterName
+ value: {}
+ required: false
+ description: The name of the cluster.
+ playbookInputQuery:
+- key: region
+ value: {}
+ required: false
+ description: The resource region.
+ playbookInputQuery:
+- key: AlertID
+ value: {}
+ required: false
+ description: The alert id.
+ playbookInputQuery:
+inputSections:
+- inputs:
+ - ClusterName
+ - region
+ - AlertID
+ name: General (Inputs group)
+ description: Generic group for inputs
+outputSections:
+- outputs: []
+ name: General (Outputs group)
+ description: Generic group for outputs
+outputs: []
+tests:
+- No tests (auto formatted)
+marketplaces: ["xsoar"]
+fromversion: 6.10.0
diff --git a/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent_README.md b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent_README.md
new file mode 100644
index 000000000000..1f6377dc0ed9
--- /dev/null
+++ b/Packs/CloudIncidentResponse/Playbooks/playbook-Cortex_XDR_-_Malicious_Pod_Response_-_Agent_README.md
@@ -0,0 +1,85 @@
+This playbook ensures a swift and effective response to malicious activities within Kubernetes environments, leveraging cloud-native tools to maintain cluster security and integrity.
+
+The playbook is designed to handle agent-generated alerts due to malicious activities within Kubernetes (K8S) pods, such as mining activities, which require immediate action. The playbook also addresses scenarios where the malicious pod is killed, but the malicious K8S workload repeatedly creates new pods.
+
+### Key Features:
+
+AWS Function Integration: This utilizes an AWS Lambda function that can manage resources and facilitate rapid response actions within an Amazon EKS cluster without the need for third-party tools such as Kubectl.
+
+The Lambda function can initiate the following response actions:
+
+ - Pod Termination: The playbook includes steps to safely terminate the affected pod within the K8S environment.
+ - Workload Suspension: If necessary, the playbook can be escalated to suspend the entire workload associated with the mining activity.
+
+Once the Lambda function execution is completed, the playbook deletes all of the created objects to ensure undesirable usage.
+
+### Workflow:
+
+1. Alert Detection: The playbook begins with the monitoring agent detecting a mining alert within a Kubernetes pod.
+2. Alert Validation: Validates the alert to ensure it is not a false positive.
+3. Response Decision:
+ - Pod Termination: If the mining activity is isolated to a single pod, the AWS Lambda function is invoked to terminate the affected pod within the K8S environment.
+ - Workload Suspension: If the mining activity is widespread or poses a significant threat, the AWS Lambda function suspends the entire workload within the K8S environment.
+4. Cleanup: This action initiates the complete removal of all objects created for the Lambda execution for security and hardening purposes.
+
+### Required Integration
+
+#### AWS IAM (Identity and Access Management)
+- [AWS IAM API Documentation](https://docs.aws.amazon.com/IAM/latest/APIReference/Welcome.html)
+- [Cortex XSOAR AWS IAM Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSIAM/)
+
+#### AWS EC2 (Elastic Compute Cloud)
+- [AWS EC2 API Documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Welcome.html)
+- [Cortex XSOAR AWS EC2 Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSEC2/)
+
+#### AWS EKS (Elastic Kubernetes Service)
+- [AWS EKS API Documentation](https://docs.aws.amazon.com/eks/latest/APIReference/Welcome.html)
+- [Cortex XSOAR AWS EKS Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSEKS/)
+
+#### AWS Lambda
+- [AWS Lambda API Documentation](https://docs.aws.amazon.com/lambda/latest/dg/API_Reference.html)
+- [Cortex XSOAR AWS Lambda Integration](https://cortex.marketplace.pan.dev/marketplace/details/AWSLambda/).
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+* GenericPolling
+* Function Deployment - AWS
+* Function Removal - AWS
+
+### Integrations
+
+This playbook does not use any integrations.
+
+### Scripts
+
+* Set
+
+### Commands
+
+* aws-lambda-invoke
+* xdr-get-cloud-original-alerts
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| ClusterName | The name of the cluster. | | Optional |
+| region | The resource region. | | Optional |
+| AlertID | The alert id. | | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex XDR - Malicious Pod Response - Agent](../doc_files/Cortex_XDR_-_Malicious_Pod_Response_-_Agent.png)
diff --git a/Packs/CloudIncidentResponse/ReleaseNotes/1_0_17.md b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_17.md
new file mode 100644
index 000000000000..3b710c26b73a
--- /dev/null
+++ b/Packs/CloudIncidentResponse/ReleaseNotes/1_0_17.md
@@ -0,0 +1,8 @@
+
+#### Playbooks
+
+##### New: Cortex XDR - Malicious Pod Response - Agent
+
+- New: This playbook ensures a swift and effective response to malicious activities within Kubernetes environments, leveraging cloud-native tools to maintain cluster security and integrity.
+
+<~XSOAR> (Available from Cortex XSOAR 6.10.0).~XSOAR>
diff --git a/Packs/CloudIncidentResponse/doc_files/Cortex_XDR_-_Malicious_Pod_Response_-_Agent.png b/Packs/CloudIncidentResponse/doc_files/Cortex_XDR_-_Malicious_Pod_Response_-_Agent.png
new file mode 100644
index 000000000000..a385ca623be1
Binary files /dev/null and b/Packs/CloudIncidentResponse/doc_files/Cortex_XDR_-_Malicious_Pod_Response_-_Agent.png differ
diff --git a/Packs/CloudIncidentResponse/pack_metadata.json b/Packs/CloudIncidentResponse/pack_metadata.json
index 158bf363e2a6..5a0d2a824c54 100644
--- a/Packs/CloudIncidentResponse/pack_metadata.json
+++ b/Packs/CloudIncidentResponse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cloud Incident Response",
"description": "This content Pack helps you automate collection, investigation, and remediation of incidents related to cloud infrastructure activities in AWS, Azure, and GCP.",
"support": "xsoar",
- "currentVersion": "1.0.16",
+ "currentVersion": "1.0.17",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CloudShare/Integrations/CloudShare/CloudShare.yml b/Packs/CloudShare/Integrations/CloudShare/CloudShare.yml
index 0498145fd59b..673916eacfe4 100644
--- a/Packs/CloudShare/Integrations/CloudShare/CloudShare.yml
+++ b/Packs/CloudShare/Integrations/CloudShare/CloudShare.yml
@@ -39,17 +39,17 @@ script:
- name: owned
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns only environments owned by the user.
- defaultValue: "false"
+ defaultValue: 'false'
- name: visible
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns all environments visible to the user.
- defaultValue: "false"
+ defaultValue: 'false'
- name: ownerEmail
description: Optional. Filters results by the environment owner's email address, where {ownerEmail} is the environment owner's email address.
- name: classId
@@ -57,10 +57,10 @@ script:
- name: brief
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: 'Optional. Whether to return a less detailed or more detailed response. {brief_value} can be: true (default) - Returns less detail. false - Returns more detail.'
- defaultValue: "true"
+ defaultValue: 'true'
outputs:
- contextPath: CloudShare.Environments.projectId
description: Project ID
@@ -99,23 +99,23 @@ script:
auto: PREDEFINED
predefined:
- tru
- - "false"
+ - 'false'
description: Returns only projects in which the user is a project manager.
- defaultValue: "false"
+ defaultValue: 'false'
- name: WhereUserIsProjectMember
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns only projects in which the user is a project member.
- defaultValue: "false"
+ defaultValue: 'false'
- name: WhereUserCanCreateClass
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns only projects in which the user is allowed to create a class. The minimum user level allowed to create classes is set per project and can be changed by project manager users.
- defaultValue: "false"
+ defaultValue: 'false'
outputs:
- contextPath: CloudShare.Projects.name
description: Name
@@ -184,7 +184,7 @@ script:
- contextPath: CloudShare.Projects.isActive
description: Is active
type: boolean
- description: Retrieves a specified project’s details
+ description: "Retrieves a specified project's details"
- name: cloudshare-get-project-policies
arguments:
- name: projectId
@@ -214,10 +214,10 @@ script:
- name: defaultSnapshot
auto: PREDEFINED
predefined:
- - "true"
- - "false"
- description: ' If set to true – get the default snapshot for every blueprint. The returned JSON will contain a property ‘CreateFromVersions’, which is an array of one element – the default snapshot. If unspecified, default is false (don’t return the default snapshot).'
- defaultValue: "false"
+ - 'true'
+ - 'false'
+ description: " If set to true - get the default snapshot for every blueprint. The returned JSON will contain a property 'CreateFromVersions', which is an array of one element - the default snapshot. If unspecified, default is false (don't return the default snapshot)."
+ defaultValue: 'false'
outputs:
- contextPath: CloudShare.Projects.Blueprints.id
description: ID
@@ -895,7 +895,7 @@ script:
arguments:
- name: PolicyId
required: true
- description: The ID of the policy to apply to the class. The policy will control the runtime and storage time of each student’s environment and what will happen to the environment when it is inactive.
+ description: "The ID of the policy to apply to the class. The policy will control the runtime and storage time of each student's environment and what will happen to the environment when it is inactive."
- name: BlueprintId
required: true
description: The ID of the blueprint to base the class on. Students who attend the class will be served environments based on this blueprint.
@@ -919,19 +919,19 @@ script:
- name: UseCustomInvitationEmail
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Whether or not the student invitation email is customized.
- defaultValue: "false"
+ defaultValue: 'false'
- name: LimitEarlyAccess
required: true
auto: PREDEFINED
predefined:
- - "0"
- - "1"
- - "2"
+ - '0'
+ - '1'
+ - '2'
description: 'This option controls access by students and instructor to a lab before scheduled class time. Possible values: 0 - Allow lab access before class (default). 1 - Allow lab access before class for instructor only. 2 - No early access allowed.'
- defaultValue: "0"
+ defaultValue: '0'
- name: CustomInvitationEmailSubject
description: The subject line of the custom student invitation email. Used if UseCustomInvitationEmail is set to true.
- name: CustomInvitationEmailBody
@@ -939,10 +939,10 @@ script:
- name: PermitAccessToNonRegisteredStudent
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Whether to permit users to self register as students for the class.
- defaultValue: "false"
+ defaultValue: 'false'
- name: MaxStudents
description: 'Numeric. The maximum number of students allowed in the class (can be null). Maximum value: 60'
- name: address
@@ -952,33 +952,33 @@ script:
- name: enableSteps
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Enables the class to have multiple steps.
- defaultValue: "false"
+ defaultValue: 'false'
- name: StudentsCanSwitchStep
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Whether students can change steps independently. Applies when enableSteps is set to true.
- defaultValue: "false"
+ defaultValue: 'false'
- name: steps
description: A list of class steps (JSON dictionaries) for a multi-step class. Must be provided if enableSteps is set to true.
- name: selfPaced
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Creates a Self-Paced Class that allows a student to enter class at a time convenient to them.
- defaultValue: "false"
+ defaultValue: 'false'
- name: allowMultipleStudentLogin
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: When set to true, allows for more than one environment activation per student.
- defaultValue: "false"
+ defaultValue: 'false'
outputs:
- contextPath: CloudShare.Classes.id
description: ID
@@ -1028,18 +1028,18 @@ script:
- name: useCustomInvitationEmail
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Whether or not the student invitation email is customized.
- defaultValue: "false"
+ defaultValue: 'false'
- name: limitEarlyAccess
auto: PREDEFINED
predefined:
- - "0"
- - "1"
- - "2"
+ - '0'
+ - '1'
+ - '2'
description: 'This option controls access by students and instructor to a lab before scheduled class time. Possible values: 0 - Allow lab access before class (default). 1 - Allow lab access before class for instructor only. 2 - No early access allowed.'
- defaultValue: "0"
+ defaultValue: '0'
- name: customInvitationEmailSubject
description: The subject line of the custom student invitation email. Used if UseCustomInvitationEmail is set to true.
- name: customInvitationEmailBody
@@ -1047,10 +1047,10 @@ script:
- name: permitAccessToNonRegisteredStudent
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Whether to permit users to self register as students for the class.
- defaultValue: "false"
+ defaultValue: 'false'
- name: maxStudents
description: 'Numeric. The maximum number of students allowed in the class (can be null). Maximum value: 60'
- name: address
@@ -1060,17 +1060,17 @@ script:
- name: selfPaced
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: Creates a Self-Paced Class that allows a student to enter class at a time convenient to them.
- defaultValue: "false"
+ defaultValue: 'false'
- name: allowMultipleStudentLogin
auto: PREDEFINED
predefined:
- - "false"
- - "true"
+ - 'false'
+ - 'true'
description: When set to true, allows for more than one environment activation per student.
- defaultValue: "false"
+ defaultValue: 'false'
- name: classId
required: true
description: The ID of the class.
@@ -1226,10 +1226,10 @@ script:
- name: isFull
auto: PREDEFINED
predefined:
- - "false"
- - "true"
- description: Whether to return the details of the VMs in each student’s environment as well as other details.
- defaultValue: "false"
+ - 'false'
+ - 'true'
+ description: "Whether to return the details of the VMs in each student's environment as well as other details."
+ defaultValue: 'false'
outputs:
- contextPath: CloudShare.Students.status
description: Status
@@ -1301,7 +1301,7 @@ script:
- contextPath: CloudShare.Students.VMs.name
description: Name
type: string
- description: Retrieves information about a student in a class, including the student’s environment and VMs
+ description: "Retrieves information about a student in a class, including the student's environment and VMs"
- name: cloudshare-delete-student
arguments:
- name: classId
@@ -1318,13 +1318,13 @@ script:
description: The ID of the class.
- name: email
required: true
- description: The student’s email address
+ description: "The student's email address"
- name: firstName
required: true
- description: The student’s first name
+ description: "The student's first name"
- name: lastName
required: true
- description: The student’s last name
+ description: "The student's last name"
outputs:
- contextPath: CloudShare.Students.id
description: ID
@@ -1339,12 +1339,12 @@ script:
required: true
description: The ID of the student.
- name: email
- description: The student’s email address. This can be changed as long as the student did not yet log in.
+ description: "The student's email address. This can be changed as long as the student did not yet log in."
- name: firstName
- description: The student’s first name.
+ description: "The student's first name."
- name: lastName
- description: The student’s last name.
- description: Modifies a student’s registration details
+ description: "The student's last name."
+ description: "Modifies a student's registration details"
- name: cloudshare-get-regions
arguments: []
outputs:
@@ -1669,7 +1669,7 @@ script:
description: Specifies the start of the time range, where {starttime_value} is the start of the time range in the format ISO 8601. For example, "2017-01-01"
- name: endtime
required: true
- description: Specifies the end of the time range, where {endtime_value} is the end of the time range in the format ISO 8601. For example, “2017-02-01“.
+ description: "Specifies the end of the time range, where {endtime_value} is the end of the time range in the format ISO 8601. For example, '2017-02-01'."
- name: skip
description: 'Optional. Specifies to skip the first {skip_value} records, where {skip_value} is an integer (default: 0). Can be used iteratively in conjunction with take to view distinct sets of environment records.'
- name: take
@@ -1701,40 +1701,40 @@ script:
arguments:
- name: envId
required: true
- description: Specifies the environment, where {envId} is the environment’s ID.
+ description: "Specifies the environment, where {envId} is the environment's ID."
description: Extends the lifetime of an environment
- name: cloudshare-postpone-env-suspend
arguments:
- name: envId
required: true
- description: Specifies the environment, where {envId} is the environment’s ID.
- description: Postpones an environment’s suspended state Request Path
+ description: "Specifies the environment, where {envId} is the environment's ID."
+ description: "Postpones an environment's suspended state Request Path"
- name: cloudshare-resume-env
arguments:
- name: envId
required: true
- description: Specifies the environment, where {envId} is the environment’s ID.
+ description: "Specifies the environment, where {envId} is the environment's ID."
description: Resumes an environment that was previously suspended, returning it to active running state
- name: cloudshare-revert-env
arguments:
- name: envId
required: true
- description: Specifies the environment, where {envId} is the environment’s ID.
+ description: "Specifies the environment, where {envId} is the environment's ID."
- name: snapshotId
required: true
- description: Specifies the snapshot to which to revert the environment, where {snapshotId} is the snapshot’s ID.
+ description: "Specifies the snapshot to which to revert the environment, where {snapshotId} is the snapshot's ID."
description: Reverts an environment to a specified snapshot
- name: cloudshare-suspend-env
arguments:
- name: envId
required: true
- description: Specifies the environment, where {envId} is the environment’s ID.
+ description: "Specifies the environment, where {envId} is the environment's ID."
description: Suspends an environment
- name: cloudshare-get-env
arguments:
- name: envID
required: true
- description: The environment’s ID.
+ description: "The environment's ID."
- name: permission
auto: PREDEFINED
predefined:
@@ -1770,7 +1770,7 @@ script:
- contextPath: CloudShare.Environments.teamId
description: Team ID
type: string
- description: Retrieves properties of an environment and enables verification of the requesting user’s permissions to the environment
+ description: "Retrieves properties of an environment and enables verification of the requesting user's permissions to the environment"
- name: cloudshare-delete-env
arguments:
- name: envID
@@ -1993,10 +1993,10 @@ script:
arguments:
- name: vmId
required: true
- description: "The ID of the VM"
+ description: The ID of the VM
- name: path
required: true
- description: "Path in VM"
+ description: Path in VM
outputs:
- contextPath: CloudShare.VM.Execute.executionId
description: Execution ID
@@ -2026,7 +2026,7 @@ script:
- contextPath: CloudShare.VM.Modify.conflicts
description: Conflicts
type: string
- description: Adjusts a VM’s CPU count, disk size, and RAM
+ description: "Adjusts a VM's CPU count, disk size, and RAM"
- name: cloudshare-reboot-vm
arguments:
- name: VmID
@@ -2078,7 +2078,7 @@ script:
- contextPath: CloudShare.Folders.projectFolders.usagePercentage
description: Usage percentage
type: string
- description: Retrieves the user’s cloud folder and the user’s projects’ project folder(s)
+ description: "Retrieves the user's cloud folder and the user's projects' project folder(s)"
- name: cloudshare-get-env-cloud-folders
arguments:
- name: EnvId
@@ -2100,7 +2100,7 @@ script:
- contextPath: CloudShare.EnvFolders.token
description: Token
type: string
- description: Shows the cloud folder on all of the environment’s machines
+ description: "Shows the cloud folder on all of the environment's machines"
- name: cloudshare-generate-cloud-folder-password
arguments: []
outputs:
@@ -2110,20 +2110,20 @@ script:
- contextPath: CloudShare.FoldersPassword.newFtpUri
description: New FTP URI
type: string
- description: Generates a new FTP password for accessing the user’s cloud folders.
+ description: "Generates a new FTP password for accessing the user's cloud folders."
- name: cloudshare-unmount-env-folders
arguments:
- name: EnvId
required: true
description: The ID of the environment.
- description: Hides the cloud folder on all of the environment’s machines
+ description: "Hides the cloud folder on all of the environment's machines"
- name: cloudshare-get-templates
arguments:
- name: templateType
auto: PREDEFINED
predefined:
- - "0"
- - "1"
+ - '0'
+ - '1'
description: 'Filters the results by type of template. Possible values: 0 - Returns blueprints only. 1 - Returns VM templates only.'
- name: projectId
description: Filters the results to include only blueprints that belong to a specific project.
@@ -2404,7 +2404,7 @@ script:
type: number
- contextPath: CloudShare.Snapshots.regions
description: Regions
- description: Retrieves all snapshots contained in a specified environment’s blueprint. A blueprint can contain up to five snapshots, with newer snapshots displacing the oldest snapshots in the blueprint.
+ description: "Retrieves all snapshots contained in a specified environment's blueprint. A blueprint can contain up to five snapshots, with newer snapshots displacing the oldest snapshots in the blueprint."
- name: cloudshare-mark-default-snapshot
arguments:
- name: snapshotID
@@ -2428,10 +2428,10 @@ script:
- name: setAsDefault
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Defaults to true. If true, the new snapshot is marked as the default snapshot in the containing blueprint.
- defaultValue: "true"
+ defaultValue: 'true'
description: Takes a snapshot of an environment
- name: cloudshare-get-teams
arguments: []
@@ -2446,10 +2446,10 @@ script:
- name: cloudshare-invite-user-poc
arguments:
- name: policyId
- description: The ID of the environment policy to assign to the environment created for the end user as part of the POC. The policy will govern the life cycle of the end user’s environment.
+ description: "The ID of the environment policy to assign to the environment created for the end user as part of the POC. The policy will govern the life cycle of the end user's environment."
- name: blueprintId
required: true
- description: The ID of the blueprint based on which the end user’s environment will be created (this is the POC’s blueprint).
+ description: "The ID of the blueprint based on which the end user's environment will be created (this is the POC's blueprint)."
- name: OwningProjectMemberId
required: true
description: The ID of the project member user to whom the end user will be assigned
@@ -2461,22 +2461,22 @@ script:
description: The number of days to keep the invitation valid for
- name: email
required: true
- description: The recipient’s email. The invitation will be sent to the specified email.
+ description: "The recipient's email. The invitation will be sent to the specified email."
- name: firstName
required: true
- description: The recipient’s first name
+ description: "The recipient's first name"
- name: LastName
required: true
- description: The recipient’s last name
+ description: "The recipient's last name"
- name: regionId
- description: The region in which to create the POC’s environment.
+ description: "The region in which to create the POC's environment."
- name: InviteeCanSetEmail
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Indicates whether an end user can set email when accepting the invitation. Default is true.
- defaultValue: "true"
+ defaultValue: 'true'
- name: customEmailSubject
description: The subject of the email. The invitation will be sent with the specified custom email subject. This value will override the custom email subject in the UI.
- name: customEmailBody
@@ -2553,7 +2553,7 @@ script:
description: Blueprint name
type: string
description: Retrieves POC invitations sent.
- dockerimage: demisto/cloudshare:1.0.0.14120
+ dockerimage: demisto/cloudshare:1.0.0.73056
subtype: python3
beta: true
fromversion: 6.0.0
diff --git a/Packs/CloudShare/ReleaseNotes/1_0_3.md b/Packs/CloudShare/ReleaseNotes/1_0_3.md
new file mode 100644
index 000000000000..817687d95525
--- /dev/null
+++ b/Packs/CloudShare/ReleaseNotes/1_0_3.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CloudShare (Beta)
+
+- Updated the Docker image to: *demisto/cloudshare:1.0.0.73056*.
diff --git a/Packs/CloudShare/pack_metadata.json b/Packs/CloudShare/pack_metadata.json
index ff1f3e1dc25a..5149a5d35a23 100644
--- a/Packs/CloudShare/pack_metadata.json
+++ b/Packs/CloudShare/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CloudShare",
"description": "CloudShare integration",
"support": "community",
- "currentVersion": "1.0.2",
+ "currentVersion": "1.0.3",
"author": "Adam Burt",
"url": "",
"email": "",
diff --git a/Packs/Cofense-Intelligence/Integrations/example b/Packs/Cofense-Intelligence/Integrations/CofenseIntelligence/command_examples
similarity index 100%
rename from Packs/Cofense-Intelligence/Integrations/example
rename to Packs/Cofense-Intelligence/Integrations/CofenseIntelligence/command_examples
diff --git a/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md
index 6f9a57b6a790..887561cf25b5 100644
--- a/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md
+++ b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md
@@ -3,4 +3,4 @@
##### Cofense Intelligence v2
-Fixed an issue in `Base` pack (Version `1.33.52`) so now Cofense Intelligence V2 will correctly input email addresses into context under `Accounts.[x].Email` and not under `Email`.
+Fixed an issue in `Base` pack (Version `1.33.52`) so now Cofense Intelligence V2 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/CofenseTriage/Integrations/CofenseTriagev2/CofenseTriagev2.yml b/Packs/CofenseTriage/Integrations/CofenseTriagev2/CofenseTriagev2.yml
index 32c3042a789d..487a3acd967c 100644
--- a/Packs/CofenseTriage/Integrations/CofenseTriagev2/CofenseTriagev2.yml
+++ b/Packs/CofenseTriage/Integrations/CofenseTriagev2/CofenseTriagev2.yml
@@ -9,17 +9,17 @@ description: Use the Cofense Triage integration to ingest reported phishing indi
configuration:
- display: Server URL (e.g., https://192.168.0.1)
name: host
- defaultvalue: ""
+ defaultvalue: ''
type: 0
required: true
- display: User
name: user
- defaultvalue: ""
+ defaultvalue: ''
type: 0
required: true
- display: API Token
name: token
- defaultvalue: ""
+ defaultvalue: ''
type: 4
required: true
- display: Fetch incidents
@@ -30,7 +30,7 @@ configuration:
name: incidentType
type: 13
required: false
-- display: "Mailbox Location"
+- display: Mailbox Location
name: mailbox_location
defaultvalue: Processed_Reports
type: 15
@@ -46,27 +46,27 @@ configuration:
- display: Category ID to fetch
name: category_id
additionalinfo: Corresponds to category IDs defined in Triage. Eg, "1".
- defaultvalue: ""
+ defaultvalue: ''
type: 0
required: false
- display: Match Priority - the highest match priority based on rule hits for the report
name: match_priority
- defaultvalue: ""
+ defaultvalue: ''
type: 0
required: false
- display: 'Tags - CSV list of tags of processed reports by which to filter '
name: tags
- defaultvalue: ""
+ defaultvalue: ''
type: 0
required: false
- display: Maximum number of incidents to fetch each time
name: max_fetch
- defaultvalue: "30"
+ defaultvalue: '30'
type: 0
required: false
- display: Trust any certificate (not secure)
name: insecure
- defaultvalue: "false"
+ defaultvalue: 'false'
type: 8
required: false
- display: Use system proxy settings
@@ -97,12 +97,12 @@ script:
- name: max_matches
default: true
description: Maximum number of matches to fetch. Default is 30.
- defaultValue: "30"
+ defaultValue: '30'
- name: verbose
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns all fields of a report.
outputs:
- contextPath: Cofense.Report.ID
@@ -152,7 +152,7 @@ script:
type: number
- contextPath: Cofense.Report.Sha256
description: SHA256 hash of the file.
- description: 'Runs a query for reports.'
+ description: Runs a query for reports.
- name: cofense-search-inbox-reports
arguments:
- name: file_hash
@@ -172,12 +172,12 @@ script:
- name: max_matches
default: true
description: Maximum number of matches to fetch. Default is 30.
- defaultValue: "30"
+ defaultValue: '30'
- name: verbose
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Returns all fields of a report.
outputs:
- contextPath: Cofense.Report.ID
@@ -227,7 +227,7 @@ script:
type: number
- contextPath: Cofense.Report.Sha256
description: SHA256 hash of the file.
- description: 'Runs a query for reports from the `inbox` mailbox.'
+ description: Runs a query for reports from the `inbox` mailbox.
- name: cofense-get-attachment
arguments:
- name: attachment_id
@@ -352,11 +352,11 @@ script:
- name: set_white_bg
auto: PREDEFINED
predefined:
- - "True"
- - "False"
+ - 'True'
+ - 'False'
description: Change background to white.
- defaultValue: "False"
- description: 'Retrieves a report by the report ID number and displays as PNG.'
+ defaultValue: 'False'
+ description: Retrieves a report by the report ID number and displays as PNG.
- name: cofense-get-threat-indicators
arguments:
- name: type
@@ -377,9 +377,9 @@ script:
- Suspicious
- Benign
- name: start_date
- description: "designated start date tagged by analyst (format example: YYYY-MM-DD+HH:MM:SS). Default: 6 days ago."
+ description: 'designated start date tagged by analyst (format example: YYYY-MM-DD+HH:MM:SS). Default: 6 days ago.'
- name: end_date
- description: "designated end date from assignment (format example: YYYY-MM-DD+HH:MM:SS). Default: current date."
+ description: 'designated end date from assignment (format example: YYYY-MM-DD+HH:MM:SS). Default: current date.'
outputs:
- contextPath: Cofense.ThreatIndicators
description: Threat indicator output.
@@ -402,7 +402,7 @@ script:
description: Value of the threat indicator.
type: string
description: Threat Indicators that are designated by analysts as malicious, suspicious or benign.
- dockerimage: demisto/chromium:124.0.6367.93838
+ dockerimage: demisto/chromium:126.0.6478.102778
isfetch: true
tests:
- No test - partner integration (test pb exists - Cofense Triage Test)
diff --git a/Packs/CofenseTriage/ReleaseNotes/2_1_26.md b/Packs/CofenseTriage/ReleaseNotes/2_1_26.md
new file mode 100644
index 000000000000..295f9cf9359e
--- /dev/null
+++ b/Packs/CofenseTriage/ReleaseNotes/2_1_26.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cofense Triage v2
+
+- Updated the Docker image to: *demisto/chromium:126.0.6478.102778*.
diff --git a/Packs/CofenseTriage/pack_metadata.json b/Packs/CofenseTriage/pack_metadata.json
index 8c6776058999..e0d2ea873525 100644
--- a/Packs/CofenseTriage/pack_metadata.json
+++ b/Packs/CofenseTriage/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cofense Triage",
"description": "Cofense Triage allows users to fetch reports by using the fetch incidents capability. It also provides commands to get entities like reporters, rules, categories, and more.",
"support": "partner",
- "currentVersion": "2.1.25",
+ "currentVersion": "2.1.26",
"author": "Cofense",
"url": "https://cofense.com/contact-support/",
"email": "support@cofense.com",
diff --git a/Packs/CommonDashboards/.pack-ignore b/Packs/CommonDashboards/.pack-ignore
index fbc100121225..c207b3e3b658 100644
--- a/Packs/CommonDashboards/.pack-ignore
+++ b/Packs/CommonDashboards/.pack-ignore
@@ -6,3 +6,6 @@ ignore=BA101
[file:dashboard-SLA.json]
ignore=BA101
+
+[file:1_7_1.md]
+ignore=RN113,RN114
diff --git a/Packs/CommonDashboards/.secrets-ignore b/Packs/CommonDashboards/.secrets-ignore
index e69de29bb2d1..7ed50cb2e6ed 100644
--- a/Packs/CommonDashboards/.secrets-ignore
+++ b/Packs/CommonDashboards/.secrets-ignore
@@ -0,0 +1,2 @@
+https://cortex.marketplace.pan.dev/marketplace/
+https://cortex.marketplace.pan.dev
diff --git a/Packs/CommonDashboards/Dashboards/XSOAR_Automation_Insights.json b/Packs/CommonDashboards/Dashboards/XSOAR_Automation_Insights.json
index e8120bf8d8ff..bc5a5c28f0b6 100644
--- a/Packs/CommonDashboards/Dashboards/XSOAR_Automation_Insights.json
+++ b/Packs/CommonDashboards/Dashboards/XSOAR_Automation_Insights.json
@@ -834,6 +834,9 @@
},
"type": "above"
},
+ "keys": [
+ "sum|droppedCount"
+ ],
"valuesFormat": "abbreviated"
},
"category": ""
@@ -972,7 +975,7 @@
"prevName": "Use Case Adoption Coverage",
"dataType": "scripts",
"widgetType": "text",
- "query": "UseCaseAdoptionMetrics",
+ "query": "AdoptionMetrics",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
diff --git a/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json b/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
index 9894442ff791..f973c1c07cb7 100644
--- a/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
+++ b/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
@@ -14,101 +14,6 @@
"fromDateLicense": "0001-01-01T00:00:00Z",
"name": "API Execution Metrics",
"layout": [
- {
- "id": "e7377a00-e12b-11ee-8377-d592a71a5531",
- "forceRange": false,
- "x": 8,
- "y": 0,
- "i": "e7377a00-e12b-11ee-8377-d592a71a5531",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c06",
- "version": 3,
- "cacheVersn": 0,
- "modified": "2024-03-13T11:21:27.174372329Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for FortiSandbox2",
- "prevName": "API Call Results for FortiSandbox2",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and name:FortiSandboxv2",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
{
"id": "870b0d20-e0df-11ec-a7f6-39e12e3bcb7a",
"forceRange": false,
@@ -136,11 +41,11 @@
"commitMessage": "",
"shouldCommit": false,
"Cache": null,
- "name": "Successful vs. Rate Limited API Calls by Integration",
+ "name": "API Call Status by Integration",
"prevName": "Successful vs. Rate Limited API Calls by Integration",
"dataType": "metrics",
"widgetType": "bar",
- "query": "type:integration and metricType:",
+ "query": "type:integration and apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
@@ -184,6 +89,113 @@
]
],
"name": "Success"
+ },
+ "Auth Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "AuthError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Auth Error"
+ },
+ "Connection Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ConnectionError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Connection Error"
+ },
+ "General Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "GeneralError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "General Error"
+ },
+ "Other": {
+ "name": "Other"
+ },
+ "Proxy Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ProxyError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Proxy Error"
+ },
+ "Retry Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "RetryError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Retry Error"
+ },
+ "SSL Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "SSLError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "SSL Error"
+ },
+ "Service Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ServiceError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Service Error"
+ },
+ "Timeout Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "TimeoutError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Timeout Error"
}
}
],
@@ -194,34 +206,39 @@
"keys": [
"sum|totalAPICalls"
],
+ "valuesFormat": "abbreviated",
+ "hideLegend": false,
"legend": [
{
- "color": "#01A2EC",
- "name": "urlscan.io"
+ "color": "#F50057",
+ "name": "General Error"
},
{
"color": "#229D80",
- "name": "VirusTotal (API v3)"
+ "name": "Success"
},
{
- "color": "#F50057",
- "name": "Rasterize"
+ "color": "#FD5BDE",
+ "name": "Quota Error"
},
{
- "color": "#ce5050",
- "name": "Quota Error"
+ "color": "#FFC4C6",
+ "name": "Service Error"
},
{
- "color": "#4fa327",
- "name": "Success"
+ "color": "#FF8411",
+ "name": "Auth Error"
+ },
+ {
+ "color": "#01A2EC",
+ "name": "Other"
}
],
- "limitType": "top",
- "valuesFormat": "abbreviated"
+ "limitType": "top"
},
"category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
+ "sizeInBytes": 0,
+ "created": "0001-01-01T00:00:00Z"
},
"reflectDimensions": true
},
@@ -275,29 +292,42 @@
"keys": [
"sum|totalAPICalls"
],
- "valuesFormat": "abbreviated"
+ "valuesFormat": "abbreviated",
+ "colors": {
+ "isEnabled": false,
+ "items": {
+ "#1DB846": {
+ "value": 3
+ },
+ "#D13C3C": {
+ "value": 0
+ },
+ "#EF9700": {
+ "value": -1
+ }
+ },
+ "type": "above"
+ }
},
"category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
+ "sizeInBytes": 0,
+ "created": "0001-01-01T00:00:00Z"
},
"reflectDimensions": true
},
{
- "id": "7f0bd220-e0e0-11ec-a7f6-39e12e3bcb7a",
+ "id": "4603f290-e0e1-11ec-a7f6-39e12e3bcb7a",
"forceRange": false,
- "x": 8,
- "y": 3,
- "i": "7f0bd220-e0e0-11ec-a7f6-39e12e3bcb7a",
+ "x": 4,
+ "y": 0,
+ "i": "4603f290-e0e1-11ec-a7f6-39e12e3bcb7a",
"w": 4,
"h": 3,
"widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c07",
- "version": 4,
+ "id": "error-count-per-command-per-incident-type",
+ "version": 2,
"cacheVersn": 0,
- "sequenceNumber": 409396,
- "primaryTerm": 2,
- "modified": "2022-04-27T15:34:53.64268093Z",
+ "modified": "2022-05-31T12:57:46.243309208Z",
"packID": "",
"packName": "",
"itemVersion": "",
@@ -309,29 +339,33 @@
"definitionId": "",
"vcShouldIgnore": false,
"vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
+ "commitMessage": "Widget imported",
+ "shouldCommit": true,
"Cache": null,
- "name": "API Call Results for VirusTotal",
- "prevName": "API Call Results for VirusTotal",
+ "name": "API Execution Metrics For Enrichment Commands",
+ "prevName": "API Execution Metrics For Enrichment Commands",
"dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"VirusTotal (API v3)_instance_1\" or \"VirusTotal (API v3)_instance_1_copy\"",
+ "widgetType": "column",
+ "query": "type:integration and command:domain or command:url or command:ip or command:file",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
"toDate": "0001-01-01T00:00:00Z",
"period": {
"by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 0,
"field": ""
},
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
+ "keys": [
+ "sum|totalAPICalls"
+ ],
+ "valuesFormat": "regular",
"customGroupBy": [
null,
{
@@ -377,37 +411,67 @@
}
],
"groupBy": [
- "modified(h)",
+ "command",
"apiResponseType"
],
- "keys": [
- "sum|totalAPICalls"
- ],
"referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
+ "showGraphValues": true,
+ "tableColumns": [
+ {
+ "isDefault": true,
+ "key": "count",
+ "position": 0
+ },
+ {
+ "isDefault": true,
+ "key": "data",
+ "position": 1
+ },
+ {
+ "isDefault": true,
+ "key": "floatData",
+ "position": 2
+ },
+ {
+ "isDefault": true,
+ "key": "groups",
+ "position": 3
+ },
+ {
+ "isDefault": true,
+ "key": "name",
+ "position": 4
+ },
+ {
+ "isDefault": true,
+ "key": "color",
+ "position": 5
+ }
+ ],
+ "xAxisLabel": "Enrichment Command Name ",
+ "yAxisLabel": "Error count"
},
"category": "",
+ "sizeInBytes": 0,
"created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
+ "size": 5,
+ "description": "Errors by Incident Type per Command (top 5)"
},
"reflectDimensions": true
},
{
- "id": "4603f290-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "id": "5b389a70-21b8-11ef-86d3-29ed4c950ba8",
"forceRange": false,
- "x": 4,
+ "x": 8,
"y": 0,
- "i": "4603f290-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "i": "5b389a70-21b8-11ef-86d3-29ed4c950ba8",
"w": 4,
"h": 3,
"widget": {
- "id": "error-count-per-command-per-incident-type",
+ "id": "b42d7e6f-fe25-4963-8dc2-c2ca9cae8f1e",
"version": 2,
"cacheVersn": 0,
- "modified": "2022-05-31T12:57:46.243309208Z",
+ "modified": "2024-06-03T14:48:31.630201802Z",
"packID": "",
"packName": "",
"itemVersion": "",
@@ -419,17 +483,15 @@
"definitionId": "",
"vcShouldIgnore": false,
"vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "Widget imported",
- "shouldCommit": true,
- "size": 5,
+ "commitMessage": "",
+ "shouldCommit": false,
"Cache": null,
- "name": "API Execution Metrics For Enrichment Commands",
- "prevName": "API Execution Metrics For Enrichment Commands",
+ "name": "API Call Results for Email Category",
+ "prevName": "Email",
"dataType": "metrics",
- "widgetType": "column",
- "query": "type:integration and command:domain or command:url or command:ip or command:file",
+ "widgetType": "line",
+ "query": "type:integration and (apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError) and category:\"Email\"",
"isPredefined": false,
- "description": "Errors by Incident Type per Command (top 5)",
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
"toDate": "0001-01-01T00:00:00Z",
@@ -438,122 +500,41 @@
"byTo": "",
"byFrom": "days",
"toValue": null,
- "fromValue": 0,
+ "fromValue": 7,
"field": ""
},
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
"groupBy": [
- "command",
- "apiResponseType"
+ "modified(d)",
+ "name"
],
"keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "showGraphValues": true,
- "tableColumns": [
- {
- "isDefault": true,
- "key": "count",
- "position": 0
- },
- {
- "isDefault": true,
- "key": "data",
- "position": 1
- },
- {
- "isDefault": true,
- "key": "floatData",
- "position": 2
- },
- {
- "isDefault": true,
- "key": "groups",
- "position": 3
- },
- {
- "isDefault": true,
- "key": "name",
- "position": 4
- },
- {
- "isDefault": true,
- "key": "color",
- "position": 5
- }
+ "sum|executionCount"
],
- "valuesFormat": "regular",
- "xAxisLabel": "Enrichment Command name ",
- "yAxisLabel": "Error count"
+ "timeFrame": "days",
+ "valuesFormat": "abbreviated"
},
"category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
+ "sizeInBytes": 0,
+ "created": "2024-06-03T14:48:11.173825958Z"
},
"reflectDimensions": true
},
{
- "id": "63de01c0-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "id": "e6c4a200-21b8-11ef-86d3-29ed4c950ba8",
"forceRange": false,
"x": 8,
- "y": 6,
- "i": "63de01c0-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "y": 3,
+ "i": "e6c4a200-21b8-11ef-86d3-29ed4c950ba8",
"w": 4,
- "h": 3,
+ "h": 5,
"widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c07",
- "version": 5,
+ "id": "f99ff2ec-6e20-4b5e-8d59-664d6f80b3c0",
+ "version": 1,
"cacheVersn": 0,
- "sequenceNumber": 409396,
- "primaryTerm": 2,
- "modified": "2022-05-31T12:58:36.448784342Z",
+ "modified": "2024-06-03T14:52:28.706525616Z",
"packID": "",
"packName": "",
"itemVersion": "",
@@ -565,14 +546,14 @@
"definitionId": "",
"vcShouldIgnore": false,
"vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "Widget imported",
- "shouldCommit": true,
+ "commitMessage": "",
+ "shouldCommit": false,
"Cache": null,
- "name": "API Call Results for UrlScan",
- "prevName": "API Call Results for UrlScan",
+ "name": "API Call Metrics",
+ "prevName": "API Metrics",
"dataType": "metrics",
"widgetType": "line",
- "query": "type:integration and instance:\"urlscan.io_instance_1\"",
+ "query": "type:integration and (apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError) and -category:Email",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
@@ -582,903 +563,27 @@
"byTo": "",
"byFrom": "days",
"toValue": null,
- "fromValue": 0,
+ "fromValue": 7,
"field": ""
},
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Retry Timeout": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "RetryTimeout",
- "type": "string"
- }
- ]
- ],
- "name": "Retry Timeout"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
"groupBy": [
- "modified(h)",
- "apiResponseType"
+ "modified(d)",
+ "name"
],
"keys": [
"sum|totalAPICalls"
],
- "referenceLine": {
- "type": "max"
- },
- "timeFrame": "hours",
+ "showGraphValues": true,
"valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
+ "hideLegend": false,
+ "timeFrame": "days"
},
"category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "id": "8caf2770-583a-11ed-b0d6-31717adfe334",
- "forceRange": false,
- "x": 8,
- "y": 9,
- "i": "8caf2770-583a-11ed-b0d6-31717adfe334",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Microsoft Defender for Endpoint",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:01:15.237569307Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Microsoft Defender for Endpoint",
- "prevName": "API Call Results for Microsoft Defender for Endpoint",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"Microsoft Defender Advanced Threat Protection_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "id": "8d4f5f10-583a-11ed-b0d6-31717adfe334",
- "forceRange": false,
- "x": 4,
- "y": 8,
- "i": "8d4f5f10-583a-11ed-b0d6-31717adfe334",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Microsoft Graph Mail",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:05:03.146716149Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Microsoft Graph Mail",
- "prevName": "API Call Results for Microsoft Graph Mail",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"MicrosoftGraphMail_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "id": "e3006ae0-23cc-11ee-bc30-99ab8f4422d3",
- "forceRange": false,
- "x": 8,
- "y": 12,
- "i": "e3006ae0-23cc-11ee-bc30-99ab8f4422d3",
- "w": 4,
- "h": 2,
- "widget": {
- "id": "c63390c0-0c5e-4906-8b44-a748e6a639ee",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2023-07-16T11:10:20.740319Z",
- "packID": "JoeSecurity",
- "packName": "Joe Security",
- "itemVersion": "1.1.9",
- "fromServerVersion": "6.5.0",
- "toServerVersion": "",
- "propagationLabels": [],
- "packPropagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Joe Security",
- "prevName": "API Call Results for Joe Security",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"JoeSecurityV2_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "",
- "byFrom": "days",
- "toValue": null,
- "fromValue": 0,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "Auth Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "AuthError",
- "type": "string"
- }
- ]
- ],
- "name": "Auth Error"
- },
- "Connection Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "ConnectionError",
- "type": "string"
- }
- ]
- ],
- "name": "Connection Error"
- },
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(d)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {
- "type": "max"
- },
- "showOthers": false,
- "timeFrame": "days",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "forceRange": false,
- "h": 3,
- "i": "0a993750-ef32-11ed-a72f-dd3156f45ab2",
- "id": "0a993750-ef32-11ed-a72f-dd3156f45ab2",
- "reflectDimensions": true,
- "w": 4,
- "widget": {
- "Cache": null,
- "cacheVersn": 0,
- "category": "",
- "commitMessage": "",
- "dataType": "metrics",
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "fromDateLicense": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byFrom": "days",
- "byTo": "",
- "field": "",
- "fromValue": 0,
- "toValue": null
- },
- "toDate": "0001-01-01T00:00:00Z"
- },
- "definitionId": "",
- "fromServerVersion": "6.5.0",
- "id": "c63390c0-0c5e-4906-8b44-a748e6a639ea",
- "isPredefined": true,
- "itemVersion": "3.0.0",
- "modified": "2023-05-10T12:54:41.634649071Z",
- "name": "API Call Results for Rapid7 Threat Command",
- "packID": "IntSight",
- "packName": "Rapid7 - Threat Command (IntSights)",
- "packPropagationLabels": [
- "all"
- ],
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(d)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {
- "type": "max"
- },
- "showOthers": false,
- "timeFrame": "days",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "prevName": "API Call Results for Rapid7 Threat Command",
- "propagationLabels": [],
- "query": "type:integration and instance:\"rapid7_threat_command_instance_1\" or \"rapid7_threat_command_instance_1_copy\"",
- "shouldCommit": false,
- "toServerVersion": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "version": 3,
- "widgetType": "line",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "x": 0,
- "y": 8
- },
- {
- "id": "9e8acc20-8d3a-11ee-a736-df621984533b",
- "forceRange": false,
- "x": 4,
- "y": 11,
- "i": "9e8acc20-8d3a-11ee-a736-df621984533b",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Email Hippo",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:01:15.237569307Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Email Hippo",
- "prevName": "API Call Results for Email Hippo",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"Email Hippo_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- " Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": " Success"
- },
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "id": "c4d4f820-b9fd-11ee-9dd7-37fe5ed8a2cc",
- "forceRange": false,
- "x": 0,
- "y": 11,
- "i": "c4d4f820-b9fd-11ee-9dd7-37fe5ed8a2cc",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "0821903b-1099-4f3d-8c30-12345d8c5c07",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2024-01-14T16:57:28.451017133Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Autofocus",
- "prevName": "API Call Results for Autofocus",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and name:\"AutoFocus V2\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": "",
- "created": "0001-01-01T00:00:00Z",
- "sizeInBytes": 0
- },
- "reflectDimensions": true
- },
- {
- "id": "adcecad0-17a8-11ef-b89d-b728009392bd",
- "forceRange": false,
- "x": 0,
- "y": 14,
- "i": "adcecad0-17a8-11ef-b89d-b728009392bd",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "eff19897-4635-44c4-889c-f106373be355",
- "version": 5,
- "cacheVersn": 0,
- "modified": "2024-05-21T19:31:05.883188536Z",
- "created": "2024-05-21T10:22:20.546514077Z",
"sizeInBytes": 0,
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results For Gmail Single User",
- "prevName": "API Call Results For Gmail Single User",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and name:\"Gmail Single User\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "hours",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "ConnectionError": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "ConnectionError",
- "type": "string"
- }
- ]
- ],
- "name": "ConnectionError"
- },
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
+ "created": "2024-06-03T14:52:28.705817022Z"
},
"reflectDimensions": true
}
@@ -1486,4 +591,4 @@
"fromVersion": "6.8.0",
"description": "",
"isPredefined": true
-}
+}
\ No newline at end of file
diff --git a/Packs/CommonDashboards/Dashboards/dashboard-My_Threat_Landscape.json b/Packs/CommonDashboards/Dashboards/dashboard-My_Threat_Landscape.json
index c488a75741fa..c131a45d0fd8 100644
--- a/Packs/CommonDashboards/Dashboards/dashboard-My_Threat_Landscape.json
+++ b/Packs/CommonDashboards/Dashboards/dashboard-My_Threat_Landscape.json
@@ -121,7 +121,7 @@
"propagationLabels": [
"all"
],
- "query": "type:File and (verdict:Malicious or verdict:Suspicious) and incident.id:*",
+ "query": "type:File and (verdict:Malicious or verdict:Suspicious) and investigationIDs:*",
"shouldCommit": false,
"size": 10,
"sort": [
@@ -204,7 +204,7 @@
"propagationLabels": [
"all"
],
- "query": "type:\"Attack Pattern\" and (verdict:Malicious or verdict:Suspicious) and incident.id:*",
+ "query": "type:\"Attack Pattern\" and (verdict:Malicious or verdict:Suspicious) and investigationIDs:*",
"shouldCommit": false,
"size": 10,
"sort": [
@@ -268,7 +268,7 @@
"propagationLabels": [
"all"
],
- "query": "(verdict:Malicious or verdict:Suspicious) and incident.id:*",
+ "query": "(verdict:Malicious or verdict:Suspicious) and investigationIDs:*",
"shouldCommit": false,
"toServerVersion": "",
"vcShouldIgnore": false,
@@ -350,7 +350,7 @@
"propagationLabels": [
"all"
],
- "query": "incident.id:* and expirationStatus:active and (verdict:Malicious or verdict:Suspicious or verdict:Unknown)",
+ "query": "investigationIDs:* and expirationStatus:active and (verdict:Malicious or verdict:Suspicious or verdict:Unknown)",
"shouldCommit": false,
"size": 10,
"sort": [
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_6_0.md b/Packs/CommonDashboards/ReleaseNotes/1_6_0.md
new file mode 100644
index 000000000000..0cd1cd65b3fe
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_6_0.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### API Execution Metrics
+
+Added error types to the metrics in the **API Call Status By Integration** widget.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_6_1.md b/Packs/CommonDashboards/ReleaseNotes/1_6_1.md
new file mode 100644
index 000000000000..539e793bb5ce
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_6_1.md
@@ -0,0 +1,3 @@
+## Common Dashboards
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_0.md b/Packs/CommonDashboards/ReleaseNotes/1_7_0.md
new file mode 100644
index 000000000000..42c65a0d8807
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_0.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### API Execution Metrics
+
+Replaced integration-specific widgets showing API metrics with the **API Call Metrics** and **API Call Results for Email Category** widgets.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_1.md b/Packs/CommonDashboards/ReleaseNotes/1_7_1.md
new file mode 100644
index 000000000000..c15e4eea0feb
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_1.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### New: UseCaseAdoptionMetrics
+
+- The dashboard provide a high level overview on the usage of Cortex XSOAR. It contains data points and metrics on enriched and observed threat intelligence, playbook and automation executions, and data sources being ingested into Cortex XSOAR.
+- Updated the Docker image to: *demisto/python3:3.10.14.97374*.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_2.md b/Packs/CommonDashboards/ReleaseNotes/1_7_2.md
new file mode 100644
index 000000000000..5ee4f4dc385f
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_2.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### My Threat Landscape
+
+- Updated the indicators query in the widgets to improve performance.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_3.md b/Packs/CommonDashboards/ReleaseNotes/1_7_3.md
new file mode 100644
index 000000000000..5ee4f4dc385f
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_3.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### My Threat Landscape
+
+- Updated the indicators query in the widgets to improve performance.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_4.md b/Packs/CommonDashboards/ReleaseNotes/1_7_4.md
new file mode 100644
index 000000000000..cdb139608d4f
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_4.md
@@ -0,0 +1,12 @@
+
+#### Dashboards
+
+##### XSOAR Automation Insights
+
+- Updated the "Adoption Metrics" widgets' script.
+
+#### Scripts
+
+##### AdoptionMetrics
+
+- Fixed the return argument.
diff --git a/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.py b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.py
new file mode 100644
index 000000000000..e42f9d22564f
--- /dev/null
+++ b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.py
@@ -0,0 +1,119 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+def check_phishing_incidents() -> bool | DemistoException:
+ """
+ Checks for phishing incidents.
+
+ Returns:
+ bool: True if phishing incidents are found, False otherwise.
+ """
+ try:
+ res = demisto.executeCommand("getIncidents", {"type": "Phishing", "size": 1})
+ return bool(res and res[0].get("Contents", {}).get("data"))
+ except DemistoException as e:
+ return DemistoException(str(e))
+
+
+def is_rapid_breach_response_installed() -> bool | DemistoException:
+ """
+ Checks if Rapid Breach Response is installed.
+
+ Returns:
+ bool: True if Rapid Breach Response is installed, False otherwise.
+ """
+ try:
+ res = demisto.executeCommand("core-api-get", {"uri": "/contentpacks/metadata/installed"})
+ if res:
+ installed_packs = res[0].get("Contents", {}).get("response")
+ return any(pack["name"] == "Rapid Breach Response" for pack in installed_packs)
+ return False
+ except DemistoException as e:
+ return DemistoException(str(e))
+
+
+def get_use_cases() -> Dict[str, Any]:
+ """
+ Retrieves use cases data based on modules in Demisto.
+
+ Returns:
+ dict: A dictionary containing use cases in production and at risk.
+ """
+ use_cases_in_production = set()
+ at_risk = []
+
+ phishing_incidents = check_phishing_incidents()
+ use_case_dict = {
+ 'Ransomware & Malware Coverage': 'https://cortex.marketplace.pan.dev/marketplace/?useCase=Malware',
+ 'Business Email Compromise Coverage': 'https://cortex.marketplace.pan.dev/marketplace/?useCase=Phishing',
+ 'Network Security': 'https://xsoar.pan.dev/marketplace/?category=Network%20Security',
+ 'Analytics & SIEM': 'https://cortex.marketplace.pan.dev/marketplace/?category=Analytics+%26+SIEM',
+ 'Data Enrichment & Threat Intelligence':
+ 'https://cortex.marketplace.pan.dev/marketplace/?category=Data+Enrichment+%26+Threat+Intelligence',
+ 'Vulnerability Management': 'https://xsoar.pan.dev/marketplace/?category=Vulnerability%20Management',
+ 'Case Management': 'https://cortex.marketplace.pan.dev/marketplace/?category=Case+Management',
+ 'Rapid Breach Response': 'https://cortex.marketplace.pan.dev/marketplace/details/MajorBreachesInvestigationandResponse/'
+ }
+
+ catagories = {
+ 'network security': 'Network Security',
+ 'analytics & siem': 'Analytics & SIEM',
+ 'data enrichment & threat intelligence': 'Data Enrichment & Threat Intelligence',
+ 'vulnerability management': 'Vulnerability Management',
+ 'case management': 'Case Management',
+ 'forensic & malware analysis': 'Ransomware & Malware Coverage',
+ 'endpoint': 'Ransomware & Malware Coverage'
+ }
+
+ for _, details in demisto.getModules().items():
+ category = details.get('category', '').lower()
+ brand = details.get('brand', '').lower()
+ state = details.get('state')
+ incident_types = details.get('incident_types', [])
+
+ if brand != 'builtin' and state == 'active' and category != 'utilities':
+ if category in ['email', 'messaging', 'messaging and conferencing'] and 'phishing' in incident_types:
+ if phishing_incidents:
+ use_cases_in_production.add('Business Email Compromise Coverage')
+ else:
+ at_risk.append(
+ '[Business Email Compromise Coverage](https://xsoar.pan.dev/marketplace/?category=Email%2C%20Messaging)')
+ elif category in catagories:
+ use_cases_in_production.add(catagories[category])
+
+ if is_rapid_breach_response_installed():
+ use_cases_in_production.add('Rapid Breach Response')
+
+ at_risk_dict = {}
+ for use_case, url in use_case_dict.items():
+ if use_case not in use_cases_in_production:
+ at_risk_dict[use_case] = url
+
+ return {
+ 'use_cases_in_production': use_cases_in_production,
+ 'at_risk': at_risk_dict
+ }
+
+
+def main():
+ use_cases_data = get_use_cases()
+
+ headers = ['Use Case Adoption & Coverage', 'Status']
+ t = []
+ for use_case in use_cases_data['use_cases_in_production']:
+ t.append({'Use Case Adoption & Coverage': use_case, 'Status': '✅'})
+
+ for use_case, _ in use_cases_data['at_risk'].items():
+ t.append({'Use Case Adoption & Coverage': use_case, 'Status': '❌'})
+ table = tableToMarkdown(name='Use Case Coverage', t=t, headers=headers)
+
+ return_results(table)
+ return table
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.yml b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.yml
new file mode 100644
index 000000000000..265f1c3dbcd4
--- /dev/null
+++ b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics.yml
@@ -0,0 +1,25 @@
+commonfields:
+ id: AdoptionMetrics
+ version: -1
+name: AdoptionMetrics
+script: ''
+type: python
+tags:
+- widget
+- dynamic-section
+comment: "The dashboard provide a high level overview on the usage of Cortex XSOAR. It contains data points and metrics on enriched and observed threat intelligence, playbook and automation executions, and data sources being ingested into Cortex XSOAR."
+enabled: true
+engineinfo: {}
+runas: DBotWeakRole
+runonce: false
+scripttarget: 0
+subtype: python3
+dockerimage: demisto/python3:3.10.14.97374
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
+
+
+
+
+
diff --git a/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics_test.py b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics_test.py
new file mode 100644
index 000000000000..5cb00c29ca10
--- /dev/null
+++ b/Packs/CommonDashboards/Scripts/AdoptionMetrics/AdoptionMetrics_test.py
@@ -0,0 +1,119 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+import AdoptionMetrics
+import pytest
+
+
+CASES_CHECK_PHISHING = [
+ ([], False), # incidents_not_exists
+ (["Phishing"], True) # phishing incident found
+]
+
+
+@pytest.mark.parametrize('data, expected_result', CASES_CHECK_PHISHING)
+def test_check_phishing_incidents(mocker, data, expected_result):
+ """
+ Given:
+ case1 = No incidents fetched from Demisto.
+ case2 = Phishing incident found in fetched incidents.
+
+ When:
+ - Checking for phishing incidents.
+
+ Then:
+ - Assert the result returned is as expected.
+ """
+ # Simulate no incidents fetched
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[{"Contents": {"data": data}}]
+ )
+ assert AdoptionMetrics.check_phishing_incidents() == expected_result
+
+
+CASES_RAPID_BREACH_RESPONSE = [
+ ([{"Contents": {"response": []}}], False), # No content packs are installed.
+ ([{"Contents": {"response": [{"name": "Rapid Breach Response"}]}}], True) # content packs are installed.
+]
+
+
+@pytest.mark.parametrize('return_value, expected_result', CASES_RAPID_BREACH_RESPONSE)
+def test_is_rapid_breach_response_installed(mocker, return_value, expected_result):
+ """
+ Given:
+ case1 = No content packs are installed.
+ case2 = content packs are installed.
+
+ When:
+ - Checking if Rapid Breach Response is installed.
+
+ Then:
+ - Assert the result returned is as expected
+ """
+ # Simulate no content packs installed
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=return_value
+ )
+
+ assert AdoptionMetrics.is_rapid_breach_response_installed() == expected_result
+
+
+def test_get_use_cases(mocker):
+ """
+ Given:
+ - Various modules with different categories and states.
+ - No phishing incidents fetched.
+ - Rapid Breach Response not installed.
+ When:
+ - Retrieving use cases data.
+ Then:
+ - Use cases in production and at risk are returned correctly.
+ """
+ from AdoptionMetrics import get_use_cases
+ # Simulate modules with different categories and states
+ mocker.patch.object(demisto, "getModules", return_value={
+ '1': {'category': 'case management', 'brand': 'Brand', 'state': 'active'},
+ '2': {'category': 'Email', 'brand': 'Brand', 'state': 'active', 'incident_types': ['Phishing']},
+ '3': {'category': 'network security', 'brand': 'Brand', 'state': 'active'},
+ '4': {'category': 'vulnerability management', 'brand': 'Brand', 'state': 'at_risk'}
+ })
+ link = 'https://cortex.marketplace.pan.dev/marketplace/'
+ mocker.patch.object(AdoptionMetrics, 'check_phishing_incidents', return_value=False)
+ mocker.patch.object(AdoptionMetrics, 'is_rapid_breach_response_installed', return_value=False)
+ res = get_use_cases()
+ assert res == {'use_cases_in_production': {'Case Management', 'Network Security'},
+ 'at_risk': {'Ransomware & Malware Coverage': f'{link}?useCase=Malware',
+ 'Business Email Compromise Coverage': f'{link}?useCase=Phishing',
+ 'Analytics & SIEM': f'{link}?category=Analytics+%26+SIEM',
+ 'Data Enrichment & Threat Intelligence':
+ f'{link}?category=Data+Enrichment+%26+Threat+Intelligence',
+ 'Vulnerability Management':
+ 'https://xsoar.pan.dev/marketplace/?category=Vulnerability%20Management',
+ 'Rapid Breach Response':
+ f'{link}details/MajorBreachesInvestigationandResponse/'}}
+
+
+def test_main(mocker):
+ """
+ Given:
+ - Use cases data.
+
+ When:
+ - Generating Markdown table.
+
+ Then:
+ - Markdown table is generated correctly.
+ """
+ import AdoptionMetrics
+ use_cases_data = {
+ 'use_cases_in_production': {'Ransomware & Malware Coverage', 'Network Security'},
+ 'at_risk': {'Business Email Compromise Coverage': 'https://xsoar.pan.dev/marketplace/?category=Email%2C%20Messaging'}
+ }
+ mocker.patch.object(AdoptionMetrics, 'get_use_cases', return_value=use_cases_data)
+ res = AdoptionMetrics.main()
+ assert '| Network Security | ✅ |' in res
+ assert 'Business Email Compromise Coverage | ❌ |' in res
+ assert 'Rapid Breach Response' not in res
diff --git a/Packs/CommonDashboards/Scripts/AdoptionMetrics/README.md b/Packs/CommonDashboards/Scripts/AdoptionMetrics/README.md
new file mode 100644
index 000000000000..e375bca1ceda
--- /dev/null
+++ b/Packs/CommonDashboards/Scripts/AdoptionMetrics/README.md
@@ -0,0 +1,20 @@
+This dashboard is meant to provide a high level overview on the usage of Cortex XSOAR. It contains datapoints and metrics on enriched and observed threat intelligence, playbook and automation executions, and data sources being ingested into Cortex XSOAR. It is useful for determining automation maturity and overall return on investment.
+
+## Script Data
+
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | widget, dynamic-section |
+
+## Inputs
+
+---
+There are no inputs for this script.
+
+## Outputs
+
+---
+There are no outputs for this script.
diff --git a/Packs/CommonDashboards/pack_metadata.json b/Packs/CommonDashboards/pack_metadata.json
index 254add5811aa..843a63ba6c55 100644
--- a/Packs/CommonDashboards/pack_metadata.json
+++ b/Packs/CommonDashboards/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Dashboards",
"description": "Frequently used dashboards pack.",
"support": "xsoar",
- "currentVersion": "1.5.0",
+ "currentVersion": "1.7.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
index d1111f41fe32..e0d603e83269 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_Account_-_Generic_v2.yml
@@ -1304,6 +1304,7 @@ tasks:
message:
to:
simple: Analyst
+ simple:marketplacev2: Investigator
subject:
simple: Block Sensitive Account - User Verification Form
body:
@@ -1320,7 +1321,6 @@ tasks:
format: html
bcc:
cc:
- simple: Administrator
timings:
retriescount: 2
retriesinterval: 360
@@ -1385,6 +1385,7 @@ tasks:
message:
to:
simple: Analyst
+ simple:marketplacev2: Investigator
subject:
simple: Block Account - User Verification Form
body:
@@ -1399,7 +1400,6 @@ tasks:
format: html
bcc:
cc:
- simple: Administrator
timings:
retriescount: 2
retriesinterval: 360
@@ -2609,11 +2609,11 @@ inputs:
playbookInputQuery:
outputs:
- contextPath: Blocklist.Final
- description: Blocked accounts
+ description: Blocked accounts.
type: unknown
tests:
- No tests (auto formatted)
fromversion: 6.5.0
marketplaces:
- - xsoar
- - marketplacev2
\ No newline at end of file
+- xsoar
+- marketplacev2
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
index 20577bc8d831..86204f448bed 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_IP_-_Generic_v3.yml
@@ -1,7 +1,7 @@
id: Block IP - Generic v3
version: -1
name: Block IP - Generic v3
-description: "This playbook blocks malicious IP addresses using all integrations that are enabled. The direction of the traffic that will be blocked is determined by the XSOAR user (and set by default to outgoing)\nNote the following:\n- some of those integrations require specific parameters to run, which are based on the playbook inputs. Also, certain integrations use FW rules or appended network objects.\n- Note that the appended network objects should be specified in blocking rules inside the system later on. \n\n\nSupported integrations for this playbook [Network security products such as FW/WAF/IPs/etc.]: \n\n* Check Point Firewall\n* Palo Alto Networks PAN-OS\n* Zscaler\n* FortiGate\n* Aria Packet Intelligence\n* Cisco Firepower \n* Cisco Secure Cloud Analytics\n* Cisco ASA\n* Akamai WAF\n* F5 SilverLine\n* ThreatX\n* Signal Sciences WAF\n* Sophos Firewall\n\n"
+description: "This playbook blocks malicious IP addresses using all integrations that are enabled. The direction of the traffic that will be blocked is determined by the XSOAR user (and set by default to outgoing)\nNote the following:\n- some of those integrations require specific parameters to run, which are based on the playbook inputs. Also, certain integrations use FW rules or appended network objects.\n- Note that the appended network objects should be specified in blocking rules inside the system later on. \n\n\nSupported integrations for this playbook [Network security products such as FW/WAF/IPs/etc.]: \n\n* Check Point Firewall\n* Palo Alto Networks PAN-OS\n* Zscaler\n* FortiGate\n* Aria Packet Intelligence\n* Cisco Firepower \n* Cisco Secure Cloud Analytics\n* Cisco ASA\n* Akamai WAF\n* F5 SilverLine\n* ThreatX\n* Signal Sciences WAF\n* Sophos Firewall."
starttaskid: "0"
tasks:
"0":
@@ -1290,7 +1290,8 @@ tasks:
ignoreworker: false
message:
to:
- simple: Analyst,Administrator
+ simple: Analyst
+ simple:marketplacev2: Investigator
subject:
simple: Block IP Playbook - Analyst's Verification
body:
@@ -1307,7 +1308,6 @@ tasks:
format: html
bcc:
cc:
- simple: Administrator
timings:
retriescount: 2
retriesinterval: 360
@@ -3054,7 +3054,8 @@ tasks:
ignoreworker: false
message:
to:
- simple: Analyst,Administrator
+ simple: Analyst
+ simple:marketplacev2: Investigator
subject:
simple: Block IP Playbook - Analyst's Verification
body:
@@ -3069,7 +3070,6 @@ tasks:
format: html
bcc:
cc:
- simple: Administrator
timings:
retriescount: 2
retriesinterval: 360
@@ -3723,7 +3723,7 @@ inputs:
For prisma SASE usage - Specify the scope for a newly created security rule to be applied.
Remember, this input will only be used when there is no input to the CategoryName.
Default: Shared
- playbookInputQuery: null
+ playbookInputQuery:
outputs:
- contextPath: Aria.BlockDestSubnet.Rule
description: The rule name/ID which was created in the system for this playbook.
diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
index f7e487cbcbc0..8697ef00f7fc 100644
--- a/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
+++ b/Packs/CommonPlaybooks/Playbooks/playbook-Block_URL_-_Generic_v2.yml
@@ -13,7 +13,7 @@ description: |-
* Sophos
* Forcepoint
* Checkpoint
- * Netcraft
+ * Netcraft.
starttaskid: "0"
tasks:
"0":
@@ -814,7 +814,8 @@ tasks:
ignoreworker: false
message:
to:
- simple: Administrator,Analyst
+ simple: Analyst
+ simple:marketplacev2: Investigator
subject:
simple: 'User Verification - Block URLs (Inc #${incident.id})'
body:
@@ -1411,7 +1412,7 @@ inputs:
For prisma SASE usage - Specify the scope for a newly created security rule to be applied.
Remember, this input will only be used when there is no input to the CategoryName.
Default: Shared
- playbookInputQuery: null
+ playbookInputQuery:
outputs: []
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md
new file mode 100644
index 000000000000..03b567b57bd5
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md
@@ -0,0 +1,3 @@
+## Common Playbooks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_34.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_34.md
new file mode 100644
index 000000000000..045ae5e02115
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_34.md
@@ -0,0 +1,12 @@
+
+#### Playbooks
+
+##### Block IP - Generic v3
+
+- Fixed an issue with the data collection tasks to send emails to the corresponding roles in XSIAM (Investigator) and XSOAR (Analyst).
+##### Block Account - Generic v2
+
+- Fixed an issue with the data collection tasks to send emails to the corresponding roles in XSIAM (Investigator) and XSOAR (Analyst).
+##### Block URL - Generic v2
+
+- Fixed an issue with the data collection tasks to send emails to the corresponding roles in XSIAM (Investigator) and XSOAR (Analyst).
diff --git a/Packs/CommonPlaybooks/doc_files/Calculate_Severity_3rd-party_integrations.png b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_3rd-party_integrations.png
new file mode 100644
index 000000000000..de851a24586c
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_3rd-party_integrations.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Critical_Assets_v2.png b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Critical_Assets_v2.png
new file mode 100644
index 000000000000..31dfc86c574e
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Critical_Assets_v2.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Indicators_DBotScore.png b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Indicators_DBotScore.png
new file mode 100644
index 000000000000..5197f0c388e2
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Calculate_Severity_Indicators_DBotScore.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/DBot_Indicator_Enrichment_Generic.png b/Packs/CommonPlaybooks/doc_files/DBot_Indicator_Enrichment_Generic.png
new file mode 100644
index 000000000000..abe26ed171c6
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/DBot_Indicator_Enrichment_Generic.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/File_Enrichment_File_reputation.png b/Packs/CommonPlaybooks/doc_files/File_Enrichment_File_reputation.png
new file mode 100644
index 000000000000..3387bae23f39
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/File_Enrichment_File_reputation.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/GenericPolling.png b/Packs/CommonPlaybooks/doc_files/GenericPolling.png
new file mode 100644
index 000000000000..e615e6f9c471
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/GenericPolling.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Isolate_Endpoint_Generic.png b/Packs/CommonPlaybooks/doc_files/Isolate_Endpoint_Generic.png
new file mode 100644
index 000000000000..f59637542a2c
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Isolate_Endpoint_Generic.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports.png b/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports.png
new file mode 100644
index 000000000000..eab94c305a95
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports_Job.png b/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports_Job.png
new file mode 100644
index 000000000000..c8e36cf85506
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Send_Investigation_Summary_Reports_Job.png differ
diff --git a/Packs/CommonPlaybooks/doc_files/Wait_Until_Datetime.png b/Packs/CommonPlaybooks/doc_files/Wait_Until_Datetime.png
new file mode 100644
index 000000000000..adf167bdb133
Binary files /dev/null and b/Packs/CommonPlaybooks/doc_files/Wait_Until_Datetime.png differ
diff --git a/Packs/CommonPlaybooks/pack_metadata.json b/Packs/CommonPlaybooks/pack_metadata.json
index 1cd9f11d9cb6..4087839b7437 100644
--- a/Packs/CommonPlaybooks/pack_metadata.json
+++ b/Packs/CommonPlaybooks/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Playbooks",
"description": "Frequently used playbooks pack.",
"support": "xsoar",
- "currentVersion": "2.6.32",
+ "currentVersion": "2.6.34",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonReports/ReleaseNotes/1_0_9.md b/Packs/CommonReports/ReleaseNotes/1_0_9.md
new file mode 100644
index 000000000000..29e7ddc36447
--- /dev/null
+++ b/Packs/CommonReports/ReleaseNotes/1_0_9.md
@@ -0,0 +1,3 @@
+## Common Reports
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonReports/pack_metadata.json b/Packs/CommonReports/pack_metadata.json
index d0896f5d82ea..c7bd4561a85e 100644
--- a/Packs/CommonReports/pack_metadata.json
+++ b/Packs/CommonReports/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Reports",
"description": "Frequently used reports pack.",
"support": "xsoar",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonScripts/.pack-ignore b/Packs/CommonScripts/.pack-ignore
index 8df93efda629..f316fe4a7910 100644
--- a/Packs/CommonScripts/.pack-ignore
+++ b/Packs/CommonScripts/.pack-ignore
@@ -175,6 +175,8 @@ qr
cv
unescape_url
unescape
+hyperlinks
+Auth
[file:ScheduleGenericPolling.yml]
ignore=BA124
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_10.md b/Packs/CommonScripts/ReleaseNotes/1_15_10.md
new file mode 100644
index 000000000000..6f89d493b29b
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_10.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ExtractHyperlinksFromOfficeFiles
+
+- Added support for parsing grouped shapes in power point files.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_11.md b/Packs/CommonScripts/ReleaseNotes/1_15_11.md
new file mode 100644
index 000000000000..52586043ba1b
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_11.md
@@ -0,0 +1,6 @@
+#### Scripts
+
+##### ReadPDFFileV2
+
+- Updated the Docker image to: *demisto/readpdf:1.0.0.98214*.
+- Fixed an issue where the script failed to open and extract the PDF's data.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_12.md b/Packs/CommonScripts/ReleaseNotes/1_15_12.md
new file mode 100644
index 000000000000..d5d8ab2971f2
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_12.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### FailedInstances
+
+- Fixed an issue where the script failed when a ServiceNow v2 instance was configured with OAuth flow enabled.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_13.json b/Packs/CommonScripts/ReleaseNotes/1_15_13.json
new file mode 100644
index 000000000000..976d4d67b477
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_13.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "The misnamed, recently-released ***PrintToParentAlert*** script is no longer available. Use ***PrintToParentIncident*** instead."
+}
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_13.md b/Packs/CommonScripts/ReleaseNotes/1_15_13.md
new file mode 100644
index 000000000000..08ab11365a9d
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_13.md
@@ -0,0 +1,6 @@
+#### Scripts
+
+##### PrintToParentIncident
+
+- Fixed an issue where the misnamed, recently-released ***PrintToParentAlert*** script was uploaded. It is no longer available, use ***PrintToParentIncident*** instead.
+- - Updated the Docker image to: *demisto/python3:3.10.14.98471*.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_14.md b/Packs/CommonScripts/ReleaseNotes/1_15_14.md
new file mode 100644
index 000000000000..7786e23bdfe8
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_14.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CreateEmailHtmlBody
+
+Fixed an issue where the alerts field was not mapped in Cortex XSIAM.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_15.md b/Packs/CommonScripts/ReleaseNotes/1_15_15.md
new file mode 100644
index 000000000000..a785d8c333d1
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_15.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### DisplayHTMLWithImages
+- Updated the Docker image to: *demisto/python3:3.10.14.98889*.
+
+- Resolved a situation where there was no default background, and now the default background is white.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_16.md b/Packs/CommonScripts/ReleaseNotes/1_15_16.md
new file mode 100644
index 000000000000..37776d4645fc
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_16.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GridFieldSetup
+
+Updated the script to include 10 additional inputs.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_17.md b/Packs/CommonScripts/ReleaseNotes/1_15_17.md
new file mode 100644
index 000000000000..5fc887a29acc
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_17.md
@@ -0,0 +1,8 @@
+
+#### Scripts
+
+##### ReadQRCode
+
+- Improved implementation of `stderr` redirection by removing `wurlitzer.pipes()` and redirecting `stderr` to a temporary file.
+- The automation now extracts QR codes exclusively to improve performance.
+- Updated the Docker image to: *demisto/qrcode:1.0.0.98232*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_18.md b/Packs/CommonScripts/ReleaseNotes/1_15_18.md
new file mode 100644
index 000000000000..c3ac61223750
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_18.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ReadQRCode
+
+- Fixed an issue where indicators were not extracted from the output.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_19.md b/Packs/CommonScripts/ReleaseNotes/1_15_19.md
new file mode 100644
index 000000000000..7126e2b6660d
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_19.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### ExtractIndicatorsFromTextFile
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+- Fixed an issue where the outputs were not displayed in the context data.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_20.md b/Packs/CommonScripts/ReleaseNotes/1_15_20.md
new file mode 100644
index 000000000000..a940536e0d64
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_20.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ContentPackInstaller
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_21.md b/Packs/CommonScripts/ReleaseNotes/1_15_21.md
new file mode 100644
index 000000000000..32c607c55ef5
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_21.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### New: DownloadAndArchivePythonLibrary
+
+- New: The script downloads a Python library using PIP, archives it, and returns the file to the war room.
+<~XSOAR> (Available from Cortex XSOAR 6.10.0).~XSOAR>
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_22.md b/Packs/CommonScripts/ReleaseNotes/1_15_22.md
new file mode 100644
index 000000000000..3a897267b931
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_22.md
@@ -0,0 +1,6 @@
+#### Scripts
+
+##### ParseCSV
+
+- Fixed an issue where passing no `escapechar` would result in failure.
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_23.md b/Packs/CommonScripts/ReleaseNotes/1_15_23.md
new file mode 100644
index 000000000000..903013bf512d
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_23.md
@@ -0,0 +1,8 @@
+
+#### Scripts
+
+##### ExportAuditLogsToFile
+
+- Fixed an issue with Cortex XSOAR 6.x where the command would fail by sending the wrong request.
+- Updated the Docker image to: *demisto/python3:3.10.14.101217*.
+
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_24.md b/Packs/CommonScripts/ReleaseNotes/1_15_24.md
new file mode 100644
index 000000000000..f3efe5611584
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_24.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### SetGridField
+
+- Updated the Docker image to: *demisto/pandas:1.0.0.102566*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_25.md b/Packs/CommonScripts/ReleaseNotes/1_15_25.md
new file mode 100644
index 000000000000..c88e6cd09b73
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_25.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### VerifyIPv6Indicator
+- Updated the IPv6 formatter to remove unneeded prefixed characters.
+- Updated the Docker image to: *demisto/python3:3.10.14.101217*.
+
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_5.md b/Packs/CommonScripts/ReleaseNotes/1_15_5.md
new file mode 100644
index 000000000000..ce28a7b1ec15
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_5.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ExtractHyperlinksFromOfficeFiles
+- Updated the Docker image to: *demisto/office-utils:2.0.0.96781*.
+- Fixed an issue where images with hyperlinks were not extracted properly from docx files.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_6.md b/Packs/CommonScripts/ReleaseNotes/1_15_6.md
new file mode 100644
index 000000000000..2c724edb925f
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_6.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetListRow
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
+- Fixed an issue where new lines at the end of the list could lead to an exception.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_7.md b/Packs/CommonScripts/ReleaseNotes/1_15_7.md
new file mode 100644
index 000000000000..ed91b4927f33
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_7.md
@@ -0,0 +1,3 @@
+## Common Scripts
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_8.md b/Packs/CommonScripts/ReleaseNotes/1_15_8.md
new file mode 100644
index 000000000000..76e70187ac1c
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_8.md
@@ -0,0 +1,7 @@
+#### Scripts
+
+##### New: PrintToAlert
+Prints a value to the specified alert's war-room.
+
+##### New: PrintToParentIncident
+Prints a value to the parent incident's war-room of the current alert.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_9.md b/Packs/CommonScripts/ReleaseNotes/1_15_9.md
new file mode 100644
index 000000000000..674470b19f9f
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_9.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetIndicatorDBotScoreFromCache
+
+- Added support for additional special characters that were not escaped properly.
diff --git a/Packs/CommonScripts/Scripts/ContentPackInstaller/ContentPackInstaller.yml b/Packs/CommonScripts/Scripts/ContentPackInstaller/ContentPackInstaller.yml
index 2f4c2408290a..36d80eac77ee 100644
--- a/Packs/CommonScripts/Scripts/ContentPackInstaller/ContentPackInstaller.yml
+++ b/Packs/CommonScripts/Scripts/ContentPackInstaller/ContentPackInstaller.yml
@@ -38,7 +38,7 @@ tags:
- Content Management
timeout: 600ns
type: python
-dockerimage: demisto/xsoar-tools:1.0.0.83431
+dockerimage: demisto/xsoar-tools:1.0.0.99061
tests:
- ContentPackInstaller_Test
fromversion: 6.0.0
diff --git a/Packs/CommonScripts/Scripts/CreateEmailHtmlBody/CreateEmailHtmlBody.js b/Packs/CommonScripts/Scripts/CreateEmailHtmlBody/CreateEmailHtmlBody.js
index 0993e2a6d17b..a338d88c0b47 100644
--- a/Packs/CommonScripts/Scripts/CreateEmailHtmlBody/CreateEmailHtmlBody.js
+++ b/Packs/CommonScripts/Scripts/CreateEmailHtmlBody/CreateEmailHtmlBody.js
@@ -15,6 +15,8 @@ var getLabel = function(incident,path) {
};
var res = executeCommand("getList", {"listName": args.listTemplate});
+var platform = getDemistoVersion().platform // Could be 'xsoar' (for XSOAR) or 'x2' (for XSIAM).
+var XSIAM = 'x2'
if (res[0].Type == entryTypes.error) {
return res;
@@ -28,11 +30,11 @@ var map = {};
while (found = reg.exec(html)) {
var path = found[1];
- if (path.indexOf('incident.labels.') === 0) {
+ if (path.indexOf('incident.labels.') === 0 && platform !== XSIAM) {
logDebug("Field " + path + " is handled as label.")
map[path] = getLabel(incidents[0], path);
- } else if (path.indexOf('incident.') === 0) {
+ } else if (path.indexOf('incident.') === 0 && platform !== XSIAM) {
map[path] = dq({'incident': incidents[0]}, path);
// check if this path is actually in custom fields (not found directly under incident)
if (map[path] === null) {
@@ -40,6 +42,18 @@ while (found = reg.exec(html)) {
var customFieldPath = path.replace('incident.', 'incident.CustomFields.');
map[path] = dq({'incident': incidents[0]}, customFieldPath);
}
+ } else if (path.indexOf('alert.labels.') === 0 && platform === XSIAM) {
+ logDebug("Field " + path + " is handled as label.")
+ map[path] = getLabel(incidents[0], path);
+
+ } else if (path.indexOf('alert.') === 0 && platform === XSIAM) {
+ map[path] = dq({'alert': incidents[0]}, path);
+ // check if this path is actually in custom fields (not found directly under incident)
+ if (map[path] === null) {
+ logDebug("Field " + path + " is either custom or null. Handling as custom.")
+ var customFieldPath = path.replace('alert.', 'alert.CustomFields.');
+ map[path] = dq({'alert': incidents[0]}, customFieldPath);
+ }
} else if (path.indexOf('object.') === 0) {
logDebug("Field " + path + " is part of object.")
diff --git a/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.py b/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.py
index dcde453225eb..c6760bfe5720 100644
--- a/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.py
+++ b/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.py
@@ -58,6 +58,8 @@ def main(args):
attachments = incident.get('attachment', {})
files = demisto.context().get('File', [])
+ html_body = f' {html_body}
'
+
if 'src="cid' in html_body:
entry_id_list = get_entry_id_list(attachments, files)
html_body = create_html_with_images(html_body, entry_id_list)
diff --git a/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.yml b/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.yml
index ad5db96cff05..ace519e5a127 100644
--- a/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.yml
+++ b/Packs/CommonScripts/Scripts/DisplayHTMLWithImages/DisplayHTMLWithImages.yml
@@ -11,6 +11,6 @@ tags:
system: true
scripttarget: 0
fromversion: 6.5.0
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.98889
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.py b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.py
new file mode 100644
index 000000000000..bfecd7b56b8d
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.py
@@ -0,0 +1,54 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+from pathlib import Path
+import shlex
+import subprocess
+from tempfile import mkdtemp
+import zipfile
+
+
+def installLibrary(dir_path: Path, library_name: str) -> str:
+ # Install the package using pip
+ demisto.debug(f"Running in {dir_path=}, {library_name=}")
+ cmd = f'python3 -m pip install --target {dir_path} {library_name}'
+ process = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = process.communicate()
+ demisto.debug(f"returned code: {process.returncode}")
+ if process.returncode != 0:
+ raise DemistoException(f"Failed to install the {library_name} library: {stderr.decode('utf-8')}")
+
+ # Create a zip file with maximum compression level
+ zip_path = dir_path / (library_name + '.zip')
+ with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip_file:
+ for root, _dirs, files in os.walk(dir_path):
+ for file in files:
+ demisto.debug(f"{root=}, {file=}")
+ file_path = Path(root) / file
+ # Ensure the folder inside the ZIP file is named 'python'
+ arcname = Path('python') / file_path.relative_to(dir_path)
+ zip_file.write(file_path, arcname=arcname)
+ demisto.debug("Updated file")
+
+ # Read the zip file contents
+ with open(zip_path, 'rb') as zip_file:
+ zip_content = zip_file.read()
+
+ return fileResult(library_name + '.zip', zip_content)
+
+
+def main():
+ args = demisto.args()
+ library_name = args.get('library_name')
+ try:
+ dir_path = Path(mkdtemp(prefix='python'))
+ demisto.debug("Starting installing library.")
+ result = installLibrary(dir_path, library_name)
+ return_results(result)
+
+ except Exception as e:
+ return_error(f"An error occurred: {str(e)}")
+
+
+if __name__ in ('__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.yml b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.yml
new file mode 100644
index 000000000000..abc598d3608e
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary.yml
@@ -0,0 +1,23 @@
+commonfields:
+ id: DownloadAndArchivePythonLibrary
+ version: -1
+name: DownloadAndArchivePythonLibrary
+script: ''
+type: python
+tags:
+- Utility
+- file
+comment: The script downloads a Python library using PIP, archives it, and returns the file to the war room.
+enabled: true
+args:
+- name: library_name
+ required: true
+ description: The Python library you wish to download and archive.
+scripttarget: 0
+subtype: python3
+runonce: false
+dockerimage: demisto/py3-tools:1.0.0.100861
+runas: DBotWeakRole
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary_test.py b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary_test.py
new file mode 100644
index 000000000000..d27791b3f840
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/DownloadAndArchivePythonLibrary_test.py
@@ -0,0 +1,116 @@
+import unittest
+from unittest.mock import patch, MagicMock, mock_open
+from pathlib import Path
+import zipfile
+import subprocess
+
+# Import the functions from the script
+from DownloadAndArchivePythonLibrary import installLibrary, main
+
+
+class TestInstallLibrary(unittest.TestCase):
+ @patch('DownloadAndArchivePythonLibrary.subprocess.Popen')
+ @patch('DownloadAndArchivePythonLibrary.zipfile.ZipFile')
+ @patch('DownloadAndArchivePythonLibrary.os.walk')
+ @patch('DownloadAndArchivePythonLibrary.mkdtemp')
+ @patch('DownloadAndArchivePythonLibrary.open', new_callable=mock_open, read_data=b'test data')
+ @patch('DownloadAndArchivePythonLibrary.fileResult')
+ def test_installLibrary(self, mock_fileResult, mock_open, mock_mkdtemp, mock_os_walk, mock_zipfile, mock_popen):
+ # Prepare
+ mock_dir_path = Path('/fake/dir')
+ mock_mkdtemp.return_value = mock_dir_path
+
+ mock_popen_instance = MagicMock()
+ mock_popen_instance.communicate.return_value = (b'success', b'')
+ mock_popen_instance.returncode = 0
+ mock_popen.return_value = mock_popen_instance
+
+ mock_zipfile_instance = MagicMock()
+ mock_zipfile.return_value.__enter__.return_value = mock_zipfile_instance
+
+ mock_os_walk.return_value = [('/fake/dir', ('subdir',), ('file1.py', 'file2.py'))]
+
+ expected_result = {
+ 'Type': 3,
+ 'File': 'fake_library.zip',
+ 'FileID': 'fake_library.zip',
+ 'Contents': b'test data',
+ 'ContentsFormat': 'text'
+ }
+ mock_fileResult.return_value = expected_result
+
+ # Run
+ result = installLibrary(mock_dir_path, 'fake_library')
+
+ # Check
+ # Ensure subprocess was called with the correct command
+ mock_popen.assert_called_once_with(
+ ['python3', '-m', 'pip', 'install', '--target', str(mock_dir_path), 'fake_library'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
+
+ # Ensure zipfile was created with the correct path and mode
+ mock_zipfile.assert_called_once_with(mock_dir_path / 'fake_library.zip', 'w',
+ compression=zipfile.ZIP_DEFLATED, compresslevel=9)
+
+ # Ensure files were added to the zip archive
+ expected_arcnames = [Path('python') / 'file1.py', Path('python') / 'file2.py']
+ mock_zipfile_instance.write.assert_any_call(Path('/fake/dir/file1.py'), arcname=expected_arcnames[0])
+ mock_zipfile_instance.write.assert_any_call(Path('/fake/dir/file2.py'), arcname=expected_arcnames[1])
+
+ # Ensure the correct result is returned
+ assert result == expected_result
+
+ @patch('DownloadAndArchivePythonLibrary.installLibrary')
+ @patch('DownloadAndArchivePythonLibrary.demisto.args')
+ @patch('DownloadAndArchivePythonLibrary.return_results')
+ @patch('DownloadAndArchivePythonLibrary.return_error')
+ @patch('DownloadAndArchivePythonLibrary.Path')
+ @patch('DownloadAndArchivePythonLibrary.mkdtemp')
+ def test_main_success(self, mock_mkdtemp, mock_path, mock_return_error, mock_return_results, mock_args, mock_installLibrary):
+ # Prepare
+ mock_args.return_value = {'library_name': 'fake_library'}
+ mock_dir_path = Path('/fake/dir')
+ mock_mkdtemp.return_value = mock_dir_path
+
+ expected_result = {
+ 'Type': 3,
+ 'File': 'fake_library.zip',
+ 'FileID': 'fake_library.zip',
+ 'Contents': b'test data',
+ 'ContentsFormat': 'text'
+ }
+ mock_installLibrary.return_value = expected_result
+
+ # Run
+ main()
+
+ # Check
+ mock_return_results.assert_called_once_with(expected_result)
+ mock_return_error.assert_not_called()
+
+ @patch('DownloadAndArchivePythonLibrary.installLibrary')
+ @patch('DownloadAndArchivePythonLibrary.demisto.args')
+ @patch('DownloadAndArchivePythonLibrary.return_results')
+ @patch('DownloadAndArchivePythonLibrary.return_error')
+ @patch('DownloadAndArchivePythonLibrary.Path')
+ @patch('DownloadAndArchivePythonLibrary.mkdtemp')
+ def test_main_failure(self, mock_mkdtemp, mock_path, mock_return_error, mock_return_results, mock_args, mock_installLibrary):
+ # Prepare
+ mock_args.return_value = {'library_name': 'fake_library'}
+ mock_dir_path = Path('/fake/dir')
+ mock_mkdtemp.return_value = mock_dir_path
+
+ mock_installLibrary.side_effect = Exception('Test Exception')
+
+ # Run
+ main()
+
+ # Check
+ mock_return_error.assert_called_once_with('An error occurred: Test Exception')
+ mock_return_results.assert_not_called()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/README.md b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/README.md
new file mode 100644
index 000000000000..b5bbc2e8f1bb
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/DownloadAndArchivePythonLibrary/README.md
@@ -0,0 +1,24 @@
+The script downloads a Python library using PIP, archives it and return in to the war room.
+
+## Script Data
+
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | Utility, file |
+| Cortex XSOAR Version | 6.10.0 |
+
+## Inputs
+
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| library_name | The Python library you wish to download and archive. |
+
+## Outputs
+
+---
+There are no outputs for this script.
diff --git a/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.py b/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.py
index 5bec8c862fbd..082150889d3d 100644
--- a/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.py
+++ b/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.py
@@ -82,7 +82,7 @@ def main(): # pragma: no cover
# if there are more events than the default size, page through and get them all
while len(audits) < total:
- if body.get("page"): # pagination for xsoar-6
+ if demisto_version.startswith("6"): # pagination for xsoar-6
body["page"] = page_num
else: # pagination for xsoar-8
body["request_data"]["search_from"] = page_num # type: ignore[index]
diff --git a/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.yml b/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.yml
index c1e6c4698f19..b8a4006659ab 100644
--- a/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.yml
+++ b/Packs/CommonScripts/Scripts/ExportAuditLogsToFile/ExportAuditLogsToFile.yml
@@ -25,7 +25,7 @@ contentitemexportablefields:
dependson:
must:
- core-api-post
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.101217
enabled: true
name: ExportAuditLogsToFile
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
index 8786248758d3..50b4af1b12f7 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
@@ -3,8 +3,10 @@
import openpyxl
from docx import Document
from pptx import Presentation
+from pptx.enum.shapes import MSO_SHAPE_TYPE
import zipfile
import pandas as pd
+from docx.opc.constants import RELATIONSHIP_TYPE as RT
def extract_hyperlinks_from_xlsx(file_path: str) -> Set:
@@ -34,10 +36,10 @@ def extract_hyperlinks_from_xlsx(file_path: str) -> Set:
def extract_hyperlinks_from_docx(file_path: str) -> Set:
doc = Document(file_path)
links = set()
- for para in doc.paragraphs:
- for hyper in para.hyperlinks:
- if hyper.address:
- links.add(hyper.address)
+ for rel in doc.part.rels.values():
+ if rel.reltype == RT.HYPERLINK and rel.is_external:
+ links.add(rel._target)
+
return links
@@ -51,7 +53,12 @@ def extract_hyperlinks_from_pptx(file_path: str) -> Set:
for run in paragraph.runs:
if run.hyperlink and run.hyperlink.address:
links.add(run.hyperlink.address)
- if shape.click_action and shape.click_action.hyperlink.address:
+ if shape.shape_type == MSO_SHAPE_TYPE.GROUP: # pylint: disable=E1101
+ group_shape = shape
+ for s in group_shape.shapes:
+ if s.click_action and s.click_action.hyperlink.address:
+ links.add(s.click_action.hyperlink.address)
+ elif shape.click_action and shape.click_action.hyperlink.address:
links.add(shape.click_action.hyperlink.address)
return links
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
index 2842d24fe96a..e2ee7d1888d7 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
@@ -18,5 +18,5 @@ script: '-'
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/office-utils:2.0.0.88298
+dockerimage: demisto/office-utils:2.0.0.96781
fromversion: 5.5.0
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
index 7f26494c3a78..f4339697edc6 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
@@ -6,33 +6,39 @@
('test_data/d1.docx',
{'https://xsoar.pan.dev/', 'https://www.paloaltonetworks.com/', 'https://jobs.paloaltonetworks.com/en/'}),
('test_data/d2.docx', set()),
+ ('test_data/d3.docx', {'https://www.paloaltonetworks.com/', 'http://www.google.com'}),
('test_data/e1.xlsx', {'http://www.google.com', 'http://www.yahoo.de/'}),
('test_data/e2.xlsx', set()),
('test_data/e3.xlsx', {'https://www.paloaltonetworks.com/'}),
('test_data/p1.pptx', {'https://xsoar.pan.dev/', 'https://www.paloaltonetworks.com/'}),
('test_data/p2.pptx', set()),
+ ('test_data/p3.pptx', {'http://www.google.com'})
])
def test_basescript_dummy(file_path, expected_output):
"""
Given:
1. docx file with hyperlinks on a picture and text.
2. docx file without hyperlinks
- 3. excel file with hyperlinks on a picture and inside text cell.
- 4. excel file with no hyperlinks.
- 5. excel file with hyperlinks inside text cell.
- 6. power point file with hyperlinks on a picture and text.
- 7. power point file without hyperlinks.
+ 3. docx file with hyperlinks on a picture and in the document.
+ 4. excel file with hyperlinks on a picture and inside text cell.
+ 5. excel file with no hyperlinks.
+ 6. excel file with hyperlinks inside text cell.
+ 7. power point file with hyperlinks on a picture and text.
+ 8. power point file without hyperlinks.
+ 9. power point file with hyperlinks inside grourped shapes.
When:
Extracting hyperlinks from file using ExtractHyperlinksFromOfficeFiles script.
Then:
Validate that:
1. hyperlinks extracted from docx file
2. no hyperlinks extracted from docx file
- 3. hyperlinks extracted from excel file
- 4. no hyperlinks extracted from excel file
- 5. hyperlinks extracted from excel file
- 6. hyperlinks extracted from power point file
- 7. no hyperlinks extracted from power point file
+ 3. hyperlinks extracted from the docx file.
+ 4. hyperlinks extracted from excel file
+ 5. no hyperlinks extracted from excel file
+ 6. hyperlinks extracted from excel file
+ 7. hyperlinks extracted from power point file
+ 8. no hyperlinks extracted from power point file
+ 9. The grouped shapes are parsed correctly and the hyperlink is extracted.
"""
response = extract_hyperlink_by_file_type(file_name=file_path, file_path=file_path)
assert set(response.raw_response) == expected_output
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx
new file mode 100644
index 000000000000..83c45346f3c2
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p3.pptx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p3.pptx
new file mode 100644
index 000000000000..3d8d888c6105
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/p3.pptx differ
diff --git a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.py b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.py
index c87b48c257eb..a2fbdee5b005 100644
--- a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.py
+++ b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.py
@@ -68,7 +68,8 @@ def extract_indicators_from_file(args):
'Type': entryTypes['note'],
'ContentsFormat': formats['text'],
'Contents': indicators_hr,
- 'HumanReadable': string_to_markdown(indicators_hr)
+ 'HumanReadable': string_to_markdown(indicators_hr),
+ 'EntryContext': json.loads(indicators_hr)
}
diff --git a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.yml b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.yml
index de4a5a900a4b..ca7d9442ed4e 100644
--- a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.yml
+++ b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile.yml
@@ -52,4 +52,4 @@ tests:
- Extract Indicators From File - Generic v2 - Test
fromversion: 5.0.0
tags: []
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.99865
diff --git a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile_test.py b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile_test.py
index 2a8fa75c1e55..9d32f4c2f91a 100644
--- a/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile_test.py
+++ b/Packs/CommonScripts/Scripts/ExtractIndicatorsFromTextFile/ExtractIndicatorsFromTextFile_test.py
@@ -26,6 +26,7 @@ def test_extract_indicators(mocker):
results = extract_indicators_from_file(args)
assert {'Contents': '{"IP": ["1.1.1.1"]}',
'ContentsFormat': 'text',
+ 'EntryContext': {'IP': ['1.1.1.1']},
'HumanReadable': '### IP\n- 1.1.1.1\n',
'Type': 1} == results
diff --git a/Packs/CommonScripts/Scripts/FailedInstances/FailedInstances.js b/Packs/CommonScripts/Scripts/FailedInstances/FailedInstances.js
index 957eba620f16..c31cb9ddaef3 100644
--- a/Packs/CommonScripts/Scripts/FailedInstances/FailedInstances.js
+++ b/Packs/CommonScripts/Scripts/FailedInstances/FailedInstances.js
@@ -17,6 +17,12 @@ Object.keys(all).forEach(function(m) {
}])});
var res = executeCommand(cmd, {});
+ var content = res[0].Contents
+ var result = content.includes("Test button cannot be used") && all[m].brand === "ServiceNow v2";
+ if (result === true) {
+ cmd = 'servicenow-oauth-test'
+ res = executeCommand(cmd, {});
+ }
executeCommand("addEntries", {"entries": JSON.stringify([{
Type: entryTypes.note,
Contents: 'done testing **' + m + '**:\n' + res[0].Contents,
diff --git a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
index 044347f2954a..dd699b70af49 100644
--- a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
+++ b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
@@ -12,16 +12,19 @@ def escape_special_characters(text: str) -> str:
Returns:
return the value with the added escape char.
"""
- text = text.replace('\n', '\\n')
- text = text.replace('\t', '\\t')
- text = text.replace('\r', '\\r')
- text = text.replace('(', '\(')
- text = text.replace(')', '\)')
- text = text.replace('[', '\[')
- text = text.replace(']', '\]')
- text = text.replace('^', '\^')
- text = text.replace(':', '\:')
- return text
+ return (
+ text.replace("\\", r"\\")
+ .replace("\n", r"\n")
+ .replace("\t", r"\t")
+ .replace("\r", r"\r")
+ .replace("(", r"\(")
+ .replace(")", r"\)")
+ .replace("[", r"\[")
+ .replace("]", r"\]")
+ .replace("^", r"\^")
+ .replace(":", r"\:")
+ .replace('"', r"\"")
+ )
def main():
diff --git a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
index e395044b72d7..c42dd4492624 100644
--- a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
+++ b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
@@ -206,7 +206,9 @@ def test_no_iocs_returned_from_search_indicators(mocker):
('', ''),
('\t\r\n', '\\t\\r\\n'),
('([', '\(\['),
- ('^ASDF:', '\^ASDF\:')])
+ ('^ASDF:', '\^ASDF\:'),
+ ('aaa\gg123:', r'aaa\\gg123\:'),
+ ('"', '\\"')])
def test_escape_special_characters(input, expected_res):
"""
Given:
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
similarity index 96%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
index 452e578add24..faa6560fa0b7 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
@@ -17,7 +17,8 @@ def validate_header_exists(headers, header):
return_error("Error: The supplied header name was not found.")
-def list_to_headers_and_lines(list_data, list_separator: str):
+def list_to_headers_and_lines(list_data: str, list_separator: str):
+ list_data = list_data.strip()
lines_and_headers = [(line.replace("\r", "") if line.endswith("\r") else line).split(list_separator)
for line in list_data.split('\n')]
headers = lines_and_headers[0]
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
similarity index 97%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
index 5c3ce08266d3..6d0a4760f8a1 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
@@ -42,7 +42,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.13.80593
+dockerimage: demisto/python3:3.10.14.96411
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py b/Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
similarity index 92%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
index 9967ee8f9033..3e3f48244de2 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
@@ -208,3 +208,22 @@ def test_list_to_headers_and_lines(list_data, expected_headers, expected_lines):
headers, lines = list_to_headers_and_lines(list_data, ",")
assert expected_headers == headers
assert expected_lines == lines
+
+
+def test_parse_list_with_new_line_at_the_end(mocker):
+ """
+ Given:
+ - A list with a new line at the end.
+ When:
+ - Parsing the list.
+ Then:
+ - Make sure that no exception is raised and the code finished gracefully.
+ """
+ list_with_new_line_at_the_end = """,mapping_framework,mapping_framework_version,capability_group,capability_id
+0,veris,1.3.7,action.hacking
+
+"""
+ from GetListRow import parse_list
+ mocker.patch.object(demisto, "executeCommand", return_value=[{"Contents": list_with_new_line_at_the_end}])
+ res = parse_list(parse_all='false', header="mapping_framework", value="veris", list_name='test_list', list_separator=',')
+ assert res
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/README.md b/Packs/CommonScripts/Scripts/GetListRow/README.md
similarity index 100%
rename from Packs/CommonScripts/Scripts/GetlistRow/README.md
rename to Packs/CommonScripts/Scripts/GetListRow/README.md
diff --git a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
index 63087160bd54..3cc14938096e 100644
--- a/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
+++ b/Packs/CommonScripts/Scripts/GridFieldSetup/GridFieldSetup.yml
@@ -23,6 +23,26 @@ args:
name: val9
- description: A value for the 10th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
name: val10
+- description: A value for the 11th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val11
+- description: A value for the 12th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val12
+- description: A value for the 13th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val13
+- description: A value for the 14th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val14
+- description: A value for the 15th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val15
+- description: A value for the 16th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val16
+- description: A value for the 17th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val17
+- description: A value for the 18th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val18
+- description: A value for the 19th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val19
+- description: A value for the 20th key. (Can be a string or context path or `TIMESTAMP` to get the current timestamp in ISO format.)
+ name: val20
- description: Grid field to populate.
name: gridfield
required: true
@@ -39,7 +59,7 @@ comment: |-
commonfields:
id: GridFieldSetup
version: -1
-dockerimage: demisto/python3:3.10.13.89009
+dockerimage: demisto/python3:3.10.14.99144
enabled: true
name: GridFieldSetup
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.py b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.py
index fecc70f0f910..44ed5dbd78b7 100644
--- a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.py
+++ b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.py
@@ -54,7 +54,7 @@ def unicode_dict_reader(csv_data, **kwargs):
counter = 0
for val in value:
- col_name = 'NO_NAME_COLUMN_{}'.format(counter)
+ col_name = f'NO_NAME_COLUMN_{counter}'
row_dict[col_name] = val
counter += 1
@@ -77,7 +77,7 @@ def unicode_dict_reader(csv_data, **kwargs):
"""
first_row = arr[0]
for counter in range(no_name_columns_counter):
- first_row['NO_NAME_COLUMN_{}'.format(counter)] = ""
+ first_row[f'NO_NAME_COLUMN_{counter}'] = ""
return arr
@@ -92,7 +92,7 @@ def get_entry_by_file_name(file_name):
if file_name.lower() == fn.lower():
return entry
- raise ValueError('Was unable to find "{}" in the war room. Please ensure the file was uploaded.'.format(file_name))
+ raise ValueError(f'Was unable to find "{file_name}" in the war room. Please ensure the file was uploaded.')
csv_entry = None
@@ -124,7 +124,7 @@ def main():
parse_ip = int(d_args['ips']) if 'ips' in d_args else -1
parse_domain = int(d_args['domains']) if 'domains' in d_args else -1
parse_hash = int(d_args['hashes']) if 'hashes' in d_args else -1
- parse_all = True if d_args['parseAll'] == 'yes' else False
+ parse_all = d_args['parseAll'] == 'yes'
if parse_ip == -1 and parse_domain == -1 and parse_hash == -1 and not parse_all:
return_error('Select a field to extract or set parseAll=yes to parse the whole CSV file')
@@ -142,7 +142,7 @@ def main():
res = demisto.getFilePath(entry_id)
if not res:
- return_error("Entry {} not found".format(entry_id))
+ return_error(f"Entry {entry_id} not found")
file_path = res['path']
file_name = res['name']
@@ -186,7 +186,7 @@ def main():
if sum(1 for line in open(file_path)) <= 1: # checks if there are less than one line
return_error('No data to parse. CSV file might be empty or one-lined. try the `ParseAll=yes` argument.')
- with open(file_path, 'rU') as f:
+ with open(file_path) as f:
has_header = csv.Sniffer().has_header(f.read(1024))
f.seek(0)
csv_data = csv.reader(f)
diff --git a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.yml b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.yml
index 1452f74f2941..8c22afc497d3 100644
--- a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.yml
+++ b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV.yml
@@ -5,11 +5,11 @@ args:
- description: The name of the file. The file must be uploaded to the War Room.
name: file
deprecated: true
-- description: The column number that contains IP Addresses. Other IOC types should not be in that column. (First column is column 0)
+- description: The column number that contains IP Addresses. Other IOC types should not be in that column. (First column is column 0).
name: ips
-- description: The column number that contains domains. Other IOC types should not be in that column. (First column is column 0)
+- description: The column number that contains domains. Other IOC types should not be in that column. (First column is column 0).
name: domains
-- description: The column number that contains file hashes. Other IOC types should not be in that column. (First column is column 0)
+- description: The column number that contains file hashes. Other IOC types should not be in that column. (First column is column 0).
name: hashes
- auto: PREDEFINED
defaultValue: 'yes'
@@ -19,7 +19,7 @@ args:
- 'yes'
- 'no'
- defaultValue: utf-8
- description: The codec type used to parse the file. (some character sets are not UTF-8 supported)
+ description: The codec type used to parse the file. (some character sets are not UTF-8 supported).
name: codec
comment: This script will parse a CSV file and place the unique IPs, Domains and Hashes into the context.
commonfields:
@@ -58,4 +58,4 @@ runas: DBotWeakRole
tests:
- No tests
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.13.86272
+dockerimage: demisto/python3:3.10.14.99865
diff --git a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV_test.py b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV_test.py
index c6cdecb7d5ab..b7d63a324c21 100644
--- a/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV_test.py
+++ b/Packs/CommonScripts/Scripts/ParseCSV/ParseCSV_test.py
@@ -133,7 +133,7 @@ def test_parsecsv_with_iocs_same_column(self, mocker):
result = self.get_demisto_results()
ips_result = result.get('EntryContext', {}).get('IP', [])
- if ips_result and '1.1.1.1' != ips_result[0].get('Address'):
+ if ips_result and ips_result[0].get('Address') != '1.1.1.1':
result['EntryContext']['IP'].reverse()
domains_result = result.get('EntryContext', {}).get('Domain', [])
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py
new file mode 100644
index 000000000000..77ae224561a9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py
@@ -0,0 +1,42 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+def print_to_alert_command(current_alert_id: str, value: str, alert_id: str) -> None:
+ """Prints a value to the specified alert ID.
+
+ Args:
+ current_alert_id (str): The alert ID running the script.
+ value (str): The value to print.
+ alert_id (str): The alert ID to print to.
+ """
+ entry_note = json.dumps(
+ [{"Type": 1, "ContentsFormat": EntryFormat.MARKDOWN, "Contents": f"Entry from alert #{current_alert_id}:\n{value}"}]
+ )
+ entry_tags_res: list[dict[str, Any]] = demisto.executeCommand(
+ "addEntries", {"entries": entry_note, "id": alert_id, "reputationCalcAsync": True}
+ )
+ if isError(entry_tags_res[0]):
+ return_error(get_error(entry_tags_res))
+ else:
+ return_results(CommandResults(readable_output=f"Successfully printed to alert {alert_id}."))
+
+
+def main(): # pragma: no cover
+ try:
+ current_alert: dict[str, Any] = demisto.incident()
+ current_alert_id: str = current_alert["id"]
+ args = demisto.args()
+ value: str = args["value"]
+ alert_id = args["alert_id"]
+ print_to_alert_command(
+ current_alert_id=current_alert_id,
+ value=value,
+ alert_id=alert_id,
+ )
+ except Exception as ex:
+ return_error(f"Failed to execute PrintToAlert. Error: {str(ex)}")
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml
new file mode 100644
index 000000000000..19db3a7aefea
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml
@@ -0,0 +1,26 @@
+args:
+- description: The value to print to the war-room of specified alert.
+ name: value
+ required: true
+- description: The alert ID to print to.
+ name: alert_id
+ required: true
+comment: Prints a value to the specified alert's war-room. The alert must be in status "Under Investigation".
+commonfields:
+ id: PrintToAlert
+ version: -1
+name: PrintToAlert
+script: '-'
+tags: []
+enabled: true
+scripttarget: 0
+timeout: '0'
+runas: DBotWeakRole
+type: python
+subtype: python3
+dockerimage: demisto/python3:3.10.14.97374
+fromversion: 8.7.0
+marketplaces:
+- marketplacev2
+tests:
+- No test - unit test
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py
new file mode 100644
index 000000000000..275e4ff12e18
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py
@@ -0,0 +1,75 @@
+import pytest
+from pytest_mock import MockerFixture
+import demistomock as demisto
+from CommonServerPython import EntryType
+
+
+def test_print_to_alert(mocker: MockerFixture):
+ """Tests print_to_alert_command when the executeCommand command succeeds.
+
+ Checks that the addEntries command is called with the right arguments.
+ """
+ from PrintToAlert import print_to_alert_command
+
+ execute_command_mocker = mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.NOTE,
+ "Contents": "done",
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ print_to_alert_command(
+ current_alert_id="5",
+ value="Hello",
+ alert_id="4",
+ )
+ # Right command is called
+ assert execute_command_mocker.call_args[0][0] == "addEntries"
+ # Right arguments are given
+ assert execute_command_mocker.call_args[0][1] == {
+ "entries": '[{"Type": 1, "ContentsFormat": "markdown", "Contents": "Entry from alert #5:\\nHello"}]',
+ "id": "4",
+ "reputationCalcAsync": True,
+ }
+ assert demisto.results.call_args[0][0]["HumanReadable"] == "Successfully printed to alert 4."
+
+
+def test_print_to_alert_error(mocker: MockerFixture):
+ """Tests print_to_alert_command when the executeCommand command fails.
+
+ Checks that the system exists and an error message is returned.
+ """
+ from PrintToAlert import print_to_alert_command
+
+ error_message = "Something went wrong"
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.ERROR,
+ "Contents": error_message,
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ with pytest.raises(SystemExit):
+ print_to_alert_command(
+ current_alert_id="5",
+ value="Hello",
+ alert_id="4",
+ )
+ assert demisto.results.call_args[0][0] == {
+ "Type": EntryType.ERROR,
+ "ContentsFormat": "text",
+ "Contents": error_message,
+ "EntryContext": None,
+ }
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/README.md b/Packs/CommonScripts/Scripts/PrintToAlert/README.md
new file mode 100644
index 000000000000..7db8b25ccbd9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/README.md
@@ -0,0 +1,35 @@
+Prints a value to the specified alert's war-room. The alert must be in status "Under Investigation".
+
+## Script Data
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | |
+| Cortex XSOAR Version | 8.7.0 |
+
+## Inputs
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| value | The value to print to the war-room of specified alert. |
+| alert_id | The alert ID to print to. |
+
+## Outputs
+---
+There are no outputs for this script.
+
+
+## Script Example
+```!PrintToAlert alert_id=5 value="Hello from the other side"```
+
+## Context Example
+```json
+{}
+```
+
+## Human Readable Output
+
+>Successfully printed to alert 5.
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py
new file mode 100644
index 000000000000..58f2f700cad9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py
@@ -0,0 +1,63 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+def print_to_parent_incident(alert_id: str, value: str, parent_incident_id: str) -> None:
+ """Prints a value to the alert's parent incident.
+
+ Args:
+ alert_id (str): The alert ID running the script.
+ value (str): The value to print.
+ parent_incident_id (str): The parent incident's ID of the alert.
+ """
+ entry_note = json.dumps(
+ [{"Type": 1, "ContentsFormat": EntryFormat.MARKDOWN, "Contents": f"Entry from alert #{alert_id}:\n{value}"}]
+ )
+ entry_tags_res: list[dict[str, Any]] = demisto.executeCommand(
+ "addEntries", {"entries": entry_note, "id": parent_incident_id, "reputationCalcAsync": True}
+ )
+ if isError(entry_tags_res[0]):
+ return_error(get_error(entry_tags_res))
+ else:
+ return_results(CommandResults(readable_output=f"Successfully printed to parent incident {parent_incident_id}."))
+
+
+def validate_parent_incident_id(parent_incident_id: str, alert_id: str) -> str:
+ """Validates if the parent incident ID of the alert is not empty, and return it.
+
+ Args:
+ parent_incident_id (str): The parent incident ID of the alert.
+ alert_id (str): The alert ID running the script.
+
+ Raises:
+ DemistoException: If the parent incident ID is an empty string, meaning it couldn't be found.
+
+ Returns:
+ str: The parent incident ID if not empty.
+ """
+ if not parent_incident_id:
+ raise DemistoException(f"No parent incident was found for {alert_id =}")
+ return parent_incident_id
+
+
+def main(): # pragma: no cover
+ try:
+ args = demisto.args()
+ value: str = args["value"]
+ current_alert: dict[str, Any] = demisto.incident()
+ alert_id: str = current_alert["id"]
+ parent_incident_id: str = validate_parent_incident_id(
+ parent_incident_id=current_alert.get("parentXDRIncident", ""),
+ alert_id=alert_id,
+ )
+ print_to_parent_incident(
+ alert_id=alert_id,
+ value=value,
+ parent_incident_id=parent_incident_id,
+ )
+ except Exception as ex:
+ return_error(f"Failed to execute PrintToParentIncident. Error: {str(ex)}")
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml
new file mode 100644
index 000000000000..b2f0ba2845a1
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml
@@ -0,0 +1,25 @@
+args:
+- description: The value to print to the parent incident's war-room.
+ name: value
+ required: true
+comment: Prints a value to the parent incident's war-room of the current alert.
+commonfields:
+ id: PrintToParentIncident
+ version: -1
+name: PrintToParentIncident
+script: '-'
+tags: []
+enabled: true
+scripttarget: 0
+timeout: '0'
+runas: DBotWeakRole
+type: python
+subtype: python3
+dockerimage: demisto/python3:3.10.14.98471
+fromversion: 8.7.0
+marketplaces:
+- marketplacev2
+tests:
+- No test - unit test
+skipprepare:
+- script-name-incident-to-alert
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py
new file mode 100644
index 000000000000..73fa22ba5d98
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py
@@ -0,0 +1,83 @@
+import pytest
+from pytest_mock import MockerFixture
+import demistomock as demisto
+from CommonServerPython import EntryType, DemistoException
+
+
+def test_print_to_parent_incident(mocker: MockerFixture):
+ """Tests print_to_parent_incident when the executeCommand command succeeds.
+
+ Checks that the addEntries command is called with the right arguments.
+ """
+ from PrintToParentIncident import print_to_parent_incident
+
+ execute_command_mocker = mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.NOTE,
+ "Contents": "done",
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ print_to_parent_incident(
+ alert_id="4",
+ value="Hello",
+ parent_incident_id="INCIDENT-5",
+ )
+ # Right command is called
+ assert execute_command_mocker.call_args[0][0] == "addEntries"
+ # Right arguments are given
+ assert execute_command_mocker.call_args[0][1] == {
+ "entries": '[{"Type": 1, "ContentsFormat": "markdown", "Contents": "Entry from alert #4:\\nHello"}]',
+ "id": "INCIDENT-5",
+ "reputationCalcAsync": True,
+ }
+ assert demisto.results.call_args[0][0]["HumanReadable"] == "Successfully printed to parent incident INCIDENT-5."
+
+
+def test_print_to_alert_error(mocker: MockerFixture):
+ """Tests print_to_parent_incident when the executeCommand command fails.
+
+ Checks that the system exists and an error message is returned.
+ """
+ from PrintToParentIncident import print_to_parent_incident
+
+ error_message = "Something went wrong"
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.ERROR,
+ "Contents": error_message,
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ with pytest.raises(SystemExit):
+ print_to_parent_incident(
+ alert_id="4",
+ value="Hello",
+ parent_incident_id="INCIDENT-5",
+ )
+ assert demisto.results.call_args[0][0] == {
+ "Type": EntryType.ERROR,
+ "ContentsFormat": "text",
+ "Contents": error_message,
+ "EntryContext": None,
+ }
+
+
+def test_no_parent_incident_error():
+ """Check that we return an error when no parent incident is found"""
+ from PrintToParentIncident import validate_parent_incident_id
+
+ with pytest.raises(DemistoException):
+ validate_parent_incident_id(parent_incident_id="", alert_id=4)
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md b/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md
new file mode 100644
index 000000000000..f806a65e6367
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md
@@ -0,0 +1,34 @@
+Prints a value to the parent incident's war-room of the current alert.
+
+## Script Data
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | |
+| Cortex XSOAR Version | 8.7.0 |
+
+## Inputs
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| value | The value to print to the parent incident's war-room. |
+
+## Outputs
+---
+There are no outputs for this script.
+
+
+## Script Example
+```!PrintToParentIncident value="Parent of 6 I assume?"```
+
+## Context Example
+```json
+{}
+```
+
+## Human Readable Output
+
+>Successfully printed to parent incident INCIDENT-5.
diff --git a/Packs/CommonScripts/Scripts/ReadPDFFileV2/README.md b/Packs/CommonScripts/Scripts/ReadPDFFileV2/README.md
index 52c0b700959a..555d6ab673f3 100644
--- a/Packs/CommonScripts/Scripts/ReadPDFFileV2/README.md
+++ b/Packs/CommonScripts/Scripts/ReadPDFFileV2/README.md
@@ -18,6 +18,7 @@ Load a PDF file's content and metadata into context. Supports extraction of hash
| entryID | The War Room entryID of the file to read. |
| userPassword | The password for the file, if encrypted. |
| maxImages | The maximum number of images to extract from the PDF file. |
+| unescape_url | To unescape URLs that have been escaped as part of the URLs extraction. Invalid characters will be ignored. Default is true.|
## Outputs
---
diff --git a/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.py b/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.py
index 9c1282b82a19..c76c9c45e2de 100644
--- a/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.py
+++ b/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.py
@@ -240,9 +240,9 @@ def get_pdf_htmls_content(pdf_path: str, output_folder: str, unescape_url: bool
html_file_names = get_files_names_in_path(output_folder, "*.html")
html_content = ""
for file_name in html_file_names:
- with open(file_name) as f:
+ with open(file_name, "rb") as f:
for line in f:
- html_content += html.unescape(line) if unescape_url else line
+ html_content += html.unescape(str(line)) if unescape_url else str(line)
return html_content
diff --git a/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.yml b/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.yml
index a89eff38691d..04a84b503b35 100644
--- a/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.yml
+++ b/Packs/CommonScripts/Scripts/ReadPDFFileV2/ReadPDFFileV2.yml
@@ -129,7 +129,7 @@ tags:
- ingestion
timeout: "0"
type: python
-dockerimage: demisto/readpdf:1.0.0.93363
+dockerimage: demisto/readpdf:1.0.0.98214
runas: DBotRole
tests:
- Extract Indicators From File - Generic v2 - Test
diff --git a/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.py b/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.py
index 959022b26c36..5304c35a24ca 100644
--- a/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.py
+++ b/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.py
@@ -1,27 +1,46 @@
import demistomock as demisto # noqa
from CommonServerPython import * # noqa
+from typing import IO
from pyzbar import pyzbar
import cv2
-from wurlitzer import pipes
+import tempfile
# pylint: disable=E1101 # disable pylint not recognizing cv2's attributes.
+class StderrRedirect:
+ '''Context manager to redirect stderr.'''
+ temp_stderr: IO
+ old_stderr: int
+
+ def __enter__(self):
+ demisto.debug('entering StderrRedirect')
+ self.temp_stderr = tempfile.TemporaryFile()
+ self.old_stderr = os.dup(sys.stderr.fileno()) # make a copy of stderr
+ os.dup2(self.temp_stderr.fileno(), sys.stderr.fileno()) # redirect stderr to the temporary file
+
+ def __exit__(self, exc_type, exc_value, exc_traceback):
+ demisto.debug(f'exiting StderrRedirect: {exc_type=}, {exc_value=}, {exc_traceback=}')
+ self.temp_stderr.seek(0)
+ demisto.debug(f'stderr: {self.temp_stderr.read()}')
+ os.dup2(self.old_stderr, sys.stderr.fileno()) # restore stderr
+ os.close(self.old_stderr)
+ self.temp_stderr.close()
+
+
def read_qr_code(filename: str) -> list:
- debug_messages = [] # don't use demisto.debug under the context manager.
- with pipes() as (out, _):
+ with StderrRedirect(): # redirect stderr to catch cv2 warnings which are sent directly to stderr
+
img = cv2.imread(filename)
- text = [d.data.decode() for d in pyzbar.decode(img)]
+ demisto.debug(f'loaded file: {filename}')
+ text = [d.data.decode() for d in pyzbar.decode(img, symbols=[pyzbar.ZBarSymbol.QRCODE])]
+ demisto.debug(f'pybar decode: {text}')
if not text:
- debug_messages.append("Couldn't extract text with pyzbar, retrying with cv2.")
- detect = cv2.QRCodeDetector()
- text, *_ = detect.detectAndDecode(img)
-
- debug_messages.append(f'stdout: {out.read()}')
+ demisto.debug("Couldn't extract text with pyzbar, retrying with cv2.")
+ text = [cv2.QRCodeDetector().detectAndDecode(img)[0]]
- demisto.debug('\n'.join(debug_messages))
- return text if isinstance(text, list) else [text]
+ return text
def extract_indicators_from_text(text: list) -> dict:
@@ -42,15 +61,15 @@ def extract_info_from_qr_code(entry_id: str) -> CommandResults:
indicators = extract_indicators_from_text(text)
except (cv2.error, TypeError) as e: # generic error raised by cv2
raise DemistoException('Error parsing file. Please make sure it is a valid image file.') from e
- except ValueError: # raised by demisto.getFilePath when the entry_id is not found
- raise DemistoException(f'Invalid entry ID: {entry_id=}')
-
+ except ValueError as e: # raised by demisto.getFilePath when the entry_id is not found
+ demisto.debug(f'ValueError: {e}, {e.args}')
+ raise DemistoException(f'Invalid entry ID: {entry_id=}') from e
return CommandResults(
outputs_prefix='QRCodeReader',
outputs=({'Text': text} | indicators),
readable_output=tableToMarkdown(
'QR Code Read', {'Text': text}
- ),
+ )
)
diff --git a/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.yml b/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.yml
index b79704330345..c8542ca5e564 100644
--- a/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.yml
+++ b/Packs/CommonScripts/Scripts/ReadQRCode/ReadQRCode.yml
@@ -26,7 +26,7 @@ tags: []
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/qrcode:1.0.0.87067
+dockerimage: demisto/qrcode:1.0.0.98232
fromversion: 6.10.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/SetGridField/README.md b/Packs/CommonScripts/Scripts/SetGridField/README.md
index 3cb97ccb0769..5e47ac1c3b71 100644
--- a/Packs/CommonScripts/Scripts/SetGridField/README.md
+++ b/Packs/CommonScripts/Scripts/SetGridField/README.md
@@ -123,7 +123,7 @@ Entry Context:
The first time you run `SetGridField` on a newly created grid field, you may see an error similar to the following:
-![Screen Shot 2021-12-21 at 10 36 03 PM](doc_files/troubleshoot.png)
+![Screen Shot 2021-12-21 at 10 36 03 PM](../../doc_files/troubleshoot.png)
To resolve the error:
1. Make sure the grid field is associated with the incident type the field is being used in.
diff --git a/Packs/CommonScripts/Scripts/SetGridField/SetGridField.yml b/Packs/CommonScripts/Scripts/SetGridField/SetGridField.yml
index dac3a7f2cf86..dfb1edb2c4bc 100644
--- a/Packs/CommonScripts/Scripts/SetGridField/SetGridField.yml
+++ b/Packs/CommonScripts/Scripts/SetGridField/SetGridField.yml
@@ -43,7 +43,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/pandas:1.0.0.86039
+dockerimage: demisto/pandas:1.0.0.102566
fromversion: 5.0.0
tests:
- No tests
diff --git a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.py b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.py
index c8ebdee25921..2534c0b85701 100644
--- a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.py
+++ b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.py
@@ -1,11 +1,11 @@
import demistomock as demisto
from CommonServerPython import *
-import ipaddress
+from ipaddress import IPv6Address
def is_valid_ipv6_address(address):
try:
- ipaddress.IPv6Address(address)
+ IPv6Address(address)
return True
except ValueError:
return False
@@ -13,11 +13,16 @@ def is_valid_ipv6_address(address):
def main():
the_input = demisto.args().get('input')
-
the_input = argToList(the_input)
entries_list = []
for item in the_input:
+
+ demisto.info(f'Got IPv6 {item}')
+ item = re.sub('[^a-f0-9:%th.]+', '', item)
+
+ demisto.info(f'Changed item to {str(item)}')
+
if is_valid_ipv6_address(item):
entries_list.append(item)
else:
diff --git a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.yml b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.yml
index 7adf46d1849a..e9afd916f4da 100644
--- a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.yml
+++ b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator.yml
@@ -15,7 +15,7 @@ tags:
- indicator-format
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.13.86272
+dockerimage: demisto/python3:3.10.14.101217
runas: DBotWeakRole
tests:
- No test
diff --git a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator_test.py b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator_test.py
index f9c63068189e..11335f90665d 100644
--- a/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator_test.py
+++ b/Packs/CommonScripts/Scripts/VerifyIPv6Indicator/VerifyIPv6Indicator_test.py
@@ -23,7 +23,14 @@ def test_set_limit(address, expected):
assert ipv6_address == expected
-def test_main(mocker):
+@pytest.mark.parametrize(
+ "address, expected",
+ [
+ ('00:16:45:00:46:91', ''),
+ ('"1:2:3:4:5:6:7:8', '1:2:3:4:5:6:7:8'),
+ ]
+)
+def test_main(mocker, address, expected):
"""
Given:
- MAC Address as input
@@ -32,10 +39,10 @@ def test_main(mocker):
Then:
- Ensure the MAC address is caught as invalid IPv6 and returns array with empty string
"""
- mocker.patch.object(demisto, 'args', return_value={'input': '00:16:45:00:46:91'})
+ mocker.patch.object(demisto, 'args', return_value={'input': address})
mocker.patch.object(demisto, 'results')
main()
- demisto.results.assert_called_with([''])
+ demisto.results.assert_called_with([expected])
@pytest.mark.skip(reason="Flaky test, issue #41552")
diff --git a/Packs/CommonScripts/doc_files/88423600-dd54b900-cda0-11ea-89f4-83981367659a.png b/Packs/CommonScripts/doc_files/88423600-dd54b900-cda0-11ea-89f4-83981367659a.png
new file mode 100644
index 000000000000..d56eadba7421
Binary files /dev/null and b/Packs/CommonScripts/doc_files/88423600-dd54b900-cda0-11ea-89f4-83981367659a.png differ
diff --git a/Packs/CommonScripts/doc_files/99517136-efc5b480-2986-11eb-879c-a0a88923c4b9.png b/Packs/CommonScripts/doc_files/99517136-efc5b480-2986-11eb-879c-a0a88923c4b9.png
new file mode 100644
index 000000000000..4a5060db329c
Binary files /dev/null and b/Packs/CommonScripts/doc_files/99517136-efc5b480-2986-11eb-879c-a0a88923c4b9.png differ
diff --git a/Packs/CommonScripts/doc_files/99517219-0409b180-2987-11eb-9aa4-7e96b2a12238.png b/Packs/CommonScripts/doc_files/99517219-0409b180-2987-11eb-9aa4-7e96b2a12238.png
new file mode 100644
index 000000000000..12ec9ee406b5
Binary files /dev/null and b/Packs/CommonScripts/doc_files/99517219-0409b180-2987-11eb-9aa4-7e96b2a12238.png differ
diff --git a/Packs/CommonScripts/doc_files/99517256-0f5cdd00-2987-11eb-8a1f-1dc41d166b42.png b/Packs/CommonScripts/doc_files/99517256-0f5cdd00-2987-11eb-8a1f-1dc41d166b42.png
new file mode 100644
index 000000000000..058f7089bed0
Binary files /dev/null and b/Packs/CommonScripts/doc_files/99517256-0f5cdd00-2987-11eb-8a1f-1dc41d166b42.png differ
diff --git a/Packs/CommonScripts/Scripts/SetGridField/doc_files/grid.png b/Packs/CommonScripts/doc_files/grid.png
similarity index 100%
rename from Packs/CommonScripts/Scripts/SetGridField/doc_files/grid.png
rename to Packs/CommonScripts/doc_files/grid.png
diff --git a/Packs/CommonScripts/Scripts/SetGridField/doc_files/grid_key_value_update.png b/Packs/CommonScripts/doc_files/grid_key_value_update.png
similarity index 100%
rename from Packs/CommonScripts/Scripts/SetGridField/doc_files/grid_key_value_update.png
rename to Packs/CommonScripts/doc_files/grid_key_value_update.png
diff --git a/Packs/CommonScripts/Scripts/SetGridField/doc_files/grid_list_update.png b/Packs/CommonScripts/doc_files/grid_list_update.png
similarity index 100%
rename from Packs/CommonScripts/Scripts/SetGridField/doc_files/grid_list_update.png
rename to Packs/CommonScripts/doc_files/grid_list_update.png
diff --git a/Packs/CommonScripts/Scripts/SetGridField/doc_files/nested_dict_grid.png b/Packs/CommonScripts/doc_files/nested_dict_grid.png
similarity index 100%
rename from Packs/CommonScripts/Scripts/SetGridField/doc_files/nested_dict_grid.png
rename to Packs/CommonScripts/doc_files/nested_dict_grid.png
diff --git a/Packs/CommonScripts/Scripts/SetGridField/doc_files/troubleshoot.png b/Packs/CommonScripts/doc_files/troubleshoot.png
similarity index 100%
rename from Packs/CommonScripts/Scripts/SetGridField/doc_files/troubleshoot.png
rename to Packs/CommonScripts/doc_files/troubleshoot.png
diff --git a/Packs/CommonScripts/pack_metadata.json b/Packs/CommonScripts/pack_metadata.json
index bcf03fb64fdd..c595b541f1e6 100644
--- a/Packs/CommonScripts/pack_metadata.json
+++ b/Packs/CommonScripts/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Scripts",
"description": "Frequently used scripts pack.",
"support": "xsoar",
- "currentVersion": "1.15.4",
+ "currentVersion": "1.15.25",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonTypes/.pack-ignore b/Packs/CommonTypes/.pack-ignore
index 91c409f2aa8c..858beb6785fd 100644
--- a/Packs/CommonTypes/.pack-ignore
+++ b/Packs/CommonTypes/.pack-ignore
@@ -342,4 +342,5 @@ Cyberint
DN
PEM
SPKI
-subkeys
\ No newline at end of file
+subkeys
+formatter
\ No newline at end of file
diff --git a/Packs/CommonTypes/IncidentFields/incidentfield-External_Last_Updated_Time.json b/Packs/CommonTypes/IncidentFields/incidentfield-External_Last_Updated_Time.json
new file mode 100644
index 000000000000..4b7249ed9c8b
--- /dev/null
+++ b/Packs/CommonTypes/IncidentFields/incidentfield-External_Last_Updated_Time.json
@@ -0,0 +1,28 @@
+{
+ "id": "incident_externallastupdatedtime",
+ "version": -1,
+ "modified": "2024-07-01T18:11:20.250915274Z",
+ "name": "External Last Updated Time",
+ "ownerOnly": false,
+ "cliName": "externallastupdatedtime",
+ "type": "date",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedToAll": true,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.0.0"
+}
\ No newline at end of file
diff --git a/Packs/CommonTypes/IndicatorTypes/reputation-ipv6.json b/Packs/CommonTypes/IndicatorTypes/reputation-ipv6.json
index 8a6b49106a4e..b1e31422e088 100644
--- a/Packs/CommonTypes/IndicatorTypes/reputation-ipv6.json
+++ b/Packs/CommonTypes/IndicatorTypes/reputation-ipv6.json
@@ -7,7 +7,7 @@
"commitMessage": "",
"shouldPublish": false,
"shouldCommit": false,
- "regex": "(?i)(?P(?:[0-9a-f]{1,4}:){7,7}[0-9a-f]{1,4})|(?Pfe80:(?::[0-9a-f]{0,4}){0,4}%[0-9a-zA-Z]+)|(?P::(?:ffff(?::0{1,4}){0,1}:){0,1}(?:(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])[.]){3,3}(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])|(?:[0-9a-f]{1,4}:){1,4}:(?:(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])[.]){3,3}(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9]))|(?P:(?:(?::[0-9a-f]{1,4}){1,7})|[0-9a-f]{1,4}:(?:(?::[0-9a-f]{1,4}){1,6})|(?:[0-9a-f]{1,4}:){1,2}(?::[0-9a-f]{1,4}){1,5}|(?:[0-9a-f]{1,4}:){1,3}(?::[0-9a-f]{1,4}){1,4}|(?:[0-9a-f]{1,4}:){1,4}(?::[0-9a-f]{1,4}){1,3}|(?:[0-9a-f]{1,4}:){1,5}(?::[0-9a-f]{1,4}){1,2}|(?:[0-9a-f]{1,4}:){1,6}:[0-9a-f]{1,4}|(?:[0-9a-f]{1,4}:){1,7}:)",
+ "regex": "(?i)(?:\\W+|^)(?:(?P(?:[0-9a-f]{1,4}:){7,7}[0-9a-f]{1,4})|(?Pfe80:(?::[0-9a-f]{0,4}){0,4}%[0-9a-z]+)|(?P::(?:ffff(?::0{1,4}){0,1}:){0,1}(?:(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])[.]){3,3}(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])|(?:[0-9a-f]{1,4}:){1,4}:(?:(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])[.]){3,3}(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9]))|(?P:(?:(?::[0-9a-f]{1,4}){1,7})|[0-9a-f]{1,4}:(?:(?::[0-9a-f]{1,4}){1,6})|(?:[0-9a-f]{1,4}:){1,2}(?::[0-9a-f]{1,4}){1,5}|(?:[0-9a-f]{1,4}:){1,3}(?::[0-9a-f]{1,4}){1,4}|(?:[0-9a-f]{1,4}:){1,4}(?::[0-9a-f]{1,4}){1,3}|(?:[0-9a-f]{1,4}:){1,5}(?::[0-9a-f]{1,4}){1,2}|(?:[0-9a-f]{1,4}:){1,6}:[0-9a-f]{1,4}|(?:[0-9a-f]{1,4}:){1,7}:))",
"details": "IPv6",
"prevDetails": "IPv6",
"reputationScriptName": "",
diff --git a/Packs/CommonTypes/ReleaseNotes/3_5_5.md b/Packs/CommonTypes/ReleaseNotes/3_5_5.md
new file mode 100644
index 000000000000..10b832ef9b32
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_5_5.md
@@ -0,0 +1,3 @@
+## Common Types
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonTypes/ReleaseNotes/3_5_6.md b/Packs/CommonTypes/ReleaseNotes/3_5_6.md
new file mode 100644
index 000000000000..ac62d58899a3
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_5_6.md
@@ -0,0 +1,7 @@
+
+#### Incident Fields
+
+##### New: External Last Updated Time
+
+- New: External Last Updated Time.
+
diff --git a/Packs/CommonTypes/ReleaseNotes/3_5_7.md b/Packs/CommonTypes/ReleaseNotes/3_5_7.md
new file mode 100644
index 000000000000..78848e85de86
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_5_7.md
@@ -0,0 +1,5 @@
+
+#### Indicator Types
+
+##### IPv6
+Updated the regex to capture an extra character before the IPv6 (Handled by the formatter).
diff --git a/Packs/CommonTypes/pack_metadata.json b/Packs/CommonTypes/pack_metadata.json
index 80a2a1197364..04ca8acd4e35 100644
--- a/Packs/CommonTypes/pack_metadata.json
+++ b/Packs/CommonTypes/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Types",
"description": "This Content Pack will get you up and running in no-time and provide you with the most commonly used incident & indicator fields and types.",
"support": "xsoar",
- "currentVersion": "3.5.4",
+ "currentVersion": "3.5.7",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonWidgets/ReleaseNotes/1_2_50.md b/Packs/CommonWidgets/ReleaseNotes/1_2_50.md
new file mode 100644
index 000000000000..f9e87e14b2b5
--- /dev/null
+++ b/Packs/CommonWidgets/ReleaseNotes/1_2_50.md
@@ -0,0 +1,3 @@
+## Common Widgets
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonWidgets/ReleaseNotes/1_2_51.md b/Packs/CommonWidgets/ReleaseNotes/1_2_51.md
new file mode 100644
index 000000000000..619e37f18ef8
--- /dev/null
+++ b/Packs/CommonWidgets/ReleaseNotes/1_2_51.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### MyToDoTasksWidget
+
+- Fixed an issue where the incident link was broken in SaaS platforms.
+- Updated the Docker image to: *demisto/python3:3.10.14.97100*.
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
index 02a6f104a9d8..22a86f0e1d33 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
@@ -5,6 +5,13 @@
MAX_ENTRIES = 30
+def get_clickable_incident_id(incident_id):
+ incident_id_url = os.path.join("Custom/caseinfoid", incident_id)
+ if not is_xsiam_or_xsoar_saas():
+ incident_id_url = f'#/{incident_id_url}'
+ return f'[{incident_id}]({incident_id_url})'
+
+
def get_open_to_do_tasks_of_current_user() -> List[Dict]:
body = {
"dataType": "todos",
@@ -28,8 +35,6 @@ def get_open_to_do_tasks_of_current_user() -> List[Dict]:
title = task.get('title', '')
description = task.get('description', '')
task_id = task.get('id', '')
- incident_id = task.get('incidentId', '')
- clickable_incident_id = f'[{incident_id}]({os.path.join("#/Custom/caseinfoid", incident_id)})'
if sla := task.get('dueDate', ''):
sla_dt = parse(sla)
assert sla_dt is not None, f'could not parse {sla}'
@@ -41,7 +46,7 @@ def get_open_to_do_tasks_of_current_user() -> List[Dict]:
'Task ID': task_id,
'SLA': sla,
'Opened By': opened_by,
- 'Incident ID': clickable_incident_id
+ 'Incident ID': get_clickable_incident_id(incident_id=task.get('incidentId', ''))
})
else:
demisto.error(f'Failed running POST query to /v2/statistics/widgets/query.\n{str(todo_tasks_query_res)}')
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
index 3800bfec00f9..3c0fcc694ef5 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
@@ -9,7 +9,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.97100
fromversion: 6.1.0
tests:
- No test
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
index 6e90c90ebba5..6b346f64d177 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
@@ -1,7 +1,8 @@
import json
import demistomock as demisto
-from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user
+from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user, get_clickable_incident_id
+import pytest
def test_open_to_do_tasks_of_current_user(mocker):
@@ -91,3 +92,18 @@ def test_no_open_to_do_tasks(mocker):
table = get_open_to_do_tasks_of_current_user()
assert len(table) == 0
+
+
+@pytest.mark.parametrize('is_xsoar_8_or_xsiam', [True, False])
+def test_clickable_incident_id(mocker, is_xsoar_8_or_xsiam):
+ '''
+ Given:
+ - incident id to create clickable_incident_id
+ When:
+ - Running clickable_incident_id in XSIAM/XSOAR 8 and XSOAR 6
+ Then:
+ - Ensure '#/' is in the created link only in XSOAR 6.
+ '''
+ import MyToDoTasksWidget
+ mocker.patch.object(MyToDoTasksWidget, 'is_xsiam_or_xsoar_saas', return_value=is_xsoar_8_or_xsiam)
+ assert ('#/' in get_clickable_incident_id('1234')) == (not is_xsoar_8_or_xsiam)
diff --git a/Packs/CommonWidgets/pack_metadata.json b/Packs/CommonWidgets/pack_metadata.json
index 7eb06011ed8e..0ca859d3be24 100644
--- a/Packs/CommonWidgets/pack_metadata.json
+++ b/Packs/CommonWidgets/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Widgets",
"description": "Frequently used widgets pack.",
"support": "xsoar",
- "currentVersion": "1.2.49",
+ "currentVersion": "1.2.51",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommunityCommonDashboards/Dashboards/CISOMetrics.json b/Packs/CommunityCommonDashboards/Dashboards/CISOMetrics.json
index 17191eb04cbb..ca5d7147ba3a 100644
--- a/Packs/CommunityCommonDashboards/Dashboards/CISOMetrics.json
+++ b/Packs/CommunityCommonDashboards/Dashboards/CISOMetrics.json
@@ -1495,5 +1495,8 @@
"version": -1,
"fromVersion": "6.0.0",
"description": "",
- "isPredefined": true
-}
+ "isPredefined": true,
+ "marketplaces": [
+ "xsoar_on_prem"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/CommunityCommonDashboards/ReleaseNotes/2_0_3.md b/Packs/CommunityCommonDashboards/ReleaseNotes/2_0_3.md
new file mode 100644
index 000000000000..fc3712221fd1
--- /dev/null
+++ b/Packs/CommunityCommonDashboards/ReleaseNotes/2_0_3.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### CISOMetrics
+
+Added the CISOMetrics dashboard to Cortex XSOAR on-prem marketplace.
diff --git a/Packs/CommunityCommonDashboards/pack_metadata.json b/Packs/CommunityCommonDashboards/pack_metadata.json
index 00b2ebaa033b..17c0f7c5da0d 100644
--- a/Packs/CommunityCommonDashboards/pack_metadata.json
+++ b/Packs/CommunityCommonDashboards/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Community Common Dashboards",
"description": "A pack that contains community dashboards",
"support": "community",
- "currentVersion": "2.0.2",
+ "currentVersion": "2.0.3",
"author": "Randy Uhrlaub",
"url": "",
"email": "",
diff --git a/Packs/CommunityCommonScripts/.pack-ignore b/Packs/CommunityCommonScripts/.pack-ignore
index 88126a7b17be..fa8528d26db2 100644
--- a/Packs/CommunityCommonScripts/.pack-ignore
+++ b/Packs/CommunityCommonScripts/.pack-ignore
@@ -1,5 +1,5 @@
[file:pack_metadata.json]
-ignore=PA125
+ignore=PA125,BA111
[file:README.md]
ignore=RM106
\ No newline at end of file
diff --git a/Packs/CommunityCommonScripts/CONTRIBUTORS.json b/Packs/CommunityCommonScripts/CONTRIBUTORS.json
new file mode 100644
index 000000000000..72f841543a29
--- /dev/null
+++ b/Packs/CommunityCommonScripts/CONTRIBUTORS.json
@@ -0,0 +1 @@
+["Mandar Naik"]
diff --git a/Packs/CommunityCommonScripts/ReleaseNotes/1_2_2.md b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_2.md
new file mode 100644
index 000000000000..54bf86fc2012
--- /dev/null
+++ b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_2.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### Defang
+
+- New: Defangs IP, Mail and URL address to prevent them from being recognized.
diff --git a/Packs/CommunityCommonScripts/ReleaseNotes/1_2_3.md b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_3.md
new file mode 100644
index 000000000000..9982caee0048
--- /dev/null
+++ b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_3.md
@@ -0,0 +1,12 @@
+
+#### Scripts
+
+##### MarkdownToHTML
+
+- Updated the Docker image to: *demisto/bs4-py3:1.0.0.100299*.
+##### PHash
+
+- Updated the Docker image to: *demisto/python-phash:1.0.0.100267*.
+##### jq
+
+- Updated the Docker image to: *demisto/jq:1.0.0.100247*.
diff --git a/Packs/CommunityCommonScripts/ReleaseNotes/1_2_4.md b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_4.md
new file mode 100644
index 000000000000..ec0abe3698ca
--- /dev/null
+++ b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_4.md
@@ -0,0 +1,81 @@
+
+#### Scripts
+
+##### DateTimeToADTime
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### MaxList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### StripAccentMarksFromString
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### GetFields
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### MapRegex
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcReturnSubnetNetwork
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### DisplayTaggedWarroomEntries
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### BatchData
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### ConvertUTCEpochTimeToTimeStamp
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcReturnAddressIANAAllocation
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### SSLVerifier
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CreateArrayWithDuplicates
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### RandomElementFromList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### GetFilePathPreProcessing
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### InvertEveryTwoItems
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcReturnSubnetBroadcastAddress
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcCheckSubnetCollision
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CreateFileFromPathObject
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### CompareList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcReturnAddressBinary
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### isArrayItemInList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### IPCalcReturnSubnetAddresses
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### MinList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### delete_expired_indicator_with_exlusion
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### DisplayIndicatorReputationContent
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### RandomPhotoNasa
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/CommunityCommonScripts/ReleaseNotes/1_2_5.md b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_5.md
new file mode 100644
index 000000000000..585ab65d845a
--- /dev/null
+++ b/Packs/CommunityCommonScripts/ReleaseNotes/1_2_5.md
@@ -0,0 +1,15 @@
+
+#### Scripts
+
+##### VersionGreaterThan
+
+- Updated the Docker image to: *demisto/powershell:7.4.0.80528*.
+##### CalculateTimeSpan
+
+- Updated the Docker image to: *demisto/powershell:7.4.0.80528*.
+##### VersionEqualTo
+
+- Updated the Docker image to: *demisto/powershell:7.4.0.80528*.
+##### VersionLessThan
+
+- Updated the Docker image to: *demisto/powershell:7.4.0.80528*.
diff --git a/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.py b/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.py
index a0d4cc2a8d1f..e687918c3182 100644
--- a/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.py
+++ b/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.py
@@ -8,7 +8,7 @@
list_of_items = list(list_of_items.split(","))
batch_size = int(batch_size)
-batch_list = list()
+batch_list = []
for i in range(0, len(list_of_items), batch_size):
batch_list.append(list_of_items[i:i + batch_size])
diff --git a/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.yml b/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.yml
index 3cbfac200fe5..0b7526bd7572 100644
--- a/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.yml
+++ b/Packs/CommunityCommonScripts/Scripts/BatchData/BatchData.yml
@@ -14,8 +14,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: BatchData
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/CalculateTimeSpan/CalculateTimeSpan.yml b/Packs/CommunityCommonScripts/Scripts/CalculateTimeSpan/CalculateTimeSpan.yml
index f34d071e2e08..b96c4671e421 100644
--- a/Packs/CommunityCommonScripts/Scripts/CalculateTimeSpan/CalculateTimeSpan.yml
+++ b/Packs/CommunityCommonScripts/Scripts/CalculateTimeSpan/CalculateTimeSpan.yml
@@ -22,7 +22,7 @@ comment: |-
commonfields:
id: CalculateTimeSpan
version: -1
-dockerimage: demisto/powershell:7.2.1.26295
+dockerimage: demisto/powershell:7.4.0.80528
enabled: true
name: CalculateTimeSpan
outputs:
diff --git a/Packs/CommunityCommonScripts/Scripts/CompareList/CompareList.yml b/Packs/CommunityCommonScripts/Scripts/CompareList/CompareList.yml
index fd6f275e6d19..c94e4de1d21c 100644
--- a/Packs/CommunityCommonScripts/Scripts/CompareList/CompareList.yml
+++ b/Packs/CommunityCommonScripts/Scripts/CompareList/CompareList.yml
@@ -1,15 +1,15 @@
args:
- name: list1_name
required: true
- description: "First list name to compare."
+ description: First list name to compare.
- name: list2_name
required: true
- description: "Second list name to compare."
+ description: Second list name to compare.
commonfields:
id: CompareList
version: -1
-dockerimage: demisto/python3:3.10.12.63474
-comment: "Compares two lists."
+dockerimage: demisto/python3:3.10.14.100715
+comment: Compares two lists.
enabled: true
name: CompareList
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/ConvertUTCEpochTimeToTimeStamp/ConvertUTCEpochTimeToTimeStamp.yml b/Packs/CommunityCommonScripts/Scripts/ConvertUTCEpochTimeToTimeStamp/ConvertUTCEpochTimeToTimeStamp.yml
index e52ae3522109..a3c982805b4b 100644
--- a/Packs/CommunityCommonScripts/Scripts/ConvertUTCEpochTimeToTimeStamp/ConvertUTCEpochTimeToTimeStamp.yml
+++ b/Packs/CommunityCommonScripts/Scripts/ConvertUTCEpochTimeToTimeStamp/ConvertUTCEpochTimeToTimeStamp.yml
@@ -16,8 +16,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: ConvertUTCEpochTimeToTimeStamp
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/CreateArrayWithDuplicates/CreateArrayWithDuplicates.yml b/Packs/CommunityCommonScripts/Scripts/CreateArrayWithDuplicates/CreateArrayWithDuplicates.yml
index 6639d9280555..bd7e39f20039 100644
--- a/Packs/CommunityCommonScripts/Scripts/CreateArrayWithDuplicates/CreateArrayWithDuplicates.yml
+++ b/Packs/CommunityCommonScripts/Scripts/CreateArrayWithDuplicates/CreateArrayWithDuplicates.yml
@@ -8,19 +8,20 @@ args:
name: separator
- description: The key to place result array in context, by default will be "array".
name: contextKey
-comment: |-
- Will create an array object in context from a given string input , allowing for duplicate values to be retained
+comment: 'Will create an array object in context from a given string input , allowing for duplicate values to be retained
+
Output is to ContextKey.array as JSON does not permit duplicate key names
- e.g., ContextKey.array.value1, ContextKey.array.value2, ContextKey.array.value3, etc.
+
+ e.g., ContextKey.array.value1, ContextKey.array.value2, ContextKey.array.value3, etc.'
commonfields:
id: CreateArrayWithDuplicates
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CreateArrayWithDuplicates
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/CreateFileFromPathObject/CreateFileFromPathObject.yml b/Packs/CommunityCommonScripts/Scripts/CreateFileFromPathObject/CreateFileFromPathObject.yml
index 61701b043819..4e930ca41b8f 100644
--- a/Packs/CommunityCommonScripts/Scripts/CreateFileFromPathObject/CreateFileFromPathObject.yml
+++ b/Packs/CommunityCommonScripts/Scripts/CreateFileFromPathObject/CreateFileFromPathObject.yml
@@ -10,7 +10,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ''
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: CreateFileFromPathObject
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/DateTimeToADTime/DateTimeToADTime.yml b/Packs/CommunityCommonScripts/Scripts/DateTimeToADTime/DateTimeToADTime.yml
index 32f84ec61374..36813c97f87f 100644
--- a/Packs/CommunityCommonScripts/Scripts/DateTimeToADTime/DateTimeToADTime.yml
+++ b/Packs/CommunityCommonScripts/Scripts/DateTimeToADTime/DateTimeToADTime.yml
@@ -1,5 +1,5 @@
args:
-- defaultValue: "0"
+- defaultValue: '0'
description: Number of days before todays date
name: days_ago
required: true
@@ -7,7 +7,7 @@ comment: Converts unix time to AD Integer8 time. This is used in many AD date fi
commonfields:
id: DateTimeToADTime
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: DateTimeToADTime
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/Defang/Defang.py b/Packs/CommunityCommonScripts/Scripts/Defang/Defang.py
new file mode 100644
index 000000000000..adfc1f773342
--- /dev/null
+++ b/Packs/CommunityCommonScripts/Scripts/Defang/Defang.py
@@ -0,0 +1,92 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+import re
+
+def defang(content, defang_options, mail_options, url_options):
+ if "ip" in defang_options:
+ ip_regex = r"(\b25[0-5]|\b2[0-4][0-9]|\b[01]?[0-9][0-9]?)(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}"
+ content = re.sub(
+ ip_regex, lambda match: match.group(0).replace(".", "[.]"), content
+ )
+
+ if "mail" in defang_options:
+ mail_regex = r"[^@ \t\r\n]+@[^@ \t\r\n]+\.[^@ \t\r\n]+"
+ if "dot" in mail_options and "at" in mail_options:
+ content = re.sub(
+ mail_regex,
+ lambda match: match.group(0).replace(".", "[.]").replace("@", "[@]"),
+ content,
+ )
+ elif "dot" in mail_options:
+ content = re.sub(
+ mail_regex, lambda match: match.group(0).replace(".", "[.]"), content
+ )
+ elif "at" in mail_options:
+ content = re.sub(
+ mail_regex, lambda match: match.group(0).replace("@", "[@]"), content
+ )
+
+ if "url" in defang_options:
+ url_regex = r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)"
+ if "dot" in url_options and "http" in url_options and "colon" in url_options:
+ content = re.sub(
+ url_regex,
+ lambda match: match.group(0)
+ .replace(".", "[.]")
+ .replace("https", "hxxps")
+ .replace("://", "[://]"),
+ content,
+ )
+ elif "dot" in url_options and "http" in url_options:
+ content = re.sub(
+ url_regex,
+ lambda match: match.group(0)
+ .replace(".", "[.]")
+ .replace("https", "hxxps"),
+ content,
+ )
+ elif "dot" in url_options and "colon" in url_options:
+ content = re.sub(
+ url_regex,
+ lambda match: match.group(0)
+ .replace(".", "[.]")
+ .replace("://", "[://]"),
+ content,
+ )
+ elif "http" in url_options and "colon" in url_options:
+ content = re.sub(
+ url_regex,
+ lambda match: match.group(0)
+ .replace("https", "hxxps")
+ .replace("://", "[://]"),
+ content,
+ )
+ elif "dot" in url_options:
+ content = re.sub(
+ url_regex, lambda match: match.group(0).replace(".", "[.]"), content
+ )
+ elif "http" in url_options:
+ content = re.sub(
+ url_regex,
+ lambda match: match.group(0).replace("https", "hxxps"),
+ content,
+ )
+ elif "colon" in url_options:
+ content = re.sub(
+ url_regex, lambda match: match.group(0).replace("://", "[://]"), content
+ )
+
+ outputs = {"Defang": {"output": content}}
+
+ return content, outputs
+
+if __name__ in ("__main__", "builtins", "__builtin__"):
+ try:
+ input = demisto.args().get("input")
+ defang_options = demisto.args().get("defang_options")
+ mail_options = demisto.args().get("mail_options")
+ url_options = demisto.args().get("url_options")
+ return_outputs(*defang(input, defang_options, mail_options, url_options))
+ except Exception as e:
+ return_error(f"Error occurred while running the command. Exception info:\n{str(e)}")
diff --git a/Packs/CommunityCommonScripts/Scripts/Defang/Defang.yml b/Packs/CommunityCommonScripts/Scripts/Defang/Defang.yml
new file mode 100644
index 000000000000..00c89e09118a
--- /dev/null
+++ b/Packs/CommunityCommonScripts/Scripts/Defang/Defang.yml
@@ -0,0 +1,53 @@
+args:
+- name: input
+ required: true
+ description: The input to be defanged.
+ type: unknown
+- name: defang_options
+ default: true
+ auto: PREDEFINED
+ predefined:
+ - ip
+ - mail
+ - url
+ description: Specify which IOC needs defanging.
+ isArray: true
+ defaultValue: ip, mail, url
+- name: mail_options
+ auto: PREDEFINED
+ predefined:
+ - dot
+ - at
+ description: Mail defang can be configured to merely defang . or @ or both.
+ isArray: true
+ defaultValue: dot, at
+- name: url_options
+ auto: PREDEFINED
+ predefined:
+ - dot
+ - http
+ - colon
+ description: URL defang can be configured to merely defang . or https or :// or all.
+ isArray: true
+ defaultValue: dot, http, colon
+outputs:
+- contextPath: Defang.output
+ description: The defanged output.
+ type: string
+comment: Defangs IP, Mail and URL address to prevent them from being recognized.
+commonfields:
+ id: Defang
+ version: -1
+dockerimage: demisto/python3:3.10.14.99474
+enabled: true
+name: Defang
+runas: DBotWeakRole
+script: ''
+scripttarget: 0
+subtype: python3
+tags:
+- Utilities
+type: python
+fromversion: 6.1.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CommunityCommonScripts/Scripts/Defang/README.md b/Packs/CommunityCommonScripts/Scripts/Defang/README.md
new file mode 100755
index 000000000000..a6a26d399888
--- /dev/null
+++ b/Packs/CommunityCommonScripts/Scripts/Defang/README.md
@@ -0,0 +1,30 @@
+Defangs IP, Mail and URL address to prevent them from being recognized.
+
+## Script Data
+
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | Utilities |
+| Cortex XSOAR Version | 6.1.0 |
+
+## Inputs
+
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| input | The input to be defanged. |
+| defang_options | Specify which IOC needs defanging. |
+| mail_options | Mail defang can be configured to merely defang . or @ or both. |
+| url_options | URL defang can be configured to merely defang . or https or :// or all. |
+
+## Outputs
+
+---
+
+| **Path** | **Description** | **Type** |
+| --- | --- | --- |
+| Defang.output | The defanged output | string |
diff --git a/Packs/CommunityCommonScripts/Scripts/DeleteExpiredIndicatorWithExlusion/DeleteExpiredIndicatorWithExlusion.yml b/Packs/CommunityCommonScripts/Scripts/DeleteExpiredIndicatorWithExlusion/DeleteExpiredIndicatorWithExlusion.yml
index 5dac140ecd32..466fbd9e3684 100644
--- a/Packs/CommunityCommonScripts/Scripts/DeleteExpiredIndicatorWithExlusion/DeleteExpiredIndicatorWithExlusion.yml
+++ b/Packs/CommunityCommonScripts/Scripts/DeleteExpiredIndicatorWithExlusion/DeleteExpiredIndicatorWithExlusion.yml
@@ -3,8 +3,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
comment: deletes expired indicators.
enabled: true
name: delete_expired_indicator_with_exlusion
diff --git a/Packs/CommunityCommonScripts/Scripts/DisplayIndicatorReputationContent/DisplayIndicatorReputationContent.yml b/Packs/CommunityCommonScripts/Scripts/DisplayIndicatorReputationContent/DisplayIndicatorReputationContent.yml
index a821b890ea78..a2f0035c0def 100644
--- a/Packs/CommunityCommonScripts/Scripts/DisplayIndicatorReputationContent/DisplayIndicatorReputationContent.yml
+++ b/Packs/CommunityCommonScripts/Scripts/DisplayIndicatorReputationContent/DisplayIndicatorReputationContent.yml
@@ -1,11 +1,11 @@
commonfields:
id: DisplayIndicatorReputationContent
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: DisplayIndicatorReputationContent
runas: DBotWeakRole
-comment: 'Display the indicator context object in markdown format in a dynamic section layout'
+comment: Display the indicator context object in markdown format in a dynamic section layout
script: ''
scripttarget: 0
subtype: python3
diff --git a/Packs/CommunityCommonScripts/Scripts/DisplayTaggedWarroomEntries/DisplayTaggedWarroomEntries.yml b/Packs/CommunityCommonScripts/Scripts/DisplayTaggedWarroomEntries/DisplayTaggedWarroomEntries.yml
index 86f139dfba06..9bafd11dd7f6 100644
--- a/Packs/CommunityCommonScripts/Scripts/DisplayTaggedWarroomEntries/DisplayTaggedWarroomEntries.yml
+++ b/Packs/CommunityCommonScripts/Scripts/DisplayTaggedWarroomEntries/DisplayTaggedWarroomEntries.yml
@@ -5,9 +5,9 @@ args:
commonfields:
id: DisplayTaggedWarroomEntries
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
-comment: "Display warroom entries in a dynamic section which are tagged with 'report'"
+comment: Display warroom entries in a dynamic section which are tagged with 'report'
name: DisplayTaggedWarroomEntries
runas: DBotWeakRole
script: ''
diff --git a/Packs/CommunityCommonScripts/Scripts/GetFields/GetFields.yml b/Packs/CommunityCommonScripts/Scripts/GetFields/GetFields.yml
index 667cc6f24c89..73db679889fd 100644
--- a/Packs/CommunityCommonScripts/Scripts/GetFields/GetFields.yml
+++ b/Packs/CommunityCommonScripts/Scripts/GetFields/GetFields.yml
@@ -7,22 +7,22 @@ args:
description: The field to extract from each item (Optional).
name: getField
predefined:
- - ""
+ - ''
- auto: PREDEFINED
- defaultValue: "false"
+ defaultValue: 'false'
description: Whether the argument should be saved as a string (Optional).
name: stringify
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
comment: Retrieves fields from an object using dot notation
commonfields:
id: GetFields
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: GetFields
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/GetFilePathPreProcessing/GetFilePathPreProcessing.yml b/Packs/CommunityCommonScripts/Scripts/GetFilePathPreProcessing/GetFilePathPreProcessing.yml
index e443a3536e83..b5fd53437265 100644
--- a/Packs/CommunityCommonScripts/Scripts/GetFilePathPreProcessing/GetFilePathPreProcessing.yml
+++ b/Packs/CommunityCommonScripts/Scripts/GetFilePathPreProcessing/GetFilePathPreProcessing.yml
@@ -1,14 +1,15 @@
-comment: |-
- This is a pre-processing script that is used to create the attachments of incoming incidents in an existing incident, then drop the incoming incident.
+comment: 'This is a pre-processing script that is used to create the attachments of incoming incidents in an existing incident, then drop the incoming incident.
+
It should be configured as a pre-processing rule, and the logic for finding the right incident should be added to the code manually.
- The automation collects the paths and names of the attachments of the incoming incident and passes it to the "CreateFileFromPathObject" automation that is being executed on the existing incident
+
+ The automation collects the paths and names of the attachments of the incoming incident and passes it to the "CreateFileFromPathObject" automation that is being executed on the existing incident'
commonfields:
id: GetFilePathPreProcessing
version: -1
contentitemexportablefields:
contentitemfields:
fromServerVersion: ''
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: GetFilePathPreProcessing
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcCheckSubnetCollision/IPCalcCheckSubnetCollision.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcCheckSubnetCollision/IPCalcCheckSubnetCollision.yml
index 25dcaf20d3bb..43a73d0978c6 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcCheckSubnetCollision/IPCalcCheckSubnetCollision.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcCheckSubnetCollision/IPCalcCheckSubnetCollision.yml
@@ -24,7 +24,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressBinary/IPCalcReturnAddressBinary.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressBinary/IPCalcReturnAddressBinary.yml
index 56b302ea4c4f..9976771f1851 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressBinary/IPCalcReturnAddressBinary.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressBinary/IPCalcReturnAddressBinary.yml
@@ -18,7 +18,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressIANAAllocation/IPCalcReturnAddressIANAAllocation.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressIANAAllocation/IPCalcReturnAddressIANAAllocation.yml
index 56db5c149a8c..f515edb65ac6 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressIANAAllocation/IPCalcReturnAddressIANAAllocation.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnAddressIANAAllocation/IPCalcReturnAddressIANAAllocation.yml
@@ -18,7 +18,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetAddresses/IPCalcReturnSubnetAddresses.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetAddresses/IPCalcReturnSubnetAddresses.yml
index 0245e7b9e89a..b3cc5581fc83 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetAddresses/IPCalcReturnSubnetAddresses.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetAddresses/IPCalcReturnSubnetAddresses.yml
@@ -15,7 +15,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetBroadcastAddress/IPCalcReturnSubnetBroadcastAddress.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetBroadcastAddress/IPCalcReturnSubnetBroadcastAddress.yml
index e67d89183cb4..43f75a1218e2 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetBroadcastAddress/IPCalcReturnSubnetBroadcastAddress.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetBroadcastAddress/IPCalcReturnSubnetBroadcastAddress.yml
@@ -15,7 +15,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetNetwork/IPCalcReturnSubnetNetwork.yml b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetNetwork/IPCalcReturnSubnetNetwork.yml
index 7c012e27616a..6017afcf2351 100644
--- a/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetNetwork/IPCalcReturnSubnetNetwork.yml
+++ b/Packs/CommunityCommonScripts/Scripts/IPCalcReturnSubnetNetwork/IPCalcReturnSubnetNetwork.yml
@@ -15,7 +15,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
tests:
- No tests
fromversion: 6.0.0
diff --git a/Packs/CommunityCommonScripts/Scripts/InvertEveryTwoItems/InvertEveryTwoItems.yml b/Packs/CommunityCommonScripts/Scripts/InvertEveryTwoItems/InvertEveryTwoItems.yml
index acea2e81a83d..00516aa27adb 100644
--- a/Packs/CommunityCommonScripts/Scripts/InvertEveryTwoItems/InvertEveryTwoItems.yml
+++ b/Packs/CommunityCommonScripts/Scripts/InvertEveryTwoItems/InvertEveryTwoItems.yml
@@ -1,15 +1,15 @@
args:
- isArray: true
name: value
- description: "Input list"
+ description: Input list
comment: "This transformer will invert every two items in an array.\nExample: \n[\"A\", \"B\", \"C\", \"D\"]\nResult:\n[\"B\", \"A\", \"D\", \"C\"]\n\nIf the total of items in the array is an odd number the last item will be removed\nExample:\n[\"A\", \"B\", \"C\", \"D\", \"E\"]\nResult:\n[\"B\", \"A\", \"D\", \"C\"]\n\nIf the item is not an array the output will be same passed object."
commonfields:
id: InvertEveryTwoItems
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: InvertEveryTwoItems
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/Jq/Jq.yml b/Packs/CommunityCommonScripts/Scripts/Jq/Jq.yml
index 75b33acc578b..804004243dfc 100644
--- a/Packs/CommunityCommonScripts/Scripts/Jq/Jq.yml
+++ b/Packs/CommunityCommonScripts/Scripts/Jq/Jq.yml
@@ -14,7 +14,7 @@ contentitemexportablefields:
fromServerVersion: ''
dependson:
must: []
-dockerimage: demisto/jq:1.0.0.24037
+dockerimage: demisto/jq:1.0.0.100247
enabled: true
name: jq
outputs:
diff --git a/Packs/CommunityCommonScripts/Scripts/MapRegex/MapRegex.yml b/Packs/CommunityCommonScripts/Scripts/MapRegex/MapRegex.yml
index af60c624e2a8..edcf090b2639 100644
--- a/Packs/CommunityCommonScripts/Scripts/MapRegex/MapRegex.yml
+++ b/Packs/CommunityCommonScripts/Scripts/MapRegex/MapRegex.yml
@@ -4,24 +4,11 @@ args:
required: true
- description: A JSON dictionary that contains key:value pairs that represent the "Outcome":"Regex".
name: json_regex
-comment: |-
- This transformer will take in a value and transform it based on multiple regular expressions defined in a JSON dictionary structure. The key:value pair of the JSON dictionary should be:
-
- "desired outcome": "regex to match"
-
- For example:
-
- {
- "Match 1": ".*match 1.*",
- "Match 2": ".*match 2.*",
- "Catch all": ".*"
- }
-
- The transformer will match in order of dictionary entries.
+comment: "This transformer will take in a value and transform it based on multiple regular expressions defined in a JSON dictionary structure. The key:value pair of the JSON dictionary should be:\n\n\"desired outcome\": \"regex to match\"\n\nFor example:\n\n{\n \"Match 1\": \".*match 1.*\",\n \"Match 2\": \".*match 2.*\",\n \"Catch all\": \".*\"\n}\n\nThe transformer will match in order of dictionary entries."
commonfields:
id: MapRegex
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: MapRegex
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/MarkdownToHTML/MarkdownToHTML.yml b/Packs/CommunityCommonScripts/Scripts/MarkdownToHTML/MarkdownToHTML.yml
index 43b6e0bf2717..128ba5c0a39e 100644
--- a/Packs/CommunityCommonScripts/Scripts/MarkdownToHTML/MarkdownToHTML.yml
+++ b/Packs/CommunityCommonScripts/Scripts/MarkdownToHTML/MarkdownToHTML.yml
@@ -36,7 +36,7 @@ type: python
contentitemexportablefields:
contentitemfields:
fromServerVersion: ''
-dockerimage: demisto/bs4-py3:1.0.0.30051
+dockerimage: demisto/bs4-py3:1.0.0.100299
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/CommunityCommonScripts/Scripts/MaxList/MaxList.yml b/Packs/CommunityCommonScripts/Scripts/MaxList/MaxList.yml
index 3ed66fac3028..38a93389f306 100644
--- a/Packs/CommunityCommonScripts/Scripts/MaxList/MaxList.yml
+++ b/Packs/CommunityCommonScripts/Scripts/MaxList/MaxList.yml
@@ -8,7 +8,7 @@ comment: "Gets the maximum value from list\ne.g. [\"25\", \"10\", \"25\"] => \"
commonfields:
id: MaxList
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: MaxList
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/MinList/MinList.yml b/Packs/CommunityCommonScripts/Scripts/MinList/MinList.yml
index e5a6f3881904..ddc00b80a071 100644
--- a/Packs/CommunityCommonScripts/Scripts/MinList/MinList.yml
+++ b/Packs/CommunityCommonScripts/Scripts/MinList/MinList.yml
@@ -8,7 +8,7 @@ comment: "Gets the minimum value from list\ne.g. [\"25\", \"10\", \"25\"] => \"
commonfields:
id: MinList
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: MinList
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/PHash/PHash.yml b/Packs/CommunityCommonScripts/Scripts/PHash/PHash.yml
index c3e6a80e4f23..7ea13e598541 100644
--- a/Packs/CommunityCommonScripts/Scripts/PHash/PHash.yml
+++ b/Packs/CommunityCommonScripts/Scripts/PHash/PHash.yml
@@ -8,8 +8,8 @@ commonfields:
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python-phash:1.0.0.25389
+ fromServerVersion: ''
+dockerimage: demisto/python-phash:1.0.0.100267
enabled: true
name: PHash
outputs:
diff --git a/Packs/CommunityCommonScripts/Scripts/RandomElementFromList/RandomElementFromList.yml b/Packs/CommunityCommonScripts/Scripts/RandomElementFromList/RandomElementFromList.yml
index fb4a31cd4436..d7614a39b059 100644
--- a/Packs/CommunityCommonScripts/Scripts/RandomElementFromList/RandomElementFromList.yml
+++ b/Packs/CommunityCommonScripts/Scripts/RandomElementFromList/RandomElementFromList.yml
@@ -11,7 +11,7 @@ comment: randomly select elements from a list in Python
commonfields:
id: RandomElementFromList
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: RandomElementFromList
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/RandomPhotoNasa/RandomPhotoNasa.yml b/Packs/CommunityCommonScripts/Scripts/RandomPhotoNasa/RandomPhotoNasa.yml
index df90bab7862a..69b9f7be8e17 100644
--- a/Packs/CommunityCommonScripts/Scripts/RandomPhotoNasa/RandomPhotoNasa.yml
+++ b/Packs/CommunityCommonScripts/Scripts/RandomPhotoNasa/RandomPhotoNasa.yml
@@ -5,7 +5,7 @@ args:
commonfields:
id: RandomPhotoNasa
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: RandomPhotoNasa
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/SSLVerifier/SSLVerifier.yml b/Packs/CommunityCommonScripts/Scripts/SSLVerifier/SSLVerifier.yml
index 6a049355ee32..28bfd71c0c72 100644
--- a/Packs/CommunityCommonScripts/Scripts/SSLVerifier/SSLVerifier.yml
+++ b/Packs/CommunityCommonScripts/Scripts/SSLVerifier/SSLVerifier.yml
@@ -14,7 +14,7 @@ args:
- name: Port
default: true
description: Port to check
- defaultValue: "443"
+ defaultValue: '443'
outputs:
- contextPath: SSLVerifier.Certificate.Expiry
description: Time of expiration
@@ -30,7 +30,7 @@ outputs:
type: string
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
runas: DBotWeakRole
fromversion: 5.0.0
tests:
diff --git a/Packs/CommunityCommonScripts/Scripts/StripAccentMarksFromString/StripAccentMarksFromString.yml b/Packs/CommunityCommonScripts/Scripts/StripAccentMarksFromString/StripAccentMarksFromString.yml
index b0d1a4af1f31..2e0e25e8c9d6 100644
--- a/Packs/CommunityCommonScripts/Scripts/StripAccentMarksFromString/StripAccentMarksFromString.yml
+++ b/Packs/CommunityCommonScripts/Scripts/StripAccentMarksFromString/StripAccentMarksFromString.yml
@@ -2,14 +2,14 @@ args:
- name: value
required: true
description: Value to strip accent marks from
-comment: |-
+comment: |-
Strip accent marks (diacritics) from a given string.
For example: "Niño שָׁלוֹם Montréal اَلسَّلَامُ عَلَيْكُمْ"
Will return: "Nino שלום Montreal السلام عليكم"
commonfields:
id: StripAccentMarksFromString
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: StripAccentMarksFromString
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/VersionEqualTo/VersionEqualTo.yml b/Packs/CommunityCommonScripts/Scripts/VersionEqualTo/VersionEqualTo.yml
index 05a8380257bc..69f9f92b1d95 100644
--- a/Packs/CommunityCommonScripts/Scripts/VersionEqualTo/VersionEqualTo.yml
+++ b/Packs/CommunityCommonScripts/Scripts/VersionEqualTo/VersionEqualTo.yml
@@ -5,14 +5,13 @@ args:
- description: Version number to compare against left side
name: right
required: true
-comment: |
+comment: |-
Tests whether left side version number is equal to right side version number.
-
Version numbers need to have at least a major and minor version component to be considered valid. E.g. 1.0
commonfields:
id: VersionEqualTo
version: -1
-dockerimage: demisto/powershell:7.2.1.26295
+dockerimage: demisto/powershell:7.4.0.80528
enabled: true
name: VersionEqualTo
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/VersionGreaterThan/VersionGreaterThan.yml b/Packs/CommunityCommonScripts/Scripts/VersionGreaterThan/VersionGreaterThan.yml
index 71332952b6c4..86ec86d04866 100644
--- a/Packs/CommunityCommonScripts/Scripts/VersionGreaterThan/VersionGreaterThan.yml
+++ b/Packs/CommunityCommonScripts/Scripts/VersionGreaterThan/VersionGreaterThan.yml
@@ -5,14 +5,14 @@ args:
- description: Version number to compare against left side
name: right
required: true
-comment: |-
- Tests whether left side version number is greater than right side version number.
+comment: 'Tests whether left side version number is greater than right side version number.
- Version numbers need to have at least a major and minor version component to be considered valid. E.g. 1.0
+
+ Version numbers need to have at least a major and minor version component to be considered valid. E.g. 1.0'
commonfields:
id: VersionGreaterThan
version: -1
-dockerimage: demisto/powershell:7.2.1.26295
+dockerimage: demisto/powershell:7.4.0.80528
enabled: true
name: VersionGreaterThan
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/VersionLessThan/VersionLessThan.yml b/Packs/CommunityCommonScripts/Scripts/VersionLessThan/VersionLessThan.yml
index 05a5a1d12533..61d5579adf1b 100644
--- a/Packs/CommunityCommonScripts/Scripts/VersionLessThan/VersionLessThan.yml
+++ b/Packs/CommunityCommonScripts/Scripts/VersionLessThan/VersionLessThan.yml
@@ -5,14 +5,14 @@ args:
- description: Version number to compare against left side
name: right
required: true
-comment: |-
- Tests whether left side version number is less than right side version number.
+comment: 'Tests whether left side version number is less than right side version number.
- Version numbers need to have at least a major and minor version component to be considered valid. E.g. 1.0
+
+ Version numbers need to have at least a major and minor version component to be considered valid. E.g. 1.0'
commonfields:
id: VersionLessThan
version: -1
-dockerimage: demisto/powershell:7.2.1.26295
+dockerimage: demisto/powershell:7.4.0.80528
enabled: true
name: VersionLessThan
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/Scripts/isArrayItemInList/IsArrayItemInList.yml b/Packs/CommunityCommonScripts/Scripts/isArrayItemInList/IsArrayItemInList.yml
index f39709366f8b..d0705a26a28a 100644
--- a/Packs/CommunityCommonScripts/Scripts/isArrayItemInList/IsArrayItemInList.yml
+++ b/Packs/CommunityCommonScripts/Scripts/isArrayItemInList/IsArrayItemInList.yml
@@ -6,17 +6,18 @@ args:
- description: the XSOAR system list name.
name: listName
required: true
-comment: |-
- This automation is for comparing array(list) data of context to existing lists on XSOAR server. You can avoid using loop of sub-playbook.
+comment: 'This automation is for comparing array(list) data of context to existing lists on XSOAR server. You can avoid using loop of sub-playbook.
+
inputArray: the context array/list data
- listName: the XSOAR system list
+
+ listName: the XSOAR system list'
commonfields:
id: isArrayItemInList
version: -1
contentitemexportablefields:
contentitemfields:
- fromServerVersion: ""
-dockerimage: demisto/python3:3.10.12.63474
+ fromServerVersion: ''
+dockerimage: demisto/python3:3.10.14.100715
enabled: true
name: isArrayItemInList
runas: DBotWeakRole
diff --git a/Packs/CommunityCommonScripts/pack_metadata.json b/Packs/CommunityCommonScripts/pack_metadata.json
index 4a4b1f9cb15e..00da6dbe5711 100644
--- a/Packs/CommunityCommonScripts/pack_metadata.json
+++ b/Packs/CommunityCommonScripts/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Community Common Scripts",
"description": "A pack that contains community scripts",
"support": "community",
- "currentVersion": "1.2.1",
+ "currentVersion": "1.2.5",
"author": "",
"url": "https://live.paloaltonetworks.com/t5/cortex-xsoar-discussions/bd-p/Cortex_XSOAR_Discussions",
"email": "",
diff --git a/Packs/ComputerVisionEngine/Integrations/ComputerVisionEngine/ComputerVisionEngine.yml b/Packs/ComputerVisionEngine/Integrations/ComputerVisionEngine/ComputerVisionEngine.yml
index cbe8195bfdf8..9f6397accf68 100644
--- a/Packs/ComputerVisionEngine/Integrations/ComputerVisionEngine/ComputerVisionEngine.yml
+++ b/Packs/ComputerVisionEngine/Integrations/ComputerVisionEngine/ComputerVisionEngine.yml
@@ -3,10 +3,11 @@ commonfields:
id: Computer Vision Engine
version: -1
configuration: []
-description: |-
- This integration is processing images or movies and detects objects on them by using Machine Learning.
+description: 'This integration is processing images or movies and detects objects on them by using Machine Learning.
+
It is using OpenCV with:
- YOLO COCO
+
+ YOLO COCO'
display: Computer Vision Engine
name: Computer Vision Engine
script:
@@ -27,7 +28,7 @@ script:
- contextPath: ComputerVision
description: The key holds down the information about detected objects in the picture
type: Unknown
- dockerimage: demisto/yolo-coco:1.0.0.15530
+ dockerimage: demisto/yolo-coco:1.0.0.98891
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/ComputerVisionEngine/ReleaseNotes/1_0_2.md b/Packs/ComputerVisionEngine/ReleaseNotes/1_0_2.md
new file mode 100644
index 000000000000..ba2765438d83
--- /dev/null
+++ b/Packs/ComputerVisionEngine/ReleaseNotes/1_0_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Computer Vision Engine
+
+- Updated the Docker image to: *demisto/yolo-coco:1.0.0.98891*.
diff --git a/Packs/ComputerVisionEngine/doc_files/computervision.gif b/Packs/ComputerVisionEngine/doc_files/computervision.gif
new file mode 100644
index 000000000000..dc86c98c2751
Binary files /dev/null and b/Packs/ComputerVisionEngine/doc_files/computervision.gif differ
diff --git a/Packs/ComputerVisionEngine/pack_metadata.json b/Packs/ComputerVisionEngine/pack_metadata.json
index a2ec47a108fd..897add789252 100644
--- a/Packs/ComputerVisionEngine/pack_metadata.json
+++ b/Packs/ComputerVisionEngine/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ComputerVisionEngine",
"description": "The ComputerVision Integration by using the Deep-learning library yolo-coco and OpenCV is able to recognize objects on photographs, i.e. planes, luggage, people, dogs, cats, etc.\nThe integration can be used in playbooks to extract objects on rasterized websites in phishing campaigns and making IOCs out of them.\nAdditionally, the integration is useful in CCTV systems significantly reducing the number of false - positives and creating custom workflows in XSOAR playbooks for on-prem physical security!",
"support": "community",
- "currentVersion": "1.0.1",
+ "currentVersion": "1.0.2",
"author": "Maciej Drobniuch",
"url": "drobniuch.pl",
"email": "maciej@drobniuch.pl",
diff --git a/Packs/ContentManagement/ReleaseNotes/1_2_20.md b/Packs/ContentManagement/ReleaseNotes/1_2_20.md
new file mode 100644
index 000000000000..4148df9aa8cd
--- /dev/null
+++ b/Packs/ContentManagement/ReleaseNotes/1_2_20.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### ConfigurationSetup
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.96723*.
+##### ListCreator
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.96723*.
diff --git a/Packs/ContentManagement/ReleaseNotes/1_2_21.md b/Packs/ContentManagement/ReleaseNotes/1_2_21.md
new file mode 100644
index 000000000000..0439a9e60927
--- /dev/null
+++ b/Packs/ContentManagement/ReleaseNotes/1_2_21.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### GetIdsFromCustomContent
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
+##### CommitFiles
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
diff --git a/Packs/ContentManagement/ReleaseNotes/1_2_22.md b/Packs/ContentManagement/ReleaseNotes/1_2_22.md
new file mode 100644
index 000000000000..8d610f127dc8
--- /dev/null
+++ b/Packs/ContentManagement/ReleaseNotes/1_2_22.md
@@ -0,0 +1,9 @@
+
+#### Scripts
+
+##### CustomPackInstaller
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
+##### JobCreator
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
diff --git a/Packs/ContentManagement/Scripts/CommitFiles/CommitFiles.yml b/Packs/ContentManagement/Scripts/CommitFiles/CommitFiles.yml
index 2d49a163564a..63702eed562d 100644
--- a/Packs/ContentManagement/Scripts/CommitFiles/CommitFiles.yml
+++ b/Packs/ContentManagement/Scripts/CommitFiles/CommitFiles.yml
@@ -31,7 +31,7 @@ commonfields:
contentitemexportablefields:
contentitemfields:
fromServerVersion: ''
-dockerimage: demisto/xsoar-tools:1.0.0.69232
+dockerimage: demisto/xsoar-tools:1.0.0.99061
enabled: true
name: CommitFiles
outputs:
diff --git a/Packs/ContentManagement/Scripts/ConfigurationSetup/ConfigurationSetup.yml b/Packs/ContentManagement/Scripts/ConfigurationSetup/ConfigurationSetup.yml
index 4c1f6f0fa59f..aa82a36c9570 100644
--- a/Packs/ContentManagement/Scripts/ConfigurationSetup/ConfigurationSetup.yml
+++ b/Packs/ContentManagement/Scripts/ConfigurationSetup/ConfigurationSetup.yml
@@ -35,7 +35,7 @@ tags:
- Content Management
timeout: '0'
type: python
-dockerimage: demisto/xsoar-tools:1.0.0.36076
+dockerimage: demisto/xsoar-tools:1.0.0.96723
tests:
- No tests (auto formatted)
fromversion: 6.0.0
diff --git a/Packs/ContentManagement/Scripts/CustomPackInstaller/CustomPackInstaller.yml b/Packs/ContentManagement/Scripts/CustomPackInstaller/CustomPackInstaller.yml
index ada39aefb8ad..f9d8b4769b9f 100644
--- a/Packs/ContentManagement/Scripts/CustomPackInstaller/CustomPackInstaller.yml
+++ b/Packs/ContentManagement/Scripts/CustomPackInstaller/CustomPackInstaller.yml
@@ -33,7 +33,7 @@ tags:
- Content Management
timeout: '0'
type: python
-dockerimage: demisto/xsoar-tools:1.0.0.83431
+dockerimage: demisto/xsoar-tools:1.0.0.99061
tests:
- No tests (auto formatted)
fromversion: 6.0.0
diff --git a/Packs/ContentManagement/Scripts/GetIdsFromCustomContent/GetIdsFromCustomContent.yml b/Packs/ContentManagement/Scripts/GetIdsFromCustomContent/GetIdsFromCustomContent.yml
index 6cf5a18f4015..92b2c59ddab0 100644
--- a/Packs/ContentManagement/Scripts/GetIdsFromCustomContent/GetIdsFromCustomContent.yml
+++ b/Packs/ContentManagement/Scripts/GetIdsFromCustomContent/GetIdsFromCustomContent.yml
@@ -19,7 +19,7 @@ script: '-'
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/xsoar-tools:1.0.0.80199
+dockerimage: demisto/xsoar-tools:1.0.0.99061
fromversion: 6.8.0
tests:
- No tests (auto formatted)
diff --git a/Packs/ContentManagement/Scripts/JobCreator/JobCreator.yml b/Packs/ContentManagement/Scripts/JobCreator/JobCreator.yml
index ff9218317242..16a3bef6427b 100644
--- a/Packs/ContentManagement/Scripts/JobCreator/JobCreator.yml
+++ b/Packs/ContentManagement/Scripts/JobCreator/JobCreator.yml
@@ -19,7 +19,7 @@ tags:
- Content Management
timeout: '0'
type: python
-dockerimage: demisto/xsoar-tools:1.0.0.83431
+dockerimage: demisto/xsoar-tools:1.0.0.99061
tests:
- No tests (auto formatted)
fromversion: 6.0.0
diff --git a/Packs/ContentManagement/Scripts/ListCreator/ListCreator.yml b/Packs/ContentManagement/Scripts/ListCreator/ListCreator.yml
index fba4e2b4ebe4..e3720709b342 100644
--- a/Packs/ContentManagement/Scripts/ListCreator/ListCreator.yml
+++ b/Packs/ContentManagement/Scripts/ListCreator/ListCreator.yml
@@ -21,7 +21,7 @@ tags:
- Content Management
timeout: '0'
type: python
-dockerimage: demisto/xsoar-tools:1.0.0.19258
+dockerimage: demisto/xsoar-tools:1.0.0.96723
tests:
- No tests (auto formatted)
fromversion: 6.0.0
diff --git a/Packs/ContentManagement/pack_metadata.json b/Packs/ContentManagement/pack_metadata.json
index dff1640fb48e..f49072d77169 100644
--- a/Packs/ContentManagement/pack_metadata.json
+++ b/Packs/ContentManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "XSOAR CI/CD",
"description": "This pack enables you to orchestrate your XSOAR system configuration.",
"support": "xsoar",
- "currentVersion": "1.2.19",
+ "currentVersion": "1.2.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Core/.pack-ignore b/Packs/Core/.pack-ignore
index 2325038d26af..4b9f4becc21c 100644
--- a/Packs/Core/.pack-ignore
+++ b/Packs/Core/.pack-ignore
@@ -1,8 +1,5 @@
-[file:CoreIOCs.yml]
-ignore=IN124
-
[file:CortexCoreIR.yml]
-ignore=IN124,IN139
+ignore=IN139
[file:README.md]
ignore=RM104
diff --git a/Packs/Core/.secrets-ignore b/Packs/Core/.secrets-ignore
index ef4f536bc6ca..4d9d077190f3 100644
--- a/Packs/Core/.secrets-ignore
+++ b/Packs/Core/.secrets-ignore
@@ -85,4 +85,9 @@ dummy1@dummy.com
dummy2@dummy.com
dummy3@dummy.com
000001e7a228b2a7abdf7f7e404bc8522df32b725e86907dde32176bccbbbb27
-80.66.75.36
\ No newline at end of file
+80.66.75.36
+218.92.0.29
+ManagerEmail@test.com
+test2@test.com
+test@test.com
+f3322.net
\ No newline at end of file
diff --git a/Packs/Core/Author_image.png b/Packs/Core/Author_image.png
deleted file mode 100644
index 249fc6f403d6..000000000000
Binary files a/Packs/Core/Author_image.png and /dev/null differ
diff --git a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
index 00ced1fd7d9b..0ebfc0b757d3 100644
--- a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
+++ b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
@@ -87,7 +87,7 @@ script:
required: true
description: Disables IOCs in the Cortex server.
name: core-iocs-disable
- dockerimage: demisto/google-cloud-storage:1.0.0.96060
+ dockerimage: demisto/google-cloud-storage:1.0.0.100327
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
index 5cb9c5c61873..1224ab408e0b 100644
--- a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
+++ b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
@@ -186,7 +186,7 @@ def main(): # pragma: no cover
proxy=proxy,
verify=verify_cert,
headers=headers,
- timeout=timeout
+ timeout=timeout,
)
try:
diff --git a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
index a93ca0608bda..cc1e48175697 100644
--- a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
+++ b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
@@ -2943,7 +2943,7 @@ script:
script: '-'
subtype: python3
type: python
- dockerimage: demisto/google-cloud-storage:1.0.0.96060
+ dockerimage: demisto/google-cloud-storage:1.0.0.100327
tests:
- No tests
fromversion: 6.2.0
diff --git a/Packs/Core/Playbooks/playbook-Get_entity_alerts_by_MITRE_tactics.yml b/Packs/Core/Playbooks/playbook-Get_entity_alerts_by_MITRE_tactics.yml
index 946a8a6a933b..1c540a78ab6d 100644
--- a/Packs/Core/Playbooks/playbook-Get_entity_alerts_by_MITRE_tactics.yml
+++ b/Packs/Core/Playbooks/playbook-Get_entity_alerts_by_MITRE_tactics.yml
@@ -159,10 +159,10 @@ tasks:
isautoswitchedtoquietmode: false
"69":
id: "69"
- taskid: 23aeb319-9260-4ca8-88a5-a83e131b9b90
+ taskid: a6fafebb-c3f5-4df6-8151-d1e87e45dfcf
type: regular
task:
- id: 23aeb319-9260-4ca8-88a5-a83e131b9b90
+ id: a6fafebb-c3f5-4df6-8151-d1e87e45dfcf
version: -1
name: Add Reconnaissance to query
description: |-
@@ -194,7 +194,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Reconnaisance" or '
+ simple: 'mitreattcktactic:"TA0043 - Reconnaissance" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -422,10 +422,10 @@ tasks:
isautoswitchedtoquietmode: false
"76":
id: "76"
- taskid: 2de02662-7c12-46f4-8a5f-6de9d1f2c5a4
+ taskid: f8704cbd-2216-4667-80a6-0598eea4c297
type: regular
task:
- id: 2de02662-7c12-46f4-8a5f-6de9d1f2c5a4
+ id: f8704cbd-2216-4667-80a6-0598eea4c297
version: -1
name: Add Command and Control to query
description: |-
@@ -457,7 +457,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Command and Control" or '
+ simple: 'mitreattcktactic:"TA0011 - Command and Control" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1337,10 +1337,10 @@ tasks:
isautoswitchedtoquietmode: false
"98":
id: "98"
- taskid: d6dbd1f3-291a-41d3-8940-0f4bb01f3ec4
+ taskid: 6a7cbc3f-4505-4433-8824-547496ae78df
type: regular
task:
- id: d6dbd1f3-291a-41d3-8940-0f4bb01f3ec4
+ id: 6a7cbc3f-4505-4433-8824-547496ae78df
version: -1
name: Add Initial Access to query
description: |-
@@ -1372,7 +1372,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Initial Access" or '
+ simple: 'mitreattcktactic:"TA0001 - Initial Access" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1392,10 +1392,10 @@ tasks:
isautoswitchedtoquietmode: false
"99":
id: "99"
- taskid: ab51d8aa-5001-4fe7-87e1-c0af8ab73d9d
+ taskid: 24bb20be-c615-41e3-85bd-53684350c012
type: regular
task:
- id: ab51d8aa-5001-4fe7-87e1-c0af8ab73d9d
+ id: 24bb20be-c615-41e3-85bd-53684350c012
version: -1
name: Add Execution to query
description: |-
@@ -1427,7 +1427,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Execution" or '
+ simple: 'mitreattcktactic:"TA0002 - Execution" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1447,10 +1447,10 @@ tasks:
isautoswitchedtoquietmode: false
"100":
id: "100"
- taskid: ec7e90f7-2b52-453e-8416-0ad9230574f5
+ taskid: ababc384-4722-4441-83d8-ca5851899d1a
type: regular
task:
- id: ec7e90f7-2b52-453e-8416-0ad9230574f5
+ id: ababc384-4722-4441-83d8-ca5851899d1a
version: -1
name: Add Persistence to query
description: |-
@@ -1482,7 +1482,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Persistence" or '
+ simple: 'mitreattcktactic:"TA0003 - Persistence" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1502,10 +1502,10 @@ tasks:
isautoswitchedtoquietmode: false
"107":
id: "107"
- taskid: 04c5b9b8-5f78-4992-8e38-f67cdfd69de8
+ taskid: 3ec25bba-f072-46b5-853d-f7d32b698359
type: regular
task:
- id: 04c5b9b8-5f78-4992-8e38-f67cdfd69de8
+ id: 3ec25bba-f072-46b5-853d-f7d32b698359
version: -1
name: Add Privilege Escalation to query
description: |-
@@ -1537,7 +1537,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Privilege Escalation" or '
+ simple: 'mitreattcktactic:"TA0004 - Privilege Escalation" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1557,10 +1557,10 @@ tasks:
isautoswitchedtoquietmode: false
"108":
id: "108"
- taskid: 9419e7f5-1b4c-438a-809d-5da85187810e
+ taskid: 8ef51106-2ea7-4020-86df-3af2dc171900
type: regular
task:
- id: 9419e7f5-1b4c-438a-809d-5da85187810e
+ id: 8ef51106-2ea7-4020-86df-3af2dc171900
version: -1
name: Add Defense Evasion to query
description: |-
@@ -1592,7 +1592,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Defense Evasion" or '
+ simple: 'mitreattcktactic:"TA0005 - Defense Evasion" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1612,10 +1612,10 @@ tasks:
isautoswitchedtoquietmode: false
"109":
id: "109"
- taskid: 9ba1fbd5-87dd-4a85-8081-c7c8c01270af
+ taskid: 2f5a14c3-7d46-4c59-805b-8fbb811b4b51
type: regular
task:
- id: 9ba1fbd5-87dd-4a85-8081-c7c8c01270af
+ id: 2f5a14c3-7d46-4c59-805b-8fbb811b4b51
version: -1
name: Add Credential Access to query
description: |-
@@ -1647,7 +1647,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Credential Access" or '
+ simple: 'mitreattcktactic:"TA0006 - Credential Access" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1667,10 +1667,10 @@ tasks:
isautoswitchedtoquietmode: false
"110":
id: "110"
- taskid: 92bd64eb-f199-412c-8a38-b399c348dad8
+ taskid: bf903dce-296b-4bd0-8eb2-a6c7d07f2813
type: regular
task:
- id: 92bd64eb-f199-412c-8a38-b399c348dad8
+ id: bf903dce-296b-4bd0-8eb2-a6c7d07f2813
version: -1
name: Add Discovery to query
description: |-
@@ -1702,7 +1702,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Discovery" or '
+ simple: 'mitreattcktactic:"TA0007 - Discovery" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1722,10 +1722,10 @@ tasks:
isautoswitchedtoquietmode: false
"111":
id: "111"
- taskid: c10dd7b5-e0d9-4a37-832f-a59c6a747d13
+ taskid: c2386f08-8339-401d-8d25-4ec123c827db
type: regular
task:
- id: c10dd7b5-e0d9-4a37-832f-a59c6a747d13
+ id: c2386f08-8339-401d-8d25-4ec123c827db
version: -1
name: Add Lateral Movement to query
description: |-
@@ -1757,7 +1757,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Lateral Movement" or '
+ simple: 'mitreattcktactic:"TA0008 - Lateral Movement" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1777,10 +1777,10 @@ tasks:
isautoswitchedtoquietmode: false
"112":
id: "112"
- taskid: 3a171e3a-a6e1-4b39-86a1-7964da7a99b5
+ taskid: b88de95c-964b-4cc6-834e-50dfa3963736
type: regular
task:
- id: 3a171e3a-a6e1-4b39-86a1-7964da7a99b5
+ id: b88de95c-964b-4cc6-834e-50dfa3963736
version: -1
name: Add Collection to query
description: |-
@@ -1812,7 +1812,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Collection" or '
+ simple: 'mitreattcktactic:"TA0009 - Collection" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -1832,10 +1832,10 @@ tasks:
isautoswitchedtoquietmode: false
"113":
id: "113"
- taskid: 851cfe9e-0583-4f72-8a0b-ad46c721b1b0
+ taskid: 2cc2e6ef-3fa9-4dda-81a7-659b5ec94529
type: regular
task:
- id: 851cfe9e-0583-4f72-8a0b-ad46c721b1b0
+ id: 2cc2e6ef-3fa9-4dda-81a7-659b5ec94529
version: -1
name: Add Impact to query
description: |-
@@ -1867,7 +1867,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: 'categoryname:"Impact" or '
+ simple: 'mitreattcktactic:"TA0040 - Impact" or '
reputationcalc: 1
separatecontext: false
continueonerrortype: ""
@@ -2101,10 +2101,10 @@ tasks:
isautoswitchedtoquietmode: false
"117":
id: "117"
- taskid: f2da8ae7-bba0-475f-87d4-728005910653
+ taskid: a7fc38ac-74b6-42aa-8709-134b74d8de8e
type: regular
task:
- id: f2da8ae7-bba0-475f-87d4-728005910653
+ id: a7fc38ac-74b6-42aa-8709-134b74d8de8e
version: -1
name: Hunt all techniques
description: |-
@@ -2140,7 +2140,7 @@ tasks:
prefix: {}
suffix:
value:
- simple: :"${inputs.EntityID}" and (categoryname:"Reconnaisance" or categoryname:"Initial Access" or categoryname:"Execution" or categoryname:"Persistence" or categoryname:"Privilege Escalation" or categoryname:"Defense Evasion" or categoryname:"Credential Access" or categoryname:"Discovery" or categoryname:"Lateral Movement" or categoryname:"Collection" or categoryname:"Command and Control" or categoryname:"Impact")
+ simple: :"${inputs.EntityID}" and (mitreattcktactic:"TA0043 - Reconnaissance" or mitreattcktactic:"TA0001 - Initial Access" or mitreattcktactic:"TA0002 - Execution" or mitreattcktactic:"TA0003 - Persistence" or mitreattcktactic:"TA0004 - Privilege Escalation" or mitreattcktactic:"TA0005 - Defense Evasion" or mitreattcktactic:"TA0006 - Credential Access" or mitreattcktactic:"TA0007 - Discovery" or mitreattcktactic:"TA0008 - Lateral Movement" or mitreattcktactic:"TA0009 - Collection" or mitreattcktactic:"TA0011 - Command and Control" or mitreattcktactic:"TA0040 - Impact")
variable_markers: {}
reputationcalc: 1
separatecontext: false
diff --git a/Packs/Core/Playbooks/playbook-IOC_Alert.yml b/Packs/Core/Playbooks/playbook-IOC_Alert.yml
index 1cb6ccfe4e18..277d9902b841 100644
--- a/Packs/Core/Playbooks/playbook-IOC_Alert.yml
+++ b/Packs/Core/Playbooks/playbook-IOC_Alert.yml
@@ -219,71 +219,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3720
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "43":
- id: "43"
- taskid: b772f110-939d-4f2b-8850-088e8cf36343
- type: condition
- task:
- id: b772f110-939d-4f2b-8850-088e8cf36343
- version: -1
- name: Should continue with the investigation?
- description: Asks the user whether the alert investigation should continue.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "44"
- "yes":
- - "60"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 410,
- "y": 2530
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "44":
- id: "44"
- taskid: a0697551-115b-4674-8204-ba39af9247f5
- type: regular
- task:
- id: a0697551-115b-4674-8204-ba39af9247f5
- version: -1
- name: Continue with the incident investigation
- description: Continue with the incident investigation.
- type: regular
- iscommand: false
- brand: ""
- nexttasks:
- '#none#':
- - "60"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 710,
- "y": 2700
+ "y": 3380
}
}
note: false
@@ -338,7 +274,7 @@ tasks:
brand: ""
nexttasks:
'#default#':
- - "43"
+ - "60"
"yes":
- "83"
separatecontext: false
@@ -387,7 +323,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 2870
+ "y": 2530
}
}
note: false
@@ -689,7 +625,7 @@ tasks:
{
"position": {
"x": 750,
- "y": 3540
+ "y": 3200
}
}
note: false
@@ -735,7 +671,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3005
+ "y": 2665
}
}
note: false
@@ -781,7 +717,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3360
+ "y": 3020
}
}
note: false
@@ -827,7 +763,7 @@ tasks:
{
"position": {
"x": 760,
- "y": 3180
+ "y": 2840
}
}
note: false
@@ -963,7 +899,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "43"
+ - "60"
scriptarguments:
AutoContainment:
complex:
@@ -1055,7 +991,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "43"
+ - "60"
scriptarguments:
AutoEradicate:
complex:
@@ -1218,10 +1154,7 @@ tasks:
id: f77aa4eb-f400-4a50-829f-4ce59cad479f
version: -1
name: Ticket Management - Generic
- description: "`Ticket Management - Generic` allows you to open new tickets or
- update comments to the existing ticket in the following ticketing systems:\n-ServiceNow
- \n-Zendesk \nusing the following sub-playbooks:\n-`ServiceNow - Ticket Management`\n-`Zendesk
- - Ticket Management`\n"
+ description: "`Ticket Management - Generic` allows you to open new tickets or update comments to the existing ticket in the following ticketing systems:\n-ServiceNow \n-Zendesk \nusing the following sub-playbooks:\n-`ServiceNow - Ticket Management`\n-`Zendesk - Ticket Management`\n"
playbookName: Ticket Management - Generic
type: playbook
iscommand: false
@@ -1309,9 +1242,6 @@ view: |-
{
"linkLabelsPosition": {
"10_12_#default#": 0.52,
- "43_44_#default#": 0.49,
- "43_60_yes": 0.54,
- "54_43_#default#": 0.19,
"54_83_yes": 0.41,
"73_74_#default#": 0.44,
"73_75_yes": 0.41,
@@ -1321,7 +1251,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 3895,
+ "height": 3555,
"width": 1770,
"x": -230,
"y": -110
@@ -1536,6 +1466,6 @@ outputSections:
description: Generic group for outputs
outputs: []
tests:
-- No tests (auto formatted)
+- Test Playbook - IOC Alert
marketplaces: ["marketplacev2"]
-fromversion: 6.6.0
+fromversion: 6.6.0
\ No newline at end of file
diff --git a/Packs/Core/Playbooks/playbook-IOC_Alert_README.md b/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
index 6faf870a9001..0427ca362496 100644
--- a/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
+++ b/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
@@ -28,13 +28,13 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* Endpoint Investigation Plan
* Recovery Plan
* Eradication Plan
-* Ticket Management - Generic
-* Endpoint Investigation Plan
* Enrichment for Verdict
-* Containment Plan
* Handle False Positive Alerts
+* Ticket Management - Generic
+* Containment Plan
### Integrations
@@ -47,8 +47,8 @@ This playbook does not use any scripts.
### Commands
* extractIndicators
-* closeInvestigation
* setParentIncidentFields
+* closeInvestigation
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
index d0ecc4cab148..bcc255812f4e 100644
--- a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
+++ b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
@@ -1225,10 +1225,10 @@ tasks:
isautoswitchedtoquietmode: false
"52":
id: "52"
- taskid: bb00ca3d-82a5-4b82-8ce2-a5593eecb43b
+ taskid: 0ea2b397-c024-4c57-81d1-ccdf630043f6
type: regular
task:
- id: bb00ca3d-82a5-4b82-8ce2-a5593eecb43b
+ id: 0ea2b397-c024-4c57-81d1-ccdf630043f6
version: -1
name: Set Number of Related Alerts
description: |-
@@ -1268,7 +1268,7 @@ tasks:
fieldMapping:
- incidentfield: Number Of Found Related Alerts
output:
- simple: NumOfRelatedAlerts
+ simple: ${NumOfRelatedAlerts}
- incidentfield: Alert Search Results
output:
complex:
@@ -1306,14 +1306,15 @@ tasks:
- incidentfield: Failed Logon Events
output:
complex:
- root: AzureFailLoginCount
+ root: NumOfOktaFailedLogon
transformers:
- operator: append
args:
item:
value:
- simple: NumOfOktaFailedLogon
+ simple: AzureFailLoginCount
iscontext: true
+ - operator: SumList
- incidentfield: Email
output:
complex:
@@ -1642,6 +1643,6 @@ outputSections:
description: Generic group for outputs
outputs: []
tests:
-- No tests (auto formatted)
+- Test Playbook - Identity Analytics - Alert Handling
marketplaces: ["marketplacev2"]
-fromversion: 6.10.0
+fromversion: 6.10.0
\ No newline at end of file
diff --git a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
index d5669b650f4d..9227114a53e1 100644
--- a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
+++ b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
@@ -21,12 +21,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Azure - User Investigation
-* Okta - User Investigation
* Cloud IAM Enrichment - Generic
+* Azure - User Investigation
+* Cloud Credentials Rotation - Azure
* Containment Plan
+* Okta - User Investigation
* Account Enrichment - Generic v2.1
-* Cloud Credentials Rotation - Azure
* Get entity alerts by MITRE tactics
### Integrations
@@ -36,14 +36,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Scripts
-* Set
* SetAndHandleEmpty
+* Set
### Commands
-* core-get-cloud-original-alerts
-* ip
* closeInvestigation
+* ip
+* core-get-cloud-original-alerts
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-Impossible_Traveler.yml b/Packs/Core/Playbooks/playbook-Impossible_Traveler.yml
index f40697d24eea..fc4f2e233421 100644
--- a/Packs/Core/Playbooks/playbook-Impossible_Traveler.yml
+++ b/Packs/Core/Playbooks/playbook-Impossible_Traveler.yml
@@ -891,10 +891,10 @@ tasks:
continueonerrortype: ""
"117":
id: "117"
- taskid: 02daa225-5853-458d-8c40-fb3e671a5568
+ taskid: e190537d-1b4b-4055-8eff-1f60d7651da1
type: playbook
task:
- id: 02daa225-5853-458d-8c40-fb3e671a5568
+ id: e190537d-1b4b-4055-8eff-1f60d7651da1
version: -1
name: Containment Plan
description: "This playbook handles all the containment actions available with Cortex XSIAM, including: \n* Isolate endpoint\n* Disable account\n* Quarantine file\n* Block indicators\n* Clear user session (currently, the playbook supports only Okta)\n\nNote: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
@@ -940,6 +940,18 @@ tasks:
accessor: username
transformers:
- operator: uniq
+ AutoBlockIndicators:
+ simple: "True"
+ EndpointID:
+ complex:
+ root: alert
+ accessor: agentid
+ transformers:
+ - operator: uniq
+ HostContainment:
+ simple: "True"
+ UserVerification:
+ simple: "False"
separatecontext: true
loop:
iscommand: false
diff --git a/Packs/Core/Playbooks/playbook-Impossible_Traveler_-_Enrichment.yml b/Packs/Core/Playbooks/playbook-Impossible_Traveler_-_Enrichment.yml
index d8f9793d1b8e..e61008242545 100644
--- a/Packs/Core/Playbooks/playbook-Impossible_Traveler_-_Enrichment.yml
+++ b/Packs/Core/Playbooks/playbook-Impossible_Traveler_-_Enrichment.yml
@@ -806,6 +806,6 @@ outputs:
description: The user's manager information retrieved from MSGraphUser.
type: unknown
tests:
-- No tests (auto formatted)
+- Test Playbook - Impossible Traveler - Enrichment
marketplaces: ["marketplacev2"]
fromversion: 6.6.0
diff --git a/Packs/Core/Playbooks/playbook-Impossible_Traveler_README.md b/Packs/Core/Playbooks/playbook-Impossible_Traveler_README.md
index b2bf35c15e1e..24fbb8bf1267 100644
--- a/Packs/Core/Playbooks/playbook-Impossible_Traveler_README.md
+++ b/Packs/Core/Playbooks/playbook-Impossible_Traveler_README.md
@@ -36,9 +36,9 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Ticket Management - Generic
-* Impossible Traveler - Enrichment
* Containment Plan
+* Impossible Traveler - Enrichment
+* Ticket Management - Generic
### Integrations
@@ -46,14 +46,14 @@ This playbook does not use any integrations.
### Scripts
-* CreateArray
-* Set
* impossibleTravelerGetDistance
+* Set
+* CreateArray
### Commands
-* setParentIncidentFields
* closeInvestigation
+* setParentIncidentFields
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml
index 3fc0d9f6877b..57f4ebf5e13a 100644
--- a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml
+++ b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml
@@ -69,6 +69,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"2":
id: "2"
taskid: c91fc254-410b-425e-87b2-c69a28f65349
@@ -327,7 +328,7 @@ tasks:
{
"position": {
"x": 430,
- "y": 2075
+ "y": 2070
}
}
note: false
@@ -389,7 +390,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 2370
+ "y": 2250
}
}
note: false
@@ -497,7 +498,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 4100
+ "y": 3990
}
}
note: false
@@ -543,7 +544,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 3300
+ "y": 3310
}
}
note: false
@@ -589,7 +590,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 3640
+ "y": 3650
}
}
note: false
@@ -607,7 +608,7 @@ tasks:
task:
id: da1d7739-25a2-416b-8c41-98d80f898913
version: -1
- name: Continue with the incident investigation
+ name: Continue with the alert investigation
description: Continue manually with the alert investigation.
type: regular
iscommand: false
@@ -620,7 +621,7 @@ tasks:
{
"position": {
"x": 1270,
- "y": 3810
+ "y": 3820
}
}
note: false
@@ -651,7 +652,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 3145
+ "y": 3115
}
}
note: false
@@ -679,7 +680,7 @@ tasks:
{
"position": {
"x": 40,
- "y": 4170
+ "y": 4160
}
}
note: false
@@ -930,6 +931,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"61":
id: "61"
taskid: 109f06df-2f1f-4c89-8b00-e84b143cf481
@@ -1041,8 +1043,6 @@ tasks:
simple: Quarantine
HostContainment:
simple: 'True'
- IAMUserDomain:
- simple: ''
UserContainment:
simple: "True"
UserVerification:
@@ -1073,6 +1073,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"63":
id: "63"
taskid: 461f2351-9e50-443b-8486-795bc963d33c
@@ -1136,6 +1137,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"64":
id: "64"
taskid: f232d4e1-9457-469a-805a-2064f2f0c30e
@@ -1266,6 +1268,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"65":
id: "65"
taskid: d59aca07-8dd0-410e-8c3f-a971980653cf
@@ -1336,7 +1339,7 @@ tasks:
{
"position": {
"x": 900,
- "y": 3050
+ "y": 2940
}
}
note: false
@@ -1346,6 +1349,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"66":
id: "66"
taskid: 32acfe19-fb1e-43f7-8302-9f695f5173cb
@@ -1388,8 +1392,8 @@ tasks:
view: |-
{
"position": {
- "x": 1370,
- "y": 3470
+ "x": 1270,
+ "y": 3480
}
}
note: false
@@ -1399,6 +1403,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"67":
id: "67"
taskid: 58f335fb-7b1f-4de2-83e6-5c80421dbdc5
@@ -1656,7 +1661,7 @@ tasks:
{
"position": {
"x": 1760,
- "y": 3240
+ "y": 3140
}
}
note: false
@@ -1702,7 +1707,7 @@ tasks:
{
"position": {
"x": 430,
- "y": 1840
+ "y": 1815
}
}
note: false
@@ -1865,6 +1870,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ continueonerrortype: ""
"80":
id: "80"
taskid: 8ac754b3-8eb9-4480-8804-df4cf57620fa
@@ -2105,7 +2111,7 @@ tasks:
view: |-
{
"position": {
- "x": 1560,
+ "x": 1550,
"y": 1010
}
}
@@ -2136,7 +2142,7 @@ tasks:
view: |-
{
"position": {
- "x": 1110,
+ "x": 1130,
"y": 1010
}
}
@@ -2147,7 +2153,6 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
-system: true
view: |-
{
"linkLabelsPosition": {
@@ -2156,13 +2161,14 @@ view: |-
"2_81_True Positive": 0.62,
"34_39_No": 0.58,
"34_67_Yes": 0.54,
- "46_47_#default#": 0.54,
+ "46_47_#default#": 0.35,
"46_66_yes": 0.49,
"47_48_#default#": 0.5,
"55_35_#default#": 0.19,
"58_35_#default#": 0.33,
"6_35_Blocked": 0.12,
"6_57_#default#": 0.58,
+ "70_50_#default#": 0.18,
"70_71_Benign": 0.63,
"75_67_#default#": 0.67,
"78_39_no": 0.14,
@@ -2173,9 +2179,9 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 6155,
- "width": 1710,
- "x": 430,
+ "height": 6025,
+ "width": 2100,
+ "x": 40,
"y": -1800
}
}
@@ -2452,4 +2458,3 @@ tests:
fromversion: 6.6.0
marketplaces:
- marketplacev2
-
diff --git a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md
index 345214b4bbe3..d247b6477b5c 100644
--- a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md
+++ b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md
@@ -41,14 +41,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Wildfire Detonate and Analyze File
+* Handle False Positive Alerts
* Enrichment for Verdict
-* Recovery Plan
-* Endpoint Investigation Plan
* Ticket Management - Generic
-* Containment Plan
* Eradication Plan
-* Handle False Positive Alerts
+* Wildfire Detonate and Analyze File
+* Containment Plan
+* Endpoint Investigation Plan
+* Recovery Plan
### Integrations
@@ -61,12 +61,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Commands
+* core-report-incorrect-wildfire
* core-retrieve-file-details
-* setParentIncidentFields
* internal-wildfire-get-report
* closeInvestigation
+* setParentIncidentFields
* core-retrieve-files
-* core-report-incorrect-wildfire
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan.yml b/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan.yml
index 8f815cb60d8c..84005e2064a4 100644
--- a/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan.yml
+++ b/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan.yml
@@ -218,7 +218,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "16"
+ - "18"
scriptarguments:
AutoContainment:
complex:
@@ -304,7 +304,7 @@ tasks:
brand: ""
nexttasks:
'#default#':
- - "16"
+ - "18"
"yes":
- "25"
separatecontext: false
@@ -334,68 +334,6 @@ tasks:
isoversize: false
isautoswitchedtoquietmode: false
continueonerrortype: ""
- "16":
- id: "16"
- taskid: 6a4c1876-1b2f-426e-841e-7fa1a9a22df8
- type: title
- task:
- id: 6a4c1876-1b2f-426e-841e-7fa1a9a22df8
- version: -1
- name: Review
- type: title
- iscommand: false
- brand: ""
- description: ''
- nexttasks:
- '#none#':
- - "17"
- separatecontext: false
- view: |-
- {
- "position": {
- "x": 440,
- "y": 1650
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- continueonerrortype: ""
- "17":
- id: "17"
- taskid: 9c06759e-9696-45cd-8a45-78d3e3e552f3
- type: regular
- task:
- id: 9c06759e-9696-45cd-8a45-78d3e3e552f3
- version: -1
- name: Manual Review Internal Scan
- description: Manual review of the investigation.
- type: regular
- iscommand: false
- brand: ""
- nexttasks:
- '#none#':
- - "18"
- separatecontext: false
- view: |-
- {
- "position": {
- "x": 440,
- "y": 1790
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- continueonerrortype: ""
"18":
id: "18"
taskid: 14a8536b-2d4e-4124-8712-39d147e4c6d0
@@ -431,7 +369,7 @@ tasks:
{
"position": {
"x": 440,
- "y": 1960
+ "y": 1650
}
}
note: false
@@ -463,7 +401,7 @@ tasks:
{
"position": {
"x": 60,
- "y": 2130
+ "y": 1820
}
}
note: false
@@ -491,7 +429,7 @@ tasks:
{
"position": {
"x": 440,
- "y": 2490
+ "y": 2180
}
}
note: false
@@ -523,7 +461,7 @@ tasks:
{
"position": {
"x": 440,
- "y": 2310
+ "y": 2000
}
}
note: false
@@ -841,12 +779,11 @@ tasks:
view: |-
{
"linkLabelsPosition": {
- "15_16_#default#": 0.46,
"18_19_#default#": 0.4
},
"paper": {
"dimensions": {
- "height": 2695,
+ "height": 2385,
"width": 1260,
"x": -200,
"y": -140
@@ -1027,4 +964,4 @@ outputs: []
tests:
- No tests (auto formatted)
marketplaces: ["marketplacev2"]
-fromversion: 6.6.0
+fromversion: 6.6.0
\ No newline at end of file
diff --git a/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan_README.md b/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan_README.md
index 2ac4ef55d49d..bbf759d253c6 100644
--- a/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan_README.md
+++ b/Packs/Core/Playbooks/playbook-NGFW_Internal_Scan_README.md
@@ -25,11 +25,11 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* Account Enrichment - Generic v2.1
* Ticket Management - Generic
* Containment Plan
* Endpoint Investigation Plan
* Endpoint Enrichment - Generic v2.1
-* Account Enrichment - Generic v2.1
### Integrations
@@ -41,8 +41,8 @@ This playbook does not use any scripts.
### Commands
-* setParentIncidentFields
* closeInvestigation
+* setParentIncidentFields
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-Possible_External_RDP_Brute-Force_-_Set_Verdict.yml b/Packs/Core/Playbooks/playbook-Possible_External_RDP_Brute-Force_-_Set_Verdict.yml
index 9faa31739fe3..7a87554bfd63 100644
--- a/Packs/Core/Playbooks/playbook-Possible_External_RDP_Brute-Force_-_Set_Verdict.yml
+++ b/Packs/Core/Playbooks/playbook-Possible_External_RDP_Brute-Force_-_Set_Verdict.yml
@@ -178,10 +178,10 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 58c51221-40de-486f-8f63-ed71ce50ab4b
+ taskid: d3816b97-2a2f-4539-83ee-af6592faad20
type: condition
task:
- id: 58c51221-40de-486f-8f63-ed71ce50ab4b
+ id: d3816b97-2a2f-4539-83ee-af6592faad20
version: -1
name: Connection from unusual country?
description: Check if the connection was made from an unusual country.
@@ -267,10 +267,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 00bcc57e-7e18-4bab-8356-a1ef5348e646
+ taskid: bc3eca1f-c99b-4630-884d-9e573739a061
type: regular
task:
- id: 00bcc57e-7e18-4bab-8356-a1ef5348e646
+ id: bc3eca1f-c99b-4630-884d-9e573739a061
version: -1
name: Set related alerts
description: Set a value in context under the key you entered.
@@ -301,7 +301,7 @@ tasks:
timertriggers: []
ignoreworker: false
fieldMapping:
- - incidentfield: XDR Alert Search Results
+ - incidentfield: Alert Search Results
output:
complex:
root: inputs.RelatedAlerts
diff --git a/Packs/Core/ReleaseNotes/3_0_34.md b/Packs/Core/ReleaseNotes/3_0_34.md
new file mode 100644
index 000000000000..f93731ab7831
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_34.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+Updated the output value for the **'Number Of Found Related Alerts'** alert field mapping rule in the *'Set Number of Related Alerts'* task.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_35.md b/Packs/Core/ReleaseNotes/3_0_35.md
new file mode 100644
index 000000000000..43d17a0fa830
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_35.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### IOC Alert
+
+- Deleted two unnecessary manual tasks titled *'Should continue with the investigation?'* and *'Continue with the alert investigation'* to optimize playbook flow.
diff --git a/Packs/Core/ReleaseNotes/3_0_36.md b/Packs/Core/ReleaseNotes/3_0_36.md
new file mode 100644
index 000000000000..0fe4d3b8fab7
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_36.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+- Updated the mapping rule for the **'Failed Logon Events'** alert field in the *'Set Number of Related Alerts'* playbook task.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_37.md b/Packs/Core/ReleaseNotes/3_0_37.md
new file mode 100644
index 000000000000..4a973ee9c9a7
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_37.md
@@ -0,0 +1,3 @@
+## Core - Investigation and Response
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_38.md b/Packs/Core/ReleaseNotes/3_0_38.md
new file mode 100644
index 000000000000..6c4ee3e801c9
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_38.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Investigation & Response
+
+Fixed an issue in CoreIRApiModule regarding close reason resolution.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_39.md b/Packs/Core/ReleaseNotes/3_0_39.md
new file mode 100644
index 000000000000..e6d763d6005d
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_39.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Investigation & Response
+
+Fixed an issue in CoreIRApiModule where it was failing to parse a response.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_40.md b/Packs/Core/ReleaseNotes/3_0_40.md
new file mode 100644
index 000000000000..23040387f1c3
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_40.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### NGFW Internal Scan
+
+Deleted an unnecessary manual task titled *'Manual Review Internal Scan'* to optimize playbook flow.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_41.md b/Packs/Core/ReleaseNotes/3_0_41.md
new file mode 100644
index 000000000000..6983e6f84c34
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_41.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Possible External RDP Brute-Force - Set Verdict
+
+Updated the alert field configuration within the mapping rule of the *'Set related alerts'* playbook task.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_42.md b/Packs/Core/ReleaseNotes/3_0_42.md
new file mode 100644
index 000000000000..8cd4cc1383ed
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_42.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Impossible Traveler - Enrichment
+
+Internal code improvements.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_43.md b/Packs/Core/ReleaseNotes/3_0_43.md
new file mode 100644
index 000000000000..096a1d099593
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_43.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+Internal code improvements.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_44.md b/Packs/Core/ReleaseNotes/3_0_44.md
new file mode 100644
index 000000000000..428628268079
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_44.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Get entity alerts by MITRE tactics
+
+Updated the alert field where MITRE tactics are searched.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_45.md b/Packs/Core/ReleaseNotes/3_0_45.md
new file mode 100644
index 000000000000..078e2c68447b
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_45.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Impossible Traveler Response
+
+Updated the *'EndpointID'* playbook input value within the **'Containment Plan'** sub-playbook.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_46.md b/Packs/Core/ReleaseNotes/3_0_46.md
new file mode 100644
index 000000000000..0e31961df94e
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_46.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### IOC Alert
+
+Internal code improvements.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_47.md b/Packs/Core/ReleaseNotes/3_0_47.md
new file mode 100644
index 000000000000..d1911a89c22f
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_47.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Local Analysis alert Investigation
+
+Organized the playbook tasks.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_48.md b/Packs/Core/ReleaseNotes/3_0_48.md
new file mode 100644
index 000000000000..b84dd3772909
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_48.md
@@ -0,0 +1,15 @@
+<~XPANSE>
+
+#### Integrations
+
+##### Indicators detection
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
+- Updated the Docker image to: *demisto/google-cloud-storage:1.0.0.100327*.
+
+##### Investigation & Response
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
+- Updated the Docker image to: *demisto/google-cloud-storage:1.0.0.100327*.
+
+~XPANSE>
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_49.md b/Packs/Core/ReleaseNotes/3_0_49.md
new file mode 100644
index 000000000000..31c4ccdda8ad
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_49.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Indicators detection
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
+
+##### Investigation & Response
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
diff --git a/Packs/Core/ReleaseNotes/3_0_50.md b/Packs/Core/ReleaseNotes/3_0_50.md
new file mode 100644
index 000000000000..672dcb686dc3
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_50.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+
+##### Indicators detection
+
+Updated the CoreIRApiModule with support for custom XSOAR close-reasons in XSOAR-XDR close-reason mapping.
+
+##### Investigation & Response
+
+Updated the CoreIRApiModule with support for custom XSOAR close-reasons in XSOAR-XDR close-reason mapping.
+
diff --git a/Packs/Core/TestPlaybooks/Test_Playbook_-_IOC_Alert.yml b/Packs/Core/TestPlaybooks/Test_Playbook_-_IOC_Alert.yml
new file mode 100644
index 000000000000..e38688eddff1
--- /dev/null
+++ b/Packs/Core/TestPlaybooks/Test_Playbook_-_IOC_Alert.yml
@@ -0,0 +1,535 @@
+id: Test Playbook - IOC Alert
+version: -1
+name: Test Playbook - IOC Alert
+description: |-
+ This playbook tests the ‘IOC Alert' playbook which is part of the ‘Core’ pack.
+
+ The following tests are conducted in the playbook:
+ 1-Verify that the parent incident fields have been populated.
+ 2- Ensure that the context data is correctly extracted.
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 9193802e-ca8d-4d1d-8fb6-ac843c6409e0
+ type: start
+ task:
+ id: 9193802e-ca8d-4d1d-8fb6-ac843c6409e0
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 300
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 97297dba-2ed4-45c7-8f88-63bd12b62ea8
+ type: regular
+ task:
+ id: 97297dba-2ed4-45c7-8f88-63bd12b62ea8
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ - "308"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 430
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: c3d897d5-026b-4763-8611-ec681594c84c
+ type: regular
+ task:
+ id: c3d897d5-026b-4763-8611-ec681594c84c
+ version: -1
+ name: Set Alert Fields
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "307"
+ scriptarguments:
+ details:
+ simple: DC1 uploaded 376.6MB to the external host f3322.net over 3 sessions in the last 24 hours. During that time, DC1 downloaded 434.7KB from the same external host. In the past 30 days, the host uploaded data via HTTPS protocol to this external host an average of 0 bytes a day. Similar events of data upload to this external host, was seen from other endpoints in the network 0 times in the last 30 days
+ ignore-outputs:
+ simple: "false"
+ name:
+ simple: '''IOC Alert'' Test Playbook'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 250,
+ "y": 600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: 350e14ec-8eae-4ea7-8ce7-ce5890e6c63e
+ type: title
+ task:
+ id: 350e14ec-8eae-4ea7-8ce7-ce5890e6c63e
+ version: -1
+ name: Start Tests
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "13"
+ - "310"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 930
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 4931b845-286f-4a29-8c88-8f36d375bd3e
+ type: title
+ task:
+ id: 4931b845-286f-4a29-8c88-8f36d375bd3e
+ version: -1
+ name: Check Context Data
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "70"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 850,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "70":
+ id: "70"
+ taskid: a87d5dd0-7b2f-40db-8fd1-3f2e611d8f76
+ type: condition
+ task:
+ id: a87d5dd0-7b2f-40db-8fd1-3f2e611d8f76
+ version: -1
+ name: Verify Extracted Indicators
+ description: Verify that the ‘ExtractedIndicators’ context key was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "71"
+ "yes":
+ - "306"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ExtractedIndicators
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 850,
+ "y": 1200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "71":
+ id: "71"
+ taskid: cee5c66c-f442-4deb-8200-de2684e4d81e
+ type: regular
+ task:
+ id: cee5c66c-f442-4deb-8200-de2684e4d81e
+ version: -1
+ name: Verify Context Error - Extracted Indicators
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ExtractedIndicators' context key was not extracted properly. One of the following may be the cause:
+ 1- The 'extractIndicators' script failed to execute in the 'Extract IOC' task.
+ 2- The 'text' input configuration was changed for the 'extractIndicators' automation used in the 'Extract IOC' task.
+ 3- The 'extractIndicators' automation outputs have been modified and no longer contain the 'ExtractedIndicators' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 850,
+ "y": 1445
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "306":
+ id: "306"
+ taskid: 277d4cdf-d375-490e-8974-bce285c77675
+ type: title
+ task:
+ id: 277d4cdf-d375-490e-8974-bce285c77675
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1380
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "307":
+ id: "307"
+ taskid: 6dca2d8d-dbc9-4f8d-82a4-3dce2773532c
+ type: playbook
+ task:
+ id: 6dca2d8d-dbc9-4f8d-82a4-3dce2773532c
+ version: -1
+ name: IOC Alert
+ description: "IOCs provide the ability to alert on known malicious objects on endpoints across the organization. \n\n**Analysis Actions:**\nThe playbook will use several enrichment sources to determine the IOC verdict. Additionally, will use the Analytics module to run a prevalence check for the IOC.\n\n**Response Actions:**\nThe playbook's first response action is a containment plan that is based on the playbook input. In that phase, the playbook will execute endpoint isolation\n\n**Investigative Actions:**\nWhen the playbook executes, it checks for additional abnormal activity using the Endpoint Investigation Plan playbook that can indicate the endpoint might be compromised.\n\n**Remediation Actions:**\nIn case results are found within the investigation phase, the playbook will execute remediation actions that include containment and eradication.\n\nThis phase will execute the following containment actions:\n\n* File quarantine\n* Endpoint isolation\n\nAnd the following eradication actions:\n\n* Manual process termination\n* Manual file deletion"
+ playbookName: IOC Alert
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ AutoContainment:
+ simple: "True"
+ AutoEradication:
+ simple: "True"
+ AutoRestoreEndpoint:
+ simple: "False"
+ BlockIndicatorsAutomatically:
+ simple: "False"
+ CommentToAdd:
+ simple: '${alert.name}. Alert ID: ${alert.id}'
+ FileRemediation:
+ simple: Quarantine
+ PreHostContainment:
+ simple: "False"
+ ShouldCloseAutomatically:
+ simple: "False"
+ ShouldHandleFPautomatically:
+ simple: "False"
+ ShouldOpenTicket:
+ simple: "False"
+ ZendeskSubject:
+ simple: XSIAM Incident ID - ${parentIncidentFields.incident_id}
+ addCommentPerEndpoint:
+ simple: "False"
+ description:
+ simple: ${parentIncidentFields.description}. ${parentIncidentFields.xdr_url}
+ serviceNowShortDescription:
+ simple: XSIAM Incident ID - ${parentIncidentFields.incident_id}
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 770
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "308":
+ id: "308"
+ taskid: 76bbbed3-71a6-4c35-81d5-582b311dfa92
+ type: regular
+ task:
+ id: 76bbbed3-71a6-4c35-81d5-582b311dfa92
+ version: -1
+ name: Set 'foundIncidents' to context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "307"
+ scriptarguments:
+ append:
+ simple: "true"
+ ignore-outputs:
+ simple: "false"
+ key:
+ simple: foundIncidents
+ value:
+ simple: '''IOC Alert'' Test Playbook'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 600
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "310":
+ id: "310"
+ taskid: 61dacdc9-30cc-4657-879c-317a9a75bfba
+ type: title
+ task:
+ id: 61dacdc9-30cc-4657-879c-317a9a75bfba
+ version: -1
+ name: Check Parent Incident Fields
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "311"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "311":
+ id: "311"
+ taskid: c2665084-634e-460a-87a3-94eefbf1b585
+ type: condition
+ task:
+ id: c2665084-634e-460a-87a3-94eefbf1b585
+ version: -1
+ name: Verify Manual Severity
+ description: Verify that the ‘manual_severity’ parent incident field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "312"
+ "yes":
+ - "306"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: parentIncidentFields.manual_severity
+ iscontext: true
+ right:
+ value:
+ simple: High
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "312":
+ id: "312"
+ taskid: 1bfb3810-4529-4033-8b51-3ab8d2392124
+ type: regular
+ task:
+ id: 1bfb3810-4529-4033-8b51-3ab8d2392124
+ version: -1
+ name: Verify Parent Incident Field Error - Manual Severity
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘manual_severity’ parent incident field was populated correctly. One of the following may be the cause:
+ 1- The 'setParentIncidentFields' script failed to execute in the 'Set Alert Severity to High' task.
+ 2- The 'manual_severity' input configuration was changed for the 'setParentIncidentFields' automation used in the 'Set Alert Severity to High' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1445
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "311_306_yes": 0.11,
+ "70_306_yes": 0.17
+ },
+ "paper": {
+ "dimensions": {
+ "height": 1240,
+ "width": 1180,
+ "x": 50,
+ "y": 300
+ }
+ }
+ }
+inputs: []
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.6.0
+marketplaces:
+- marketplacev2
diff --git a/Packs/Core/TestPlaybooks/Test_Playbook_-_Identity_Analytics_-_Alert_Handling.yml b/Packs/Core/TestPlaybooks/Test_Playbook_-_Identity_Analytics_-_Alert_Handling.yml
new file mode 100644
index 000000000000..3c8e0833ca96
--- /dev/null
+++ b/Packs/Core/TestPlaybooks/Test_Playbook_-_Identity_Analytics_-_Alert_Handling.yml
@@ -0,0 +1,2197 @@
+id: Test Playbook - Identity Analytics - Alert Handling
+version: -1
+name: Test Playbook - Identity Analytics - Alert Handling
+description: |-
+ This playbook tests the ‘Identity Analytics - Alert Handling' playbook which is part of the ‘Core’ pack.
+
+ The following tests are conducted in the playbook:
+ 1-Confirm that alert fields were populated correctly by the custom output mapping rules and the ‘setalert’ automation.
+ 2- Ensure that the context data is correctly extracted.
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: e94eaa55-648d-494c-8c5e-a57680aaf718
+ type: start
+ task:
+ id: e94eaa55-648d-494c-8c5e-a57680aaf718
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": -460
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: c2777100-cb8e-4bf0-8a85-876775dbf3f8
+ type: regular
+ task:
+ id: c2777100-cb8e-4bf0-8a85-876775dbf3f8
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "304"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": -330
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: f3aa9969-4bca-45f9-8798-a6c34c15a888
+ type: title
+ task:
+ id: f3aa9969-4bca-45f9-8798-a6c34c15a888
+ version: -1
+ name: Start Tests
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "13"
+ - "269"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 950
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 2a55e89d-e559-4863-8030-134d9b361df7
+ type: title
+ task:
+ id: 2a55e89d-e559-4863-8030-134d9b361df7
+ version: -1
+ name: Check Context Data
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "70"
+ - "302"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 1090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "70":
+ id: "70"
+ taskid: 2a9a2803-aa95-43fd-826a-a42bcebb768e
+ type: condition
+ task:
+ id: 2a9a2803-aa95-43fd-826a-a42bcebb768e
+ version: -1
+ name: Verify Number Of Related Alerts
+ description: Verify that the ‘NumOfRelatedAlerts’ context key was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "71"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: NumOfRelatedAlerts
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "71":
+ id: "71"
+ taskid: 80b5b923-6b57-4e9c-836b-0d499f6c09a3
+ type: regular
+ task:
+ id: 80b5b923-6b57-4e9c-836b-0d499f6c09a3
+ version: -1
+ name: Verify Context Error - Number Of Related Alerts
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'NumOfRelatedAlerts' context key was not extracted properly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'key' input configuration was changed for the 'SetAndHandleEmpty' automation used in the 'Set Number of Related Alerts' task.
+ 3- The 'value' input configuration was changed for the 'SetAndHandleEmpty' automation used in the 'Set Number of Related Alerts' task.
+ 4- the 'Get entity alerts by MITRE tactics' playbook did not return any results.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "256":
+ id: "256"
+ taskid: 1c672a25-12a7-4b67-8347-929cccad235b
+ type: title
+ task:
+ id: 1c672a25-12a7-4b67-8347-929cccad235b
+ version: -1
+ name: Done Verifying Context Data
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "306"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "265":
+ id: "265"
+ taskid: fb1aa7f2-8c65-47bc-83aa-758a3eb4e9d6
+ type: regular
+ task:
+ id: fb1aa7f2-8c65-47bc-83aa-758a3eb4e9d6
+ version: -1
+ name: Set Okta Username To Alert Field
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ username:
+ simple: demisto\testplaybookuser
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2260,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "266":
+ id: "266"
+ taskid: f8689a9e-9c3c-4632-82ec-52df5776de04
+ type: playbook
+ task:
+ id: f8689a9e-9c3c-4632-82ec-52df5776de04
+ version: -1
+ name: Identity Analytics - Alert Handling
+ description: |-
+ The `Identity Analytics - Alert Handling` playbook is designed to handle Identity Analytics alerts and executes the following:
+
+ Analysis:
+ - Enriches the IP and the account, providing additional context and information about these indicators.
+
+ Verdict:
+ - Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+ Investigation:
+ - Checks for related XDR alerts to the user by Mitre tactics to identify malicious activity.
+ - Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+ - Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+ Verdict Handling:
+ - Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP and revoking or clearing user's sessions.
+ - Handles non-malicious alerts identified during the investigation.
+ playbookName: Identity Analytics - Alert Handling
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "313"
+ scriptarguments:
+ AutoContainment:
+ simple: "False"
+ AutoRemediation:
+ simple: "False"
+ AzureMfaFailedLogonThreshold:
+ simple: "1"
+ ClearUserSessions:
+ simple: "False"
+ FailedLogonThreshold:
+ simple: "1"
+ IAMRemediationType:
+ simple: Revoke
+ OktaSuspiciousEventsThreshold:
+ simple: "1"
+ RelatedAlertsThreshold:
+ simple: "1"
+ UserContainment:
+ simple: "False"
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 140
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "268":
+ id: "268"
+ taskid: ea4aeb45-e4c9-4671-81e7-fd392a3b470d
+ type: regular
+ task:
+ id: ea4aeb45-e4c9-4671-81e7-fd392a3b470d
+ version: -1
+ name: Set 'IP.Address' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: IP.Address
+ value:
+ simple: 8.8.8.8
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "269":
+ id: "269"
+ taskid: efd89723-30ed-4a96-8555-44a53cbb1b88
+ type: title
+ task:
+ id: efd89723-30ed-4a96-8555-44a53cbb1b88
+ version: -1
+ name: Check Alert Field
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "270"
+ - "272"
+ - "274"
+ - "276"
+ - "278"
+ - "280"
+ - "282"
+ - "284"
+ - "286"
+ - "288"
+ - "290"
+ - "300"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -170,
+ "y": 1090
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "270":
+ id: "270"
+ taskid: e8aba1f6-c825-4896-87db-27f5aff5bcfc
+ type: condition
+ task:
+ id: e8aba1f6-c825-4896-87db-27f5aff5bcfc
+ version: -1
+ name: Verify Number Of Found Related Alerts
+ description: Verify that the ‘numberoffoundrelatedalerts’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "271"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: alert.numberoffoundrelatedalerts
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -170,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "271":
+ id: "271"
+ taskid: 929a123f-9afd-435e-8966-56bdeca93cdb
+ type: regular
+ task:
+ id: 929a123f-9afd-435e-8966-56bdeca93cdb
+ version: -1
+ name: Verify Context Error - Number Of Found Related Alerts
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘numberoffoundrelatedalerts’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'key' input configuration was changed for the 'SetAndHandleEmpty' automation used in the 'Set Number of Related Alerts' task.
+ 3- The 'value' input configuration was changed for the 'SetAndHandleEmpty' automation used in the 'Set Number of Related Alerts' task.
+ 4- the 'Get entity alerts by MITRE tactics' playbook did not return any results.
+ 5- The mapping rule for the 'Number Of Found Related Alerts' alert field was removed.
+ 6- A change was made to the 'outputs' value for the 'Number Of Found Related Alerts' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -170,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "272":
+ id: "272"
+ taskid: 16f1a1c1-d49f-43ed-88ed-29970819bf43
+ type: condition
+ task:
+ id: 16f1a1c1-d49f-43ed-88ed-29970819bf43
+ version: -1
+ name: Verify Alert Search Results
+ description: Verify that the ‘alertsearchresults’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "273"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.alertsearchresults
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -570,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "273":
+ id: "273"
+ taskid: 82460a18-7046-4306-8ee7-3bb794a19033
+ type: regular
+ task:
+ id: 82460a18-7046-4306-8ee7-3bb794a19033
+ version: -1
+ name: Verify Context Error - Alert Search Results
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘alertsearchresults’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- the 'Get entity alerts by MITRE tactics' playbook did not return any results.
+ 3- The mapping rule for the 'Alert Search Results' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Alert Search Results' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -570,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "274":
+ id: "274"
+ taskid: 091ed34f-8049-4f7d-8107-8dc419404e30
+ type: condition
+ task:
+ id: 091ed34f-8049-4f7d-8107-8dc419404e30
+ version: -1
+ name: Verify User Risk Level
+ description: Verify that the ‘userrisklevel’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "275"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.userrisklevel
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -960,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "275":
+ id: "275"
+ taskid: dbabdf28-2f79-486b-8b1c-8a4cf2b4c357
+ type: regular
+ task:
+ id: dbabdf28-2f79-486b-8b1c-8a4cf2b4c357
+ version: -1
+ name: Verify Context Error - User Risk Level
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘userrisklevel’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- the 'Get entity alerts by MITRE tactics' playbook did not return any results.
+ 3- The mapping rule for the 'User Risk Level' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'User Risk Level' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -960,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "276":
+ id: "276"
+ taskid: 363cfe78-5987-44d0-8065-bbc01756da11
+ type: condition
+ task:
+ id: 363cfe78-5987-44d0-8065-bbc01756da11
+ version: -1
+ name: Verify Failed Logon Events
+ description: Verify that the ‘failedlogonevents’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "277"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.failedlogonevents
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1350,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "277":
+ id: "277"
+ taskid: 203bef92-3fdb-4d3e-8091-5f05b914c4c5
+ type: regular
+ task:
+ id: 203bef92-3fdb-4d3e-8091-5f05b914c4c5
+ version: -1
+ name: Verify Context Error - Failed Logon Events
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘failedlogonevents’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- the 'Azure - User Investigation' playbook did not return any results.
+ 3- The mapping rule for the 'Failed Logon Events' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Failed Logon Events' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1350,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "278":
+ id: "278"
+ taskid: 14a9b4f3-0f91-483d-8dfe-bf43772b5195
+ type: condition
+ task:
+ id: 14a9b4f3-0f91-483d-8dfe-bf43772b5195
+ version: -1
+ name: Verify Email
+ description: Verify that the ‘email’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "279"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.email
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1740,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "279":
+ id: "279"
+ taskid: ac6c5cb7-c231-46ea-8069-0587cae96ab4
+ type: regular
+ task:
+ id: ac6c5cb7-c231-46ea-8069-0587cae96ab4
+ version: -1
+ name: Verify Context Error - Email
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘email’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' and the 'Fetch cloud alert extra data' playbook did not return any results.
+ 3- The mapping rule for the 'Email' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Email' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1740,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "280":
+ id: "280"
+ taskid: 344a08d4-d61d-4472-8fa4-d74bf25683cf
+ type: condition
+ task:
+ id: 344a08d4-d61d-4472-8fa4-d74bf25683cf
+ version: -1
+ name: Verify Account Member Of
+ description: Verify that the ‘accountmemberof’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "281"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.accountmemberof
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2140,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "281":
+ id: "281"
+ taskid: 4de0f33b-bbcd-4d9c-8bcc-4a64c5456cc6
+ type: regular
+ task:
+ id: 4de0f33b-bbcd-4d9c-8bcc-4a64c5456cc6
+ version: -1
+ name: Verify Context Error - Account Member Of
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘accountmemberof’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'Account Member Of' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Account Member Of' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2140,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "282":
+ id: "282"
+ taskid: 77e4c0e4-ba5f-4d65-89a2-2cc803251049
+ type: condition
+ task:
+ id: 77e4c0e4-ba5f-4d65-89a2-2cc803251049
+ version: -1
+ name: Verify Cloud Account ID
+ description: Verify that the ‘cloudaccountid’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "283"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.cloudaccountid
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2530,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "283":
+ id: "283"
+ taskid: 88a1825f-bf07-4524-859f-0d49a5c2b478
+ type: regular
+ task:
+ id: 88a1825f-bf07-4524-859f-0d49a5c2b478
+ version: -1
+ name: Verify Context Error - Cloud Account ID
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘cloudaccountid’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'Cloud Account ID' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Cloud Account ID' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2530,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "284":
+ id: "284"
+ taskid: 4b9edee1-0038-42a9-8e22-737eecdca275
+ type: condition
+ task:
+ id: 4b9edee1-0038-42a9-8e22-737eecdca275
+ version: -1
+ name: Verify Account ID
+ description: Verify that the ‘accountid’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "285"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.accountid
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2920,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "285":
+ id: "285"
+ taskid: 8f2e6331-42f8-4ea1-8773-4b6962d1284f
+ type: regular
+ task:
+ id: 8f2e6331-42f8-4ea1-8773-4b6962d1284f
+ version: -1
+ name: Verify Context Error - Account ID
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘accountid’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'Account ID' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'Account ID' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -2920,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "286":
+ id: "286"
+ taskid: b70f8dcb-6277-4572-82b8-49a0d27e6667
+ type: condition
+ task:
+ id: b70f8dcb-6277-4572-82b8-49a0d27e6667
+ version: -1
+ name: Verify SAM Account Name
+ description: Verify that the ‘sAMAccountName’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "287"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.samaccountname
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -3310,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "287":
+ id: "287"
+ taskid: 68f4ada0-c415-4ed9-80ac-1b3b6fafda28
+ type: regular
+ task:
+ id: 68f4ada0-c415-4ed9-80ac-1b3b6fafda28
+ version: -1
+ name: Verify Context Error - SAM Account Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘ sAMAccountName’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'sAMAccountName' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'sAMAccountName' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -3310,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "288":
+ id: "288"
+ taskid: f9e1593c-6532-4bb9-8822-1de376b7a13b
+ type: condition
+ task:
+ id: f9e1593c-6532-4bb9-8822-1de376b7a13b
+ version: -1
+ name: Verify Account Status
+ description: Verify that the ‘accountstatus’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "289"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.accountstatus
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -3700,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "289":
+ id: "289"
+ taskid: eb0204ef-e039-4a3a-8be0-92e19cccf861
+ type: regular
+ task:
+ id: eb0204ef-e039-4a3a-8be0-92e19cccf861
+ version: -1
+ name: Verify Context Error - Account Status
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘ accountstatus’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'accountstatus' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'accountstatus' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -3700,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "290":
+ id: "290"
+ taskid: 786a227b-b7b4-4aee-8fa7-c2d540090382
+ type: condition
+ task:
+ id: 786a227b-b7b4-4aee-8fa7-c2d540090382
+ version: -1
+ name: Verify Manager Email Address
+ description: Verify that the ‘manageremailaddress’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "291"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: alert.manageremailaddress
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -4090,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "291":
+ id: "291"
+ taskid: 3b8a8a6a-c037-4d3f-8ab5-918317cf3f70
+ type: regular
+ task:
+ id: 3b8a8a6a-c037-4d3f-8ab5-918317cf3f70
+ version: -1
+ name: Verify Context Error - Manager Email Address
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘ manageremailaddress’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'SetAndHandleEmpty' script failed to execute in the 'Set Number of Related Alerts' task.
+ 2- The 'Account Enrichment - Generic v2.1' playbook did not return any results.
+ 3- The mapping rule for the 'manageremailaddress' alert field was removed.
+ 4- A change was made to the 'outputs' value for the 'manageremailaddress' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -4090,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "292":
+ id: "292"
+ taskid: 24a4a254-1b5e-4dd4-8e2e-7d42f9e6307b
+ type: regular
+ task:
+ id: 24a4a254-1b5e-4dd4-8e2e-7d42f9e6307b
+ version: -1
+ name: Set 'foundIncidents' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: foundIncidents.severity
+ value:
+ simple: high
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "293":
+ id: "293"
+ taskid: eb799528-7bbc-430f-896c-d10e2f3641c3
+ type: regular
+ task:
+ id: eb799528-7bbc-430f-896c-d10e2f3641c3
+ version: -1
+ name: Set 'event.auth_identity' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: Core.OriginalAlert.event.auth_identity
+ stringify:
+ simple: "true"
+ value:
+ simple: testplaybookuser@demistodev.onmicrosoft.com
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1070,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "294":
+ id: "294"
+ taskid: c6578000-ce13-4d51-8b14-92a229990e40
+ type: regular
+ task:
+ id: c6578000-ce13-4d51-8b14-92a229990e40
+ version: -1
+ name: Set 'AzureFailLoginCount' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: AzureFailLoginCount
+ value:
+ simple: "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 670,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "295":
+ id: "295"
+ taskid: 7e06b281-7da2-449d-8aaa-152ffb1c5aec
+ type: regular
+ task:
+ id: 7e06b281-7da2-449d-8aaa-152ffb1c5aec
+ version: -1
+ name: Set 'NumOfOktaFailedLogon' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: NumOfOktaFailedLogon
+ value:
+ simple: "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 260,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "296":
+ id: "296"
+ taskid: a9117589-d789-45b8-8fd1-402218800940
+ type: regular
+ task:
+ id: a9117589-d789-45b8-8fd1-402218800940
+ version: -1
+ name: Set 'ActiveDirectory' to Context
+ description: Set multiple keys/values to the context.
+ scriptName: SetMultipleValues
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ keys:
+ simple: mail,memberOf,sAMAccountName
+ parent:
+ simple: ActiveDirectory
+ values:
+ simple: test2@test.com,CN=Domain Admins,test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "297":
+ id: "297"
+ taskid: d3a81198-6b69-4194-83f7-664b60fb1173
+ type: regular
+ task:
+ id: d3a81198-6b69-4194-83f7-664b60fb1173
+ version: -1
+ name: Set 'MSGraphUser' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: MSGraphUser.ID
+ value:
+ simple: "000000000001"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -550,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "298":
+ id: "298"
+ taskid: eeafd1e1-6ee7-4b81-8ca4-c643cf94c06d
+ type: regular
+ task:
+ id: eeafd1e1-6ee7-4b81-8ca4-c643cf94c06d
+ version: -1
+ name: Set 'Account' to Context
+ description: Set multiple keys/values to the context.
+ scriptName: SetMultipleValues
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ keys:
+ simple: ID,Status
+ parent:
+ simple: Account
+ values:
+ simple: 001,disable
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -950,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "299":
+ id: "299"
+ taskid: a2f4c034-7d5d-477e-8192-fab5ce17d730
+ type: regular
+ task:
+ id: a2f4c034-7d5d-477e-8192-fab5ce17d730
+ version: -1
+ name: Set 'UserManagerEmail' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ key:
+ simple: UserManagerEmail
+ value:
+ simple: ManagerEmail@test.com
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1350,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "300":
+ id: "300"
+ taskid: e844723a-f6db-421c-872c-01e48c733e97
+ type: condition
+ task:
+ id: e844723a-f6db-421c-872c-01e48c733e97
+ version: -1
+ name: Verify Verdict
+ description: Verify that the ‘ Verdict’ alert field was populated correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "301"
+ "yes":
+ - "305"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: alert.verdict
+ iscontext: true
+ right:
+ value:
+ simple: Malicious
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -4480,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "301":
+ id: "301"
+ taskid: fb9495e2-ccc8-43cc-8677-4f4eb566fafe
+ type: regular
+ task:
+ id: fb9495e2-ccc8-43cc-8677-4f4eb566fafe
+ version: -1
+ name: Verify Context Error - Verdict
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The ‘ Verdict’ alert field was populated correctly. One of the following may be the cause:
+ 1- The 'Set' script failed to execute in the 'Set Alert Verdict' task.
+ 2- The mapping rule for the 'Verdict' alert field was removed.
+ 3- A change was made to the 'outputs' value for the 'Verdict' alert field mapping rule.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -4480,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "302":
+ id: "302"
+ taskid: 9bbbf46e-c50c-4206-8024-0ac97f3be45b
+ type: condition
+ task:
+ id: 9bbbf46e-c50c-4206-8024-0ac97f3be45b
+ version: -1
+ name: Verify Verdict
+ description: Verify that the ‘Verdict’ context key was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "303"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: Verdict
+ iscontext: true
+ right:
+ value:
+ simple: Malicious
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "303":
+ id: "303"
+ taskid: 2fc2c831-5640-483a-8f94-52a2e545c443
+ type: regular
+ task:
+ id: 2fc2c831-5640-483a-8f94-52a2e545c443
+ version: -1
+ name: Verify Context Error - Verdict
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Verdict' context key was not extracted properly. One of the following may be the cause:
+ 1- The 'Set' script failed to execute in the 'Set Alert Verdict' task.
+ 2- The 'key' input configuration was changed for the 'Set' automation used in the 'Set Alert Verdict' task.
+ 3- The 'value' input configuration was changed for the 'Set' automation used in the 'Set Alert Verdict' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "304":
+ id: "304"
+ taskid: 48d0e3e8-8f1f-4af1-8efb-e76d51f17fa1
+ type: title
+ task:
+ id: 48d0e3e8-8f1f-4af1-8efb-e76d51f17fa1
+ version: -1
+ name: Set Information to Context
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "265"
+ - "268"
+ - "292"
+ - "294"
+ - "295"
+ - "296"
+ - "297"
+ - "298"
+ - "299"
+ - "308"
+ - "293"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": -170
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "305":
+ id: "305"
+ taskid: 4a673d19-0bd6-4b64-8f13-c9f24a5537b3
+ type: title
+ task:
+ id: 4a673d19-0bd6-4b64-8f13-c9f24a5537b3
+ version: -1
+ name: Done Verifying Alert Fields
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "306"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 230,
+ "y": 1435
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "306":
+ id: "306"
+ taskid: 48a4350b-11fd-4fe8-86f9-da0d7bafc36e
+ type: title
+ task:
+ id: 48a4350b-11fd-4fe8-86f9-da0d7bafc36e
+ version: -1
+ name: AD Tests
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 1590
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "308":
+ id: "308"
+ taskid: 92981484-2af2-4af1-8bdf-b901c99cf394
+ type: regular
+ task:
+ id: 92981484-2af2-4af1-8bdf-b901c99cf394
+ version: -1
+ name: Set 'RiskyUser.risk_level' to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "266"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: Core.RiskyUser.risk_level
+ stringify:
+ simple: "true"
+ value:
+ simple: Medium
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2660,
+ "y": -30
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "313":
+ id: "313"
+ taskid: 9b50288a-cc93-46b0-8fa6-b6eb5a925fe2
+ type: regular
+ task:
+ id: 9b50288a-cc93-46b0-8fa6-b6eb5a925fe2
+ version: -1
+ name: Set Azure Username To Alert Field
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "314"
+ scriptarguments:
+ username:
+ simple: testplaybookuser@demistodev.onmicrosoft.com
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 300
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "314":
+ id: "314"
+ taskid: e8f86d57-85a8-4d30-8bb3-b954cbc44f9d
+ type: playbook
+ task:
+ id: e8f86d57-85a8-4d30-8bb3-b954cbc44f9d
+ version: -1
+ name: Identity Analytics - Alert Handling
+ description: |-
+ The `Identity Analytics - Alert Handling` playbook is designed to handle Identity Analytics alerts and executes the following:
+
+ Analysis:
+ - Enriches the IP and the account, providing additional context and information about these indicators.
+
+ Verdict:
+ - Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+ Investigation:
+ - Checks for related XDR alerts to the user by Mitre tactics to identify malicious activity.
+ - Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+ - Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+ Verdict Handling:
+ - Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP and revoking or clearing user's sessions.
+ - Handles non-malicious alerts identified during the investigation.
+ playbookName: Identity Analytics - Alert Handling
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "315"
+ scriptarguments:
+ AutoContainment:
+ simple: "False"
+ AutoRemediation:
+ simple: "False"
+ AzureMfaFailedLogonThreshold:
+ simple: "1"
+ ClearUserSessions:
+ simple: "False"
+ FailedLogonThreshold:
+ simple: "1"
+ IAMRemediationType:
+ simple: Revoke
+ OktaSuspiciousEventsThreshold:
+ simple: "1"
+ RelatedAlertsThreshold:
+ simple: "1"
+ UserContainment:
+ simple: "False"
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 460
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "315":
+ id: "315"
+ taskid: 6d53c538-9f36-4cd9-87fe-23a035091fa7
+ type: regular
+ task:
+ id: 6d53c538-9f36-4cd9-87fe-23a035091fa7
+ version: -1
+ name: Set AD Username To Alert Field
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "316"
+ scriptarguments:
+ username:
+ simple: demisto\DefaultAccount
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 620
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "316":
+ id: "316"
+ taskid: 3c9eae64-9836-433b-8964-219d0481054f
+ type: playbook
+ task:
+ id: 3c9eae64-9836-433b-8964-219d0481054f
+ version: -1
+ name: Identity Analytics - Alert Handling
+ description: |-
+ The `Identity Analytics - Alert Handling` playbook is designed to handle Identity Analytics alerts and executes the following:
+
+ Analysis:
+ - Enriches the IP and the account, providing additional context and information about these indicators.
+
+ Verdict:
+ - Determines the appropriate verdict based on the data collected from the enrichment phase.
+
+ Investigation:
+ - Checks for related XDR alerts to the user by Mitre tactics to identify malicious activity.
+ - Checks for specific arguments for malicious usage from Okta using the 'Okta User Investigation' sub-playbook.
+ - Checks for specific arguments for malicious usage from Azure using the 'Azure User Investigation' sub-playbook.
+
+ Verdict Handling:
+ - Handles malicious alerts by initiating appropriate response actions, including blocking malicious IP and revoking or clearing user's sessions.
+ - Handles non-malicious alerts identified during the investigation.
+ playbookName: Identity Analytics - Alert Handling
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ AutoContainment:
+ simple: "False"
+ AutoRemediation:
+ simple: "False"
+ AzureMfaFailedLogonThreshold:
+ simple: "1"
+ ClearUserSessions:
+ simple: "False"
+ FailedLogonThreshold:
+ simple: "1"
+ IAMRemediationType:
+ simple: Revoke
+ OktaSuspiciousEventsThreshold:
+ simple: "1"
+ RelatedAlertsThreshold:
+ simple: "1"
+ UserContainment:
+ simple: "False"
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 785
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "302_256_yes": 0.17,
+ "70_256_yes": 0.11
+ },
+ "paper": {
+ "dimensions": {
+ "height": 2115,
+ "width": 7520,
+ "x": -4480,
+ "y": -460
+ }
+ }
+ }
+inputs: []
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.6.0
+marketplaces:
+- marketplacev2
diff --git a/Packs/Core/TestPlaybooks/Test_Playbook_-_Impossible_Traveler_-_Enrichment.yml b/Packs/Core/TestPlaybooks/Test_Playbook_-_Impossible_Traveler_-_Enrichment.yml
new file mode 100644
index 000000000000..210154372e5e
--- /dev/null
+++ b/Packs/Core/TestPlaybooks/Test_Playbook_-_Impossible_Traveler_-_Enrichment.yml
@@ -0,0 +1,7718 @@
+id: Test Playbook - Impossible Traveler - Enrichment
+version: -1
+name: Test Playbook - Impossible Traveler - Enrichment
+description: |-
+ This playbook tests the ‘Impossible Traveler - Enrichment' playbook which is part of the ‘Core’ pack.
+
+ The following tests are conducted in the playbook:
+ 1- Review and validate the playbook’s output.
+ 2- Ensure that the context data is correctly extracted.
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 9193802e-ca8d-4d1d-8fb6-ac843c6409e0
+ type: start
+ task:
+ id: 9193802e-ca8d-4d1d-8fb6-ac843c6409e0
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 97297dba-2ed4-45c7-8f88-63bd12b62ea8
+ type: regular
+ task:
+ id: 97297dba-2ed4-45c7-8f88-63bd12b62ea8
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ - "49"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -80
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: 07bc794e-3229-45e0-8d0c-0ff51c87d4b7
+ type: regular
+ task:
+ id: 07bc794e-3229-45e0-8d0c-0ff51c87d4b7
+ version: -1
+ name: Get Available Users - AD
+ description: Retrieves detailed information about a user account. The user can be specified by name, email address, or as an Active Directory Distinguished Name (DN). If no filter is specified, all users are returned.
+ script: '|||ad-get-user'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ extend-context:
+ simple: AD.AvailableUsers=
+ ignore-outputs:
+ simple: "true"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 240,
+ "y": 90
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 3fb47b10-a41e-40d3-8b78-2eec7841e754
+ type: condition
+ task:
+ id: 3fb47b10-a41e-40d3-8b78-2eec7841e754
+ version: -1
+ name: Users Retrieved?
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "4"
+ "yes":
+ - "266"
+ - "267"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AD.AvailableUsers.attributes.displayName
+ iscontext: true
+ right:
+ value: {}
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraph.AvailableUsers.displayName
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 260
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 4f4763ac-f7b5-41e2-87f8-c57a32f30b08
+ type: regular
+ task:
+ id: 4f4763ac-f7b5-41e2-87f8-c57a32f30b08
+ version: -1
+ name: Get Users Error - No Available Users
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "264"
+ scriptarguments:
+ message:
+ simple: "No available users were found. \nThis may indicate the following:\n1- The 'msgraph-user-list' or the 'ad-get-user' 'scripts failed to execute.\n2. results were not returned by 'msgraph-user-list' or 'ad-get-user'."
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1290,
+ "y": 430
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: b4c3f65d-215a-4ff7-8aee-0d601ac92f25
+ type: title
+ task:
+ id: b4c3f65d-215a-4ff7-8aee-0d601ac92f25
+ version: -1
+ name: Start Tests
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "13"
+ - "78"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 930
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 4931b845-286f-4a29-8c88-8f36d375bd3e
+ type: title
+ task:
+ id: 4931b845-286f-4a29-8c88-8f36d375bd3e
+ version: -1
+ name: Check Context Data
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "70"
+ - "72"
+ - "74"
+ - "76"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 660,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: f73b0448-ed1a-4086-8c0c-ccb0abba3b49
+ type: condition
+ task:
+ id: f73b0448-ed1a-4086-8c0c-ccb0abba3b49
+ version: -1
+ name: Verify IP Address
+ description: Verify that the ‘ IP.Address’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "19"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Address
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: 15483d21-0147-43db-80a1-c3fdc15a543b
+ type: regular
+ task:
+ id: 15483d21-0147-43db-80a1-c3fdc15a543b
+ version: -1
+ name: Verify Context Error - IP Address
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Address' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Address' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'IP.Address' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "49":
+ id: "49"
+ taskid: 0e44ae7e-9147-41d8-8e3e-d133eb2fda02
+ type: regular
+ task:
+ id: 0e44ae7e-9147-41d8-8e3e-d133eb2fda02
+ version: -1
+ name: Get Available Users - MSGraph
+ description: |-
+ Retrieves a list of user objects.
+ Permissions: - User.ReadBasic.All (Delegated) - User.Read.All (Application).
+ script: '|||msgraph-user-list'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ extend-context:
+ simple: MSGraph.AvailableUsers=
+ ignore-outputs:
+ simple: "true"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 90
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "51":
+ id: "51"
+ taskid: 05324788-6a9a-48d8-87b5-02fccf81b479
+ type: playbook
+ task:
+ id: 05324788-6a9a-48d8-87b5-02fccf81b479
+ version: -1
+ name: Impossible Traveler - Enrichment
+ description: |-
+ This playbook get as an input all of the involved IP addresses and identities from the Impossible Traveler playbook alert, and enriches them based on the following:
+ * Geo location
+ * Active Directory
+ * IP enrichment e.g. VirusTotal, AbuseIPDB, etc.
+ playbookName: Impossible Traveler - Enrichment
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ domain:
+ simple: demisto
+ sourceip:
+ simple: 218.92.0.29
+ username:
+ simple: ${AvailableUsers}
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 770
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "52":
+ id: "52"
+ taskid: 00f270df-7927-4507-826b-cb8d6bde9e70
+ type: condition
+ task:
+ id: 00f270df-7927-4507-826b-cb8d6bde9e70
+ version: -1
+ name: Verify IP Geo Country
+ description: Verify that the ‘IP.Geo.Country’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "53"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Geo.Country
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "53":
+ id: "53"
+ taskid: 24112c27-ebce-47a5-8174-717355649450
+ type: regular
+ task:
+ id: 24112c27-ebce-47a5-8174-717355649450
+ version: -1
+ name: Verify Context Error - IP Geo Country
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Geo.Country' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Geo.Country' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'IP.Geo.Country' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "54":
+ id: "54"
+ taskid: 56479104-da2a-474d-82ed-611866aed637
+ type: condition
+ task:
+ id: 56479104-da2a-474d-82ed-611866aed637
+ version: -1
+ name: Verify IP Geo Country Code
+ description: Verify that the ‘IP.Geo.CountryCode’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "55"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Geo.CountryCode
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "55":
+ id: "55"
+ taskid: 0901d1ad-648d-43ab-8b9d-f736fa46e3f3
+ type: regular
+ task:
+ id: 0901d1ad-648d-43ab-8b9d-f736fa46e3f3
+ version: -1
+ name: Verify Context Error - IP Geo Country Code
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Geo.CountryCode' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Geo.CountryCode' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'IP.Geo.CountryCode' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "56":
+ id: "56"
+ taskid: f3686487-781e-481c-826b-78e2906927e9
+ type: condition
+ task:
+ id: f3686487-781e-481c-826b-78e2906927e9
+ version: -1
+ name: Verify Malicious IP - Vendor
+ description: Verify that the ‘IP.Malicious.Vendor’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "57"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Malicious.Vendor
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1860,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "57":
+ id: "57"
+ taskid: 7a67bab5-4c95-4f14-8daa-ba62ce83cf23
+ type: regular
+ task:
+ id: 7a67bab5-4c95-4f14-8daa-ba62ce83cf23
+ version: -1
+ name: Verify Context Error - Malicious IP - Vendor
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Malicious.Vendor' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Malicious.Vendor' context key.
+ 3- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ 4- The playbook outputs have been modified and no longer contain the 'IP.Malicious.Vendor' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1860,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "58":
+ id: "58"
+ taskid: c78f1f20-a6b6-49e6-86ed-0ff8b14a8af5
+ type: condition
+ task:
+ id: c78f1f20-a6b6-49e6-86ed-0ff8b14a8af5
+ version: -1
+ name: Verify Malicious IP - Detections
+ description: Verify that the ‘IP.Malicious.Detections’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "59"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Malicious.Detections
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2260,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "59":
+ id: "59"
+ taskid: a9836722-5832-4597-82ae-3244b41c44e5
+ type: regular
+ task:
+ id: a9836722-5832-4597-82ae-3244b41c44e5
+ version: -1
+ name: Verify Context Error - Malicious IP - Detections
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Malicious.Detections' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Malicious.Detections' context key.
+ 3- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ 4- The playbook outputs have been modified and no longer contain the 'IP.Malicious.Detections' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2260,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "60":
+ id: "60"
+ taskid: 97864b41-cfae-4f2d-898f-2ff3cf43166a
+ type: condition
+ task:
+ id: 97864b41-cfae-4f2d-898f-2ff3cf43166a
+ version: -1
+ name: Verify Malicious IP - Description
+ description: Verify that the ‘IP.Malicious.Description’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "61"
+ "yes":
+ - "258"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: IP.Malicious.Description
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2650,
+ "y": 2340
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "61":
+ id: "61"
+ taskid: d3ea8bfb-d784-4eb0-837e-39e311e1a2a0
+ type: regular
+ task:
+ id: d3ea8bfb-d784-4eb0-837e-39e311e1a2a0
+ version: -1
+ name: Verify Context Error - Malicious IP - Description
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'IP.Malicious.Description' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'IP.Malicious.Description' context key.
+ 3- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ 4- The playbook outputs have been modified and no longer contain the 'IP.Malicious.Description' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2650,
+ "y": 2525
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "62":
+ id: "62"
+ taskid: de429f6d-ebea-4ce0-8430-9db4f4b586dc
+ type: condition
+ task:
+ id: de429f6d-ebea-4ce0-8430-9db4f4b586dc
+ version: -1
+ name: Verify Score
+ description: Verify that the ‘DBotScore.Score’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "63"
+ "yes":
+ - "257"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: DBotScore.Score
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 1820
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "63":
+ id: "63"
+ taskid: 2091b1f2-bf27-40b5-8032-db6fd6d30cd8
+ type: regular
+ task:
+ id: 2091b1f2-bf27-40b5-8032-db6fd6d30cd8
+ version: -1
+ name: Verify Context Error - Score
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'DBotScore.Score' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'DBotScore.Score' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'DBotScore.Score' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2025
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "64":
+ id: "64"
+ taskid: 3a7657e0-bfaa-42a8-8d75-fd7760ab260a
+ type: condition
+ task:
+ id: 3a7657e0-bfaa-42a8-8d75-fd7760ab260a
+ version: -1
+ name: Verify Vendor
+ description: Verify that the ‘DBotScore.Vendor’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "65"
+ "yes":
+ - "257"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: DBotScore.Vendor
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 1820
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "65":
+ id: "65"
+ taskid: 46d71f39-33ef-4fd3-8345-8f6c96875b07
+ type: regular
+ task:
+ id: 46d71f39-33ef-4fd3-8345-8f6c96875b07
+ version: -1
+ name: Verify Context Error - Vendor
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'DBotScore.Vendor' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'DBotScore.Vendor' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'DBotScore.Vendor' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 2025
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "66":
+ id: "66"
+ taskid: 89066a6e-25a9-405a-8d6c-d24112541734
+ type: condition
+ task:
+ id: 89066a6e-25a9-405a-8d6c-d24112541734
+ version: -1
+ name: Verify Indicator
+ description: Verify that the ‘DBotScore.Indicator’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "67"
+ "yes":
+ - "257"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: DBotScore.Indicator
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 1820
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "67":
+ id: "67"
+ taskid: 179171a6-6d53-4f69-8f32-b2405c6ae1ed
+ type: regular
+ task:
+ id: 179171a6-6d53-4f69-8f32-b2405c6ae1ed
+ version: -1
+ name: Verify Context Error - Indicator
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'DBotScore.Indicator' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'DBotScore.Indicator' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'DBotScore.Indicator' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 2025
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "68":
+ id: "68"
+ taskid: ba1b5e68-4c2e-4cd2-8313-2793a5a59840
+ type: condition
+ task:
+ id: ba1b5e68-4c2e-4cd2-8313-2793a5a59840
+ version: -1
+ name: Verify Indicator Type
+ description: Verify that the ‘DBotScore.Type’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "69"
+ "yes":
+ - "257"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: DBotScore.Type
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1890,
+ "y": 1820
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "69":
+ id: "69"
+ taskid: 9b5f181e-e56f-47ce-8d92-82db592eaf7a
+ type: regular
+ task:
+ id: 9b5f181e-e56f-47ce-8d92-82db592eaf7a
+ version: -1
+ name: Verify Context Error - Indicator Type
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'DBotScore.Type' playbook output was not extracted properly. One of the following may be the cause:
+ 1- 'IP' script did not return any results.
+ 2- The 'IP' automation outputs have been modified and no longer contain the 'DBotScore.Type' context key.
+ 3- The playbook outputs have been modified and no longer contain the 'DBotScore.Type' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1890,
+ "y": 2025
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "70":
+ id: "70"
+ taskid: cd3bea67-4b56-4341-837c-ecefdb6a8550
+ type: condition
+ task:
+ id: cd3bea67-4b56-4341-837c-ecefdb6a8550
+ version: -1
+ name: Verify HTTP Request Response
+ description: Verify that the ‘HttpRequest.Response’ context key was deleted.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "71"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotExists
+ left:
+ value:
+ simple: HttpRequest.Response
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 660,
+ "y": 1210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "71":
+ id: "71"
+ taskid: 6bcd7ef2-caa6-4d7b-84ea-2c40cfdd32a2
+ type: regular
+ task:
+ id: 6bcd7ef2-caa6-4d7b-84ea-2c40cfdd32a2
+ version: -1
+ name: Verify Context Error - HTTP Request Response
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'HttpRequest.Response' context key was not deleted. One of the following may be the cause:
+ 1- The 'DeleteContext' script failed to execute.
+ 2- The 'key' input configuration was changed for the 'DeleteContext' automation used in the 'Delete HTTP Response Context' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 660,
+ "y": 1415
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "72":
+ id: "72"
+ taskid: e842cfc5-1a08-4473-8efb-2ae7feb7bb97
+ type: condition
+ task:
+ id: e842cfc5-1a08-4473-8efb-2ae7feb7bb97
+ version: -1
+ name: Verify Coordinates
+ description: Verify that the ‘Coordinates’ context key was deleted.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "73"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotExists
+ left:
+ value:
+ simple: Coordinates
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1060,
+ "y": 1210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "73":
+ id: "73"
+ taskid: 92aa06cb-4327-4cce-8f8a-763bd51328c9
+ type: regular
+ task:
+ id: 92aa06cb-4327-4cce-8f8a-763bd51328c9
+ version: -1
+ name: Verify Context Error - Coordinates
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Coordinates' context key was not deleted. One of the following may be the cause:
+ 1- The 'DeleteContext' script failed to execute.
+ 2- The 'key' input configuration was changed for the 'DeleteContext' automation used in the 'Delete Coordinates Context' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1060,
+ "y": 1415
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "74":
+ id: "74"
+ taskid: 3b46bd3b-cef6-4204-87b0-a040f85149b2
+ type: condition
+ task:
+ id: 3b46bd3b-cef6-4204-87b0-a040f85149b2
+ version: -1
+ name: Verify Events
+ description: Verify that the ‘Events’ context key was deleted.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "75"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotExists
+ left:
+ value:
+ simple: Events
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1460,
+ "y": 1210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "75":
+ id: "75"
+ taskid: 7ef51d87-0822-47f9-8345-d1df3cc747b7
+ type: regular
+ task:
+ id: 7ef51d87-0822-47f9-8345-d1df3cc747b7
+ version: -1
+ name: Verify Context Error - Events
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Events' context key was not deleted. One of the following may be the cause:
+ 1- The 'DeleteContext' script failed to execute.
+ 2- The 'key' input configuration was changed for the 'DeleteContext' automation used in the 'Delete Events Context' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1460,
+ "y": 1415
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "76":
+ id: "76"
+ taskid: 127857c6-7608-48ed-89ed-84e4c6578ef8
+ type: condition
+ task:
+ id: 127857c6-7608-48ed-89ed-84e4c6578ef8
+ version: -1
+ name: Verify Impossible Traveler Events
+ description: Verify that the ‘ImpossibleTraveler.Events’ context key was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "77"
+ "yes":
+ - "256"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ImpossibleTraveler.Events
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1860,
+ "y": 1210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "77":
+ id: "77"
+ taskid: d3b8fd4c-7ff4-47e7-828b-480cd7117aaf
+ type: regular
+ task:
+ id: d3b8fd4c-7ff4-47e7-828b-480cd7117aaf
+ version: -1
+ name: Verify Context Error - Impossible Traveler Events
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ImpossibleTraveler.Events' context key was not extracted properly. One of the following may be the cause:
+ 1- An error occurred in the 'Set' script of the 'Create Impossible Traveler Events' task.
+ 2- The 'key' input configuration was changed for the 'Set' automation used in the 'Create Impossible Traveler Events' task.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1860,
+ "y": 1415
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "78":
+ id: "78"
+ taskid: aff7f8e8-f8f7-47d0-834f-ca38bf90772b
+ type: title
+ task:
+ id: aff7f8e8-f8f7-47d0-834f-ca38bf90772b
+ version: -1
+ name: Check Playbook Outputs
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "254"
+ - "255"
+ - "186"
+ - "215"
+ - "125"
+ - "102"
+ - "268"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1530
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "79":
+ id: "79"
+ taskid: 8a08c38e-c961-4df8-84cb-71bc573178b6
+ type: condition
+ task:
+ id: 8a08c38e-c961-4df8-84cb-71bc573178b6
+ version: -1
+ name: Verify Manager ID
+ description: Verify that the ‘MSGraphUserManager.Manager.ID’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "80"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.ID
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "80":
+ id: "80"
+ taskid: 17bd05a7-b9f6-4c6c-89be-7175cfc598fc
+ type: regular
+ task:
+ id: 17bd05a7-b9f6-4c6c-89be-7175cfc598fc
+ version: -1
+ name: Verify Context Error - Manager ID
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.ID' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.ID' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.ID' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "81":
+ id: "81"
+ taskid: fa59f131-440c-4051-87ed-50d123acbbff
+ type: title
+ task:
+ id: fa59f131-440c-4051-87ed-50d123acbbff
+ version: -1
+ name: MS Graph User Manager
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "79"
+ - "82"
+ - "84"
+ - "86"
+ - "88"
+ - "90"
+ - "92"
+ - "94"
+ - "96"
+ - "98"
+ - "100"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3870
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "82":
+ id: "82"
+ taskid: 343646fb-061c-4843-8242-f7ed9177503f
+ type: condition
+ task:
+ id: 343646fb-061c-4843-8242-f7ed9177503f
+ version: -1
+ name: Verify Display Name
+ description: Verify that the ‘MSGraphUserManager.Manager.DisplayName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "83"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.DisplayName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "83":
+ id: "83"
+ taskid: 5cd384f3-c97e-4785-8839-492fc18d2538
+ type: regular
+ task:
+ id: 5cd384f3-c97e-4785-8839-492fc18d2538
+ version: -1
+ name: Verify Context Error - Display Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.DisplayName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.DisplayName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.DisplayName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "84":
+ id: "84"
+ taskid: b937e02e-d630-44da-8173-eedc7e93971c
+ type: condition
+ task:
+ id: b937e02e-d630-44da-8173-eedc7e93971c
+ version: -1
+ name: Verify Given Name
+ description: Verify that the ‘MSGraphUserManager.Manager.GivenName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "85"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.GivenName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "85":
+ id: "85"
+ taskid: 4cec9187-f3ba-4cbe-86ae-0c1fbd71caa9
+ type: regular
+ task:
+ id: 4cec9187-f3ba-4cbe-86ae-0c1fbd71caa9
+ version: -1
+ name: Verify Context Error - Given Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.GivenName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.GivenName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.GivenName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "86":
+ id: "86"
+ taskid: a4fad89a-9fee-4c0a-80ca-779b84349b76
+ type: condition
+ task:
+ id: a4fad89a-9fee-4c0a-80ca-779b84349b76
+ version: -1
+ name: Verify Business Phones
+ description: Verify that the ‘MSGraphUserManager.Manager.BusinessPhones’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "87"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.BusinessPhones
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "87":
+ id: "87"
+ taskid: b1f89149-eee9-4588-811b-d2200399b040
+ type: regular
+ task:
+ id: b1f89149-eee9-4588-811b-d2200399b040
+ version: -1
+ name: Verify Context Error - Business Phones
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.BusinessPhones' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.BusinessPhones' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.BusinessPhones' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "88":
+ id: "88"
+ taskid: 282f0f69-4c6d-4dfc-8fb3-a18e83d4de44
+ type: condition
+ task:
+ id: 282f0f69-4c6d-4dfc-8fb3-a18e83d4de44
+ version: -1
+ name: Verify Job Title
+ description: Verify that the ‘MSGraphUserManager.Manager.JobTitle’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "89"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.JobTitle
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2270,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "89":
+ id: "89"
+ taskid: f2f53af0-7c5c-4b49-8415-a5d974cbefd4
+ type: regular
+ task:
+ id: f2f53af0-7c5c-4b49-8415-a5d974cbefd4
+ version: -1
+ name: Verify Context Error - Job Title
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.JobTitle' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.JobTitle' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.JobTitle' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2270,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "90":
+ id: "90"
+ taskid: 053e1e88-488c-4fa1-80e1-eaa966ab903d
+ type: condition
+ task:
+ id: 053e1e88-488c-4fa1-80e1-eaa966ab903d
+ version: -1
+ name: Verify Mail
+ description: Verify that the ‘MSGraphUserManager.Manager.Mail’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "91"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.Mail
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2670,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "91":
+ id: "91"
+ taskid: a16218a5-1f60-49f5-8089-33776c624e6e
+ type: regular
+ task:
+ id: a16218a5-1f60-49f5-8089-33776c624e6e
+ version: -1
+ name: Verify Context Error - Mail
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.Mail' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.Mail' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.Mail' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2670,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "92":
+ id: "92"
+ taskid: d937c8b3-8ab7-44f1-83d9-a8850b70cc0a
+ type: condition
+ task:
+ id: d937c8b3-8ab7-44f1-83d9-a8850b70cc0a
+ version: -1
+ name: Verify Mobile Phone
+ description: Verify that the ‘MSGraphUserManager.Manager.MobilePhone’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "93"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.MobilePhone
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3070,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "93":
+ id: "93"
+ taskid: 1223c98a-ed7f-4825-8e6c-5f4ba476ba36
+ type: regular
+ task:
+ id: 1223c98a-ed7f-4825-8e6c-5f4ba476ba36
+ version: -1
+ name: Verify Context Error - Mobile Phone
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.MobilePhone' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.MobilePhone' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.MobilePhone' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3070,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "94":
+ id: "94"
+ taskid: 3f447135-6301-46b6-8712-19cf476d1efd
+ type: condition
+ task:
+ id: 3f447135-6301-46b6-8712-19cf476d1efd
+ version: -1
+ name: Verify Office Location
+ description: Verify that the ‘MSGraphUserManager.Manager.OfficeLocation’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "95"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.OfficeLocation
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3470,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "95":
+ id: "95"
+ taskid: ea5c6ae1-7d24-46e9-8932-6be2874995a9
+ type: regular
+ task:
+ id: ea5c6ae1-7d24-46e9-8932-6be2874995a9
+ version: -1
+ name: Verify Context Error - Office Location
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.OfficeLocation' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.OfficeLocation' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.OfficeLocation' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3470,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "96":
+ id: "96"
+ taskid: 35410f8f-b4dc-4c7d-8102-218ec298502b
+ type: condition
+ task:
+ id: 35410f8f-b4dc-4c7d-8102-218ec298502b
+ version: -1
+ name: Verify Preferred Language
+ description: Verify that the ‘MSGraphUserManager.Manager.PreferredLanguage’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "97"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.PreferredLanguage
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3860,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "97":
+ id: "97"
+ taskid: 6ab7bc52-ec49-45da-8b0d-bc9cc05b5128
+ type: regular
+ task:
+ id: 6ab7bc52-ec49-45da-8b0d-bc9cc05b5128
+ version: -1
+ name: Verify Context Error - Preferred Language
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.PreferredLanguage' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.PreferredLanguage' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.PreferredLanguage' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3860,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "98":
+ id: "98"
+ taskid: ef9e0300-d8f5-40e5-895f-b43b94a62363
+ type: condition
+ task:
+ id: ef9e0300-d8f5-40e5-895f-b43b94a62363
+ version: -1
+ name: Verify Manager Surname
+ description: Verify that the ‘MSGraphUserManager.Manager.Surname’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "99"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.Surname
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4260,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "99":
+ id: "99"
+ taskid: 3f701158-a0dc-4d98-841b-c745f45ecb81
+ type: regular
+ task:
+ id: 3f701158-a0dc-4d98-841b-c745f45ecb81
+ version: -1
+ name: Verify Context Error - Manager Surname
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.Surname' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.Surname' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.Surname' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4260,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "100":
+ id: "100"
+ taskid: 8737dc7b-72d6-453c-88a2-d973855d02a6
+ type: condition
+ task:
+ id: 8737dc7b-72d6-453c-88a2-d973855d02a6
+ version: -1
+ name: Verify User Principal Name
+ description: Verify that the ‘MSGraphUserManager.Manager.UserPrincipalName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "101"
+ "yes":
+ - "260"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUserManager.Manager.UserPrincipalName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4660,
+ "y": 4010
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "101":
+ id: "101"
+ taskid: ecb03cbb-4d2d-4641-8db1-9b179e62a48c
+ type: regular
+ task:
+ id: ecb03cbb-4d2d-4641-8db1-9b179e62a48c
+ version: -1
+ name: Verify Context Error - User Principal Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUserManager.Manager.UserPrincipalName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get-manager' script failed to execute.
+ 2- 'msgraph-user-get-manager' script did not return any results.
+ 3- The 'msgraph-user-get-manager' automation outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.UserPrincipalName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUserManager.Manager.UserPrincipalName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4660,
+ "y": 4195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "102":
+ id: "102"
+ taskid: 73811fe8-cea1-4965-8ae2-0867b9d54ab2
+ type: title
+ task:
+ id: 73811fe8-cea1-4965-8ae2-0867b9d54ab2
+ version: -1
+ name: MS Graph User
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "103"
+ - "123"
+ - "113"
+ - "105"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "103":
+ id: "103"
+ taskid: de246c5a-c481-4fde-81c4-2b931e3ac5e7
+ type: condition
+ task:
+ id: de246c5a-c481-4fde-81c4-2b931e3ac5e7
+ version: -1
+ name: Verify User ID
+ description: Verify that the ‘MSGraphUser.ID’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "104"
+ "yes":
+ - "261"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraphUser.ID
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "104":
+ id: "104"
+ taskid: 918e7c1c-f76f-477b-8842-2a77c8a8cf93
+ type: regular
+ task:
+ id: 918e7c1c-f76f-477b-8842-2a77c8a8cf93
+ version: -1
+ name: Verify Context Error - User ID
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUser.ID' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get' script failed to execute.
+ 2- 'msgraph-user-get' script did not return any results.
+ 3- The 'msgraph-user-get' automation outputs have been modified and no longer contain the 'MSGraphUser.ID' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUser.ID' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3535
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "105":
+ id: "105"
+ taskid: 246744a1-dd77-4c7c-8b71-1bb767138b7e
+ type: condition
+ task:
+ id: 246744a1-dd77-4c7c-8b71-1bb767138b7e
+ version: -1
+ name: Verify Display Name
+ description: Verify that the ‘MSGraphUser.DisplayName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "106"
+ "yes":
+ - "261"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: MSGraphUser.DisplayName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 3350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "106":
+ id: "106"
+ taskid: ccef8d97-c66f-405f-8e60-87410081c201
+ type: regular
+ task:
+ id: ccef8d97-c66f-405f-8e60-87410081c201
+ version: -1
+ name: Verify Context Error - Display Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUser.DisplayName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get' script failed to execute.
+ 2- 'msgraph-user-get' script did not return any results.
+ 3- The 'msgraph-user-get' automation outputs have been modified and no longer contain the 'MSGraphUser.DisplayName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUser.DisplayName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 3535
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "113":
+ id: "113"
+ taskid: 65aea3a7-b0b7-4364-8091-26c43ccf797b
+ type: condition
+ task:
+ id: 65aea3a7-b0b7-4364-8091-26c43ccf797b
+ version: -1
+ name: Verify User Mail
+ description: Verify that the ‘MSGraphUser.Mail’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "114"
+ "yes":
+ - "261"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: MSGraphUser.Mail
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 3350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "114":
+ id: "114"
+ taskid: 5043ec16-4ea6-4da8-8c5b-5d8d5237700a
+ type: regular
+ task:
+ id: 5043ec16-4ea6-4da8-8c5b-5d8d5237700a
+ version: -1
+ name: Verify Context Error - User Mail
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUser.Mail' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get' script failed to execute.
+ 2- 'msgraph-user-get' script did not return any results.
+ 3- The 'msgraph-user-get' automation outputs have been modified and no longer contain the 'MSGraphUser.Mail' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUser.Mail' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 3535
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "123":
+ id: "123"
+ taskid: 2ec0b59a-b42c-4330-8d90-3b4236aaba48
+ type: condition
+ task:
+ id: 2ec0b59a-b42c-4330-8d90-3b4236aaba48
+ version: -1
+ name: Verify Principal Name
+ description: Verify that the ‘MSGraphUser.UserPrincipalName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "124"
+ "yes":
+ - "261"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: MSGraphUser.UserPrincipalName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1890,
+ "y": 3350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "124":
+ id: "124"
+ taskid: 1dd10b06-88aa-4025-832f-8b2413b12298
+ type: regular
+ task:
+ id: 1dd10b06-88aa-4025-832f-8b2413b12298
+ version: -1
+ name: Verify Context Error - Principal Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'MSGraphUser.UserPrincipalName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'msgraph-user-get' script failed to execute.
+ 2- 'msgraph-user-get' script did not return any results.
+ 3- The 'msgraph-user-get' automation outputs have been modified and no longer contain the 'MSGraphUser.UserPrincipalName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'MSGraphUser.UserPrincipalName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1890,
+ "y": 3535
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "125":
+ id: "125"
+ taskid: d878fd66-4361-4f88-85fe-e996d364d24d
+ type: title
+ task:
+ id: d878fd66-4361-4f88-85fe-e996d364d24d
+ version: -1
+ name: Active Directory Users
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "126"
+ - "128"
+ - "130"
+ - "132"
+ - "134"
+ - "136"
+ - "140"
+ - "142"
+ - "144"
+ - "146"
+ - "148"
+ - "150"
+ - "152"
+ - "154"
+ - "156"
+ - "158"
+ - "160"
+ - "162"
+ - "164"
+ - "166"
+ - "168"
+ - "170"
+ - "172"
+ - "174"
+ - "176"
+ - "178"
+ - "180"
+ - "182"
+ - "184"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4880
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "126":
+ id: "126"
+ taskid: 7fe24d5d-2784-4853-8ace-ef5a9d200bde
+ type: condition
+ task:
+ id: 7fe24d5d-2784-4853-8ace-ef5a9d200bde
+ version: -1
+ name: Verify User DN
+ description: Verify that the ‘ActiveDirectory.Users.dn’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "127"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.dn
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "127":
+ id: "127"
+ taskid: c4efe020-1274-485d-8e05-67212779b302
+ type: regular
+ task:
+ id: c4efe020-1274-485d-8e05-67212779b302
+ version: -1
+ name: Verify Context Error - User DN
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.dn' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.dn' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.dn' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "128":
+ id: "128"
+ taskid: daf07b9d-f691-43d9-89b6-25e8eef82e33
+ type: condition
+ task:
+ id: daf07b9d-f691-43d9-89b6-25e8eef82e33
+ version: -1
+ name: Verify Display Name
+ description: Verify that the ‘ActiveDirectory.Users.displayName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "129"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.displayName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "129":
+ id: "129"
+ taskid: 28fb613d-05a6-43e5-8dde-672e48893122
+ type: regular
+ task:
+ id: 28fb613d-05a6-43e5-8dde-672e48893122
+ version: -1
+ name: Verify Context Error - Display Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.displayName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.displayName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.displayName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "130":
+ id: "130"
+ taskid: b7cb39d8-5281-4cf1-8167-8ddb34e5a2a4
+ type: condition
+ task:
+ id: b7cb39d8-5281-4cf1-8167-8ddb34e5a2a4
+ version: -1
+ name: Verify User Name
+ description: Verify that the ‘ActiveDirectory.Users.name’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "131"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.name
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "131":
+ id: "131"
+ taskid: f416a717-c015-42ed-86c0-07d278d2b386
+ type: regular
+ task:
+ id: f416a717-c015-42ed-86c0-07d278d2b386
+ version: -1
+ name: Verify Context Error - User Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.name' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.name' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.name' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1490,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "132":
+ id: "132"
+ taskid: 5c304fd1-d686-4859-8a54-73cd502b1ed2
+ type: condition
+ task:
+ id: 5c304fd1-d686-4859-8a54-73cd502b1ed2
+ version: -1
+ name: Verify SAM Account Name
+ description: Verify that the ‘ActiveDirectory.Users.sAMAccountName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "133"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.sAMAccountName
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1880,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "133":
+ id: "133"
+ taskid: a2158f83-8827-4838-8f2c-a19b1e590355
+ type: regular
+ task:
+ id: a2158f83-8827-4838-8f2c-a19b1e590355
+ version: -1
+ name: Verify Context Error - SAM Account Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.sAMAccountName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.sAMAccountName' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.sAMAccountName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1880,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "134":
+ id: "134"
+ taskid: 09f4579e-6050-492c-8597-c58f8edccdac
+ type: condition
+ task:
+ id: 09f4579e-6050-492c-8597-c58f8edccdac
+ version: -1
+ name: Verify User Account Control
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControl’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "135"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControl
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2280,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "135":
+ id: "135"
+ taskid: 92fbd9ff-e293-4cb1-81bd-db589bed1ebf
+ type: regular
+ task:
+ id: 92fbd9ff-e293-4cb1-81bd-db589bed1ebf
+ version: -1
+ name: Verify Context Error - User Account Control
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControl' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControl' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControl' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2280,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "136":
+ id: "136"
+ taskid: 1b787f09-408e-40fd-8930-8422e153428a
+ type: condition
+ task:
+ id: 1b787f09-408e-40fd-8930-8422e153428a
+ version: -1
+ name: Verify User Mail
+ description: Verify that the ‘ActiveDirectory.Users.mail’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "137"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.mail
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2680,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "137":
+ id: "137"
+ taskid: be77799a-b488-4adc-8e9a-b5949e855841
+ type: regular
+ task:
+ id: be77799a-b488-4adc-8e9a-b5949e855841
+ version: -1
+ name: Verify Context Error - User Mail
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.mail' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.mail' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.mail' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2680,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "140":
+ id: "140"
+ taskid: 40ea9d06-4d31-4cbb-82dd-0b4f7b5f0081
+ type: condition
+ task:
+ id: 40ea9d06-4d31-4cbb-82dd-0b4f7b5f0081
+ version: -1
+ name: Verify Member Of
+ description: Verify that the ‘ActiveDirectory.Users.memberOf’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "141"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: ActiveDirectory.Users.memberOf
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3090,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "141":
+ id: "141"
+ taskid: 85253fd6-c89e-4b03-8b8f-d1a25b92d1e1
+ type: regular
+ task:
+ id: 85253fd6-c89e-4b03-8b8f-d1a25b92d1e1
+ version: -1
+ name: Verify Context Error - Member Of
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.memberOf' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.memberOf' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.memberOf' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3090,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "142":
+ id: "142"
+ taskid: 2d82588d-dd2e-425f-8a3b-fc0e8f1c57f6
+ type: condition
+ task:
+ id: 2d82588d-dd2e-425f-8a3b-fc0e8f1c57f6
+ version: -1
+ name: Verify User Account Control Fields - Script
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.SCRIPT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "143"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.SCRIPT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3490,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "143":
+ id: "143"
+ taskid: c3827dbf-eaa8-4040-80ac-5bd6ed68771a
+ type: regular
+ task:
+ id: c3827dbf-eaa8-4040-80ac-5bd6ed68771a
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Script
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.SCRIPT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SCRIPT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SCRIPT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3490,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "144":
+ id: "144"
+ taskid: a17c6413-c9af-4ea0-89ff-5365a743b191
+ type: condition
+ task:
+ id: a17c6413-c9af-4ea0-89ff-5365a743b191
+ version: -1
+ name: Verify User Account Control Fields - Account Disable
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.ACCOUNTDISABLE’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "145"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.ACCOUNTDISABLE
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3890,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "145":
+ id: "145"
+ taskid: c950675f-c6c7-4c73-8c5e-0d34efb51e75
+ type: regular
+ task:
+ id: c950675f-c6c7-4c73-8c5e-0d34efb51e75
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Account Disable
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.ACCOUNTDISABLE' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.ACCOUNTDISABLE' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.ACCOUNTDISABLE' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3890,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "146":
+ id: "146"
+ taskid: 51aa1654-5a35-4a7a-8aa9-ba2b3b737c5a
+ type: condition
+ task:
+ id: 51aa1654-5a35-4a7a-8aa9-ba2b3b737c5a
+ version: -1
+ name: Verify User Account Control Fields - Home Folder Required
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.HOMEDIR_REQUIRED’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "147"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.HOMEDIR_REQUIRED
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4300,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "147":
+ id: "147"
+ taskid: 71671819-9ae4-4e9e-875c-c627d0edf397
+ type: regular
+ task:
+ id: 71671819-9ae4-4e9e-875c-c627d0edf397
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Home Folder Required
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.HOMEDIR_REQUIRED' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.HOMEDIR_REQUIRED' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.HOMEDIR_REQUIRED' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4300,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "148":
+ id: "148"
+ taskid: c0c26286-1ec6-4a00-822e-3ec22f26cfc3
+ type: condition
+ task:
+ id: c0c26286-1ec6-4a00-822e-3ec22f26cfc3
+ version: -1
+ name: Verify User Account Control Fields - Lockout
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.LOCKOUT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "149"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.LOCKOUT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4710,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "149":
+ id: "149"
+ taskid: e9bd6445-52a7-43cc-8bf2-66b88b4dfed9
+ type: regular
+ task:
+ id: e9bd6445-52a7-43cc-8bf2-66b88b4dfed9
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Lockout
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.LOCKOUT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.LOCKOUT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.LOCKOUT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4710,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "150":
+ id: "150"
+ taskid: 0988a61c-b2b8-45e7-86d0-0af5628297e4
+ type: condition
+ task:
+ id: 0988a61c-b2b8-45e7-86d0-0af5628297e4
+ version: -1
+ name: Verify User Account Control Fields - Password Not Required
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.PASSWD_NOTREQD’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "151"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.PASSWD_NOTREQD
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5110,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "151":
+ id: "151"
+ taskid: 5d088f89-b4d8-4ff2-8aeb-e6a8ceb14fcf
+ type: regular
+ task:
+ id: 5d088f89-b4d8-4ff2-8aeb-e6a8ceb14fcf
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Password Not Required
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.PASSWD_NOTREQD' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWD_NOTREQD' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWD_NOTREQD' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5110,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "152":
+ id: "152"
+ taskid: 1f4c843a-b704-46d2-86c4-2b12239fa708
+ type: condition
+ task:
+ id: 1f4c843a-b704-46d2-86c4-2b12239fa708
+ version: -1
+ name: Verify User Account Control Fields - Password Cant Change
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.PASSWD_CANT_CHANGE’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "153"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.PASSWD_CANT_CHANGE
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5510,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "153":
+ id: "153"
+ taskid: 5a396993-4bd1-4d55-8ef2-39d5ebb6f502
+ type: regular
+ task:
+ id: 5a396993-4bd1-4d55-8ef2-39d5ebb6f502
+ version: -1
+ name: "Verify Context Error - User Account Control Fields - \nPassword Cant Change"
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.PASSWD_CANT_CHANGE' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWD_CANT_CHANGE' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWD_CANT_CHANGE' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5510,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "154":
+ id: "154"
+ taskid: f4fece23-6b99-4c3c-88c1-991123ee88fc
+ type: condition
+ task:
+ id: f4fece23-6b99-4c3c-88c1-991123ee88fc
+ version: -1
+ name: Verify User Account Control Fields - Encrypted Text Password Allowed
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.ENCRYPTED_TEXT_PWD_ALLOWED’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "155"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.ENCRYPTED_TEXT_PWD_ALLOWED
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5910,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "155":
+ id: "155"
+ taskid: b464a3a6-9c90-4856-8514-7f600f0bca2c
+ type: regular
+ task:
+ id: b464a3a6-9c90-4856-8514-7f600f0bca2c
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Encrypted Text Password Allowed
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.ENCRYPTED_TEXT_PWD_ALLOWED' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.ENCRYPTED_TEXT_PWD_ALLOWED' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.ENCRYPTED_TEXT_PWD_ALLOWED' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5910,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "156":
+ id: "156"
+ taskid: 25a48a2e-d00d-4121-810e-1c7b83beba54
+ type: condition
+ task:
+ id: 25a48a2e-d00d-4121-810e-1c7b83beba54
+ version: -1
+ name: Verify User Account Control Fields - Temporary Duplicate Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.TEMP_DUPLICATE_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "157"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.TEMP_DUPLICATE_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6310,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "157":
+ id: "157"
+ taskid: c13c01c2-3f5a-4a44-8e42-58f591da1d3b
+ type: regular
+ task:
+ id: c13c01c2-3f5a-4a44-8e42-58f591da1d3b
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Temporary Duplicate Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.TEMP_DUPLICATE_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TEMP_DUPLICATE_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TEMP_DUPLICATE_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6310,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "158":
+ id: "158"
+ taskid: 68b44f5e-efba-4ad9-829d-d6ca83c9ea7b
+ type: condition
+ task:
+ id: 68b44f5e-efba-4ad9-829d-d6ca83c9ea7b
+ version: -1
+ name: Verify User Account Control Fields - Normal Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.NORMAL_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "159"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.NORMAL_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6720,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "159":
+ id: "159"
+ taskid: d4ca1518-7d79-4157-8de1-596ca1ee514c
+ type: regular
+ task:
+ id: d4ca1518-7d79-4157-8de1-596ca1ee514c
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Normal Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.NORMAL_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.NORMAL_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.NORMAL_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6720,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "160":
+ id: "160"
+ taskid: 19220a5b-6e6c-4214-864b-3c07cb02369f
+ type: condition
+ task:
+ id: 19220a5b-6e6c-4214-864b-3c07cb02369f
+ version: -1
+ name: 'Verify User Account Control Fields - Interdomain Trust Account '
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.INTERDOMAIN_TRUST_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "161"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.INTERDOMAIN_TRUST_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7120,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "161":
+ id: "161"
+ taskid: 8a823d33-b6e2-483e-8b56-744798384f1e
+ type: regular
+ task:
+ id: 8a823d33-b6e2-483e-8b56-744798384f1e
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Interdomain Trust Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.INTERDOMAIN_TRUST_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.INTERDOMAIN_TRUST_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.INTERDOMAIN_TRUST_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7120,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "162":
+ id: "162"
+ taskid: ecd4b528-20df-4470-8ff9-5cc67d96434c
+ type: condition
+ task:
+ id: ecd4b528-20df-4470-8ff9-5cc67d96434c
+ version: -1
+ name: Verify User Account Control Fields - Workstation Trust Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.WORKSTATION_TRUST_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "163"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.WORKSTATION_TRUST_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7530,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "163":
+ id: "163"
+ taskid: d253640a-4cc8-4c84-80a7-b7fe2c30e24f
+ type: regular
+ task:
+ id: d253640a-4cc8-4c84-80a7-b7fe2c30e24f
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Workstation Trust Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.WORKSTATION_TRUST_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.WORKSTATION_TRUST_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.WORKSTATION_TRUST_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7530,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "164":
+ id: "164"
+ taskid: dc3b1ceb-16b0-4d6b-8c44-1bced24216ae
+ type: condition
+ task:
+ id: dc3b1ceb-16b0-4d6b-8c44-1bced24216ae
+ version: -1
+ name: Verify User Account Control Fields - Server Trust Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.SERVER_TRUST_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "165"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.SERVER_TRUST_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7930,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "165":
+ id: "165"
+ taskid: 67848ee0-8912-4523-8178-8da99ab95d0e
+ type: regular
+ task:
+ id: 67848ee0-8912-4523-8178-8da99ab95d0e
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Server Trust Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.SERVER_TRUST_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SERVER_TRUST_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SERVER_TRUST_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7930,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "166":
+ id: "166"
+ taskid: 75fde378-ccef-4b75-8373-585b0f045689
+ type: condition
+ task:
+ id: 75fde378-ccef-4b75-8373-585b0f045689
+ version: -1
+ name: Verify User Account Control Fields - Don't Expire Password
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.DONT_EXPIRE_PASSWORD’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "167"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.DONT_EXPIRE_PASSWORD
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 8330,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "167":
+ id: "167"
+ taskid: 30f43232-1a90-4a9e-853a-2d62bda4f75a
+ type: regular
+ task:
+ id: 30f43232-1a90-4a9e-853a-2d62bda4f75a
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Don't Expire Password
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.DONT_EXPIRE_PASSWORD' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.DONT_EXPIRE_PASSWORD' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.DONT_EXPIRE_PASSWORD' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 8330,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "168":
+ id: "168"
+ taskid: eb31ac97-82c0-43d0-87b0-d059de8da586
+ type: condition
+ task:
+ id: eb31ac97-82c0-43d0-87b0-d059de8da586
+ version: -1
+ name: Verify User Account Control Fields - MNS Logon Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.MNS_LOGON_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "169"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.MNS_LOGON_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 8720,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "169":
+ id: "169"
+ taskid: c4a4c92e-5d29-452b-8a7e-3dff2868bdf5
+ type: regular
+ task:
+ id: c4a4c92e-5d29-452b-8a7e-3dff2868bdf5
+ version: -1
+ name: Verify Context Error - User Account Control Fields - MNS Logon Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.MNS_LOGON_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.MNS_LOGON_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.MNS_LOGON_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 8720,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "170":
+ id: "170"
+ taskid: ad079d5d-cf1d-4aa7-86c7-81143defed54
+ type: condition
+ task:
+ id: ad079d5d-cf1d-4aa7-86c7-81143defed54
+ version: -1
+ name: Verify User Account Control Fields - Smart Card Required
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.SMARTCARD_REQUIRED’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "171"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.SMARTCARD_REQUIRED
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9110,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "171":
+ id: "171"
+ taskid: d5ab4f1b-d5cd-48b8-8fc4-06ef4c5a3abb
+ type: regular
+ task:
+ id: d5ab4f1b-d5cd-48b8-8fc4-06ef4c5a3abb
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Smart Card Required
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.SMARTCARD_REQUIRED' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SMARTCARD_REQUIRED' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.SMARTCARD_REQUIRED' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9110,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "172":
+ id: "172"
+ taskid: 90c30cbc-0de4-4231-800c-6615eb78d795
+ type: condition
+ task:
+ id: 90c30cbc-0de4-4231-800c-6615eb78d795
+ version: -1
+ name: Verify User Account Control Fields - Trusted For Delegation
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.TRUSTED_FOR_DELEGATION’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "173"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.TRUSTED_FOR_DELEGATION
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9510,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "173":
+ id: "173"
+ taskid: 1f3e7ae3-225c-4ed7-85aa-616295041c8f
+ type: regular
+ task:
+ id: 1f3e7ae3-225c-4ed7-85aa-616295041c8f
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Trusted For Delegation
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_FOR_DELEGATION' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_FOR_DELEGATION' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_FOR_DELEGATION' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9510,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "174":
+ id: "174"
+ taskid: f8b6629a-9a89-47d3-8246-b6c699d121e8
+ type: condition
+ task:
+ id: f8b6629a-9a89-47d3-8246-b6c699d121e8
+ version: -1
+ name: Verify User Account Control Fields - Not Delegated
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.NOT_DELEGATED’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "175"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.NOT_DELEGATED
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9910,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "175":
+ id: "175"
+ taskid: da84670e-388b-41ca-83d0-90c9e5f32415
+ type: regular
+ task:
+ id: da84670e-388b-41ca-83d0-90c9e5f32415
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Not Delegated
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.NOT_DELEGATED' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.NOT_DELEGATED' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.NOT_DELEGATED' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 9910,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "176":
+ id: "176"
+ taskid: 6e8a9d1a-02dc-4ccf-8196-c3f01ae6af02
+ type: condition
+ task:
+ id: 6e8a9d1a-02dc-4ccf-8196-c3f01ae6af02
+ version: -1
+ name: Verify User Account Control Fields - Use Data Encryption Standard Only
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.USE_DES_KEY_ONLY’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "177"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.USE_DES_KEY_ONLY
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10310,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "177":
+ id: "177"
+ taskid: 69d6b05e-0f9f-47f4-8292-8b7682f8950e
+ type: regular
+ task:
+ id: 69d6b05e-0f9f-47f4-8292-8b7682f8950e
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Use Data Encryption Standard Only
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.USE_DES_KEY_ONLY' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.USE_DES_KEY_ONLY' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.USE_DES_KEY_ONLY' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10310,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "178":
+ id: "178"
+ taskid: 212adf24-0b44-4d03-8930-5cf402803c41
+ type: condition
+ task:
+ id: 212adf24-0b44-4d03-8930-5cf402803c41
+ version: -1
+ name: Verify User Account Control Fields - Don't Require Pre-authentication
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.DONT_REQ_PREAUTH’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "179"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.DONT_REQ_PREAUTH
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10720,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "179":
+ id: "179"
+ taskid: 6577a414-ba29-4505-88d7-31d71746f787
+ type: regular
+ task:
+ id: 6577a414-ba29-4505-88d7-31d71746f787
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Don't Require Pre-authentication
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.DONT_REQ_PREAUTH' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.DONT_REQ_PREAUTH' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.DONT_REQ_PREAUTH' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 10720,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "180":
+ id: "180"
+ taskid: 3d0dab56-af8c-423c-8c94-d309f2f41a45
+ type: condition
+ task:
+ id: 3d0dab56-af8c-423c-8c94-d309f2f41a45
+ version: -1
+ name: Verify User Account Control Fields - Password Expired
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.PASSWORD_EXPIRED’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "181"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.PASSWORD_EXPIRED
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11120,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "181":
+ id: "181"
+ taskid: f52bd379-6fb8-4ae3-8539-945d29bc4e5b
+ type: regular
+ task:
+ id: f52bd379-6fb8-4ae3-8539-945d29bc4e5b
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Password Expired
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.PASSWORD_EXPIRED' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWORD_EXPIRED' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PASSWORD_EXPIRED' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11120,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "182":
+ id: "182"
+ taskid: 8780d1ee-f64c-4387-8cf4-59c83bcd0815
+ type: condition
+ task:
+ id: 8780d1ee-f64c-4387-8cf4-59c83bcd0815
+ version: -1
+ name: Verify User Account Control Fields - Trusted to Authenticate For Delegation
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.TRUSTED_TO_AUTH_FOR_DELEGATION’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "183"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.TRUSTED_TO_AUTH_FOR_DELEGATION
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11530,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "183":
+ id: "183"
+ taskid: bf93c18e-fddf-47e0-8649-48686b820207
+ type: regular
+ task:
+ id: bf93c18e-fddf-47e0-8649-48686b820207
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Trusted to Authenticate For Delegation
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_TO_AUTH_FOR_DELEGATION' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_TO_AUTH_FOR_DELEGATION' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.TRUSTED_TO_AUTH_FOR_DELEGATION' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11530,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "184":
+ id: "184"
+ taskid: d3b5b3ad-d765-4358-87c4-8204723b625d
+ type: condition
+ task:
+ id: d3b5b3ad-d765-4358-87c4-8204723b625d
+ version: -1
+ name: Verify User Account Control Fields - Partial Secrets Account
+ description: Verify that the ‘ActiveDirectory.Users.userAccountControlFields.PARTIAL_SECRETS_ACCOUNT’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "185"
+ "yes":
+ - "263"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: ActiveDirectory.Users.userAccountControlFields.PARTIAL_SECRETS_ACCOUNT
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11940,
+ "y": 5020
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "185":
+ id: "185"
+ taskid: 4cdb01d3-732b-44dd-8764-0a0ae04d483a
+ type: regular
+ task:
+ id: 4cdb01d3-732b-44dd-8764-0a0ae04d483a
+ version: -1
+ name: Verify Context Error - User Account Control Fields - Partial Secrets Account
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'ActiveDirectory.Users.userAccountControlFields.PARTIAL_SECRETS_ACCOUNT' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' script failed to execute.
+ 2- 'ad-get-user' script did not return any results.
+ 3- The 'ad-get-user' automation outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PARTIAL_SECRETS_ACCOUNT' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'ActiveDirectory.Users.userAccountControlFields.PARTIAL_SECRETS_ACCOUNT' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 11940,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "186":
+ id: "186"
+ taskid: 1ad179f0-8728-4246-8119-e5a23611342d
+ type: title
+ task:
+ id: 1ad179f0-8728-4246-8119-e5a23611342d
+ version: -1
+ name: Account
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "187"
+ - "189"
+ - "191"
+ - "195"
+ - "197"
+ - "213"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2710
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "187":
+ id: "187"
+ taskid: f96a4cc8-e6e5-4aa7-8219-4b9a25fe792b
+ type: condition
+ task:
+ id: f96a4cc8-e6e5-4aa7-8219-4b9a25fe792b
+ version: -1
+ name: Verify Display Name
+ description: Verify that the ‘Account.DisplayName’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "188"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.DisplayName
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "188":
+ id: "188"
+ taskid: 29aed759-be10-45ac-8df6-8e4bd9713c98
+ type: regular
+ task:
+ id: 29aed759-be10-45ac-8df6-8e4bd9713c98
+ version: -1
+ name: Verify Context Error - Display Name
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.DisplayName' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.DisplayName' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "189":
+ id: "189"
+ taskid: d37ea21b-53c6-4243-8193-6d245c34eecf
+ type: condition
+ task:
+ id: d37ea21b-53c6-4243-8193-6d245c34eecf
+ version: -1
+ name: Verify Email
+ description: Verify that the ‘Account.Email’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "190"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.Email
+ iscontext: true
+ ignorecase: true
+ - - operator: isExists
+ left:
+ value:
+ simple: Account.Email.Address
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "190":
+ id: "190"
+ taskid: b40d3d28-6765-410f-86bc-b9bb0f983bc5
+ type: regular
+ task:
+ id: b40d3d28-6765-410f-86bc-b9bb0f983bc5
+ version: -1
+ name: Verify Context Error - Email
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.Email' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.Email' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1080,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "191":
+ id: "191"
+ taskid: 178333b6-7cc8-4587-8a8b-46835b9cce86
+ type: condition
+ task:
+ id: 178333b6-7cc8-4587-8a8b-46835b9cce86
+ version: -1
+ name: Verify Username
+ description: Verify that the ‘Account.Username’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "192"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.Username
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "192":
+ id: "192"
+ taskid: c1b5824f-cff7-472e-8562-88b96700c14c
+ type: regular
+ task:
+ id: c1b5824f-cff7-472e-8562-88b96700c14c
+ version: -1
+ name: Verify Context Error - Username
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.Username' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.Username' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "195":
+ id: "195"
+ taskid: 56347122-637b-4249-8368-1d9b8657eff1
+ type: condition
+ task:
+ id: 56347122-637b-4249-8368-1d9b8657eff1
+ version: -1
+ name: Verify Groups
+ description: Verify that the ‘Account.Groups’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "196"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.Groups
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "196":
+ id: "196"
+ taskid: cddc21f2-b878-4527-8cd5-261ec2b7b62e
+ type: regular
+ task:
+ id: cddc21f2-b878-4527-8cd5-261ec2b7b62e
+ version: -1
+ name: Verify Context Error - Groups
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.Groups' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.Groups' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "197":
+ id: "197"
+ taskid: 75c5987a-8b7c-4312-825f-e3605e520463
+ type: condition
+ task:
+ id: 75c5987a-8b7c-4312-825f-e3605e520463
+ version: -1
+ name: Verify Account ID
+ description: Verify that the ‘Account.ID’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "198"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.ID
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2260,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "198":
+ id: "198"
+ taskid: 29d74a63-cff0-40c6-848f-37a4a9c3c914
+ type: regular
+ task:
+ id: 29d74a63-cff0-40c6-848f-37a4a9c3c914
+ version: -1
+ name: Verify Context Error - Account ID
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.ID' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.ID' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2260,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "213":
+ id: "213"
+ taskid: f2a222d8-fc42-4a05-8085-dca5deb35bd9
+ type: condition
+ task:
+ id: f2a222d8-fc42-4a05-8085-dca5deb35bd9
+ version: -1
+ name: Verify Type
+ description: Verify that the ‘Account.Type’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "214"
+ "yes":
+ - "259"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Account.Type
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2660,
+ "y": 2850
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "214":
+ id: "214"
+ taskid: 8e5e36dd-6874-402c-89cc-33370ee283c6
+ type: regular
+ task:
+ id: 8e5e36dd-6874-402c-89cc-33370ee283c6
+ version: -1
+ name: Verify Context Error - Type
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'Account.Type' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ad-get-user' and the 'msgraph-user-get' 'scripts failed to execute.
+ 2-The 'ad-get-user' and the 'msgraph-user-get' scripts did not returned any results.
+ 3- The playbook outputs have been modified and no longer contain the 'Account.Type' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2660,
+ "y": 3035
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "215":
+ id: "215"
+ taskid: 1d9f515f-3105-4c0b-8bbc-34f161b07d87
+ type: title
+ task:
+ id: 1d9f515f-3105-4c0b-8bbc-34f161b07d87
+ version: -1
+ name: AbuseIPDB
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "216"
+ - "218"
+ - "220"
+ - "224"
+ - "226"
+ - "228"
+ - "230"
+ - "232"
+ - "234"
+ - "236"
+ - "238"
+ - "240"
+ - "242"
+ - "244"
+ - "246"
+ - "248"
+ - "250"
+ - "252"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4355
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "216":
+ id: "216"
+ taskid: 13e8b46b-d313-417b-8527-4b3bb2d0f8e4
+ type: condition
+ task:
+ id: 13e8b46b-d313-417b-8527-4b3bb2d0f8e4
+ version: -1
+ name: Verify IP Address
+ description: Verify that the ‘AbuseIPDB.IP.Address’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "217"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Address
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "217":
+ id: "217"
+ taskid: 1b1e1e4e-30fb-4ff1-80b5-39dc79407791
+ type: regular
+ task:
+ id: 1b1e1e4e-30fb-4ff1-80b5-39dc79407791
+ version: -1
+ name: Verify Context Error - IP Address
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Address' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Address' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Address' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "218":
+ id: "218"
+ taskid: 8c614108-acf4-4a1f-8a16-62d30ff9f77c
+ type: condition
+ task:
+ id: 8c614108-acf4-4a1f-8a16-62d30ff9f77c
+ version: -1
+ name: Verify Abuse Confidence Score
+ description: Verify that the ‘AbuseIPDB.IP.AbuseConfidenceScore’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "219"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.AbuseConfidenceScore
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "219":
+ id: "219"
+ taskid: 82609079-1def-432e-8212-a55b9234655e
+ type: regular
+ task:
+ id: 82609079-1def-432e-8212-a55b9234655e
+ version: -1
+ name: Verify Context Error - Abuse Confidence Score
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.AbuseConfidenceScore' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.AbuseConfidenceScore' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.AbuseConfidenceScore' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "220":
+ id: "220"
+ taskid: e62157da-c716-4fde-87e4-623b993a8bdc
+ type: condition
+ task:
+ id: e62157da-c716-4fde-87e4-623b993a8bdc
+ version: -1
+ name: Verify Total Reports
+ description: Verify that the ‘AbuseIPDB.IP.TotalReports’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "221"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.TotalReports
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "221":
+ id: "221"
+ taskid: fff29355-7afc-44da-8389-218537154b46
+ type: regular
+ task:
+ id: fff29355-7afc-44da-8389-218537154b46
+ version: -1
+ name: Verify Context Error - Total Reports
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.TotalReports' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.TotalReports' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.TotalReports' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1480,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "224":
+ id: "224"
+ taskid: 0e862c40-a842-419e-8fe9-fbfe85471771
+ type: condition
+ task:
+ id: 0e862c40-a842-419e-8fe9-fbfe85471771
+ version: -1
+ name: Verify IP Version
+ description: Verify that the ‘AbuseIPDB.IP.IpVersion’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "225"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.IpVersion
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "225":
+ id: "225"
+ taskid: cd633ab1-9db8-46ca-83d7-a90e01addc0f
+ type: regular
+ task:
+ id: cd633ab1-9db8-46ca-83d7-a90e01addc0f
+ version: -1
+ name: Verify Context Error - IP Version
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.IpVersion' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.IpVersion' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.IpVersion' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 1870,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "226":
+ id: "226"
+ taskid: e7fcd7cb-af65-4ef9-81f6-f090e1789247
+ type: condition
+ task:
+ id: e7fcd7cb-af65-4ef9-81f6-f090e1789247
+ version: -1
+ name: Verify Is Public
+ description: Verify that the ‘AbuseIPDB.IP.IsPublic’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "227"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.IsPublic
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2270,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "227":
+ id: "227"
+ taskid: bdf32275-4e7e-4fc9-8ffd-5ca0cc9abb68
+ type: regular
+ task:
+ id: bdf32275-4e7e-4fc9-8ffd-5ca0cc9abb68
+ version: -1
+ name: Verify Context Error - Is Public
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.IsPublic' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsPublic' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsPublic' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2270,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "228":
+ id: "228"
+ taskid: 583311d4-557c-44a9-8cfd-afd67fdcd3c8
+ type: condition
+ task:
+ id: 583311d4-557c-44a9-8cfd-afd67fdcd3c8
+ version: -1
+ name: Verify Is Tor
+ description: Verify that the ‘AbuseIPDB.IP.IsTor’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "229"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: AbuseIPDB.IP.IsTor
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2660,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "229":
+ id: "229"
+ taskid: d4eaf6a6-f66d-4407-8b76-81b975d6f46e
+ type: regular
+ task:
+ id: d4eaf6a6-f66d-4407-8b76-81b975d6f46e
+ version: -1
+ name: Verify Context Error - Is Tor
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.IsTor' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsTor' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsTor' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2660,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "230":
+ id: "230"
+ taskid: 76d26e5c-1bad-4ff3-8f4b-ddd2ec7ae615
+ type: condition
+ task:
+ id: 76d26e5c-1bad-4ff3-8f4b-ddd2ec7ae615
+ version: -1
+ name: Verify Is Whitelisted
+ description: Verify that the ‘AbuseIPDB.IP.IsWhitelisted’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "231"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: AbuseIPDB.IP.IsWhitelisted
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3060,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "231":
+ id: "231"
+ taskid: 37b90df7-f915-4d53-885d-7885fd075f7f
+ type: regular
+ task:
+ id: 37b90df7-f915-4d53-885d-7885fd075f7f
+ version: -1
+ name: Verify Context Error - Is Whitelisted
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.IsWhitelisted' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsWhitelisted' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.IsWhitelisted' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3060,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "232":
+ id: "232"
+ taskid: bd991560-9533-4e5d-887d-85e4aa0bec43
+ type: condition
+ task:
+ id: bd991560-9533-4e5d-887d-85e4aa0bec43
+ version: -1
+ name: Verify Last Reported At
+ description: Verify that the ‘AbuseIPDB.IP.LastReportedAt’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "233"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.LastReportedAt
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3450,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "233":
+ id: "233"
+ taskid: d2dde1c6-f9a3-4100-8440-7c036d01d832
+ type: regular
+ task:
+ id: d2dde1c6-f9a3-4100-8440-7c036d01d832
+ version: -1
+ name: Verify Context Error - Last Reported At
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.LastReportedAt' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.LastReportedAt' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.LastReportedAt' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3450,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "234":
+ id: "234"
+ taskid: 1cd578e1-ab08-4390-824d-cb24e3d80747
+ type: condition
+ task:
+ id: 1cd578e1-ab08-4390-824d-cb24e3d80747
+ version: -1
+ name: Verify Number Distinct Users
+ description: Verify that the ‘AbuseIPDB.IP.NumDistinctUsers’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "235"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.NumDistinctUsers
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3840,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "235":
+ id: "235"
+ taskid: 31d04a2b-2d41-49d1-8891-a40d01402da7
+ type: regular
+ task:
+ id: 31d04a2b-2d41-49d1-8891-a40d01402da7
+ version: -1
+ name: Verify Context Error - Number Distinct Users
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.NumDistinctUsers' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.NumDistinctUsers' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.NumDistinctUsers' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 3840,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "236":
+ id: "236"
+ taskid: 9118e6be-5064-4e72-828d-3cf008c82202
+ type: condition
+ task:
+ id: 9118e6be-5064-4e72-828d-3cf008c82202
+ version: -1
+ name: Verify Reports
+ description: Verify that the ‘AbuseIPDB.IP.Reports’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "237"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Reports
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4230,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "237":
+ id: "237"
+ taskid: a1c4df6f-8c28-4968-8d55-661fc5621af5
+ type: regular
+ task:
+ id: a1c4df6f-8c28-4968-8d55-661fc5621af5
+ version: -1
+ name: Verify Context Error - Reports
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Reports' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Reports' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Reports' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4230,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "238":
+ id: "238"
+ taskid: 41a1a628-934b-4eb9-8837-759ea11cf28b
+ type: condition
+ task:
+ id: 41a1a628-934b-4eb9-8837-759ea11cf28b
+ version: -1
+ name: Verify ISP
+ description: Verify that the ‘AbuseIPDB.IP.ISP’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "239"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.ISP
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4620,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "239":
+ id: "239"
+ taskid: c1a7316b-7862-48d5-893a-b3afa1765951
+ type: regular
+ task:
+ id: c1a7316b-7862-48d5-893a-b3afa1765951
+ version: -1
+ name: Verify Context Error - ISP
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.ISP' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.ISP' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.ISP' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 4620,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "240":
+ id: "240"
+ taskid: 19aeb501-3e6c-4960-8ffc-6046ca939960
+ type: condition
+ task:
+ id: 19aeb501-3e6c-4960-8ffc-6046ca939960
+ version: -1
+ name: Verify Usage Type
+ description: Verify that the ‘AbuseIPDB.IP.UsageType’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "241"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.UsageType
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5010,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "241":
+ id: "241"
+ taskid: 476a6ea9-362d-44cb-8838-cbddc7134f83
+ type: regular
+ task:
+ id: 476a6ea9-362d-44cb-8838-cbddc7134f83
+ version: -1
+ name: Verify Context Error - Usage Type
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.UsageType' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.UsageType' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.UsageType' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5010,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "242":
+ id: "242"
+ taskid: 514f5dd9-386b-4da5-8c9f-d7eb9b399175
+ type: condition
+ task:
+ id: 514f5dd9-386b-4da5-8c9f-d7eb9b399175
+ version: -1
+ name: Verify Domain
+ description: Verify that the ‘AbuseIPDB.IP.Domain’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "243"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Domain
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5410,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "243":
+ id: "243"
+ taskid: db1c6316-ab8f-4a5d-8412-7d79445f592f
+ type: regular
+ task:
+ id: db1c6316-ab8f-4a5d-8412-7d79445f592f
+ version: -1
+ name: Verify Context Error - Domain
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Domain' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Domain' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Domain' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5410,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "244":
+ id: "244"
+ taskid: 2166ca39-8be7-451d-806f-f66c6dbfa9e3
+ type: condition
+ task:
+ id: 2166ca39-8be7-451d-806f-f66c6dbfa9e3
+ version: -1
+ name: Verify Geolocation Country
+ description: Verify that the ‘AbuseIPDB.IP.Geo.Country’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "245"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Geo.Country
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5810,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "245":
+ id: "245"
+ taskid: 7fd70ad6-a514-4fd0-83e3-d32da3c3ec86
+ type: regular
+ task:
+ id: 7fd70ad6-a514-4fd0-83e3-d32da3c3ec86
+ version: -1
+ name: Verify Context Error - Geolocation Country
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Geo.Country' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Geo.Country' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Geo.Country' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 5810,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "246":
+ id: "246"
+ taskid: 56609324-8054-48c0-82b3-debde0a08671
+ type: condition
+ task:
+ id: 56609324-8054-48c0-82b3-debde0a08671
+ version: -1
+ name: Verify Country Code
+ description: Verify that the ‘AbuseIPDB.IP.Geo.CountryCode’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "247"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Geo.CountryCode
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6210,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "247":
+ id: "247"
+ taskid: f1d2602d-2b8e-4e1e-8c7e-0192e6288d5d
+ type: regular
+ task:
+ id: f1d2602d-2b8e-4e1e-8c7e-0192e6288d5d
+ version: -1
+ name: Verify Context Error - Country Code
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Geo.CountryCode' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Geo.CountryCode' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Geo.CountryCode' context key.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6210,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "248":
+ id: "248"
+ taskid: 79cfde99-e839-4d82-8792-973d594f8c64
+ type: condition
+ task:
+ id: 79cfde99-e839-4d82-8792-973d594f8c64
+ version: -1
+ name: Verify Malicious IP - Vendor
+ description: Verify that the ‘AbuseIPDB.IP.Malicious.Vendor’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "249"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Malicious.Vendor
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6600,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "249":
+ id: "249"
+ taskid: 86e0af56-c9bf-4630-885f-159801d99e01
+ type: regular
+ task:
+ id: 86e0af56-c9bf-4630-885f-159801d99e01
+ version: -1
+ name: Verify Context Error - Malicious IP - Vendor
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Malicious.Vendor' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Vendore' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Vendor' context key.
+ 5- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 6600,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "250":
+ id: "250"
+ taskid: 05601bd7-465c-4ad7-8046-15a01c178d93
+ type: condition
+ task:
+ id: 05601bd7-465c-4ad7-8046-15a01c178d93
+ version: -1
+ name: Verify Malicious IP - Detections
+ description: Verify that the ‘AbuseIPDB.IP.Malicious.Detections’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "251"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Malicious.Detections
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7000,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "251":
+ id: "251"
+ taskid: 3feba5c0-0453-4e02-8706-fb5b347366ac
+ type: regular
+ task:
+ id: 3feba5c0-0453-4e02-8706-fb5b347366ac
+ version: -1
+ name: Verify Context Error - Malicious IP - Detections
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Malicious.Detections' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Detections' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Detections' context key.
+ 5- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7000,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "252":
+ id: "252"
+ taskid: 23e99e2f-60b6-427a-82c6-df6f403e316d
+ type: condition
+ task:
+ id: 23e99e2f-60b6-427a-82c6-df6f403e316d
+ version: -1
+ name: Verify Malicious IP - Description
+ description: Verify that the ‘AbuseIPDB.IP.Malicious.Description’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "253"
+ "yes":
+ - "262"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AbuseIPDB.IP.Malicious.Description
+ iscontext: true
+ right:
+ value: {}
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7400,
+ "y": 4510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "253":
+ id: "253"
+ taskid: 844dfd58-64a1-412e-8346-8a9e83fdd08f
+ type: regular
+ task:
+ id: 844dfd58-64a1-412e-8346-8a9e83fdd08f
+ version: -1
+ name: Verify Context Error - Malicious IP - Description
+ description: Prints an error entry with a given message
+ scriptName: PrintErrorEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: |-
+ The 'AbuseIPDB.IP.Malicious.Description' playbook output was not extracted properly. One of the following may be the cause:
+ 1- The 'ip' script failed to execute.
+ 2- 'ip' script did not return any results.
+ 3- The 'ip' automation outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Description' context key.
+ 4- The playbook outputs have been modified and no longer contain the 'AbuseIPDB.IP.Malicious.Description' context key.
+ 5- The IP address configured within the 'sourceip' playbook input is no longer malicious.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 7400,
+ "y": 4695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "254":
+ id: "254"
+ taskid: f5944b39-3ac7-447b-8b29-b5d5e7fb35e2
+ type: title
+ task:
+ id: f5944b39-3ac7-447b-8b29-b5d5e7fb35e2
+ version: -1
+ name: DBotScore
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "62"
+ - "64"
+ - "66"
+ - "68"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 1670
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "255":
+ id: "255"
+ taskid: 0afe5d86-ff31-499e-8263-771f5ddd4750
+ type: title
+ task:
+ id: 0afe5d86-ff31-499e-8263-771f5ddd4750
+ version: -1
+ name: IP
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "18"
+ - "52"
+ - "54"
+ - "56"
+ - "58"
+ - "60"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 2200
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "256":
+ id: "256"
+ taskid: a9a1c41e-894a-466e-8e4a-41f22b84dde4
+ type: title
+ task:
+ id: a9a1c41e-894a-466e-8e4a-41f22b84dde4
+ version: -1
+ name: Done Verifying Context Data
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 1480
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "257":
+ id: "257"
+ taskid: b534bd63-550d-450f-8ca4-536416cfca0e
+ type: title
+ task:
+ id: b534bd63-550d-450f-8ca4-536416cfca0e
+ version: -1
+ name: Done Verifying 'DBotScore' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 2055
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "258":
+ id: "258"
+ taskid: d92f2c48-4ec6-475e-8e0b-9df61219b633
+ type: title
+ task:
+ id: d92f2c48-4ec6-475e-8e0b-9df61219b633
+ version: -1
+ name: Done Verifying 'IP' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 2540
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "259":
+ id: "259"
+ taskid: f24f2129-1257-47e2-83e4-1b6fdaa05aee
+ type: title
+ task:
+ id: f24f2129-1257-47e2-83e4-1b6fdaa05aee
+ version: -1
+ name: Done Verifying 'Account' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 3080
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "260":
+ id: "260"
+ taskid: a9e578e7-dff4-4d4a-8deb-59cae78d6007
+ type: title
+ task:
+ id: a9e578e7-dff4-4d4a-8deb-59cae78d6007
+ version: -1
+ name: Done Verifying 'MS Graph User Manager' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 4270
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "261":
+ id: "261"
+ taskid: 723ca966-bc65-4ee3-8d26-a1e2bafde8be
+ type: title
+ task:
+ id: 723ca966-bc65-4ee3-8d26-a1e2bafde8be
+ version: -1
+ name: Done Verifying 'MS Graph User' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 3605
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "262":
+ id: "262"
+ taskid: cad480af-13e6-400e-8421-685487c40da7
+ type: title
+ task:
+ id: cad480af-13e6-400e-8421-685487c40da7
+ version: -1
+ name: Done Verifying 'AbuseIPDB' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 4710
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "263":
+ id: "263"
+ taskid: 60827778-1d58-4b1a-8337-69d613a6eec7
+ type: title
+ task:
+ id: 60827778-1d58-4b1a-8337-69d613a6eec7
+ version: -1
+ name: Done Verifying 'Active Directory Users' Playbook Output
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "264"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12340,
+ "y": 5205
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "264":
+ id: "264"
+ taskid: 53d1fdf9-1b26-4385-86fb-16d104ae0ac3
+ type: title
+ task:
+ id: 53d1fdf9-1b26-4385-86fb-16d104ae0ac3
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 12580,
+ "y": 5350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "265":
+ id: "265"
+ taskid: 8fd0087e-1bab-4973-89c6-9784dcbd4e38
+ type: regular
+ task:
+ id: 8fd0087e-1bab-4973-89c6-9784dcbd4e38
+ version: -1
+ name: Set Username To Alert Field
+ description: commands.local.cmd.set.incident
+ script: Builtin|||setAlert
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "51"
+ scriptarguments:
+ username:
+ simple: ${AvailableUsers}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 605
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "266":
+ id: "266"
+ taskid: c75cd852-ebe7-4828-8cc4-838a1db14b2d
+ type: regular
+ task:
+ id: c75cd852-ebe7-4828-8cc4-838a1db14b2d
+ version: -1
+ name: Set AD Available User
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "265"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: AvailableUsers
+ stringify:
+ simple: "true"
+ value:
+ complex:
+ root: AD.AvailableUsers.attributes
+ filters:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AD.AvailableUsers.attributes.name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AD.AvailableUsers.attributes.displayName
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AD.AvailableUsers.attributes.mail
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: AD.AvailableUsers.attributes.sAMAccountName
+ iscontext: true
+ accessor: sAMAccountName
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 250,
+ "y": 430
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "267":
+ id: "267"
+ taskid: 417eac52-e6a7-4a0c-8443-674ce61eb83c
+ type: regular
+ task:
+ id: 417eac52-e6a7-4a0c-8443-674ce61eb83c
+ version: -1
+ name: Set MSGraphUser Available User
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "265"
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: AvailableUsers
+ stringify:
+ simple: "true"
+ value:
+ complex:
+ root: MSGraph.AvailableUsers.id
+ filters:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: MSGraph.AvailableUsers.id
+ iscontext: true
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 650,
+ "y": 430
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "268":
+ id: "268"
+ taskid: 91349414-9d56-4c91-8905-dc4251b77eb8
+ type: condition
+ task:
+ id: 91349414-9d56-4c91-8905-dc4251b77eb8
+ version: -1
+ name: Is there a manager?
+ description: Verify that the ‘MSGraphUserManager.Manager.ID’ playbook output was extracted correctly.
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "260"
+ "yes":
+ - "81"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ simple: MSGraphUserManager
+ iscontext: true
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 690,
+ "y": 3710
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {
+ "187_259_yes": 0.11,
+ "189_259_yes": 0.1,
+ "18_258_yes": 0.1,
+ "191_259_yes": 0.1,
+ "195_259_yes": 0.1,
+ "197_259_yes": 0.1,
+ "213_259_yes": 0.16,
+ "52_258_yes": 0.1,
+ "54_258_yes": 0.11,
+ "56_258_yes": 0.16,
+ "58_258_yes": 0.23,
+ "60_258_yes": 0.34,
+ "62_257_yes": 0.12,
+ "64_257_yes": 0.11,
+ "66_257_yes": 0.1,
+ "68_257_yes": 0.25,
+ "70_256_yes": 0.11,
+ "72_256_yes": 0.1,
+ "74_256_yes": 0.1,
+ "76_256_yes": 0.27
+ },
+ "paper": {
+ "dimensions": {
+ "height": 5625,
+ "width": 12720,
+ "x": 240,
+ "y": -210
+ }
+ }
+ }
+inputs: []
+outputs: []
+tests:
+- No tests (auto formatted)
+fromversion: 6.6.0
+marketplaces:
+- marketplacev2
\ No newline at end of file
diff --git a/Packs/Core/doc_files/IOC_Alert.png b/Packs/Core/doc_files/IOC_Alert.png
index 87cf7dbe8c14..4d92033b0679 100644
Binary files a/Packs/Core/doc_files/IOC_Alert.png and b/Packs/Core/doc_files/IOC_Alert.png differ
diff --git a/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png b/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png
index f7a6fdda057c..fa0972c8a88d 100644
Binary files a/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png and b/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png differ
diff --git a/Packs/Core/doc_files/NGFW_Internal_Scan.png b/Packs/Core/doc_files/NGFW_Internal_Scan.png
index 4571ee9c39cb..9a611508a126 100644
Binary files a/Packs/Core/doc_files/NGFW_Internal_Scan.png and b/Packs/Core/doc_files/NGFW_Internal_Scan.png differ
diff --git a/Packs/Core/doc_files/T1036_-_Masquerading.png b/Packs/Core/doc_files/T1036_-_Masquerading.png
index 93086a2622ea..2ea37193a603 100644
Binary files a/Packs/Core/doc_files/T1036_-_Masquerading.png and b/Packs/Core/doc_files/T1036_-_Masquerading.png differ
diff --git a/Packs/Core/pack_metadata.json b/Packs/Core/pack_metadata.json
index 6aeb0b160a0b..938281cded61 100644
--- a/Packs/Core/pack_metadata.json
+++ b/Packs/Core/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core - Investigation and Response",
"description": "Automates incident response",
"support": "xsoar",
- "currentVersion": "3.0.33",
+ "currentVersion": "3.0.50",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md b/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md
new file mode 100644
index 000000000000..650557e14e61
--- /dev/null
+++ b/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md
@@ -0,0 +1,3 @@
+## Core Alert Fields
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CoreAlertFields/pack_metadata.json b/Packs/CoreAlertFields/pack_metadata.json
index 299acde06b75..79d12adf4799 100644
--- a/Packs/CoreAlertFields/pack_metadata.json
+++ b/Packs/CoreAlertFields/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core Alert Fields",
"description": "This Content Pack will provide you with the core alert fields.",
"support": "xsoar",
- "currentVersion": "1.0.34",
+ "currentVersion": "1.0.35",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexAttackSurfaceManagement/Author_image.png b/Packs/CortexAttackSurfaceManagement/Author_image.png
deleted file mode 100644
index 249fc6f403d6..000000000000
Binary files a/Packs/CortexAttackSurfaceManagement/Author_image.png and /dev/null differ
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert.yml
index 02614240b777..05889ab17039 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 79a7c8e5-9a6b-426f-8ed3-21ff1fe9124e
+ taskid: b7b07d0b-fee5-4b51-8e6b-928088c82c42
type: start
task:
- id: 79a7c8e5-9a6b-426f-8ed3-21ff1fe9124e
+ id: b7b07d0b-fee5-4b51-8e6b-928088c82c42
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
continueonerrortype: ""
"4":
id: "4"
- taskid: 548459c6-a71d-49ac-8910-fa07334e477c
+ taskid: c300597e-a947-4a0e-8551-7e862c5caa53
type: title
task:
- id: 548459c6-a71d-49ac-8910-fa07334e477c
+ id: c300597e-a947-4a0e-8551-7e862c5caa53
version: -1
name: Complete
type: title
@@ -64,10 +64,10 @@ tasks:
continueonerrortype: ""
"5":
id: "5"
- taskid: 205caac4-7e93-4b53-8028-4798bd309ad4
+ taskid: c77b9c64-6bba-4ba5-85b5-726d41384dab
type: title
task:
- id: 205caac4-7e93-4b53-8028-4798bd309ad4
+ id: c77b9c64-6bba-4ba5-85b5-726d41384dab
version: -1
name: Triage and Qualifier Stage
type: title
@@ -95,10 +95,10 @@ tasks:
continueonerrortype: ""
"6":
id: "6"
- taskid: 8cdc4fa7-9865-4a60-8254-a3fe2c1face9
+ taskid: 6e545db5-41cc-4414-8755-1094a1c03fe8
type: title
task:
- id: 8cdc4fa7-9865-4a60-8254-a3fe2c1face9
+ id: 6e545db5-41cc-4414-8755-1094a1c03fe8
version: -1
name: Enrichment Stage
type: title
@@ -107,7 +107,7 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "97"
+ - "185"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -126,10 +126,10 @@ tasks:
isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: 952a8f66-7583-43da-8753-657b3f1425e2
+ taskid: 1b80b617-5189-4039-8315-7b11be105d7d
type: title
task:
- id: 952a8f66-7583-43da-8753-657b3f1425e2
+ id: 1b80b617-5189-4039-8315-7b11be105d7d
version: -1
name: Decision Stage
type: title
@@ -138,7 +138,7 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "120"
+ - "186"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -157,10 +157,10 @@ tasks:
isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: b78597f7-5761-4cd4-8a0b-f648f78192a7
+ taskid: 4a19c6fd-f41a-40d5-87b9-811c1235dc71
type: title
task:
- id: b78597f7-5761-4cd4-8a0b-f648f78192a7
+ id: 4a19c6fd-f41a-40d5-87b9-811c1235dc71
version: -1
name: Notification Stage
type: title
@@ -175,8 +175,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 1570
+ "x": 690,
+ "y": 1610
}
}
note: false
@@ -188,10 +188,10 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: 1c823068-38b4-47b9-8aef-f70adbaf768d
+ taskid: b149b6cb-541e-45b8-8b18-27fecce33932
type: title
task:
- id: 1c823068-38b4-47b9-8aef-f70adbaf768d
+ id: b149b6cb-541e-45b8-8b18-27fecce33932
version: -1
name: Validation Stage
type: title
@@ -219,10 +219,10 @@ tasks:
isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: a1e55a91-099f-4100-829c-5d1caea841b3
+ taskid: e70c7cd7-c4d1-4f49-8ca1-8d1522da9d6a
type: title
task:
- id: a1e55a91-099f-4100-829c-5d1caea841b3
+ id: e70c7cd7-c4d1-4f49-8ca1-8d1522da9d6a
version: -1
name: Remediation Stage
type: title
@@ -231,7 +231,7 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "167"
+ - "187"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -250,10 +250,10 @@ tasks:
isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: 3d818954-51ff-4ae6-87bb-1951dd4c56c6
+ taskid: e460ad6f-77f0-4911-887d-9954b478ff75
type: title
task:
- id: 3d818954-51ff-4ae6-87bb-1951dd4c56c6
+ id: e460ad6f-77f0-4911-887d-9954b478ff75
version: -1
name: Summarization Stage
type: title
@@ -281,10 +281,10 @@ tasks:
isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 078c10a4-ae07-4e7e-8aeb-d593bef0a521
+ taskid: c428c373-5c79-44cb-80e7-de555c354c34
type: condition
task:
- id: 078c10a4-ae07-4e7e-8aeb-d593bef0a521
+ id: c428c373-5c79-44cb-80e7-de555c354c34
version: -1
name: Service exposure still observable?
description: Determines if service was still observable and also if automated remediation was done.
@@ -339,10 +339,10 @@ tasks:
isautoswitchedtoquietmode: false
"21":
id: "21"
- taskid: 0c54afdb-6281-4862-8f93-2c9108347625
+ taskid: 2076296d-2bfe-4b4c-8cdf-7bb538290e97
type: collection
task:
- id: 0c54afdb-6281-4862-8f93-2c9108347625
+ id: 2076296d-2bfe-4b4c-8cdf-7bb538290e97
version: -1
name: What to do if Remediation Confirmation Scan is inconclusive?
description: Determines if alert should be closed or kept open for auto-closure based on analyst input.
@@ -408,10 +408,10 @@ tasks:
isautoswitchedtoquietmode: false
"48":
id: "48"
- taskid: 1c7b6823-28e7-4618-8ab0-5ef8da8b63fc
+ taskid: 21578536-97b4-4a27-842c-4b119942084d
type: condition
task:
- id: 1c7b6823-28e7-4618-8ab0-5ef8da8b63fc
+ id: 21578536-97b4-4a27-842c-4b119942084d
version: -1
name: What was data collection task response?
description: Determines the next action (remediation or ticket) based on the previous data collection task.
@@ -431,6 +431,8 @@ tasks:
- "10"
Jira ticket:
- "156"
+ Slack message:
+ - "183"
separatecontext: false
conditions:
- label: Automated remediation
@@ -465,6 +467,14 @@ tasks:
right:
value:
simple: Automated remediation by creating empty S3 bucket
+ - left:
+ iscontext: true
+ value:
+ simple: alert.asmdatacollection.selected
+ operator: isEqualString
+ right:
+ value:
+ simple: Automated remediation by patching vulnerable software
- label: Manual remediation
condition:
- - operator: isEqualString
@@ -513,6 +523,16 @@ tasks:
right:
value:
simple: File a Jira Ticket
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmdatacollection.selected
+ operator: isEqualString
+ right:
+ value:
+ simple: Send a Slack message
+ label: Slack message
continueonerrortype: ""
view: |-
{
@@ -530,10 +550,10 @@ tasks:
isautoswitchedtoquietmode: false
"50":
id: "50"
- taskid: 363d6454-984f-4e75-8e39-fce60e8a7aa5
+ taskid: 4d07e99c-e580-4bdf-8883-f051bd03c896
type: collection
task:
- id: 363d6454-984f-4e75-8e39-fce60e8a7aa5
+ id: 4d07e99c-e580-4bdf-8883-f051bd03c896
version: -1
name: Verify service exposure resolved by service owner
description: Determines if manual remediation (by service owner or otherwise) was completed via analyst input.
@@ -548,8 +568,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 2380
+ "x": 690,
+ "y": 2400
}
}
note: false
@@ -597,10 +617,10 @@ tasks:
isautoswitchedtoquietmode: false
"60":
id: "60"
- taskid: d4b249ba-f931-4211-86ec-fed38bd28f2c
+ taskid: e04643ac-8595-484f-82b5-e5afcd13544f
type: title
task:
- id: d4b249ba-f931-4211-86ec-fed38bd28f2c
+ id: e04643ac-8595-484f-82b5-e5afcd13544f
version: -1
name: Notification Stage
type: title
@@ -615,8 +635,8 @@ tasks:
view: |-
{
"position": {
- "x": 1220,
- "y": 1500
+ "x": 1080,
+ "y": 1600
}
}
note: false
@@ -628,12 +648,12 @@ tasks:
isautoswitchedtoquietmode: false
"63":
id: "63"
- taskid: 2a74ea33-e35a-4e91-89ad-b4a454295dfc
+ taskid: 4e9b77ee-dc7b-47da-83a3-3e9c0e6f878f
type: title
task:
- id: 2a74ea33-e35a-4e91-89ad-b4a454295dfc
+ id: 4e9b77ee-dc7b-47da-83a3-3e9c0e6f878f
version: -1
- name: Remediation Stage
+ name: Manual Remediation Stage
type: title
iscommand: false
brand: ""
@@ -659,10 +679,10 @@ tasks:
isautoswitchedtoquietmode: false
"69":
id: "69"
- taskid: 5a25a6a7-fd0f-42ba-8aee-499d94ad8d4c
+ taskid: f2dd6603-f4fa-4499-8958-14daaf688193
type: regular
task:
- id: 5a25a6a7-fd0f-42ba-8aee-499d94ad8d4c
+ id: f2dd6603-f4fa-4499-8958-14daaf688193
version: -1
name: Set remediation grid field
description: |-
@@ -708,10 +728,10 @@ tasks:
isautoswitchedtoquietmode: false
"72":
id: "72"
- taskid: a7f0052c-3310-433c-8a1d-b68b6d53ce1c
+ taskid: 78bbbc1a-fe99-4e9a-8435-8960b0af1087
type: condition
task:
- id: a7f0052c-3310-433c-8a1d-b68b6d53ce1c
+ id: 78bbbc1a-fe99-4e9a-8435-8960b0af1087
version: -1
name: Manual remediation selected?
description: Determines if "manual remediation" was selected on main data collection task.
@@ -769,10 +789,10 @@ tasks:
isautoswitchedtoquietmode: false
"73":
id: "73"
- taskid: 400a5d2c-27fe-4cee-833a-a3d9951384f2
+ taskid: 0b8598c4-c5e0-4cce-8383-c91f58facf2d
type: regular
task:
- id: 400a5d2c-27fe-4cee-833a-a3d9951384f2
+ id: 0b8598c4-c5e0-4cce-8383-c91f58facf2d
version: -1
name: Set remediation grid field
description: |-
@@ -827,10 +847,10 @@ tasks:
isautoswitchedtoquietmode: false
"81":
id: "81"
- taskid: 3029317d-a0aa-47bd-8667-a14c4704b52d
+ taskid: b7f68d25-ba83-4275-8340-e9b0d96fd9e3
type: regular
task:
- id: 3029317d-a0aa-47bd-8667-a14c4704b52d
+ id: b7f68d25-ba83-4275-8340-e9b0d96fd9e3
version: -1
name: Set alert summary grid field
description: |-
@@ -897,10 +917,10 @@ tasks:
isautoswitchedtoquietmode: false
"86":
id: "86"
- taskid: df2185f7-5dea-4431-8a00-1dff0d6b2d4d
+ taskid: 7760cd6c-8132-4e21-8eb5-9769a3834bc9
type: playbook
task:
- id: df2185f7-5dea-4431-8a00-1dff0d6b2d4d
+ id: 7760cd6c-8132-4e21-8eb5-9769a3834bc9
version: -1
name: Cortex ASM - Detect Service
description: Playbook that looks at what ASM sub-type the alert is and directs it to different pre/post mitigation scans (such as NMAP).
@@ -949,10 +969,10 @@ tasks:
isautoswitchedtoquietmode: false
"88":
id: "88"
- taskid: f80b1685-d83c-4d3f-8219-8176c830ac92
+ taskid: f3b899dd-18a9-48f6-8503-7cf6263aca64
type: collection
task:
- id: f80b1685-d83c-4d3f-8219-8176c830ac92
+ id: f3b899dd-18a9-48f6-8503-7cf6263aca64
version: -1
name: Complete manual remediation
description: Determines if manual remediation (by service owner or otherwise) was completed via analyst input.
@@ -1014,56 +1034,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "97":
- id: "97"
- taskid: 71dcd381-9a60-4115-88fb-e1513cbb913b
- type: playbook
- task:
- id: 71dcd381-9a60-4115-88fb-e1513cbb913b
- version: -1
- name: Cortex ASM - Enrichment
- description: Used as a container folder for all enrichments of ASM alerts.
- type: playbook
- iscommand: false
- brand: ""
- playbookId: Cortex ASM - Enrichment
- nexttasks:
- '#none#':
- - "125"
- separatecontext: true
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 70,
- "y": -870
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- loop:
- exitCondition: ""
- iscommand: false
- max: 100
- wait: 1
- scriptarguments:
- AWSAssumeRoleName:
- simple: ${inputs.AWSAssumeRoleName}
- RemoteIP:
- complex:
- accessor: remoteip
- root: alert
"103":
id: "103"
- taskid: 9cd375bf-e85e-45fd-8fa4-ee8ef8bf3d10
+ taskid: 4ac1a981-fe3b-4cad-807b-3b40afbba041
type: regular
task:
- id: 9cd375bf-e85e-45fd-8fa4-ee8ef8bf3d10
+ id: 4ac1a981-fe3b-4cad-807b-3b40afbba041
version: -1
name: Set remediation grid field
description: |-
@@ -1108,10 +1084,10 @@ tasks:
isautoswitchedtoquietmode: false
"110":
id: "110"
- taskid: 75485187-2003-44f4-8476-54d27fac1cf7
+ taskid: 9384d7c9-9814-45cb-863d-6cecba635833
type: condition
task:
- id: 75485187-2003-44f4-8476-54d27fac1cf7
+ id: 9384d7c9-9814-45cb-863d-6cecba635833
version: -1
name: Close or keep open?
description: Determines if the alert will be closed or kept open.
@@ -1214,10 +1190,10 @@ tasks:
isautoswitchedtoquietmode: false
"111":
id: "111"
- taskid: faa2ac25-d58c-4d2b-8ad8-8ae1e27ec1c3
+ taskid: 9243de06-b239-4956-80c8-b0c36a28ce99
type: regular
task:
- id: faa2ac25-d58c-4d2b-8ad8-8ae1e27ec1c3
+ id: 9243de06-b239-4956-80c8-b0c36a28ce99
version: -1
name: Close alert (close now)
description: Close the current alert because auto-close was selected.
@@ -1251,10 +1227,10 @@ tasks:
isautoswitchedtoquietmode: false
"112":
id: "112"
- taskid: 3484098d-2bfe-4d6c-8fc6-07c1cb7ad591
+ taskid: eac13f7d-5da0-4dfa-8b41-568ab413edec
type: regular
task:
- id: 3484098d-2bfe-4d6c-8fc6-07c1cb7ad591
+ id: eac13f7d-5da0-4dfa-8b41-568ab413edec
version: -1
name: Close alert (remediated automatically)
description: Close the current alert because Automated Remediation path was selected.
@@ -1288,10 +1264,10 @@ tasks:
isautoswitchedtoquietmode: false
"113":
id: "113"
- taskid: f9691189-aae2-4973-89b1-45d513d7b4b1
+ taskid: b94080d0-cf5d-408b-8a5f-d1a3e781f33b
type: regular
task:
- id: f9691189-aae2-4973-89b1-45d513d7b4b1
+ id: b94080d0-cf5d-408b-8a5f-d1a3e781f33b
version: -1
name: Close alert (close now) with rule match
description: Close the current alert because auto-close was selected.
@@ -1334,10 +1310,10 @@ tasks:
isautoswitchedtoquietmode: false
"114":
id: "114"
- taskid: 0d6b5b4a-6676-4565-83c3-263bbb709ccd
+ taskid: 9cce412b-6cb5-403e-8ba5-1092762c0fb6
type: regular
task:
- id: 0d6b5b4a-6676-4565-83c3-263bbb709ccd
+ id: 9cce412b-6cb5-403e-8ba5-1092762c0fb6
version: -1
name: Close alert (remediated automatically) with rule match
description: Close the current alert because Automated Remediation path was selected.
@@ -1378,57 +1354,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "120":
- id: "120"
- taskid: bbcc2ebf-4475-4f87-8692-4b6960536228
- type: playbook
- task:
- id: bbcc2ebf-4475-4f87-8692-4b6960536228
- version: -1
- name: Cortex ASM - Remediation Path Rules
- type: playbook
- iscommand: false
- brand: ""
- playbookId: Cortex ASM - Remediation Path Rules
- description: ''
- nexttasks:
- '#none#':
- - "145"
- scriptarguments:
- BypassDevCheck:
- complex:
- root: inputs.BypassDevCheck
- ExternallyDetectedProviders:
- complex:
- root: ASM.ExternalService
- accessor: externally_detected_providers
- separatecontext: true
- continueonerrortype: ""
- loop:
- iscommand: false
- exitCondition: ""
- wait: 1
- max: 100
- view: |-
- {
- "position": {
- "x": 70,
- "y": 470
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"121":
id: "121"
- taskid: abef44ae-926e-466b-8030-33c6078e12f4
+ taskid: 598c7420-4eeb-48fc-8b5f-b539d6085c05
type: title
task:
- id: abef44ae-926e-466b-8030-33c6078e12f4
+ id: 598c7420-4eeb-48fc-8b5f-b539d6085c05
version: -1
name: Analysis Stage
type: title
@@ -1456,10 +1387,10 @@ tasks:
isautoswitchedtoquietmode: false
"124":
id: "124"
- taskid: 7697188d-a525-43a6-8f5f-02d63024d1f0
+ taskid: 83cd02a3-ed01-4a17-8376-a2b8c394944c
type: regular
task:
- id: 7697188d-a525-43a6-8f5f-02d63024d1f0
+ id: 83cd02a3-ed01-4a17-8376-a2b8c394944c
version: -1
name: Set playbook stage grid field (triage)
description: |-
@@ -1499,10 +1430,10 @@ tasks:
isautoswitchedtoquietmode: false
"125":
id: "125"
- taskid: 73acc4cd-f162-4da9-8e80-bb677b24c0fa
+ taskid: 422e3206-c215-49c0-8c50-0225df53a429
type: regular
task:
- id: 73acc4cd-f162-4da9-8e80-bb677b24c0fa
+ id: 422e3206-c215-49c0-8c50-0225df53a429
version: -1
name: Set playbook stage grid field (enrichment)
description: |-
@@ -1542,10 +1473,10 @@ tasks:
isautoswitchedtoquietmode: false
"126":
id: "126"
- taskid: e00a985a-489a-449d-805d-929b07974977
+ taskid: 4d3f708f-412f-409d-815c-d8daf22f1000
type: regular
task:
- id: e00a985a-489a-449d-805d-929b07974977
+ id: 4d3f708f-412f-409d-815c-d8daf22f1000
version: -1
name: Set playbook stage grid field (analysis)
description: |-
@@ -1585,10 +1516,10 @@ tasks:
isautoswitchedtoquietmode: false
"128":
id: "128"
- taskid: daacfc48-eb95-4aa6-8b43-92ad68d6b62c
+ taskid: 028eece5-24f0-4890-89f8-3803d33144e0
type: regular
task:
- id: daacfc48-eb95-4aa6-8b43-92ad68d6b62c
+ id: 028eece5-24f0-4890-89f8-3803d33144e0
version: -1
name: Set playbook stage grid field (decision)
description: |-
@@ -1628,10 +1559,10 @@ tasks:
isautoswitchedtoquietmode: false
"129":
id: "129"
- taskid: fd339520-4a29-4412-8675-9a7ffb073f0c
+ taskid: d814ba48-42c5-414d-86c9-dfeb842f02f0
type: regular
task:
- id: fd339520-4a29-4412-8675-9a7ffb073f0c
+ id: d814ba48-42c5-414d-86c9-dfeb842f02f0
version: -1
name: Set playbook stage grid field (decision)
description: |-
@@ -1659,7 +1590,7 @@ tasks:
{
"position": {
"x": -700,
- "y": 1120
+ "y": 1125
}
}
note: false
@@ -1671,10 +1602,10 @@ tasks:
isautoswitchedtoquietmode: false
"130":
id: "130"
- taskid: 569e4b63-d599-4604-8c97-7c7200c53c59
+ taskid: b48cba1e-0a6c-4a12-8bf7-9f2bbf47b37a
type: regular
task:
- id: 569e4b63-d599-4604-8c97-7c7200c53c59
+ id: b48cba1e-0a6c-4a12-8bf7-9f2bbf47b37a
version: -1
name: Set playbook stage grid field (decision)
description: |-
@@ -1701,8 +1632,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 1105
+ "x": 690,
+ "y": 1145
}
}
note: false
@@ -1714,10 +1645,10 @@ tasks:
isautoswitchedtoquietmode: false
"131":
id: "131"
- taskid: 33010b40-f580-4f68-8ee7-95208f3b4b5e
+ taskid: 768ae5de-aa06-4975-8e4c-aa9ae75ece4b
type: regular
task:
- id: 33010b40-f580-4f68-8ee7-95208f3b4b5e
+ id: 768ae5de-aa06-4975-8e4c-aa9ae75ece4b
version: -1
name: Set playbook stage grid field (decision)
description: |-
@@ -1744,8 +1675,8 @@ tasks:
view: |-
{
"position": {
- "x": 1220,
- "y": 1025
+ "x": 1080,
+ "y": 1125
}
}
note: false
@@ -1757,10 +1688,10 @@ tasks:
isautoswitchedtoquietmode: false
"133":
id: "133"
- taskid: d0336db6-38a5-4c68-8e16-39fba3c1c026
+ taskid: cd2d1025-4773-4743-8baf-dec1932028bd
type: regular
task:
- id: d0336db6-38a5-4c68-8e16-39fba3c1c026
+ id: cd2d1025-4773-4743-8baf-dec1932028bd
version: -1
name: Set playbook stage grid field (remediation)
description: |-
@@ -1800,10 +1731,10 @@ tasks:
isautoswitchedtoquietmode: false
"134":
id: "134"
- taskid: a25d63b0-fffe-4304-8357-d10c0f1a5f34
+ taskid: 238955d2-13ea-4e94-816f-d28db1defaf2
type: regular
task:
- id: a25d63b0-fffe-4304-8357-d10c0f1a5f34
+ id: 238955d2-13ea-4e94-816f-d28db1defaf2
version: -1
name: Set playbook stage grid field (notification)
description: |-
@@ -1830,8 +1761,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 1965
+ "x": 690,
+ "y": 2005
}
}
note: false
@@ -1843,10 +1774,10 @@ tasks:
isautoswitchedtoquietmode: false
"135":
id: "135"
- taskid: b89b04c5-a0d2-4ada-85e8-fb84fd8c0174
+ taskid: e33d4d1a-08ec-4517-8930-445a3b108f08
type: regular
task:
- id: b89b04c5-a0d2-4ada-85e8-fb84fd8c0174
+ id: e33d4d1a-08ec-4517-8930-445a3b108f08
version: -1
name: Set playbook stage grid field (validation)
description: |-
@@ -1886,10 +1817,10 @@ tasks:
isautoswitchedtoquietmode: false
"136":
id: "136"
- taskid: ea15fff7-a44e-4f31-8e63-54f069aafdec
+ taskid: 8c17fd19-2790-46fa-878d-ad53d3fff9f8
type: regular
task:
- id: ea15fff7-a44e-4f31-8e63-54f069aafdec
+ id: 8c17fd19-2790-46fa-878d-ad53d3fff9f8
version: -1
name: Set playbook stage grid field (summarization)
description: |-
@@ -1929,10 +1860,10 @@ tasks:
isautoswitchedtoquietmode: false
"141":
id: "141"
- taskid: 5fc53955-e98a-4b57-82dc-185bb48a79bb
+ taskid: 6152a9df-a84b-4bbe-8a9f-59afec75c4ae
type: title
task:
- id: 5fc53955-e98a-4b57-82dc-185bb48a79bb
+ id: 6152a9df-a84b-4bbe-8a9f-59afec75c4ae
version: -1
name: continue
type: title
@@ -1960,10 +1891,10 @@ tasks:
isautoswitchedtoquietmode: false
"142":
id: "142"
- taskid: 3759b6d2-40aa-4639-8de0-d7a5768f0f78
+ taskid: 0e38bd21-c3da-422c-8ac8-a2ebcaf93a39
type: regular
task:
- id: 3759b6d2-40aa-4639-8de0-d7a5768f0f78
+ id: 0e38bd21-c3da-422c-8ac8-a2ebcaf93a39
version: -1
name: Close alert (accepted risk)
description: Close the current alert because the service is no longer observed.
@@ -1997,10 +1928,10 @@ tasks:
isautoswitchedtoquietmode: false
"144":
id: "144"
- taskid: df8b6de0-ebf2-4344-808f-773d0a40298c
+ taskid: 167fe7ef-89a1-413f-85cb-94d9c086d6d1
type: condition
task:
- id: df8b6de0-ebf2-4344-808f-773d0a40298c
+ id: 167fe7ef-89a1-413f-85cb-94d9c086d6d1
version: -1
name: Are there any matches from accepted risk lists?
description: Checks if the system IDs, folders, and projects are present in the accepted risk lists.
@@ -2138,10 +2069,10 @@ tasks:
isautoswitchedtoquietmode: false
"145":
id: "145"
- taskid: 4766c871-f8c3-4896-8a95-02019ce271fd
+ taskid: 87cca956-c103-4e7c-84a4-0bfa10d39b81
type: condition
task:
- id: 4766c871-f8c3-4896-8a95-02019ce271fd
+ id: 87cca956-c103-4e7c-84a4-0bfa10d39b81
version: -1
name: What is the remediation action?
description: Determines what the remediation action is (direct action or data collection task).
@@ -2161,6 +2092,8 @@ tasks:
- "163"
ServiceNow Ticket:
- "162"
+ Slack:
+ - "181"
separatecontext: false
conditions:
- label: Automated Remediation
@@ -2218,6 +2151,16 @@ tasks:
right:
value:
simple: Jira
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: RemediationAction
+ operator: isEqualString
+ right:
+ value:
+ simple: Slack
+ label: Slack
continueonerrortype: ""
view: |-
{
@@ -2235,10 +2178,10 @@ tasks:
isautoswitchedtoquietmode: false
"146":
id: "146"
- taskid: 0bcc9690-5c1b-4eb9-88de-d991f31820f1
+ taskid: 5cc9450d-b22b-4100-8390-f3ec343b9319
type: title
task:
- id: 0bcc9690-5c1b-4eb9-88de-d991f31820f1
+ id: 5cc9450d-b22b-4100-8390-f3ec343b9319
version: -1
name: Automated Remediation
type: title
@@ -2266,10 +2209,10 @@ tasks:
isautoswitchedtoquietmode: false
"152":
id: "152"
- taskid: a4b94f6b-2c42-4a0e-8dfc-210138a74131
+ taskid: 3ca6aa20-4977-4c33-8d6f-4506f9f5af07
type: title
task:
- id: a4b94f6b-2c42-4a0e-8dfc-210138a74131
+ id: 3ca6aa20-4977-4c33-8d6f-4506f9f5af07
version: -1
name: Manual Remediation
type: title
@@ -2297,10 +2240,10 @@ tasks:
isautoswitchedtoquietmode: false
"156":
id: "156"
- taskid: 3d527f85-362a-4c68-8380-316a62e1924f
+ taskid: 79daba2b-1791-4d5c-855c-fb9d663191ee
type: title
task:
- id: 3d527f85-362a-4c68-8380-316a62e1924f
+ id: 79daba2b-1791-4d5c-855c-fb9d663191ee
version: -1
name: Notification Stage
type: title
@@ -2315,7 +2258,7 @@ tasks:
view: |-
{
"position": {
- "x": 370,
+ "x": 290,
"y": 1630
}
}
@@ -2328,10 +2271,10 @@ tasks:
isautoswitchedtoquietmode: false
"157":
id: "157"
- taskid: ee8d767a-885a-444c-8465-7c0382db096d
+ taskid: 9c6555fc-9a4f-480d-8292-0aa18468581c
type: regular
task:
- id: ee8d767a-885a-444c-8465-7c0382db096d
+ id: 9c6555fc-9a4f-480d-8292-0aa18468581c
version: -1
name: Set playbook stage grid field (decision)
description: |-
@@ -2358,7 +2301,7 @@ tasks:
view: |-
{
"position": {
- "x": 370,
+ "x": 290,
"y": 1175
}
}
@@ -2371,10 +2314,10 @@ tasks:
isautoswitchedtoquietmode: false
"161":
id: "161"
- taskid: f12b3c1f-6c94-4918-807a-e758e467373b
+ taskid: 5617a3f5-278a-492e-8d16-09e896120c1d
type: title
task:
- id: f12b3c1f-6c94-4918-807a-e758e467373b
+ id: 5617a3f5-278a-492e-8d16-09e896120c1d
version: -1
name: Jira Ticket
type: title
@@ -2389,7 +2332,7 @@ tasks:
view: |-
{
"position": {
- "x": 370,
+ "x": 290,
"y": 1040
}
}
@@ -2402,10 +2345,10 @@ tasks:
isautoswitchedtoquietmode: false
"162":
id: "162"
- taskid: bcef69e4-70a6-497d-8373-e29789f4c2e1
+ taskid: 0d78ff20-65b6-48a7-8f3e-c0d99809a5bf
type: title
task:
- id: bcef69e4-70a6-497d-8373-e29789f4c2e1
+ id: 0d78ff20-65b6-48a7-8f3e-c0d99809a5bf
version: -1
name: ServiceNow Ticket
type: title
@@ -2420,8 +2363,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 970
+ "x": 690,
+ "y": 1010
}
}
note: false
@@ -2433,10 +2376,10 @@ tasks:
isautoswitchedtoquietmode: false
"163":
id: "163"
- taskid: 2313dded-570c-4388-8dd7-0de43fbbf80c
+ taskid: 68147b67-a457-4d09-817b-bbf1a7539f20
type: title
task:
- id: 2313dded-570c-4388-8dd7-0de43fbbf80c
+ id: 68147b67-a457-4d09-817b-bbf1a7539f20
version: -1
name: Email Notification
type: title
@@ -2451,8 +2394,8 @@ tasks:
view: |-
{
"position": {
- "x": 1220,
- "y": 890
+ "x": 1080,
+ "y": 990
}
}
note: false
@@ -2464,10 +2407,10 @@ tasks:
isautoswitchedtoquietmode: false
"164":
id: "164"
- taskid: 83b8f6d3-b38f-4c08-884c-0a8f684e3b39
+ taskid: cf574345-4885-46c3-8781-6fe82703cd32
type: regular
task:
- id: 83b8f6d3-b38f-4c08-884c-0a8f684e3b39
+ id: cf574345-4885-46c3-8781-6fe82703cd32
version: -1
name: Set remediation grid field
description: |-
@@ -2498,7 +2441,7 @@ tasks:
view: |-
{
"position": {
- "x": -700,
+ "x": -690,
"y": 1860
}
}
@@ -2511,10 +2454,10 @@ tasks:
isautoswitchedtoquietmode: false
"165":
id: "165"
- taskid: ec8df44c-301c-4a09-84ba-5c8855f89129
+ taskid: 3b65d35e-d409-446e-8200-8c7a2d26b927
type: condition
task:
- id: ec8df44c-301c-4a09-84ba-5c8855f89129
+ id: 3b65d35e-d409-446e-8200-8c7a2d26b927
version: -1
name: Are there service owners with emails?
description: Verifies if we have emails for service owners.
@@ -2552,7 +2495,7 @@ tasks:
view: |-
{
"position": {
- "x": -700,
+ "x": -690,
"y": 2210
}
}
@@ -2565,10 +2508,10 @@ tasks:
isautoswitchedtoquietmode: false
"166":
id: "166"
- taskid: e9e352cd-b065-465f-899b-190f21c9ce72
+ taskid: ddad11be-1a2e-45a3-8030-1696ba3fe875
type: regular
task:
- id: e9e352cd-b065-465f-899b-190f21c9ce72
+ id: ddad11be-1a2e-45a3-8030-1696ba3fe875
version: -1
name: Send remediation notification email to service owners
description: Send an email to service owners regarding the status of the automated remediation action that was taken.
@@ -2615,52 +2558,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "167":
- id: "167"
- taskid: 7d17750d-0e91-4915-887a-43ddbbfbe339
- type: playbook
- task:
- id: 7d17750d-0e91-4915-887a-43ddbbfbe339
- version: -1
- name: Cortex ASM - Remediation
- description: This playbook contains all the cloud provider sub playbooks for remediation.
- type: playbook
- iscommand: false
- brand: ""
- playbookId: Cortex ASM - Remediation
- nexttasks:
- '#none#':
- - "164"
- separatecontext: true
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -690,
- "y": 1685
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- loop:
- exitCondition: ""
- iscommand: false
- max: 100
- wait: 1
- scriptarguments:
- AWSAssumeRoleName:
- simple: ${inputs.AWSAssumeRoleName}
"168":
id: "168"
- taskid: e28aa7c4-0b2f-4e12-8041-731c10074f0b
+ taskid: 3f1c6f44-04d4-4590-838c-d88bc36fbd79
type: regular
task:
- id: e28aa7c4-0b2f-4e12-8041-731c10074f0b
+ id: 3f1c6f44-04d4-4590-838c-d88bc36fbd79
version: -1
name: Set playbook stage grid field (remediation)
description: |-
@@ -2700,10 +2603,10 @@ tasks:
isautoswitchedtoquietmode: false
"169":
id: "169"
- taskid: ee06492a-abbb-4981-8851-732e9045e95b
+ taskid: ef95362b-ca5f-483b-8f1c-b49eaa591bb5
type: regular
task:
- id: ee06492a-abbb-4981-8851-732e9045e95b
+ id: ef95362b-ca5f-483b-8f1c-b49eaa591bb5
version: -1
name: Set playbook stage grid field (notification)
description: |-
@@ -2743,10 +2646,10 @@ tasks:
isautoswitchedtoquietmode: false
"171":
id: "171"
- taskid: a44a0f99-00de-4901-8cd8-85bc94ed366a
+ taskid: 1b619bad-e5c8-41f8-85f8-54c7ad48c8bc
type: regular
task:
- id: a44a0f99-00de-4901-8cd8-85bc94ed366a
+ id: 1b619bad-e5c8-41f8-85f8-54c7ad48c8bc
version: -1
name: Initialize grid fields
description: Clear ASM grid fields in case the playbook is rerun.
@@ -2808,10 +2711,10 @@ tasks:
isautoswitchedtoquietmode: false
"172":
id: "172"
- taskid: aae9af2f-62f1-49cf-8987-9bbc9912fab4
+ taskid: 24dedd51-5427-4637-8cfc-b5dc476678a7
type: regular
task:
- id: aae9af2f-62f1-49cf-8987-9bbc9912fab4
+ id: 24dedd51-5427-4637-8cfc-b5dc476678a7
version: -1
name: GenerateSummaryReport - Analysis
description: Generate an ASM Alert Summary report.
@@ -2914,10 +2817,10 @@ tasks:
isautoswitchedtoquietmode: false
"175":
id: "175"
- taskid: 1c18b32d-27ea-4d7d-8387-d96f9b6422af
+ taskid: 27b52a79-73b2-46f1-8df7-8f139873b8f4
type: playbook
task:
- id: 1c18b32d-27ea-4d7d-8387-d96f9b6422af
+ id: 27b52a79-73b2-46f1-8df7-8f139873b8f4
version: -1
name: Cortex ASM - Remediation Guidance
type: playbook
@@ -2946,10 +2849,10 @@ tasks:
isautoswitchedtoquietmode: false
"176":
id: "176"
- taskid: a68a7331-0bff-40c2-8231-c42822b96c12
+ taskid: e7101013-512c-48bf-8a4c-6a70b08dec6d
type: playbook
task:
- id: a68a7331-0bff-40c2-8231-c42822b96c12
+ id: e7101013-512c-48bf-8a4c-6a70b08dec6d
version: -1
name: Cortex ASM - Email Notification
description: This playbook is used to send email notifications to service owners to notify them of their internet exposures.
@@ -2980,8 +2883,8 @@ tasks:
view: |-
{
"position": {
- "x": 1220,
- "y": 1635
+ "x": 1080,
+ "y": 1795
}
}
note: false
@@ -2993,10 +2896,10 @@ tasks:
isautoswitchedtoquietmode: false
"177":
id: "177"
- taskid: c5ee8b3b-80c8-4e66-8d23-fed4fe1714d7
+ taskid: ac48f670-dfa1-4419-8bd3-a0bdc2b54c40
type: regular
task:
- id: c5ee8b3b-80c8-4e66-8d23-fed4fe1714d7
+ id: ac48f670-dfa1-4419-8bd3-a0bdc2b54c40
version: -1
name: GenerateSummaryReport - Summary
description: Generate an ASM Alert Summary report.
@@ -3103,10 +3006,10 @@ tasks:
isautoswitchedtoquietmode: false
"178":
id: "178"
- taskid: 38b28acb-6ef9-4c39-88a9-f396fd067a70
+ taskid: 31a1fbd0-1f8f-462a-81fd-79b5f5c2d462
type: playbook
task:
- id: 38b28acb-6ef9-4c39-88a9-f396fd067a70
+ id: 31a1fbd0-1f8f-462a-81fd-79b5f5c2d462
version: -1
name: Cortex ASM - Jira Notification
type: playbook
@@ -3137,8 +3040,8 @@ tasks:
view: |-
{
"position": {
- "x": 370,
- "y": 1780
+ "x": 290,
+ "y": 1830
}
}
note: false
@@ -3150,10 +3053,10 @@ tasks:
isautoswitchedtoquietmode: false
"179":
id: "179"
- taskid: a9ddad5d-48a5-408c-8ffa-0befe07917c4
+ taskid: 70be4075-98e6-4a30-85bd-2a2fd6c3bbbd
type: playbook
task:
- id: a9ddad5d-48a5-408c-8ffa-0befe07917c4
+ id: 70be4075-98e6-4a30-85bd-2a2fd6c3bbbd
version: -1
name: Cortex ASM - ServiceNow Notification
type: playbook
@@ -3184,8 +3087,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 1755
+ "x": 690,
+ "y": 1815
}
}
note: false
@@ -3197,10 +3100,10 @@ tasks:
isautoswitchedtoquietmode: false
"180":
id: "180"
- taskid: 21d391e7-543a-4bd0-8b0b-bc56ac517439
+ taskid: bd53973d-cb69-4fd3-838f-093a8acec197
type: regular
task:
- id: 21d391e7-543a-4bd0-8b0b-bc56ac517439
+ id: bd53973d-cb69-4fd3-838f-093a8acec197
version: -1
name: Delete context
description: |-
@@ -3235,6 +3138,281 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "181":
+ continueonerrortype: ""
+ id: "181"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "182"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: ab2861ef-0661-4556-82e8-ca82ca8247cd
+ iscommand: false
+ name: Slack Message
+ type: title
+ version: -1
+ description: ''
+ taskid: ab2861ef-0661-4556-82e8-ca82ca8247cd
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 960
+ }
+ }
+ "182":
+ continueonerrortype: ""
+ id: "182"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "183"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmplaybookstage
+ keys:
+ simple: stage,timestamp
+ val1:
+ simple: decision
+ val2:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value, you can enter `TIMESTAMP` to get the current timestamp in ISO format. Example of the command:
+ `!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
+ id: 98805312-6ca7-42e5-8c15-1d823f925f77
+ iscommand: false
+ name: Set playbook stage grid field (decision)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 98805312-6ca7-42e5-8c15-1d823f925f77
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 1085
+ }
+ }
+ "183":
+ continueonerrortype: ""
+ id: "183"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "184"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 27daf64d-b14d-4a42-8a5b-cf42d0953d35
+ iscommand: false
+ name: Notification Stage
+ type: title
+ version: -1
+ description: ''
+ taskid: 27daf64d-b14d-4a42-8a5b-cf42d0953d35
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 1580
+ }
+ }
+ "184":
+ continueonerrortype: ""
+ id: "184"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ loop:
+ exitCondition: ""
+ iscommand: false
+ max: 100
+ wait: 1
+ nexttasks:
+ '#none#':
+ - "134"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ InstantMessageChannel:
+ simple: ${inputs.InstantMessageChannel}
+ OwnerNotificationBody:
+ simple: ${inputs.OwnerNotificationBody}
+ RemediationGuidance:
+ simple: ${RemediationGuidance}
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ description: This playbook is used to create instant messages toward service owners to notify them of their internet exposures.
+ id: 0f2ae625-39be-4cb7-8d21-7e1632aa18a0
+ iscommand: false
+ name: 'Cortex ASM - Instant Message'
+ playbookId: 'Cortex ASM - Instant Message'
+ type: playbook
+ version: -1
+ taskid: 0f2ae625-39be-4cb7-8d21-7e1632aa18a0
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": 1470,
+ "y": 1780
+ }
+ }
+ "185":
+ continueonerrortype: ""
+ id: "185"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ loop:
+ exitCondition: ""
+ iscommand: false
+ max: 0
+ wait: 1
+ nexttasks:
+ '#none#':
+ - "125"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ AWSAssumeRoleName:
+ simple: ${inputs.AWSAssumeRoleName}
+ RemoteIP:
+ complex:
+ accessor: remoteip
+ root: alert
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 937819ab-91bd-438e-8329-4b00b1d6dfeb
+ iscommand: false
+ name: Cortex ASM - Enrichment
+ playbookId: Cortex ASM - Enrichment
+ type: playbook
+ version: -1
+ description: ''
+ taskid: 937819ab-91bd-438e-8329-4b00b1d6dfeb
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": 70,
+ "y": -870
+ }
+ }
+ "186":
+ continueonerrortype: ""
+ id: "186"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ loop:
+ exitCondition: ""
+ iscommand: false
+ max: 0
+ wait: 1
+ nexttasks:
+ '#none#':
+ - "145"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ BypassDevCheck:
+ simple: ${inputs.BypassDevCheck}
+ ExternallyDetectedProviders:
+ simple: ${ASM.ExternalService.externally_detected_providers}
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 3898edf1-25e6-484d-8f4e-572a9c84455c
+ iscommand: false
+ name: Cortex ASM - Remediation Path Rules
+ playbookId: Cortex ASM - Remediation Path Rules
+ type: playbook
+ version: -1
+ description: ''
+ taskid: 3898edf1-25e6-484d-8f4e-572a9c84455c
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": 70,
+ "y": 480
+ }
+ }
+ "187":
+ continueonerrortype: ""
+ id: "187"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ loop:
+ exitCondition: ""
+ iscommand: false
+ max: 0
+ wait: 1
+ nexttasks:
+ '#none#':
+ - "164"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ AWSAssumeRoleName:
+ simple: ${inputs.AWSAssumeRoleName}
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 647b684a-7cfd-4edb-883d-4de8c9516d5b
+ iscommand: false
+ name: Cortex ASM - Remediation
+ playbookId: Cortex ASM - Remediation
+ type: playbook
+ version: -1
+ description: ''
+ taskid: 647b684a-7cfd-4edb-883d-4de8c9516d5b
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": -690,
+ "y": 1675
+ }
+ }
view: |-
{
"linkLabelsPosition": {
@@ -3245,13 +3423,14 @@ view: |-
"145_163_Notification Email": 0.74,
"20_135_no": 0.53,
"48_10_ServiceNow ticket": 0.82,
+ "48_183_Slack message": 0.87,
"48_60_Notification email": 0.88,
"48_63_Manual remediation": 0.52
},
"paper": {
"dimensions": {
"height": 7105,
- "width": 2710,
+ "width": 2960,
"x": -1110,
"y": -1780
}
@@ -3342,7 +3521,32 @@ inputs:
playbookInputQuery:
required: false
value: {}
+- description: Channel to send instant messages for notification purposes. For Slack, this will be the channel ID.
+ key: InstantMessageChannel
+ playbookInputQuery:
+ required: false
+ value: {}
outputs: []
tests:
- No tests (auto formatted)
fromversion: 6.10.0
+inputSections:
+- description: Generic group for inputs
+ inputs:
+ - OwnerNotificationSubject
+ - OwnerNotificationBody
+ - NotificationTicketType
+ - RemediationNotificationSubject
+ - RemediationNotificationHTMLBody
+ - BypassDevCheck
+ - AcceptedRiskDs
+ - AcceptedRiskProjects
+ - AcceptedRiskOther
+ - JiraProjectKey
+ - AWSAssumeRoleName
+ - InstantMessageChannel
+ name: General (Inputs group)
+outputSections:
+- description: Generic group for outputs
+ name: General (Outputs group)
+ outputs: []
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert_README.md
index 1c7a445dbbc7..e1d1f3d08e69 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_ASM_Alert_README.md
@@ -6,14 +6,15 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Cortex ASM - Remediation Guidance
+* Cortex ASM - Detect Service
+* Cortex ASM - Email Notification
+* Cortex ASM - Enrichment
+* Cortex ASM - Instant Message
* Cortex ASM - Jira Notification
* Cortex ASM - Remediation
-* Cortex ASM - Detect Service
+* Cortex ASM - Remediation Guidance
* Cortex ASM - Remediation Path Rules
* Cortex ASM - ServiceNow Notification
-* Cortex ASM - Enrichment
-* Cortex ASM - Email Notification
### Integrations
@@ -21,15 +22,15 @@ This playbook does not use any integrations.
### Scripts
-* GridFieldSetup
* DeleteContext
* GenerateASMReport
+* GridFieldSetup
### Commands
-* setAlert
-* send-mail
* closeInvestigation
+* send-mail
+* setAlert
## Playbook Inputs
@@ -48,6 +49,7 @@ This playbook does not use any integrations.
| AcceptedRiskOther | Comma-separated list of other items that are considered an accepted risk and that should be closed. For example, a list of folders numbers in GCP and subscription IDs in Azure. | | Optional |
| JiraProjectKey | The Jira project key to associate with the issue. | | Required |
| AWSAssumeRoleName | If assuming roles for AWS, this is the name of the role to assume \(should be the same for all organizations\). | | Optional |
+| InstantMessageChannel | Channel to send instant messages for notification purposes. For Slack, this will be the channel ID. | | Optional |
## Playbook Outputs
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment.yml
index 8dbf47955514..3feef25deeff 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 51c3f602-b00e-4cdb-8389-4874746a7e01
+ taskid: f2a59ef0-cb1e-4798-8bd4-32f87bdcb801
type: start
task:
- id: 51c3f602-b00e-4cdb-8389-4874746a7e01
+ id: f2a59ef0-cb1e-4798-8bd4-32f87bdcb801
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: 2d771cd2-db77-4f80-8aec-9594c2347039
+ taskid: 8eca27f9-7709-4c5c-85ea-9723f1ba9817
type: title
task:
- id: 2d771cd2-db77-4f80-8aec-9594c2347039
+ id: 8eca27f9-7709-4c5c-85ea-9723f1ba9817
version: -1
name: Set Field
description: commands.local.cmd.set.incident
@@ -52,6 +52,7 @@ tasks:
- "9"
- "22"
- "29"
+ - "64"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -70,10 +71,10 @@ tasks:
isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: 5263a753-00be-4ec4-83ae-2b9f77092ca5
+ taskid: 7f2a1f3a-a3f2-4914-8757-5e8750a9954e
type: regular
task:
- id: 5263a753-00be-4ec4-83ae-2b9f77092ca5
+ id: 7f2a1f3a-a3f2-4914-8757-5e8750a9954e
version: -1
name: Set private IP grid field
description: |-
@@ -115,10 +116,10 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 5a02ff40-8e58-4578-81ab-ee2af7d68478
+ taskid: 14d35979-d26d-4475-811b-ca37e4fea28c
type: condition
task:
- id: 5a02ff40-8e58-4578-81ab-ee2af7d68478
+ id: 14d35979-d26d-4475-811b-ca37e4fea28c
version: -1
name: Is there EC2 information?
description: Determines if there is EC2 information to set the private IP, cloud, and tags fields.
@@ -159,10 +160,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 782d1aa8-51b1-4cfb-845b-51ab382cdbb1
+ taskid: b2537a81-ff0e-4dec-8e3c-d27d839bd677
type: title
task:
- id: 782d1aa8-51b1-4cfb-845b-51ab382cdbb1
+ id: b2537a81-ff0e-4dec-8e3c-d27d839bd677
version: -1
name: Private IP
type: title
@@ -190,10 +191,10 @@ tasks:
isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: c175b36e-f61d-4fed-81dc-4aac584d57e5
+ taskid: 7c312963-025c-4236-8a8a-490376d80924
type: title
task:
- id: c175b36e-f61d-4fed-81dc-4aac584d57e5
+ id: 7c312963-025c-4236-8a8a-490376d80924
version: -1
name: System IDs
type: title
@@ -208,7 +209,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 720
}
}
@@ -221,10 +222,10 @@ tasks:
isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: 70e6bd2a-1018-4417-82a9-4ce3f948fea3
+ taskid: a0c545b4-7ca6-4d1e-8ec4-9e7d5f906410
type: condition
task:
- id: 70e6bd2a-1018-4417-82a9-4ce3f948fea3
+ id: a0c545b4-7ca6-4d1e-8ec4-9e7d5f906410
version: -1
name: Is there EC2 and security group information?
description: Determines if there is EC2 and security group information to set in the system IDs field.
@@ -258,7 +259,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 515
}
}
@@ -271,10 +272,10 @@ tasks:
isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: c2790638-0379-47ee-86a2-b6f270b30023
+ taskid: d5e9eeba-26f8-403e-82d4-d81ddb1b2862
type: regular
task:
- id: c2790638-0379-47ee-86a2-b6f270b30023
+ id: d5e9eeba-26f8-403e-82d4-d81ddb1b2862
version: -1
name: Set system IDs grid field (VPC)
description: |-
@@ -305,7 +306,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1020
}
}
@@ -318,10 +319,10 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: 17605a41-dcf6-4bc6-82ac-35b74c405b65
+ taskid: af4fcae6-fa3a-4036-86f2-a044c2e2b150
type: regular
task:
- id: 17605a41-dcf6-4bc6-82ac-35b74c405b65
+ id: af4fcae6-fa3a-4036-86f2-a044c2e2b150
version: -1
name: Set system IDs grid field (SG)
description: |-
@@ -352,7 +353,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1190
}
}
@@ -365,10 +366,10 @@ tasks:
isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: 068282df-bda6-4f5e-8e14-caf76cbe3ed7
+ taskid: fa4088df-9a3a-4480-8e61-7672d7a4ea69
type: regular
task:
- id: 068282df-bda6-4f5e-8e14-caf76cbe3ed7
+ id: fa4088df-9a3a-4480-8e61-7672d7a4ea69
version: -1
name: Set system IDs grid field (subnet ID)
description: |-
@@ -399,7 +400,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1360
}
}
@@ -412,10 +413,10 @@ tasks:
isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: cd819f74-7964-456c-8dfa-709434039bf3
+ taskid: aee30d2e-a57d-406f-8973-da95d14221b2
type: regular
task:
- id: cd819f74-7964-456c-8dfa-709434039bf3
+ id: aee30d2e-a57d-406f-8973-da95d14221b2
version: -1
name: Set system IDs grid field (NIC)
description: |-
@@ -446,7 +447,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1540
}
}
@@ -459,10 +460,10 @@ tasks:
isautoswitchedtoquietmode: false
"14":
id: "14"
- taskid: c83ea01a-4333-40f5-8cd5-7a176c34e1cd
+ taskid: ff76595f-a547-4bff-8d63-e3a322dadc06
type: regular
task:
- id: c83ea01a-4333-40f5-8cd5-7a176c34e1cd
+ id: ff76595f-a547-4bff-8d63-e3a322dadc06
version: -1
name: Set system IDs grid field (EC2 ID)
description: |-
@@ -493,7 +494,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1710
}
}
@@ -506,10 +507,10 @@ tasks:
isautoswitchedtoquietmode: false
"15":
id: "15"
- taskid: 7048ede0-a0e8-4093-8933-a89715d9dfd6
+ taskid: 4bcc6bc5-345b-4561-8386-36c1f7622f90
type: title
task:
- id: 7048ede0-a0e8-4093-8933-a89715d9dfd6
+ id: 4bcc6bc5-345b-4561-8386-36c1f7622f90
version: -1
name: Cloud
type: title
@@ -537,10 +538,10 @@ tasks:
isautoswitchedtoquietmode: false
"16":
id: "16"
- taskid: c7c33d43-fb9a-43b6-8660-9dc57994afe3
+ taskid: b5715924-3d84-472a-8c05-65d6902a7ce0
type: regular
task:
- id: c7c33d43-fb9a-43b6-8660-9dc57994afe3
+ id: b5715924-3d84-472a-8c05-65d6902a7ce0
version: -1
name: Set cloud grid field
description: |-
@@ -592,10 +593,10 @@ tasks:
isautoswitchedtoquietmode: false
"17":
id: "17"
- taskid: 4809e879-3600-4207-8053-7af1e9884404
+ taskid: 16fe96c8-1930-477e-8ab7-8b5fcce4efed
type: title
task:
- id: 4809e879-3600-4207-8053-7af1e9884404
+ id: 16fe96c8-1930-477e-8ab7-8b5fcce4efed
version: -1
name: Tags
type: title
@@ -623,10 +624,10 @@ tasks:
isautoswitchedtoquietmode: false
"18":
id: "18"
- taskid: 2d8c53e5-5c46-4bfe-8f54-101c5e4143d9
+ taskid: d5122bf5-b3f5-42b7-8df4-a13c3be6f51e
type: regular
task:
- id: 2d8c53e5-5c46-4bfe-8f54-101c5e4143d9
+ id: d5122bf5-b3f5-42b7-8df4-a13c3be6f51e
version: -1
name: Set tags grid field
description: |-
@@ -672,10 +673,10 @@ tasks:
isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: 448283e1-7429-4854-80f7-8dc8ebfcf306
+ taskid: fdf7bd3a-cb0d-4de5-8b17-5bfa14d2cfe3
type: playbook
task:
- id: 448283e1-7429-4854-80f7-8dc8ebfcf306
+ id: fdf7bd3a-cb0d-4de5-8b17-5bfa14d2cfe3
version: -1
name: AWS - Enrichment
description: Given the IP address this playbook enriches EC2 and IAM information.
@@ -698,7 +699,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 150
+ "y": 170
}
}
note: false
@@ -710,10 +711,10 @@ tasks:
isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 36ad2f54-2ba5-4f0a-81ed-9f709d27b1c5
+ taskid: 99e7c32f-4435-48a8-8f27-05f7f3de346f
type: title
task:
- id: 36ad2f54-2ba5-4f0a-81ed-9f709d27b1c5
+ id: 99e7c32f-4435-48a8-8f27-05f7f3de346f
version: -1
name: Closing Steps
type: title
@@ -741,10 +742,10 @@ tasks:
- "51"
"22":
id: "22"
- taskid: 6962e5f9-ce36-492a-8fad-4e4368f6b379
+ taskid: c43230f7-e626-4657-86c0-ca663895f99c
type: condition
task:
- id: 6962e5f9-ce36-492a-8fad-4e4368f6b379
+ id: c43230f7-e626-4657-86c0-ca663895f99c
version: -1
name: Are there EC2 tags?
description: Determines if there is EC2 tag information to set the tags fields.
@@ -786,10 +787,10 @@ tasks:
isautoswitchedtoquietmode: false
"23":
id: "23"
- taskid: c083d6a8-8589-478d-8fe3-5ff4b401a959
+ taskid: 8b5f68d9-28ff-4f5e-8330-93fd0c76919d
type: playbook
task:
- id: c083d6a8-8589-478d-8fe3-5ff4b401a959
+ id: 8b5f68d9-28ff-4f5e-8330-93fd0c76919d
version: -1
name: AWS - Unclaimed S3 Bucket Validation
description: The playbook sends a HTTP get response to the hostname and validates if there is missing bucket information.
@@ -805,7 +806,7 @@ tasks:
view: |-
{
"position": {
- "x": -530,
+ "x": -1510,
"y": 810
}
}
@@ -818,10 +819,10 @@ tasks:
isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: cf0429d6-e359-4b5e-85b5-faf866471e35
+ taskid: ea9e07e7-7ce3-41bc-86a5-17e0195a004b
type: condition
task:
- id: cf0429d6-e359-4b5e-85b5-faf866471e35
+ id: ea9e07e7-7ce3-41bc-86a5-17e0195a004b
version: -1
name: Was an S3 bucket passed back?
description: Determines if an S3 bucket was passed back or not.
@@ -847,7 +848,7 @@ tasks:
view: |-
{
"position": {
- "x": -530,
+ "x": -1510,
"y": 995
}
}
@@ -860,10 +861,10 @@ tasks:
isautoswitchedtoquietmode: false
"28":
id: "28"
- taskid: dc5e8c03-8cdd-4451-8308-4ef5d5cf82d9
+ taskid: 6e45383f-a0d9-4c67-8ead-a718cb785920
type: regular
task:
- id: dc5e8c03-8cdd-4451-8308-4ef5d5cf82d9
+ id: 6e45383f-a0d9-4c67-8ead-a718cb785920
version: -1
name: Set system IDs grid field (BucketName)
description: |-
@@ -893,7 +894,7 @@ tasks:
view: |-
{
"position": {
- "x": -530,
+ "x": -1510,
"y": 1455
}
}
@@ -906,10 +907,10 @@ tasks:
isautoswitchedtoquietmode: false
"29":
id: "29"
- taskid: 070d4c16-2e9b-4a9f-8946-f8b81fa95a5b
+ taskid: 2e89202d-f805-4a8d-8d34-dd3c0075d17f
type: condition
task:
- id: 070d4c16-2e9b-4a9f-8946-f8b81fa95a5b
+ id: 2e89202d-f805-4a8d-8d34-dd3c0075d17f
version: -1
name: What is the sub-alert type?
description: Determines the ASM sub-alert type for different scans.
@@ -944,7 +945,7 @@ tasks:
view: |-
{
"position": {
- "x": -410,
+ "x": -1390,
"y": 505
}
}
@@ -957,10 +958,10 @@ tasks:
isautoswitchedtoquietmode: false
"30":
id: "30"
- taskid: 519f41b6-9cea-47d4-8c31-c2aea0f3c57e
+ taskid: 083b4657-6d36-4ee6-8f3a-5aae7feafdec
type: regular
task:
- id: 519f41b6-9cea-47d4-8c31-c2aea0f3c57e
+ id: 083b4657-6d36-4ee6-8f3a-5aae7feafdec
version: -1
name: Set true flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -996,10 +997,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 2bd2d10d-ec05-4363-8227-ce5275114507
+ taskid: 79d0de58-5b4f-428e-8d6e-ec8ce48f3288
type: regular
task:
- id: 2bd2d10d-ec05-4363-8227-ce5275114507
+ id: 79d0de58-5b4f-428e-8d6e-ec8ce48f3288
version: -1
name: Set false flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1035,10 +1036,10 @@ tasks:
isautoswitchedtoquietmode: false
"33":
id: "33"
- taskid: dea20ab0-84b6-4885-880b-8d1abc2d4d5c
+ taskid: be750499-2442-4db5-8d97-ba7b7e68b011
type: regular
task:
- id: dea20ab0-84b6-4885-880b-8d1abc2d4d5c
+ id: be750499-2442-4db5-8d97-ba7b7e68b011
version: -1
name: Set true flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1074,10 +1075,10 @@ tasks:
isautoswitchedtoquietmode: false
"34":
id: "34"
- taskid: c0cad7ce-44b8-4a76-8226-5a7287be663a
+ taskid: bbbaa31e-c07e-4788-8e3a-a591a6bb57d2
type: regular
task:
- id: c0cad7ce-44b8-4a76-8226-5a7287be663a
+ id: bbbaa31e-c07e-4788-8e3a-a591a6bb57d2
version: -1
name: Set true flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1113,10 +1114,10 @@ tasks:
isautoswitchedtoquietmode: false
"38":
id: "38"
- taskid: 54d33a30-9d4b-4b1d-8112-cb019c18c419
+ taskid: bcfc39b2-9ec1-47d9-8c59-372270fcc548
type: regular
task:
- id: 54d33a30-9d4b-4b1d-8112-cb019c18c419
+ id: bcfc39b2-9ec1-47d9-8c59-372270fcc548
version: -1
name: Set false flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1139,8 +1140,8 @@ tasks:
view: |-
{
"position": {
- "x": 520,
- "y": 1480
+ "x": -430,
+ "y": 1635
}
}
note: false
@@ -1152,10 +1153,10 @@ tasks:
isautoswitchedtoquietmode: false
"39":
id: "39"
- taskid: 9f2bc905-fc3d-4c37-8199-7e2ef2d8a6b8
+ taskid: d5bf97bc-5f7e-4e2e-8d74-57be7f721dc0
type: regular
task:
- id: 9f2bc905-fc3d-4c37-8199-7e2ef2d8a6b8
+ id: d5bf97bc-5f7e-4e2e-8d74-57be7f721dc0
version: -1
name: Set true flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1170,7 +1171,7 @@ tasks:
append:
simple: "true"
key:
- simple: asm_enrichment_flag_aws
+ simple: asm_enrichment_flag_aws_ssm
value:
simple: "true"
separatecontext: false
@@ -1178,7 +1179,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 1880
}
}
@@ -1191,10 +1192,10 @@ tasks:
isautoswitchedtoquietmode: false
"40":
id: "40"
- taskid: fa715bac-0be6-4767-8235-d6810ae4bd10
+ taskid: 6d6f9409-62c3-4c9d-8f84-e497b6497872
type: regular
task:
- id: fa715bac-0be6-4767-8235-d6810ae4bd10
+ id: 6d6f9409-62c3-4c9d-8f84-e497b6497872
version: -1
name: Set true flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1217,7 +1218,7 @@ tasks:
view: |-
{
"position": {
- "x": -530,
+ "x": -1510,
"y": 1865
}
}
@@ -1230,10 +1231,10 @@ tasks:
isautoswitchedtoquietmode: false
"41":
id: "41"
- taskid: 9e6312e8-0a0f-4647-8ba5-973e0b5ed91f
+ taskid: ee76e6eb-a96d-437d-8ef2-f5db8eadb783
type: regular
task:
- id: 9e6312e8-0a0f-4647-8ba5-973e0b5ed91f
+ id: ee76e6eb-a96d-437d-8ef2-f5db8eadb783
version: -1
name: Set false flag for completed enrichment
description: Set a value in context under the key you entered.
@@ -1256,7 +1257,7 @@ tasks:
view: |-
{
"position": {
- "x": -310,
+ "x": -1290,
"y": 1635
}
}
@@ -1269,10 +1270,10 @@ tasks:
isautoswitchedtoquietmode: false
"43":
id: "43"
- taskid: 456a42f9-d42f-4005-884d-82c8f1af4c5d
+ taskid: 27a03fef-3842-46bb-81ba-0c3b556ff3c2
type: regular
task:
- id: 456a42f9-d42f-4005-884d-82c8f1af4c5d
+ id: 27a03fef-3842-46bb-81ba-0c3b556ff3c2
version: -1
name: Set ASM enrichment status to true (s3)
description: |-
@@ -1314,10 +1315,10 @@ tasks:
isautoswitchedtoquietmode: false
"44":
id: "44"
- taskid: 39cba846-8cb8-4153-8778-57b3eb0640a4
+ taskid: 4afafe5c-7a00-4f78-8f0f-2ed2f09fe76c
type: regular
task:
- id: 39cba846-8cb8-4153-8778-57b3eb0640a4
+ id: 4afafe5c-7a00-4f78-8f0f-2ed2f09fe76c
version: -1
name: Set ASM enrichment status to true (AWS)
description: |-
@@ -1359,10 +1360,10 @@ tasks:
isautoswitchedtoquietmode: false
"51":
id: "51"
- taskid: ac24bdff-06e4-421e-8fdf-fc9e21998c84
+ taskid: 94b9ccf0-0c2c-422c-80f0-ac07e330085d
type: condition
task:
- id: ac24bdff-06e4-421e-8fdf-fc9e21998c84
+ id: 94b9ccf0-0c2c-422c-80f0-ac07e330085d
version: -1
name: Was enrichment performed?
description: Check if enrichment was performed by checking for a value of true in the relevant flag variable.
@@ -1439,10 +1440,10 @@ tasks:
isautoswitchedtoquietmode: false
"52":
id: "52"
- taskid: 11325ebe-a5d7-4461-8f01-2405f328c059
+ taskid: 1381ba16-f89d-40b5-86e2-54143a790af2
type: title
task:
- id: 11325ebe-a5d7-4461-8f01-2405f328c059
+ id: 1381ba16-f89d-40b5-86e2-54143a790af2
version: -1
name: Both
type: title
@@ -1471,10 +1472,10 @@ tasks:
isautoswitchedtoquietmode: false
"53":
id: "53"
- taskid: 2b3f9482-232a-42af-84b8-94341cb27760
+ taskid: 32d50c6b-2f9a-4302-829b-8c5aee0ca3c2
type: regular
task:
- id: 2b3f9482-232a-42af-84b8-94341cb27760
+ id: 32d50c6b-2f9a-4302-829b-8c5aee0ca3c2
version: -1
name: Set ASM enrichment status to false (AWS)
description: |-
@@ -1516,10 +1517,10 @@ tasks:
isautoswitchedtoquietmode: false
"54":
id: "54"
- taskid: 44cd6e08-91e4-4c1d-89cb-4ca2ea6bf109
+ taskid: f946c5cc-65b6-4e5d-86ee-f730fe0ea9ad
type: regular
task:
- id: 44cd6e08-91e4-4c1d-89cb-4ca2ea6bf109
+ id: f946c5cc-65b6-4e5d-86ee-f730fe0ea9ad
version: -1
name: Set ASM enrichment status to false (s3)
description: |-
@@ -1561,10 +1562,10 @@ tasks:
isautoswitchedtoquietmode: false
"55":
id: "55"
- taskid: 61f966c0-67d1-4a51-8be0-bb7a82a52b1a
+ taskid: 37aea528-4d0b-4237-82e9-9f15d40007c0
type: title
task:
- id: 61f966c0-67d1-4a51-8be0-bb7a82a52b1a
+ id: 37aea528-4d0b-4237-82e9-9f15d40007c0
version: -1
name: Complete
type: title
@@ -1589,10 +1590,10 @@ tasks:
isautoswitchedtoquietmode: false
"56":
id: "56"
- taskid: aa1861eb-40a6-43c1-8dc1-fe409506a657
+ taskid: aefccfa5-4c36-4a60-8aec-0df67d820a0c
type: regular
task:
- id: aa1861eb-40a6-43c1-8dc1-fe409506a657
+ id: aefccfa5-4c36-4a60-8aec-0df67d820a0c
version: -1
name: Set system IDs grid field (type)
description: Sets the type of cloud asset to the grid field for the ASM system IDs object.
@@ -1619,7 +1620,7 @@ tasks:
view: |-
{
"position": {
- "x": 90,
+ "x": -890,
"y": 855
}
}
@@ -1632,10 +1633,10 @@ tasks:
isautoswitchedtoquietmode: false
"57":
id: "57"
- taskid: acba7940-e548-49be-88e6-a78d4d37a720
+ taskid: 7df56655-292b-4db0-8d27-a2724d72203b
type: regular
task:
- id: acba7940-e548-49be-88e6-a78d4d37a720
+ id: 7df56655-292b-4db0-8d27-a2724d72203b
version: -1
name: Set system IDs grid field (type)
description: |-
@@ -1664,7 +1665,7 @@ tasks:
view: |-
{
"position": {
- "x": -530,
+ "x": -1510,
"y": 1235
}
}
@@ -1716,13 +1717,13 @@ tasks:
task:
brand: ""
description: Set a value in context under the key you entered.
- id: cec9ceb0-c771-4e25-8958-aaa841c0cc5a
+ id: 8196e4aa-6d02-4315-848f-ff9487841f41
iscommand: false
name: Set org
script: Set
type: regular
version: -1
- taskid: cec9ceb0-c771-4e25-8958-aaa841c0cc5a
+ taskid: 8196e4aa-6d02-4315-848f-ff9487841f41
timertriggers: []
type: regular
view: |-
@@ -1753,13 +1754,13 @@ tasks:
task:
brand: ""
description: Set a value in context under the key you entered.
- id: 1884cb2d-a89e-44cc-8b28-cff7deb2edde
+ id: 9d212a96-b43b-46a7-8417-0e836cd68a79
iscommand: false
name: Set org to n/a
script: Set
type: regular
version: -1
- taskid: 1884cb2d-a89e-44cc-8b28-cff7deb2edde
+ taskid: 9d212a96-b43b-46a7-8417-0e836cd68a79
timertriggers: []
type: regular
view: |-
@@ -1808,12 +1809,12 @@ tasks:
task:
brand: ""
description: Determines if there is AWS organization information to set in the cloud field.
- id: e5d551f1-b2a2-4e40-8683-3c6ee7d13dd1
+ id: d47da135-9824-4f1a-8f35-9f955ae78114
iscommand: false
name: Is there an organization id?
type: condition
version: -1
- taskid: e5d551f1-b2a2-4e40-8683-3c6ee7d13dd1
+ taskid: d47da135-9824-4f1a-8f35-9f955ae78114
timertriggers: []
type: condition
view: |-
@@ -1851,13 +1852,13 @@ tasks:
task:
brand: Builtin
description: commands.local.cmd.set.incident
- id: 0ccf9ad4-ce5f-4473-8961-176dfb6a8cf5
+ id: 67f0fcb4-0526-4c92-80be-4f89df7ec211
iscommand: true
name: Set hierarchy field
script: Builtin|||setAlert
type: regular
version: -1
- taskid: 0ccf9ad4-ce5f-4473-8961-176dfb6a8cf5
+ taskid: 67f0fcb4-0526-4c92-80be-4f89df7ec211
timertriggers: []
type: regular
view: |-
@@ -1867,6 +1868,450 @@ tasks:
"y": 1470
}
}
+ "64":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: AWS.SSM.InventoryEntry.Entries.[0].InstanceStatus
+ operator: isEqualString
+ right:
+ value:
+ simple: Active
+ root: AWS.SSM
+ operator: isNotEmpty
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "64"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "72"
+ "yes":
+ - "65"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is SSM information to set in the system IDs field.
+ id: 31a242a0-5a16-433c-8034-e9712abe8f9a
+ iscommand: false
+ name: Is there AWS SSM information?
+ type: condition
+ version: -1
+ taskid: 31a242a0-5a16-433c-8034-e9712abe8f9a
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 515
+ }
+ }
+ "65":
+ continueonerrortype: ""
+ id: "65"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "66"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-TYPE
+ val2:
+ simple: AWS-SSM
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: Sets the type of cloud asset to the grid field for the ASM system IDs object.
+ id: 627c9db8-e0bf-4d1e-87c2-de7a9b566d8a
+ iscommand: false
+ name: Set system IDs grid field (type)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 627c9db8-e0bf-4d1e-87c2-de7a9b566d8a
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 720
+ }
+ }
+ "66":
+ continueonerrortype: ""
+ id: "66"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "67"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-SSM-AGENT-STATUS
+ val2:
+ complex:
+ accessor: Entries
+ root: AWS.SSM.InventoryEntry
+ transformers:
+ - operator: FirstArrayElement
+ - args:
+ field:
+ value:
+ simple: InstanceStatus
+ operator: getField
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: b685526c-05d1-49a6-8976-54cec87051fd
+ iscommand: false
+ name: Set system IDs grid field (SSMAgentStatus)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: b685526c-05d1-49a6-8976-54cec87051fd
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 880
+ }
+ }
+ "67":
+ continueonerrortype: ""
+ id: "67"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "68"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-SSM-ID
+ val2:
+ complex:
+ accessor: Entries
+ root: AWS.SSM.InventoryEntry
+ transformers:
+ - operator: FirstArrayElement
+ - args:
+ field:
+ value:
+ simple: InstanceId
+ operator: getField
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 6b837a41-225c-4a41-880d-e04829a897db
+ iscommand: false
+ name: Set system IDs grid field (SSMInstanceID)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 6b837a41-225c-4a41-880d-e04829a897db
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 1055
+ }
+ }
+ "68":
+ continueonerrortype: ""
+ id: "68"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "69"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-SSM-PLATFORM-NAME
+ val2:
+ complex:
+ accessor: Entries
+ root: AWS.SSM.InventoryEntry
+ transformers:
+ - operator: FirstArrayElement
+ - args:
+ field:
+ value:
+ simple: PlatformName
+ operator: getField
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 0385d4a3-a23d-4cbc-8bdb-bb49f09687bd
+ iscommand: false
+ name: Set system IDs grid field (SSMPlatformName)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 0385d4a3-a23d-4cbc-8bdb-bb49f09687bd
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 1235
+ }
+ }
+ "69":
+ continueonerrortype: ""
+ id: "69"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "70"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-SSM-PLATFORM-TYPE
+ val2:
+ complex:
+ accessor: Entries
+ root: AWS.SSM.InventoryEntry
+ transformers:
+ - operator: FirstArrayElement
+ - args:
+ field:
+ value:
+ simple: PlatformType
+ operator: getField
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: d261ca8f-cb4e-4a63-8879-fc8b88e79b57
+ iscommand: false
+ name: Set system IDs grid field (SSMPlatformType)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: d261ca8f-cb4e-4a63-8879-fc8b88e79b57
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 1395
+ }
+ }
+ "70":
+ continueonerrortype: ""
+ id: "70"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "71"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: ASSET-SSM-PLATFORM-VERSION
+ val2:
+ complex:
+ accessor: Entries
+ root: AWS.SSM.InventoryEntry
+ transformers:
+ - operator: FirstArrayElement
+ - args:
+ field:
+ value:
+ simple: PlatformVersion
+ operator: getField
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: a84f696e-8f71-45a0-8878-b1235db97942
+ iscommand: false
+ name: Set system IDs grid field (SSMPlatformVersion)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: a84f696e-8f71-45a0-8878-b1235db97942
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 1565
+ }
+ }
+ "71":
+ continueonerrortype: ""
+ id: "71"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "20"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: asm_enrichment_flag_aws_ssm
+ value:
+ simple: "true"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: a8d26022-1fc4-4030-87e4-be61c8133b4e
+ iscommand: false
+ name: Set true flag for completed enrichment
+ script: Set
+ type: regular
+ version: -1
+ taskid: a8d26022-1fc4-4030-87e4-be61c8133b4e
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 40,
+ "y": 1730
+ }
+ }
+ "72":
+ continueonerrortype: ""
+ id: "72"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "20"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: asm_enrichment_flag_aws_ssm
+ value:
+ simple: "false"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: ca8eb5bb-d6c2-4cbc-8633-d2d62d1e2466
+ iscommand: false
+ name: Set false flag for completed enrichment
+ script: Set
+ type: regular
+ version: -1
+ taskid: ca8eb5bb-d6c2-4cbc-8633-d2d62d1e2466
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 460,
+ "y": 1340
+ }
+ }
view: |-
{
"linkLabelsPosition": {
@@ -1879,8 +2324,8 @@ view: |-
"paper": {
"dimensions": {
"height": 3285,
- "width": 3000,
- "x": -530,
+ "width": 3980,
+ "x": -1510,
"y": 0
}
}
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment_README.md
index b0087f4b10da..9734ea2f5280 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_AWS_Enrichment_README.md
@@ -15,8 +15,8 @@ This playbook does not use any integrations.
### Scripts
-* Set
* GridFieldSetup
+* Set
### Commands
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml
new file mode 100644
index 000000000000..9bc421b21f88
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml
@@ -0,0 +1,782 @@
+description: Playbook to enrich certificate information.
+id: 'Cortex ASM - Certificate Enrichment'
+inputSections:
+- description: Generic group for inputs
+ inputs:
+ - Hostname
+ name: General (Inputs group)
+inputs:
+- description: Input for Certificate enrichment
+ key: Hostname
+ playbookInputQuery:
+ required: false
+ value:
+ simple: ${alert.hostname}
+name: Cortex ASM - Certificate Enrichment
+outputSections:
+- description: Generic group for outputs
+ name: General (Outputs group)
+ outputs: []
+outputs: []
+starttaskid: "0"
+tasks:
+ "0":
+ continueonerrortype: ""
+ id: "0"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "16"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 0804bf72-274c-489c-86fa-0c78f7930d42
+ iscommand: false
+ name: ""
+ version: -1
+ description: ''
+ taskid: 0804bf72-274c-489c-86fa-0c78f7930d42
+ timertriggers: []
+ type: start
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -80
+ }
+ }
+ "1":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.brand
+ operator: isEqualString
+ right:
+ value:
+ simple: VenafiTLSProtect
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.state
+ operator: isEqualString
+ right:
+ value:
+ simple: active
+ root: modules
+ operator: isExists
+ right:
+ value: {}
+ - - left:
+ iscontext: true
+ value:
+ simple: inputs.Hostname
+ operator: isExists
+ label: "yes"
+ continueonerrortype: ""
+ id: "1"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "2"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Checks if there’s an active instance of the Venafi TLS Protect integration enabled.
+ id: de7247d4-2023-4a4b-8094-57e373d36ffd
+ iscommand: false
+ name: Is Venafi TLS Protect enabled and Input defined?
+ type: condition
+ version: -1
+ taskid: de7247d4-2023-4a4b-8094-57e373d36ffd
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 215
+ }
+ }
+ "2":
+ continueonerrortype: ""
+ id: "2"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "9"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ CN:
+ simple: ${inputs.Hostname}
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: VenafiTLSProtect
+ description: 'Gets Venafi certificates query. All dates are in 2016-11-12T00:00:00.0000000Z format. For additional field information, see: https://ao-tlspd.dev.ven-eco.com/aperture/help/Content/SDK/WebSDK/r-SDK-Certificates-search-attribute.htm and https://ao-tlspd.dev.ven-eco.com/aperture/help/Content/SDK/WebSDK/r-SDK-Certificates-search-status.htm'
+ id: c8fc2b77-e986-4817-8daa-8386909b3e4b
+ iscommand: true
+ name: Get Certificate ID from Venafi
+ script: VenafiTLSProtect|||venafi-get-certificates
+ type: regular
+ version: -1
+ taskid: c8fc2b77-e986-4817-8daa-8386909b3e4b
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 440
+ }
+ }
+ "3":
+ continueonerrortype: ""
+ id: "3"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 6e37cbda-0468-468d-8bc2-fa3f2f2040d8
+ iscommand: false
+ name: Done
+ type: title
+ version: -1
+ description: ''
+ taskid: 6e37cbda-0468-468d-8bc2-fa3f2f2040d8
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 2510
+ }
+ }
+ "5":
+ continueonerrortype: ""
+ id: "5"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "17"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ guid:
+ simple: ${Venafi.Certificate.ID}
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: VenafiTLSProtect
+ description: Uses a certificate GUID to extract more details from the certificate store.
+ id: d5cc8b99-47db-4db3-8c2e-f9098ced9989
+ iscommand: true
+ name: Extract additional details from the venafi certificate store.
+ script: VenafiTLSProtect|||venafi-get-certificate-details
+ type: regular
+ version: -1
+ taskid: d5cc8b99-47db-4db3-8c2e-f9098ced9989
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 840
+ }
+ }
+ "7":
+ continueonerrortype: ""
+ id: "7"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmenrichmentstatus
+ keys:
+ simple: source,record_exists,timestamp
+ val1:
+ simple: Certificate
+ val2:
+ simple: "true"
+ val3:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value you can enter `TIMESTAMP` to get the current timestamp in ISO format. For example:
+ `!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
+ id: 06891dbc-f1d9-4683-83cd-e6c877ee86c3
+ iscommand: false
+ name: Set ASM enrichment status to true
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 06891dbc-f1d9-4683-83cd-e6c877ee86c3
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -900,
+ "y": 2260
+ }
+ }
+ "8":
+ continueonerrortype: ""
+ id: "8"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmenrichmentstatus
+ keys:
+ simple: source,record_exists,timestamp
+ val1:
+ simple: Certificate
+ val2:
+ simple: "false"
+ val3:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value you can enter `TIMESTAMP` to get the current timestamp in ISO format. For example:
+ `!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
+ id: 183d7b27-a308-455c-8bca-f5715de02c12
+ iscommand: false
+ name: Set ASM enrichment status to false
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 183d7b27-a308-455c-8bca-f5715de02c12
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -390,
+ "y": 2260
+ }
+ }
+ "9":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: Venafi.Certificate.X509.CN
+ operator: inList
+ right:
+ iscontext: true
+ value:
+ simple: inputs.Hostname
+ root: Venafi.Certificate.X509.CN
+ operator: isNotEmpty
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "9"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "5"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if there are results from Venafi.
+ id: e02f629f-b980-4090-84a3-3601fffe17ca
+ iscommand: false
+ name: Are there results from Venafi?
+ type: condition
+ version: -1
+ taskid: e02f629f-b980-4090-84a3-3601fffe17ca
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 630
+ }
+ }
+ "10":
+ continueonerrortype: ""
+ id: "10"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "15"
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: asm_fields_set_for_certificate
+ value:
+ simple: "true"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: b4946827-072f-43c9-8afd-55c42b9ba184
+ iscommand: false
+ name: Set true flag for completed enrichment
+ script: Set
+ type: regular
+ version: -1
+ taskid: b4946827-072f-43c9-8afd-55c42b9ba184
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1800
+ }
+ }
+ "11":
+ continueonerrortype: ""
+ id: "11"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "12"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 6efaf535-f561-4465-8dc5-ebffce303c83
+ iscommand: false
+ name: Service Owner
+ type: title
+ version: -1
+ description: ''
+ taskid: 6efaf535-f561-4465-8dc5-ebffce303c83
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1140
+ }
+ }
+ "12":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: Venafi.Certificate.CertificateDetails.Subject
+ operator: containsGeneral
+ right:
+ value:
+ simple: '@'
+ root: Venafi.Certificate.CertificateDetails.Subject
+ transformers:
+ - args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: '[\w-\.]+@[\w-]+\.+[\w-]{2,4}'
+ unpack_matches: {}
+ operator: RegexExtractAll
+ operator: isNotEmpty
+ label: "yes"
+ continueonerrortype: ""
+ id: "12"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "13"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if there are any emails in Subject.
+ id: d67f3e5e-8afb-4521-8ab2-2514809b5846
+ iscommand: false
+ name: Are there emails in Certificate Subject?
+ type: condition
+ version: -1
+ taskid: d67f3e5e-8afb-4521-8ab2-2514809b5846
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1320
+ }
+ }
+ "13":
+ continueonerrortype: ""
+ id: "13"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmserviceownerunrankedraw
+ keys:
+ simple: name,email,source,timestamp
+ val1:
+ simple: n/a
+ val2:
+ complex:
+ accessor: Subject
+ root: Venafi.Certificate.CertificateDetails
+ transformers:
+ - args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: '[\w-\.]+@[\w-]+\.+[\w-]{2,4}'
+ unpack_matches: {}
+ operator: RegexExtractAll
+ val3:
+ simple: Certificate-Venafi
+ val4:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 436ba802-82ee-47f1-823e-19f0c231ef09
+ iscommand: false
+ name: Set service owner grid field
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 436ba802-82ee-47f1-823e-19f0c231ef09
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1565
+ }
+ }
+ "14":
+ continueonerrortype: ""
+ id: "14"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "15"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 000c1044-7744-4fb0-8168-5a2e14e6d6d9
+ iscommand: false
+ name: Closing stage
+ type: title
+ version: -1
+ description: ''
+ taskid: 000c1044-7744-4fb0-8168-5a2e14e6d6d9
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": -660,
+ "y": 1825
+ }
+ }
+ "15":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: asm_fields_set_for_certificate
+ operator: isTrue
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "15"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "8"
+ "yes":
+ - "7"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if enrichment is performed.
+ id: d9180488-10ff-4a1e-85ea-81a5b09ef5f0
+ iscommand: false
+ name: Was enrichment performed?
+ type: condition
+ version: -1
+ taskid: d9180488-10ff-4a1e-85ea-81a5b09ef5f0
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": -660,
+ "y": 2080
+ }
+ }
+ "16":
+ continueonerrortype: ""
+ id: "16"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "1"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 22628f06-3656-4c77-8876-eeb4ebe83a25
+ iscommand: false
+ name: Venafi enrichment
+ type: title
+ version: -1
+ description: ''
+ taskid: 22628f06-3656-4c77-8876-eeb4ebe83a25
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 60
+ }
+ }
+ "17":
+ continueonerrortype: ""
+ id: "17"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "11"
+ - "18"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 9e6da600-66c4-47f4-8b6a-ecd99f4470e7
+ iscommand: false
+ name: Set fields
+ type: title
+ version: -1
+ description: ''
+ taskid: 9e6da600-66c4-47f4-8b6a-ecd99f4470e7
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1000
+ }
+ }
+ "18":
+ continueonerrortype: ""
+ id: "18"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "19"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 9aa996ab-38c4-4e2c-85d9-07f65777702d
+ iscommand: false
+ name: System IDs
+ type: title
+ version: -1
+ description: ''
+ taskid: 9aa996ab-38c4-4e2c-85d9-07f65777702d
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 750,
+ "y": 1140
+ }
+ }
+ "19":
+ continueonerrortype: ""
+ id: "19"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: CERTIFICATE-VENAFI-ID
+ val2:
+ simple: ${Venafi.Certificate.ID}
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 578b389a-0877-4f32-88aa-127256ad227b
+ iscommand: false
+ name: Set system IDs grid field (Certificate ID)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 578b389a-0877-4f32-88aa-127256ad227b
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 750,
+ "y": 1320
+ }
+ }
+version: -1
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 2655,
+ "width": 2030,
+ "x": -900,
+ "y": -80
+ }
+ }
+ }
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md
new file mode 100644
index 000000000000..99854e35ea5b
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md
@@ -0,0 +1,42 @@
+Playbook to enrich certificate information.
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+This playbook does not use any sub-playbooks.
+
+### Integrations
+
+* VenafiTLSProtect
+
+### Scripts
+
+* Set
+* GridFieldSetup
+
+### Commands
+
+* venafi-get-certificates
+* venafi-get-certificate-details
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| Hostname | Input for Certificate enrichment | ${alert.hostname} | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex ASM - Certificate Enrichment](../doc_files/Cortex_ASM_-_Certificate_Enrichment.png)
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
index f3dd4dd5aa26..d45174ef68f6 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 66f369ad-a7a1-4875-8411-a50d64741291
+ taskid: 744b37b8-b653-4703-8f32-a0e2b9d75916
type: start
task:
- id: 66f369ad-a7a1-4875-8411-a50d64741291
+ id: 744b37b8-b653-4703-8f32-a0e2b9d75916
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: c3f3c01d-aa4c-4c4d-88af-4d79f497f147
+ taskid: 032b86e0-f734-4bfc-828b-506b3f38a87a
type: condition
task:
- id: c3f3c01d-aa4c-4c4d-88af-4d79f497f147
+ id: 032b86e0-f734-4bfc-828b-506b3f38a87a
version: -1
name: Is there an IP address?
description: Determines if the IP address has been supplied to proceed with cloud enrichment.
@@ -91,10 +91,10 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 7d47dd96-27ca-427d-8018-bc23630b713d
+ taskid: bb873f5f-e58e-47d1-84fc-5b88c2f11e7c
type: title
task:
- id: 7d47dd96-27ca-427d-8018-bc23630b713d
+ id: bb873f5f-e58e-47d1-84fc-5b88c2f11e7c
version: -1
name: ServiceNow Enrichment
type: title
@@ -122,10 +122,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 85521f3f-c1eb-4e92-86c7-69ddd3d82e54
+ taskid: 0d23935f-2f92-4bf1-853c-d81a4b68886e
type: condition
task:
- id: 85521f3f-c1eb-4e92-86c7-69ddd3d82e54
+ id: 0d23935f-2f92-4bf1-853c-d81a4b68886e
version: -1
name: Was there a result?
description: Determines if there was a result from the previous command to continue cloud enrichment.
@@ -165,10 +165,10 @@ tasks:
isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: 4c9b1a4b-a562-449a-803a-e0939cd6b5ad
+ taskid: ea81e04a-2e71-4da4-893f-a72fc6d12df1
type: condition
task:
- id: 4c9b1a4b-a562-449a-803a-e0939cd6b5ad
+ id: ea81e04a-2e71-4da4-893f-a72fc6d12df1
version: -1
name: What provider is this service?
description: Determines which cloud provider the service is in order to direct to the correct enrichment.
@@ -321,10 +321,10 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: bf678446-745d-46e1-8044-afe38da7b09c
+ taskid: 05b6f167-bf9b-48e2-8b4e-5619aa2993a8
type: condition
task:
- id: bf678446-745d-46e1-8044-afe38da7b09c
+ id: 05b6f167-bf9b-48e2-8b4e-5619aa2993a8
version: -1
name: Is Cortex ASM enabled and is there a service?
description: Determines if the "Cortex Attack Surface Management" integration instance is configured and that there is a service to continue with enrichment.
@@ -389,10 +389,10 @@ tasks:
isautoswitchedtoquietmode: false
"35":
id: "35"
- taskid: 41731e1d-66f2-4769-835c-9cd48e4a7baa
+ taskid: c584c4e7-34dc-40b5-8689-cbbc271cba76
type: title
task:
- id: 41731e1d-66f2-4769-835c-9cd48e4a7baa
+ id: c584c4e7-34dc-40b5-8689-cbbc271cba76
version: -1
name: Cloud Enrichment
type: title
@@ -420,10 +420,10 @@ tasks:
isautoswitchedtoquietmode: false
"38":
id: "38"
- taskid: a12624d9-2014-4ca2-8a55-816d0c509b05
+ taskid: 8cd46d49-a0e1-4f1a-8431-fcfdb649a1e3
type: title
task:
- id: a12624d9-2014-4ca2-8a55-816d0c509b05
+ id: 8cd46d49-a0e1-4f1a-8431-fcfdb649a1e3
version: -1
name: Complete
type: title
@@ -436,7 +436,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5770
+ "y": 6080
}
}
note: false
@@ -448,10 +448,10 @@ tasks:
isautoswitchedtoquietmode: false
"61":
id: "61"
- taskid: e4455d18-e7a6-498f-8d71-773abbd250fa
+ taskid: d3c24b8e-eb31-4b78-8556-d0dde1462c97
type: playbook
task:
- id: e4455d18-e7a6-498f-8d71-773abbd250fa
+ id: d3c24b8e-eb31-4b78-8556-d0dde1462c97
version: -1
name: Cortex ASM - ServiceNow CMDB Enrichment
type: playbook
@@ -489,10 +489,10 @@ tasks:
isautoswitchedtoquietmode: false
"62":
id: "62"
- taskid: e88c0ff6-68e2-4851-8fbd-ad487ed5a3ab
+ taskid: afed84f0-457f-40ac-8b2c-237668c34d89
type: title
task:
- id: e88c0ff6-68e2-4851-8fbd-ad487ed5a3ab
+ id: afed84f0-457f-40ac-8b2c-237668c34d89
version: -1
name: Tenable.io Enrichment
type: title
@@ -520,10 +520,10 @@ tasks:
isautoswitchedtoquietmode: false
"63":
id: "63"
- taskid: e12638c0-cee1-4666-8377-d5eb6b08eac3
+ taskid: 43ca061a-8750-4536-82e7-af3001b49845
type: playbook
task:
- id: e12638c0-cee1-4666-8377-d5eb6b08eac3
+ id: 43ca061a-8750-4536-82e7-af3001b49845
version: -1
name: Cortex ASM - Tenable.io Enrichment
description: Given the IP address this playbook enriches Tenable.io information relevant to ASM alerts.
@@ -563,10 +563,10 @@ tasks:
isautoswitchedtoquietmode: false
"66":
id: "66"
- taskid: 89573dea-4626-4d8f-8ea4-259828f9392b
+ taskid: 9b65a442-4af7-4d64-80e0-d3aed6284a96
type: regular
task:
- id: 89573dea-4626-4d8f-8ea4-259828f9392b
+ id: 9b65a442-4af7-4d64-80e0-d3aed6284a96
version: -1
name: Get external service information
description: Get service details according to the service ID.
@@ -606,10 +606,10 @@ tasks:
isautoswitchedtoquietmode: false
"67":
id: "67"
- taskid: 91d43e90-31d9-4a14-83c6-456e71a371f9
+ taskid: ed07dbf6-da24-4f6a-880c-6661cca1e249
type: regular
task:
- id: 91d43e90-31d9-4a14-83c6-456e71a371f9
+ id: ed07dbf6-da24-4f6a-880c-6661cca1e249
version: -1
name: Set protocol
description: commands.local.cmd.set.incident
@@ -643,10 +643,10 @@ tasks:
isautoswitchedtoquietmode: false
"68":
id: "68"
- taskid: effa61bc-0398-40a0-83f6-4a9b53e90669
+ taskid: 547c9c5b-f2c2-4742-87a6-7a747d916e58
type: regular
task:
- id: effa61bc-0398-40a0-83f6-4a9b53e90669
+ id: 547c9c5b-f2c2-4742-87a6-7a747d916e58
version: -1
name: Infer whether service is used for development (vs. production)
description: Identify whether the service is a "development" server. Development servers have no external users and run no production workflows. These servers might be named "dev", but they might also be named "qa", "pre-production", "user acceptance testing", or use other non-production terms. This automation uses both public data visible to anyone (`active_classifications` as derived by Xpanse ASM) as well as checking internal data for AI-learned indicators of development systems (`asm_tags` as derived from integrations with non-public systems).
@@ -688,7 +688,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5420
+ "y": 5730
}
}
note: false
@@ -700,10 +700,10 @@ tasks:
isautoswitchedtoquietmode: false
"69":
id: "69"
- taskid: 118f3ab7-3a3b-490c-84f3-65f3c4727262
+ taskid: 35daf2a7-7116-4c04-82f2-fdebe9d1aad5
type: playbook
task:
- id: 118f3ab7-3a3b-490c-84f3-65f3c4727262
+ id: 35daf2a7-7116-4c04-82f2-fdebe9d1aad5
version: -1
name: Cortex ASM - Azure Enrichment
description: Given the IP address, this playbook enriches Azure information relevant to ASM alerts.
@@ -741,10 +741,10 @@ tasks:
isautoswitchedtoquietmode: false
"70":
id: "70"
- taskid: 2aeb3492-5b5d-44d4-8164-4e9dfd5ef0c4
+ taskid: bdae52ae-233c-4a58-806a-b111be48239e
type: title
task:
- id: 2aeb3492-5b5d-44d4-8164-4e9dfd5ef0c4
+ id: bdae52ae-233c-4a58-806a-b111be48239e
version: -1
name: Splunk Enrichment
type: title
@@ -772,10 +772,10 @@ tasks:
isautoswitchedtoquietmode: false
"71":
id: "71"
- taskid: 2e978533-d06c-4ff8-8ebe-d367dce58690
+ taskid: b52ba66b-4088-416c-8d8f-19e32d460833
type: playbook
task:
- id: 2e978533-d06c-4ff8-8ebe-d367dce58690
+ id: b52ba66b-4088-416c-8d8f-19e32d460833
version: -1
name: Cortex ASM - Splunk Enrichment
description: 'Given the IP address this playbook enriches information from Splunk results relevant to ASM alerts. '
@@ -815,10 +815,10 @@ tasks:
isautoswitchedtoquietmode: false
"72":
id: "72"
- taskid: e849c4e9-d53d-4548-85dd-5cd7704dfc8b
+ taskid: 1f4edd55-a394-4fa5-8286-b1199ecd75fb
type: playbook
task:
- id: e849c4e9-d53d-4548-85dd-5cd7704dfc8b
+ id: 1f4edd55-a394-4fa5-8286-b1199ecd75fb
version: -1
name: Cortex ASM - Rapid7 Enrichment
description: Given the IP address this playbook enriches Rapid7 InsightVM (Nexpose) information relevant to ASM alerts.
@@ -858,10 +858,10 @@ tasks:
isautoswitchedtoquietmode: false
"73":
id: "73"
- taskid: 918cca53-7ffd-4aa1-8eb8-305ebaa60ff4
+ taskid: 6fd8b6d1-162e-493f-837f-8be3c652bc54
type: title
task:
- id: 918cca53-7ffd-4aa1-8eb8-305ebaa60ff4
+ id: 6fd8b6d1-162e-493f-837f-8be3c652bc54
version: -1
name: Rapid7 Enrichment
type: title
@@ -889,10 +889,10 @@ tasks:
isautoswitchedtoquietmode: false
"74":
id: "74"
- taskid: 234ac145-3757-4cb1-8501-319f48302242
+ taskid: 3b3e5e95-1326-4e54-87d4-874f97d089b8
type: title
task:
- id: 234ac145-3757-4cb1-8501-319f48302242
+ id: 3b3e5e95-1326-4e54-87d4-874f97d089b8
version: -1
name: Qualys Enrichment
type: title
@@ -920,10 +920,10 @@ tasks:
isautoswitchedtoquietmode: false
"75":
id: "75"
- taskid: d9b7ea21-8bc1-4907-8e5f-b6e2ec013c84
+ taskid: 6800c14b-ef4d-4cc6-8913-1dea41f0da00
type: playbook
task:
- id: d9b7ea21-8bc1-4907-8e5f-b6e2ec013c84
+ id: 6800c14b-ef4d-4cc6-8913-1dea41f0da00
version: -1
name: Cortex ASM - Qualys Enrichment
description: Given the IP address this playbook enriches information from Qualys assets.
@@ -963,10 +963,10 @@ tasks:
isautoswitchedtoquietmode: false
"76":
id: "76"
- taskid: e110e2fe-ca56-4ec8-8ad9-f534f67e89d2
+ taskid: 832fc7bd-8848-447e-8b74-154e1a19a150
type: playbook
task:
- id: e110e2fe-ca56-4ec8-8ad9-f534f67e89d2
+ id: 832fc7bd-8848-447e-8b74-154e1a19a150
version: -1
name: Cortex ASM - GCP Enrichment
description: Given the IP address this playbook enriches GCP information relevant to ASM alerts.
@@ -995,10 +995,10 @@ tasks:
isautoswitchedtoquietmode: false
"78":
id: "78"
- taskid: 6d560875-540c-4624-8246-384bd62bb57c
+ taskid: 0e9be24c-1be3-4998-8f38-85c309d65072
type: playbook
task:
- id: 6d560875-540c-4624-8246-384bd62bb57c
+ id: 0e9be24c-1be3-4998-8f38-85c309d65072
version: -1
name: Cortex ASM - Service Ownership
type: playbook
@@ -1015,7 +1015,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5600
+ "y": 5910
}
}
note: false
@@ -1027,10 +1027,10 @@ tasks:
isautoswitchedtoquietmode: false
"79":
id: "79"
- taskid: 31483d73-2007-41cc-8866-2f99563d049c
+ taskid: 56a54faf-ee4a-493a-80a1-fe4087457bed
type: playbook
task:
- id: 31483d73-2007-41cc-8866-2f99563d049c
+ id: 56a54faf-ee4a-493a-80a1-fe4087457bed
version: -1
name: Cortex ASM - Prisma Cloud Enrichment
description: Given the IP address this playbook enriches information from Prisma Cloud.
@@ -1074,10 +1074,10 @@ tasks:
isautoswitchedtoquietmode: false
"80":
id: "80"
- taskid: 7679ffb5-fe99-4842-8b58-c651b5f3418b
+ taskid: 6610fa32-db2d-46be-86be-c2d2ccf23db8
type: condition
task:
- id: 7679ffb5-fe99-4842-8b58-c651b5f3418b
+ id: 6610fa32-db2d-46be-86be-c2d2ccf23db8
version: -1
name: Are there any emails in tags?
description: Checks if there is email in the tags.
@@ -1137,10 +1137,10 @@ tasks:
isautoswitchedtoquietmode: false
"81":
id: "81"
- taskid: e57b4529-9731-4ac9-8edb-b2ed951efa1c
+ taskid: 428ebd93-7405-4d3f-80d7-ed88a14e8d4c
type: title
task:
- id: e57b4529-9731-4ac9-8edb-b2ed951efa1c
+ id: 428ebd93-7405-4d3f-80d7-ed88a14e8d4c
version: -1
name: Service Owner from Tags
type: title
@@ -1168,10 +1168,10 @@ tasks:
isautoswitchedtoquietmode: false
"83":
id: "83"
- taskid: 618150ab-c218-432b-8453-0f870c7ca88f
+ taskid: 2a9e6074-cb1c-49fd-8d7a-a0276165b3eb
type: regular
task:
- id: 618150ab-c218-432b-8453-0f870c7ca88f
+ id: 2a9e6074-cb1c-49fd-8d7a-a0276165b3eb
version: -1
name: Set service owners from Tag grid field
description: |-
@@ -1256,10 +1256,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 5b4e9439-349c-47c4-85e6-ec133c44402f
+ taskid: 65488c43-6f4c-47a6-8b71-0fc785792485
type: playbook
task:
- id: 5b4e9439-349c-47c4-85e6-ec133c44402f
+ id: 65488c43-6f4c-47a6-8b71-0fc785792485
version: -1
name: Cortex ASM - AWS Enrichment
type: playbook
@@ -1303,10 +1303,10 @@ tasks:
isautoswitchedtoquietmode: false
"85":
id: "85"
- taskid: 160ce35c-1f12-40fc-8048-1e8b6da77d36
+ taskid: 674f71b4-615a-4ca0-8181-115f7cfc7325
type: regular
task:
- id: 160ce35c-1f12-40fc-8048-1e8b6da77d36
+ id: 674f71b4-615a-4ca0-8181-115f7cfc7325
version: -1
name: Sleep for 1 hour
description: Sleep for X seconds
@@ -1340,10 +1340,10 @@ tasks:
isautoswitchedtoquietmode: false
"86":
id: "86"
- taskid: a208d92b-e40d-413a-835d-0fd47eb16143
+ taskid: 2e66d2cb-fc68-4ec0-8886-4320d80612da
type: condition
task:
- id: a208d92b-e40d-413a-835d-0fd47eb16143
+ id: 2e66d2cb-fc68-4ec0-8886-4320d80612da
version: -1
name: Was there a result?
description: Determines if there was a result from the previous command to continue cloud enrichment.
@@ -1383,10 +1383,10 @@ tasks:
isautoswitchedtoquietmode: false
"87":
id: "87"
- taskid: 8d4c19d4-c500-49c1-8fd4-61f1b131b42f
+ taskid: b3ec329a-381f-414f-8181-4b094102fbf4
type: regular
task:
- id: 8d4c19d4-c500-49c1-8fd4-61f1b131b42f
+ id: b3ec329a-381f-414f-8181-4b094102fbf4
version: -1
name: Get external service information
description: Get service details according to the service ID.
@@ -1426,10 +1426,10 @@ tasks:
isautoswitchedtoquietmode: false
'88':
id: '88'
- taskid: 376af535-c66f-459c-886f-406efe90345f
+ taskid: 25042dc4-d44e-4cce-8bdf-931ac8c3571c
type: playbook
task:
- id: 376af535-c66f-459c-886f-406efe90345f
+ id: 25042dc4-d44e-4cce-8bdf-931ac8c3571c
version: -1
name: Cortex ASM - On Prem Enrichment
type: playbook
@@ -1475,10 +1475,10 @@ tasks:
isautoswitchedtoquietmode: false
'89':
id: '89'
- taskid: fa3e165b-ff83-4c4d-8de3-7c4d4531f66f
+ taskid: 0e10ac8f-f7da-41bd-871f-a9e472a2db2b
type: playbook
task:
- id: fa3e165b-ff83-4c4d-8de3-7c4d4531f66f
+ id: 0e10ac8f-f7da-41bd-871f-a9e472a2db2b
version: -1
name: Cortex ASM - ServiceNow ITSM Enrichment
type: playbook
@@ -1563,13 +1563,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: e943b4d8-cc63-484a-811c-ca45bffb05ae
+ id: 0dd047f2-f372-459e-8324-5833abaedeff
iscommand: false
name: Prisma Cloud Enrichment
type: title
version: -1
description: ''
- taskid: e943b4d8-cc63-484a-811c-ca45bffb05ae
+ taskid: 0dd047f2-f372-459e-8324-5833abaedeff
timertriggers: []
type: title
view: |-
@@ -1603,13 +1603,13 @@ tasks:
task:
brand: ""
description: This playbook is used to pull information from Cortex Endpoint (XSIAM/XDR) systems for enrichment purposes.
- id: cc58ddab-6536-4f67-8d3e-eaba90832c78
+ id: 8b02b5a9-d5f3-44f7-84b4-3c8886d6f379
iscommand: false
name: Cortex ASM - Cortex Endpoint Enrichment
playbookId: Cortex ASM - Cortex Endpoint Enrichment
type: playbook
version: -1
- taskid: cc58ddab-6536-4f67-8d3e-eaba90832c78
+ taskid: 8b02b5a9-d5f3-44f7-84b4-3c8886d6f379
timertriggers: []
type: playbook
view: |-
@@ -1634,13 +1634,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: 9d50af6a-4665-4fc3-84d9-9ad558fc031e
+ id: 3bf7ff76-98e9-42aa-84f0-03be04298a7c
iscommand: false
name: Cortex Endpoint Enrichment
type: title
version: -1
description: ''
- taskid: 9d50af6a-4665-4fc3-84d9-9ad558fc031e
+ taskid: 3bf7ff76-98e9-42aa-84f0-03be04298a7c
timertriggers: []
type: title
view: |-
@@ -1663,7 +1663,7 @@ tasks:
wait: 1
nexttasks:
'#none#':
- - "68"
+ - "99"
note: false
quietmode: 0
scriptarguments:
@@ -1674,13 +1674,13 @@ tasks:
task:
brand: ""
description: Playbook to enriches Service ownership info in Azure and On-Prem Active Directory.
- id: 52aab316-1470-447a-8517-3ef2b5bf05bf
+ id: 4d50a364-ef5f-48bd-829a-b9a203560745
iscommand: false
name: Cortex ASM - Active Directory Enrichment
playbookId: Cortex ASM - Active Directory Enrichment
type: playbook
version: -1
- taskid: 52aab316-1470-447a-8517-3ef2b5bf05bf
+ taskid: 4d50a364-ef5f-48bd-829a-b9a203560745
timertriggers: []
type: playbook
view: |-
@@ -1705,13 +1705,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: d0aa855c-4b6a-48bb-8610-974a8667ee1e
+ id: 785f967c-452c-4ffa-847f-1684751fabb9
iscommand: false
name: Active Directory Enrichment
type: title
version: -1
description: ''
- taskid: d0aa855c-4b6a-48bb-8610-974a8667ee1e
+ taskid: 785f967c-452c-4ffa-847f-1684751fabb9
timertriggers: []
type: title
view: |-
@@ -1721,6 +1721,69 @@ tasks:
"y": 5100
}
}
+ "98":
+ continueonerrortype: ""
+ id: "98"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "68"
+ note: false
+ quietmode: 0
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ id: de7fd69a-c449-4f9f-82f3-c03e2184606d
+ iscommand: false
+ name: 'Cortex ASM - Certificate Enrichment'
+ playbookId: 'Cortex ASM - Certificate Enrichment'
+ type: playbook
+ version: -1
+ description: ''
+ taskid: de7fd69a-c449-4f9f-82f3-c03e2184606d
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": 110,
+ "y": 5560
+ }
+ }
+ "99":
+ continueonerrortype: ""
+ id: "99"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "98"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 68ab2492-9ebc-4ed8-871e-be2b5e6954ce
+ iscommand: false
+ name: Certificate Enrichment
+ type: title
+ version: -1
+ description: ''
+ taskid: 68ab2492-9ebc-4ed8-871e-be2b5e6954ce
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 110,
+ "y": 5420
+ }
+ }
view: |-
{
"linkLabelsPosition": {
@@ -1734,7 +1797,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 5965,
+ "height": 6275,
"width": 1610,
"x": 110,
"y": -130
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
index 75f8814c0cb3..0d7ba212c1bc 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
@@ -9,6 +9,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
* Cortex ASM - AWS Enrichment
* Cortex ASM - Active Directory Enrichment
* Cortex ASM - Azure Enrichment
+* Cortex ASM - Certificate Enrichment
* Cortex ASM - Cortex Endpoint Enrichment_Core_Combo
* Cortex ASM - GCP Enrichment
* Cortex ASM - On Prem Enrichment
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message.yml
new file mode 100644
index 000000000000..54aba3346e9c
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message.yml
@@ -0,0 +1,338 @@
+contentitemexportablefields:
+ contentitemfields: {}
+description: This playbook is used to create instant messages toward service owners to notify them of their internet exposures.
+id: 'Cortex ASM - Instant Message'
+inputSections:
+- description: Generic group for inputs
+ inputs:
+ - OwnerNotificationBody
+ - InstantMessageChannel
+ - RemediationGuidance
+ name: General (Inputs group)
+inputs:
+- description: Body of the notification (email or ticket) sent to the potential service owner.
+ key: OwnerNotificationBody
+ playbookInputQuery:
+ required: true
+ value: {}
+- description: Channel to send instant messages for notification purposes. For Slack, this will be the channel ID.
+ key: InstantMessageChannel
+ playbookInputQuery:
+ required: true
+ value: {}
+- description: Remediation Guidance of the Attack Surface Rule.
+ key: RemediationGuidance
+ playbookInputQuery:
+ required: true
+ value: {}
+name: Cortex ASM - Instant Message
+outputSections:
+- description: Generic group for outputs
+ name: General (Outputs group)
+ outputs: []
+outputs: []
+starttaskid: "0"
+tasks:
+ "0":
+ continueonerrortype: ""
+ id: "0"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "11"
+ - "13"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 18a8542a-f6ca-474e-82bb-cd023bdf2a69
+ iscommand: false
+ name: ""
+ version: -1
+ description: ''
+ taskid: 18a8542a-f6ca-474e-82bb-cd023bdf2a69
+ timertriggers: []
+ type: start
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -170
+ }
+ }
+ "5":
+ continueonerrortype: ""
+ id: "5"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: e28fdfca-d524-4853-8dc3-285981f0050f
+ iscommand: false
+ name: Complete
+ type: title
+ version: -1
+ description: ''
+ taskid: e28fdfca-d524-4853-8dc3-285981f0050f
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 520
+ }
+ }
+ "8":
+ continueonerrortype: ""
+ id: "8"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "5"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmnotification
+ keys:
+ simple: type,value,url,timestamp
+ val1:
+ simple: Slack
+ val2:
+ simple: ${Slack.Thread.ID}
+ val3:
+ simple: n/a
+ val4:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: e529a3dc-af23-41e5-8445-64473ce4b094
+ iscommand: false
+ name: Set notification grid field
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: e529a3dc-af23-41e5-8445-64473ce4b094
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 360
+ }
+ }
+ "10":
+ continueonerrortype: ""
+ id: "10"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "8"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ blocks:
+ simple: |-
+ [
+ {
+ "type": "rich_text",
+ "elements": [
+ {
+ "type": "rich_text_section",
+ "elements": [
+ {
+ "type": "link",
+ "text": "Cortex ASM Alert: ${alert.name}",
+ "url": "${AlertURL}"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "type": "divider"
+ },
+ {
+ "type": "rich_text",
+ "elements": [
+ {
+ "type": "rich_text_section",
+ "elements": [
+ {
+ "type": "text",
+ "text": "${FormattedBody}"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "type": "divider"
+ }
+ ]
+ channel_id:
+ simple: ${inputs.InstantMessageChannel}
+ separatecontext: false
+ skipunavailable: true
+ task:
+ brand: SlackV3
+ description: Sends a message to a user, group, or channel.
+ id: dfb76202-de06-43c1-82b4-7fe51c9828d5
+ iscommand: true
+ name: Send Slack message
+ script: SlackV3|||send-notification
+ type: regular
+ version: -1
+ taskid: dfb76202-de06-43c1-82b4-7fe51c9828d5
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 195
+ }
+ }
+ "11":
+ continueonerrortype: ""
+ id: "11"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ key:
+ simple: FormattedBody
+ value:
+ complex:
+ root: inputs.OwnerNotificationBody
+ transformers:
+ - args:
+ prefix: {}
+ suffix:
+ iscontext: true
+ value:
+ simple: inputs.RemediationGuidance
+ operator: concat
+ - args:
+ limit: {}
+ replaceWith: {}
+ toReplace:
+ value:
+ simple:
+ operator: replace
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 1539852c-e767-4018-8a82-fa641fb3bb54
+ iscommand: false
+ name: Format message body
+ script: Set
+ type: regular
+ version: -1
+ taskid: 1539852c-e767-4018-8a82-fa641fb3bb54
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 230,
+ "y": 10
+ }
+ }
+ "13":
+ continueonerrortype: ""
+ id: "13"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ key:
+ simple: AlertURL
+ value:
+ complex:
+ accessor: server
+ root: demistoUrls
+ transformers:
+ - args:
+ prefix: {}
+ suffix:
+ value:
+ simple: /alerts/
+ operator: concat
+ - args:
+ prefix:
+ iscontext: true
+ suffix:
+ iscontext: true
+ value:
+ simple: alert.investigationId
+ operator: concat
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 05f590df-8779-4f09-897b-6815ebb3c939
+ iscommand: false
+ name: Get alert URL
+ script: Set
+ type: regular
+ version: -1
+ taskid: 05f590df-8779-4f09-897b-6815ebb3c939
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 670,
+ "y": 10
+ }
+ }
+version: -1
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 755,
+ "width": 820,
+ "x": 230,
+ "y": -170
+ }
+ }
+ }
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message_README.md
new file mode 100644
index 000000000000..1012d616d9ca
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Instant_Message_README.md
@@ -0,0 +1,43 @@
+This playbook is used to create instant messages toward service owners to notify them of their internet exposures.
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+This playbook does not use any sub-playbooks.
+
+### Integrations
+
+SlackV3
+
+### Scripts
+
+* Set
+* GridFieldSetup
+
+### Commands
+
+send-notification
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| OwnerNotificationBody | Body of the notification \(email or ticket\) sent to the potential service owner. | | Required |
+| InstantMessageChannel | Channel to send instant messages for notification purposes. For Slack, this will be the channel ID. | | Required |
+| RemediationGuidance | Remediation Guidance of the Attack Surface Rule. | | Required |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex ASM - Instant Message](../doc_files/Cortex_ASM_-_Instant_Message.png)
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation.yml
index f5f91eb25de0..7cec40a459e1 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation.yml
@@ -6,10 +6,10 @@ starttaskid: '0'
tasks:
'0':
id: '0'
- taskid: e8d9de22-8cb1-45cb-8336-7a3634a2eaef
+ taskid: 41328bf8-d0ab-4322-8df0-3ef3a6f7911b
type: start
task:
- id: e8d9de22-8cb1-45cb-8336-7a3634a2eaef
+ id: 41328bf8-d0ab-4322-8df0-3ef3a6f7911b
version: -1
name: ''
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
'3':
id: '3'
- taskid: 4d5d4d16-ec25-4600-8aa3-9db0085d2be4
+ taskid: 27c5352a-022a-4aff-87d4-8d948fa788bc
type: condition
task:
- id: 4d5d4d16-ec25-4600-8aa3-9db0085d2be4
+ id: 27c5352a-022a-4aff-87d4-8d948fa788bc
version: -1
name: What provider is this service?
description: Determines which cloud provider the service is in order to direct to the correct enrichment.
@@ -50,7 +50,7 @@ tasks:
'#default#':
- '4'
AWS:
- - '10'
+ - "15"
Azure:
- '6'
Cortex Endpoint:
@@ -163,10 +163,10 @@ tasks:
isautoswitchedtoquietmode: false
'4':
id: '4'
- taskid: 025137d8-71d5-4a03-87fc-593dc78f0167
+ taskid: f8177daa-c79e-4080-8d8a-40ea707367aa
type: title
task:
- id: 025137d8-71d5-4a03-87fc-593dc78f0167
+ id: f8177daa-c79e-4080-8d8a-40ea707367aa
version: -1
name: Completed
type: title
@@ -178,8 +178,8 @@ tasks:
view: |-
{
"position": {
- "x": 510,
- "y": 1060
+ "x": 520,
+ "y": 1360
}
}
note: false
@@ -191,10 +191,10 @@ tasks:
isautoswitchedtoquietmode: false
'6':
id: '6'
- taskid: 8755ee7e-a021-424c-8a4c-0159367c490a
+ taskid: 57d9147d-2a07-41d4-8cc9-f54a8950643f
type: playbook
task:
- id: 8755ee7e-a021-424c-8a4c-0159367c490a
+ id: 57d9147d-2a07-41d4-8cc9-f54a8950643f
version: -1
name: Azure - Network Security Group Remediation
description: |-
@@ -259,8 +259,8 @@ tasks:
view: |-
{
"position": {
- "x": 1070,
- "y": 560
+ "x": 1220,
+ "y": 610
}
}
note: false
@@ -272,10 +272,10 @@ tasks:
isautoswitchedtoquietmode: false
'7':
id: '7'
- taskid: 4a5a1b3f-8f19-486d-8778-6bdadca1adc9
+ taskid: c03179f9-4132-4a54-8f0f-4a63d31a6f40
type: playbook
task:
- id: 4a5a1b3f-8f19-486d-8778-6bdadca1adc9
+ id: c03179f9-4132-4a54-8f0f-4a63d31a6f40
version: -1
name: AWS - Unclaimed S3 Bucket Remediation
description: The playbook will create the unclaimed S3 bucket.
@@ -310,8 +310,8 @@ tasks:
view: |-
{
"position": {
- "x": 0,
- "y": 550
+ "x": -110,
+ "y": 635
}
}
note: false
@@ -323,10 +323,10 @@ tasks:
isautoswitchedtoquietmode: false
'8':
id: '8'
- taskid: ac918c29-4d5f-48b5-8060-94a4c15cc060
+ taskid: 0d8eb068-70f3-4fe0-80ef-ca218e2cd125
type: playbook
task:
- id: ac918c29-4d5f-48b5-8060-94a4c15cc060
+ id: 0d8eb068-70f3-4fe0-80ef-ca218e2cd125
version: -1
name: AWS - Security Group Remediation v2
description: This playbook takes in some information about an EC2 instance (ID and public_ip) and with provided port and protocol, determines what security groups on the primary interface of an EC2 instance are over-permissive. It uses an automation to determine what interface on an EC2 instance has an over-permissive security group on, determine which security groups have over-permissive rules and to replace them with a copy of the security group that has only the over-permissive portion removed. Over-permissive is defined as sensitive ports (SSH, RDP, etc) being exposed to the internet via IPv4.
@@ -381,8 +381,8 @@ tasks:
view: |-
{
"position": {
- "x": -260,
- "y": 835
+ "x": -420,
+ "y": 1160
}
}
note: false
@@ -394,10 +394,10 @@ tasks:
isautoswitchedtoquietmode: false
'9':
id: '9'
- taskid: 3bfc76d9-be4e-4402-84eb-1b09f3af599f
+ taskid: 3008b5df-87a9-407c-894b-e346377b1145
type: playbook
task:
- id: 3bfc76d9-be4e-4402-84eb-1b09f3af599f
+ id: 3008b5df-87a9-407c-894b-e346377b1145
version: -1
name: GCP - Firewall Remediation
type: playbook
@@ -472,8 +472,8 @@ tasks:
view: |-
{
"position": {
- "x": 740,
- "y": 750
+ "x": 790,
+ "y": 730
}
}
note: false
@@ -485,10 +485,10 @@ tasks:
isautoswitchedtoquietmode: false
'10':
id: '10'
- taskid: 7c022be5-c22d-4413-854c-d2a87249e532
+ taskid: 231659ad-e527-4ca9-806c-e9392b78dabb
type: condition
task:
- id: 7c022be5-c22d-4413-854c-d2a87249e532
+ id: 231659ad-e527-4ca9-806c-e9392b78dabb
version: -1
name: Is AWSAssumeRoleName Input defined?
description: Determines which cloud provider the service is in order to direct to the correct enrichment.
@@ -514,8 +514,8 @@ tasks:
view: |-
{
"position": {
- "x": -260,
- "y": 400
+ "x": -600,
+ "y": 760
}
}
note: false
@@ -527,10 +527,10 @@ tasks:
isautoswitchedtoquietmode: false
'11':
id: '11'
- taskid: 85ddd43d-66b3-48f5-8861-a1c60e51024e
+ taskid: 438b1fed-2bbe-4214-8883-bcfd8f9e2000
type: playbook
task:
- id: 85ddd43d-66b3-48f5-8861-a1c60e51024e
+ id: 438b1fed-2bbe-4214-8883-bcfd8f9e2000
version: -1
name: AWS - Security Group Remediation v2
description: This playbook takes in some information about an EC2 instance (ID and public_ip) and with provided port and protocol, determines what security groups on the primary interface of an EC2 instance are over-permissive. It uses an automation to determine what interface on an EC2 instance has an over-permissive security group on, determine which security groups have over-permissive rules and to replace them with a copy of the security group that has only the over-permissive portion removed. Over-permissive is defined as sensitive ports (SSH, RDP, etc) being exposed to the internet via IPv4.
@@ -606,8 +606,8 @@ tasks:
view: |-
{
"position": {
- "x": -510,
- "y": 655
+ "x": -810,
+ "y": 1000
}
}
note: false
@@ -619,10 +619,10 @@ tasks:
isautoswitchedtoquietmode: false
'12':
id: '12'
- taskid: 0300188b-1a4f-4da1-8d6f-559597a8873c
+ taskid: c8e87bd8-3019-42db-8ac9-2e8193077cc6
type: playbook
task:
- id: 0300188b-1a4f-4da1-8d6f-559597a8873c
+ id: c8e87bd8-3019-42db-8ac9-2e8193077cc6
version: -1
name: Cortex ASM - On Prem Remediation
description: "This playbook adds new block rule(s) to on-prem firewall vendors in order to block internet access for internet exposures.\n\nConditions:\nThis is currently limited to stand-alone firewalls for PAN-OS."
@@ -697,8 +697,8 @@ tasks:
view: |-
{
"position": {
- "x": 1300,
- "y": 390
+ "x": 1650,
+ "y": 480
}
}
note: false
@@ -710,10 +710,10 @@ tasks:
isautoswitchedtoquietmode: false
'13':
id: '13'
- taskid: 56f329df-f61c-49b0-8b5d-048a4330f190
+ taskid: 9597ebaa-a391-45d6-8b94-367e5f110b7f
type: playbook
task:
- id: 56f329df-f61c-49b0-8b5d-048a4330f190
+ id: 9597ebaa-a391-45d6-8b94-367e5f110b7f
version: -1
name: Cortex ASM - Cortex Endpoint Remediation
type: playbook
@@ -753,8 +753,8 @@ tasks:
view: |-
{
"position": {
- "x": 300,
- "y": 750
+ "x": 250,
+ "y": 1050
}
}
note: false
@@ -764,14 +764,139 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "14":
+ continueonerrortype: ""
+ id: "14"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ loop:
+ exitCondition: ""
+ iscommand: false
+ max: 100
+ wait: 1
+ nexttasks:
+ '#none#':
+ - "4"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ ASM Rule ID:
+ simple: ${alert.asmattacksurfaceruleid}
+ Account ID:
+ simple: ${alert.asmcloud.project}
+ Assume Role:
+ simple: ${inputs.AWSAssumeRoleName}
+ Instance ID:
+ complex:
+ accessor: id
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmsystemids.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ASSET-ID
+ root: alert.asmsystemids
+ Region:
+ complex:
+ accessor: region
+ root: alert.asmcloud
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: true
+ skipunavailable: true
+ task:
+ brand: ""
+ description: This playbook upgrades supported packages on an AWS EC2 instance using AWS Systems manager.
+ id: d22ca941-9360-417f-8d13-c86eae687622
+ iscommand: false
+ name: AWS - Package Upgrade
+ playbookId: AWS - Package Upgrade
+ type: playbook
+ version: -1
+ taskid: d22ca941-9360-417f-8d13-c86eae687622
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": -1250,
+ "y": 785
+ }
+ }
+ "15":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: id
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmsystemids.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ASSET-SSM-PLATFORM-NAME
+ root: alert.asmsystemids
+ operator: isNotEmpty
+ right:
+ value: {}
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmdatacollection.selected
+ operator: isEqualString
+ right:
+ value:
+ simple: Automated remediation by patching vulnerable software
+ label: "yes"
+ continueonerrortype: ""
+ id: "15"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "10"
+ "yes":
+ - "14"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Is AWS Systems manager option selected for remediation?
+ id: 428b2bb0-e540-4d7e-8f02-044e35dbcd33
+ iscommand: false
+ name: Is AWS Systems manager option selected for remediation?
+ type: condition
+ version: -1
+ taskid: 428b2bb0-e540-4d7e-8f02-044e35dbcd33
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": -830,
+ "y": 450
+ }
+ }
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 1105,
- "width": 2190,
- "x": -510,
+ "height": 1405,
+ "width": 3280,
+ "x": -1250,
"y": 20
}
}
@@ -783,6 +908,6 @@ inputs:
description: If assuming roles for AWS, this is the name of the role to assume (should be the same for all organizations)
playbookInputQuery:
outputs: []
-fromversion: 6.5.0
+fromversion: 6.10.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules.yml
index f774bea1e9ac..b48228acd335 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: f53e0e04-0bde-4421-81c2-3408fbb0ad52
+ taskid: 21d5081e-7e2a-4269-86f4-ec16219239e9
type: start
task:
- id: f53e0e04-0bde-4421-81c2-3408fbb0ad52
+ id: 21d5081e-7e2a-4269-86f4-ec16219239e9
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: 0207b014-5eb8-4ef5-8f8c-c614cd144f9a
+ taskid: f5a08734-d17a-4fb1-8463-38fef0c3c540
type: regular
task:
- id: 0207b014-5eb8-4ef5-8f8c-c614cd144f9a
+ id: f5a08734-d17a-4fb1-8463-38fef0c3c540
version: -1
name: List remediation path rules
description: Returns list of remediation path rules.
@@ -82,10 +82,10 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 6665e4f1-2958-4a7e-85f8-95111f0a8024
+ taskid: dd594f41-21ad-4b5b-857d-c581e2fbf153
type: condition
task:
- id: 6665e4f1-2958-4a7e-85f8-95111f0a8024
+ id: dd594f41-21ad-4b5b-857d-c581e2fbf153
version: -1
name: Was there a result?
description: Determines if there was a result from the previous command to continue.
@@ -125,17 +125,17 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: f2a13bb9-2de5-46b3-8b11-f37790e5e7aa
+ taskid: c4f9c21c-5836-414a-822c-51497dc91b01
type: regular
task:
- id: f2a13bb9-2de5-46b3-8b11-f37790e5e7aa
+ id: c4f9c21c-5836-414a-822c-51497dc91b01
version: -1
name: Evaluate remediation path rules
description: For a given alert and remediation path rules that are defined for that alert's attack surface rule, this takes each remediation path rule and looks at the rule criteria too see if the rule matches for the given alert. If multiple rules match, it will return the most recently created rule. This assumes that the rules passed in are filtered to correlate with the alert's attack surface rule.
- scriptName: RemediationPathRuleEvaluation
type: regular
iscommand: false
brand: ""
+ script: RemediationPathRuleEvaluation
nexttasks:
'#none#':
- "22"
@@ -189,10 +189,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 475baaab-5fc4-404b-83a3-12d3d7ab2fcf
+ taskid: 1a5064d9-30a7-4d7b-8a48-4595d514d0c3
type: title
task:
- id: 475baaab-5fc4-404b-83a3-12d3d7ab2fcf
+ id: 1a5064d9-30a7-4d7b-8a48-4595d514d0c3
version: -1
name: Complete
type: title
@@ -217,10 +217,10 @@ tasks:
isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: d46de1a3-299d-4a2b-8a5d-541d684b5183
+ taskid: 06d9bc06-59ea-4f26-8e72-62158cfaaefa
type: condition
task:
- id: d46de1a3-299d-4a2b-8a5d-541d684b5183
+ id: 06d9bc06-59ea-4f26-8e72-62158cfaaefa
version: -1
name: Meets automated remediation requirements?
description: Determines if the alert meets the criteria for automated remediation.
@@ -234,11 +234,135 @@ tasks:
- "52"
Unclaimed S3 Bucket:
- "33"
- "yes":
+ AWS Systems Manager:
+ - "58"
+ Restrict Open Ports:
- "34"
separatecontext: false
conditions:
- - label: "yes"
+ - label: "AWS Systems Manager"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ complex:
+ root: modules
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.brand
+ operator: isEqualString
+ right:
+ value:
+ simple: AWS - System Manager
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.state
+ operator: isEqualString
+ right:
+ value:
+ simple: active
+ iscontext: true
+ - - operator: inList
+ left:
+ value:
+ complex:
+ root: alert.asmsystemids
+ accessor: id
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmsystemids.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ASSET-SSM-PLATFORM-NAME
+ transformers:
+ - operator: toLowerCase
+ iscontext: true
+ right:
+ value:
+ simple: ubuntu
+ - - operator: inList
+ left:
+ value:
+ complex:
+ root: alert
+ accessor: asmattacksurfaceruleid
+ transformers:
+ - args:
+ chars:
+ value:
+ simple: '[\"]'
+ operator: StripChars
+ iscontext: true
+ right:
+ value:
+ simple: InsecureOpenSSH
+ - - operator: inList
+ left:
+ value:
+ complex:
+ root: alert.asmsystemids
+ accessor: id
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmsystemids.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ASSET-SSM-PLATFORM-TYPE
+ transformers:
+ - operator: toLowerCase
+ iscontext: true
+ right:
+ value:
+ simple: linux
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: id
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmsystemids.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ASSET-SSM-AGENT-STATUS
+ root: alert.asmsystemids
+ transformers:
+ - operator: toLowerCase
+ operator: isEqualString
+ right:
+ value:
+ simple: active
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmdevcheckdetails.result
+ operator: isTrue
+ - left:
+ iscontext: true
+ value:
+ simple: inputs.BypassDevCheck
+ operator: isEqualString
+ right:
+ value:
+ simple: "True"
+ - - left:
+ iscontext: true
+ value:
+ simple: alert.asmserviceowner
+ operator: isNotEmpty
+ - label: "Restrict Open Ports"
condition:
- - operator: inList
left:
@@ -420,10 +544,10 @@ tasks:
isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: ee07dc79-0d84-44b7-855e-c22a0c02245f
+ taskid: 1d34e7f1-89dd-4e52-8e68-aeefba08d1ab
type: condition
task:
- id: ee07dc79-0d84-44b7-855e-c22a0c02245f
+ id: 1d34e7f1-89dd-4e52-8e68-aeefba08d1ab
version: -1
name: Is Cortex ASM enabled?
description: Determines if the "Cortex Attack Surface Management" integration instance is configured to pull Remediation Path Rules.
@@ -481,10 +605,10 @@ tasks:
isautoswitchedtoquietmode: false
"16":
id: "16"
- taskid: dc8ed22e-42a4-4f17-865d-748a6497b01f
+ taskid: 69e5192d-ad2f-4a44-8aac-0a45ad590fbe
type: condition
task:
- id: dc8ed22e-42a4-4f17-865d-748a6497b01f
+ id: 69e5192d-ad2f-4a44-8aac-0a45ad590fbe
version: -1
name: Is ServiceNow v2 enabled?
description: Determines if the "ServiceNow v2" integration instance is enabled in order to send ServiceNow tickets as notifications.
@@ -529,8 +653,8 @@ tasks:
view: |-
{
"position": {
- "x": 240,
- "y": -720
+ "x": -170,
+ "y": -750
}
}
note: false
@@ -542,17 +666,17 @@ tasks:
isautoswitchedtoquietmode: false
"17":
id: "17"
- taskid: 61f1db44-72bc-4ac5-820c-e9cd290ac79c
+ taskid: 32d41c9c-ef4b-43eb-8629-9cad1bfffa4e
type: regular
task:
- id: 61f1db44-72bc-4ac5-820c-e9cd290ac79c
+ id: 32d41c9c-ef4b-43eb-8629-9cad1bfffa4e
version: -1
name: Set ServiceNowv2Enabled to true
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "37"
@@ -566,8 +690,8 @@ tasks:
view: |-
{
"position": {
- "x": 240,
- "y": -520
+ "x": -170,
+ "y": -550
}
}
note: false
@@ -579,17 +703,17 @@ tasks:
isautoswitchedtoquietmode: false
"18":
id: "18"
- taskid: e9b92d4e-6ad9-42ad-8664-73071081b9e0
+ taskid: 66158556-6cc6-4c11-8cb6-af2202cc02a8
type: regular
task:
- id: e9b92d4e-6ad9-42ad-8664-73071081b9e0
+ id: 66158556-6cc6-4c11-8cb6-af2202cc02a8
version: -1
name: Set ServiceNowv2Enabled to false
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "43"
@@ -603,8 +727,8 @@ tasks:
view: |-
{
"position": {
- "x": -170,
- "y": -510
+ "x": -580,
+ "y": -540
}
}
note: false
@@ -616,17 +740,17 @@ tasks:
isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: 3c333ac9-103f-4df3-877c-ef08a0b2ce2c
+ taskid: 08373f55-01ab-487c-8a1a-628d0e094e18
type: regular
task:
- id: 3c333ac9-103f-4df3-877c-ef08a0b2ce2c
+ id: 08373f55-01ab-487c-8a1a-628d0e094e18
version: -1
name: Set AutoRemediationRequirements to true
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "36"
@@ -653,17 +777,17 @@ tasks:
isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 98f3fdfd-a9a7-43ea-8fc1-d10bf21bb126
+ taskid: 400a7a8b-004b-48e6-877f-8db9b91d3bd4
type: regular
task:
- id: 98f3fdfd-a9a7-43ea-8fc1-d10bf21bb126
+ id: 400a7a8b-004b-48e6-877f-8db9b91d3bd4
version: -1
name: Set AutoRemediationRequirements to false
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "36"
@@ -677,8 +801,8 @@ tasks:
view: |-
{
"position": {
- "x": -110,
- "y": -1475
+ "x": -450,
+ "y": -1485
}
}
note: false
@@ -690,10 +814,10 @@ tasks:
isautoswitchedtoquietmode: false
"22":
id: "22"
- taskid: 29f89903-4450-4edb-89a8-4dcb7eefb484
+ taskid: f74a74e6-61b3-4843-8e2b-3713727c3345
type: condition
task:
- id: 29f89903-4450-4edb-89a8-4dcb7eefb484
+ id: f74a74e6-61b3-4843-8e2b-3713727c3345
version: -1
name: Was there a match?
description: Determines if a matching remediation path rule was found based on population of the "ASM - Remediation Path Rule" field.
@@ -735,10 +859,10 @@ tasks:
isautoswitchedtoquietmode: false
"23":
id: "23"
- taskid: 1c420f97-4985-4bba-8cee-d8ed51cce2ff
+ taskid: cbc75f55-18a5-4da5-8367-76d54c82dc35
type: condition
task:
- id: 1c420f97-4985-4bba-8cee-d8ed51cce2ff
+ id: cbc75f55-18a5-4da5-8367-76d54c82dc35
version: -1
name: What is the action of the matched rule?
description: Determines what is returned to the parent playbook based on contents of "ASM - Remediation Path Rule" field and other requirements.
@@ -758,6 +882,8 @@ tasks:
- "26"
return SNOW:
- "25"
+ Slack:
+ - "57"
separatecontext: false
conditions:
- label: return AR
@@ -845,6 +971,24 @@ tasks:
right:
value:
simple: "True"
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.RemediationRule.action
+ operator: isEqualString
+ right:
+ value:
+ simple: slack
+ - left:
+ iscontext: true
+ value:
+ simple: SlackEnabled
+ operator: isEqualString
+ right:
+ value:
+ simple: "True"
+ label: Slack
continueonerrortype: ""
view: |-
{
@@ -862,17 +1006,17 @@ tasks:
isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: 68e6d8ee-e670-4beb-8dce-9e34b64f585f
+ taskid: dcd0c02a-1157-49c9-8afa-d4faeffaa8a5
type: regular
task:
- id: 68e6d8ee-e670-4beb-8dce-9e34b64f585f
+ id: dcd0c02a-1157-49c9-8afa-d4faeffaa8a5
version: -1
name: Return AR
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "31"
@@ -899,17 +1043,17 @@ tasks:
isautoswitchedtoquietmode: false
"25":
id: "25"
- taskid: 09502b4c-8588-4a1b-8530-f854e6c995e2
+ taskid: ac3fd175-bf39-42f7-8874-eb6e183cbc0d
type: regular
task:
- id: 09502b4c-8588-4a1b-8530-f854e6c995e2
+ id: ac3fd175-bf39-42f7-8874-eb6e183cbc0d
version: -1
name: Return SNOW
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "31"
@@ -936,17 +1080,17 @@ tasks:
isautoswitchedtoquietmode: false
"26":
id: "26"
- taskid: 3fda1c2d-d726-4835-8540-1c2aa16f474a
+ taskid: b5c1a6f2-b3a5-4f2d-8f67-eef2b8b83687
type: regular
task:
- id: 3fda1c2d-d726-4835-8540-1c2aa16f474a
+ id: b5c1a6f2-b3a5-4f2d-8f67-eef2b8b83687
version: -1
name: Return Email
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "31"
@@ -973,17 +1117,17 @@ tasks:
isautoswitchedtoquietmode: false
"27":
id: "27"
- taskid: c7a1c4d2-0e40-44ac-8d66-5361aeb0c4ad
+ taskid: 6d4daab5-8668-42ed-8dda-a91fc6d4357d
type: regular
task:
- id: c7a1c4d2-0e40-44ac-8d66-5361aeb0c4ad
+ id: 6d4daab5-8668-42ed-8dda-a91fc6d4357d
version: -1
name: Return Manual
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "31"
@@ -1010,18 +1154,18 @@ tasks:
isautoswitchedtoquietmode: false
"31":
id: "31"
- taskid: 2f03729d-b3a7-4a6c-8f85-71b5f6f15ef5
+ taskid: a907d910-10fc-4d9c-8c99-3efce090b3d2
type: regular
task:
- id: 2f03729d-b3a7-4a6c-8f85-71b5f6f15ef5
+ id: a907d910-10fc-4d9c-8c99-3efce090b3d2
version: -1
name: Get current time
description: |
Retrieves the current date and time.
- scriptName: GetTime
type: regular
iscommand: false
brand: ""
+ script: GetTime
nexttasks:
'#none#':
- "32"
@@ -1043,17 +1187,17 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 26734940-913f-4bd8-8652-a63e5068f0d1
+ taskid: 459223ef-daed-41f1-8114-cccdd677c737
type: regular
task:
- id: 26734940-913f-4bd8-8652-a63e5068f0d1
+ id: 459223ef-daed-41f1-8114-cccdd677c737
version: -1
name: Set timestamp
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "6"
@@ -1083,10 +1227,10 @@ tasks:
isautoswitchedtoquietmode: false
"33":
id: "33"
- taskid: dabc7f51-0ced-4b11-8b38-165e5b9077b5
+ taskid: a8cce078-4e13-499d-8364-7dc9db8a271f
type: title
task:
- id: dabc7f51-0ced-4b11-8b38-165e5b9077b5
+ id: a8cce078-4e13-499d-8364-7dc9db8a271f
version: -1
name: Also True
type: title
@@ -1101,8 +1245,8 @@ tasks:
view: |-
{
"position": {
- "x": 1210,
- "y": -1590
+ "x": 1380,
+ "y": -1600
}
}
note: false
@@ -1114,17 +1258,17 @@ tasks:
isautoswitchedtoquietmode: false
"34":
id: "34"
- taskid: 717271bd-270f-4db9-86c4-22aa8a9535a0
+ taskid: 096532c1-dd74-4367-8d80-bb8c4723de75
type: regular
task:
- id: 717271bd-270f-4db9-86c4-22aa8a9535a0
+ id: 096532c1-dd74-4367-8d80-bb8c4723de75
version: -1
name: Set remediationoptions context (AR ports)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "19"
@@ -1140,8 +1284,8 @@ tasks:
view: |-
{
"position": {
- "x": 300,
- "y": -1475
+ "x": -40,
+ "y": -1485
}
}
note: false
@@ -1153,17 +1297,17 @@ tasks:
isautoswitchedtoquietmode: false
"35":
id: "35"
- taskid: 74551098-9d92-446b-8eb2-30301c0196d4
+ taskid: dfc35f93-b585-4dfc-80ab-0232bf901677
type: regular
task:
- id: 74551098-9d92-446b-8eb2-30301c0196d4
+ id: dfc35f93-b585-4dfc-80ab-0232bf901677
version: -1
name: Set remediationoptions context (AR S3)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "19"
@@ -1179,8 +1323,8 @@ tasks:
view: |-
{
"position": {
- "x": 1210,
- "y": -1475
+ "x": 1380,
+ "y": -1485
}
}
note: false
@@ -1192,10 +1336,10 @@ tasks:
isautoswitchedtoquietmode: false
"36":
id: "36"
- taskid: a78048a8-0b1d-4f50-81d1-ec9a638828fb
+ taskid: 9d23c5f0-606e-4ba1-8c2d-81ac7a4c68db
type: title
task:
- id: a78048a8-0b1d-4f50-81d1-ec9a638828fb
+ id: 9d23c5f0-606e-4ba1-8c2d-81ac7a4c68db
version: -1
name: Notification Check
type: title
@@ -1206,6 +1350,7 @@ tasks:
'#none#':
- "16"
- "49"
+ - "56"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -1224,17 +1369,17 @@ tasks:
isautoswitchedtoquietmode: false
"37":
id: "37"
- taskid: 581af6af-fb23-49b8-84d0-b55c3917143b
+ taskid: 68e55153-7505-4154-8cb6-66dbc63a3c73
type: regular
task:
- id: 581af6af-fb23-49b8-84d0-b55c3917143b
+ id: 68e55153-7505-4154-8cb6-66dbc63a3c73
version: -1
name: Set remediationoptions context (SNOW)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "43"
@@ -1250,8 +1395,8 @@ tasks:
view: |-
{
"position": {
- "x": 240,
- "y": -360
+ "x": -170,
+ "y": -390
}
}
note: false
@@ -1263,17 +1408,17 @@ tasks:
isautoswitchedtoquietmode: false
"39":
id: "39"
- taskid: c9fcfadd-d65c-403c-85b7-3fe6c92ebe26
+ taskid: 3aaee7fd-4302-42eb-8cd1-61a070e172b8
type: regular
task:
- id: c9fcfadd-d65c-403c-85b7-3fe6c92ebe26
+ id: 3aaee7fd-4302-42eb-8cd1-61a070e172b8
version: -1
name: Set JiraEnabled to true
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "42"
@@ -1287,8 +1432,8 @@ tasks:
view: |-
{
"position": {
- "x": 680,
- "y": -520
+ "x": 270,
+ "y": -550
}
}
note: false
@@ -1300,17 +1445,17 @@ tasks:
isautoswitchedtoquietmode: false
"40":
id: "40"
- taskid: 7805e705-95f5-418c-88fb-84fccde4e45d
+ taskid: 148d6abb-00cb-4444-81c4-d63b880e6353
type: regular
task:
- id: 7805e705-95f5-418c-88fb-84fccde4e45d
+ id: 148d6abb-00cb-4444-81c4-d63b880e6353
version: -1
name: Set JiraEnabled to false
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "43"
@@ -1324,8 +1469,8 @@ tasks:
view: |-
{
"position": {
- "x": 1090,
- "y": -520
+ "x": 680,
+ "y": -550
}
}
note: false
@@ -1337,17 +1482,17 @@ tasks:
isautoswitchedtoquietmode: false
"42":
id: "42"
- taskid: 82db1979-3555-498d-8483-c3c12bc9fec3
+ taskid: 31981082-5856-4eba-8278-a6ca48bf2907
type: regular
task:
- id: 82db1979-3555-498d-8483-c3c12bc9fec3
+ id: 31981082-5856-4eba-8278-a6ca48bf2907
version: -1
name: Set remediationoptions context (Jira)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "43"
@@ -1363,8 +1508,8 @@ tasks:
view: |-
{
"position": {
- "x": 680,
- "y": -360
+ "x": 270,
+ "y": -390
}
}
note: false
@@ -1376,17 +1521,17 @@ tasks:
isautoswitchedtoquietmode: false
"43":
id: "43"
- taskid: d3342595-7841-4e8d-829c-2b5fb00d8bbd
+ taskid: 30ce7f6b-5825-4e42-8546-e2248718d6e0
type: regular
task:
- id: d3342595-7841-4e8d-829c-2b5fb00d8bbd
+ id: 30ce7f6b-5825-4e42-8546-e2248718d6e0
version: -1
name: Set remediationoptions context (Email)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "44"
@@ -1415,17 +1560,17 @@ tasks:
isautoswitchedtoquietmode: false
"44":
id: "44"
- taskid: 8ef9ae63-6742-4910-8cad-82b870a3b035
+ taskid: 2e55ccd2-0577-4ac9-8dc9-7bbc9c1f04eb
type: regular
task:
- id: 8ef9ae63-6742-4910-8cad-82b870a3b035
+ id: 2e55ccd2-0577-4ac9-8dc9-7bbc9c1f04eb
version: -1
name: Set remediationoptions context (Manual)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "13"
@@ -1454,10 +1599,10 @@ tasks:
isautoswitchedtoquietmode: false
"45":
id: "45"
- taskid: 3dae14eb-59a7-478d-85ac-a8ef1de6e844
+ taskid: 79eb4fd7-1804-40e1-8a4e-a22b40296203
type: collection
task:
- id: 3dae14eb-59a7-478d-85ac-a8ef1de6e844
+ id: 79eb4fd7-1804-40e1-8a4e-a22b40296203
version: -1
name: Select remediation action (dynamic)
description: Determines the next action (remediation or ticket) based on user input. Options are based on if ServiceNowV2 integration is set up and/or the alert meets the requirement for automated remediation.
@@ -1524,19 +1669,19 @@ tasks:
isautoswitchedtoquietmode: false
"46":
id: "46"
- taskid: 6e6043e3-9638-4940-8c37-e862b439ee6a
+ taskid: 44017a99-2d7b-4cfe-82b8-c5cdfcd093ec
type: regular
task:
- id: 6e6043e3-9638-4940-8c37-e862b439ee6a
+ id: 44017a99-2d7b-4cfe-82b8-c5cdfcd093ec
version: -1
name: Set data collection grid field
description: |-
Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value you can enter `TIMESTAMP` to get the current timestamp in ISO format. Example of the command:
`!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
- scriptName: GridFieldSetup
type: regular
iscommand: false
brand: Builtin
+ script: GridFieldSetup
nexttasks:
'#none#':
- "47"
@@ -1579,19 +1724,19 @@ tasks:
isautoswitchedtoquietmode: false
"47":
id: "47"
- taskid: e44336e3-305e-4d96-8731-f5aef3c67e7a
+ taskid: b9ea6816-55e3-49f3-8260-f9495b32cc19
type: regular
task:
- id: e44336e3-305e-4d96-8731-f5aef3c67e7a
+ id: b9ea6816-55e3-49f3-8260-f9495b32cc19
version: -1
name: Set playbook stage grid field (decision)
description: |-
Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value, you can enter `TIMESTAMP` to get the current timestamp in ISO format. Example of the command:
`!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
- scriptName: GridFieldSetup
type: regular
iscommand: false
brand: ""
+ script: GridFieldSetup
nexttasks:
'#none#':
- "6"
@@ -1622,17 +1767,17 @@ tasks:
isautoswitchedtoquietmode: false
"48":
id: "48"
- taskid: c35cce8e-1c8e-4432-831c-65df7e04b004
+ taskid: 30198f95-cbb7-4bb6-869d-9b6e4f3f0ec3
type: regular
task:
- id: c35cce8e-1c8e-4432-831c-65df7e04b004
+ id: 30198f95-cbb7-4bb6-869d-9b6e4f3f0ec3
version: -1
name: Return Jira
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "31"
@@ -1659,10 +1804,10 @@ tasks:
isautoswitchedtoquietmode: false
"49":
id: "49"
- taskid: fdfbace2-fb0b-48b1-895f-6d17a21b55a1
+ taskid: 12d27d63-68ff-48ed-8042-9b7df6031760
type: condition
task:
- id: fdfbace2-fb0b-48b1-895f-6d17a21b55a1
+ id: 12d27d63-68ff-48ed-8042-9b7df6031760
version: -1
name: Is Atlassian Jira v2 or v3 enabled?
description: Determines if the "Atlassian Jira v2" or "Atlassian Jira v3" integration instance is enabled in order to send Jira tickets as notifications.
@@ -1715,8 +1860,8 @@ tasks:
view: |-
{
"position": {
- "x": 680,
- "y": -720
+ "x": 470,
+ "y": -750
}
}
note: false
@@ -1728,17 +1873,17 @@ tasks:
isautoswitchedtoquietmode: false
"51":
id: "51"
- taskid: 42b54db4-31bd-45e0-8d37-b6e83f853688
+ taskid: 1579a8f3-bf24-4911-81a4-1da8456b3625
type: regular
task:
- id: 42b54db4-31bd-45e0-8d37-b6e83f853688
+ id: 1579a8f3-bf24-4911-81a4-1da8456b3625
version: -1
name: Set Attack Surface Rules list
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "10"
@@ -1765,17 +1910,17 @@ tasks:
isautoswitchedtoquietmode: false
"52":
id: "52"
- taskid: fd96a27b-b711-488a-8bb9-946f95720fea
+ taskid: a24d37a0-b4de-4610-880b-7115b65e02df
type: regular
task:
- id: fd96a27b-b711-488a-8bb9-946f95720fea
+ id: a24d37a0-b4de-4610-880b-7115b65e02df
version: -1
name: Set remediationoptions context (AR Cortex Endpoint)
description: Set a value in context under the key you entered.
- scriptName: Set
type: regular
iscommand: false
brand: ""
+ script: Set
nexttasks:
'#none#':
- "19"
@@ -1791,8 +1936,8 @@ tasks:
view: |-
{
"position": {
- "x": 750,
- "y": -1475
+ "x": 920,
+ "y": -1485
}
}
note: false
@@ -1802,19 +1947,271 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "53":
+ continueonerrortype: ""
+ id: "53"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "55"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ key:
+ simple: SlackEnabled
+ value:
+ simple: "True"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 08b8619c-ab9a-4195-8102-2de2f16a44e0
+ iscommand: false
+ name: Set SlackEnabled to true
+ script: Set
+ type: regular
+ version: -1
+ taskid: 08b8619c-ab9a-4195-8102-2de2f16a44e0
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1100,
+ "y": -550
+ }
+ }
+ "54":
+ continueonerrortype: ""
+ id: "54"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "43"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ key:
+ simple: SlackEnabled
+ value:
+ simple: "False"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: c8b4e73c-bc67-456c-83fe-a2102124e99b
+ iscommand: false
+ name: Set SlackEnabled to false
+ script: Set
+ type: regular
+ version: -1
+ taskid: c8b4e73c-bc67-456c-83fe-a2102124e99b
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1510,
+ "y": -550
+ }
+ }
+ "55":
+ continueonerrortype: ""
+ id: "55"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "43"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: remediationoptions
+ value:
+ simple: Send a Slack message
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 56cb6392-ed52-45b2-8194-b4e05730c350
+ iscommand: false
+ name: Set remediationoptions context (Slack)
+ script: Set
+ type: regular
+ version: -1
+ taskid: 56cb6392-ed52-45b2-8194-b4e05730c350
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1100,
+ "y": -390
+ }
+ }
+ "56":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.brand
+ operator: isEqualString
+ right:
+ value:
+ simple: SlackV3
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.state
+ operator: isEqualString
+ right:
+ value:
+ simple: active
+ root: modules
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "56"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "54"
+ "yes":
+ - "53"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if the Slack integration (v3) instance is enabled in order to send Slack messages as notifications.
+ id: 84e72fc4-8d45-44b2-8c3d-ce4ec92ad200
+ iscommand: false
+ name: Is Slack enabled?
+ type: condition
+ version: -1
+ taskid: 84e72fc4-8d45-44b2-8c3d-ce4ec92ad200
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 1110,
+ "y": -750
+ }
+ }
+ "57":
+ continueonerrortype: ""
+ id: "57"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "31"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ key:
+ simple: RemediationAction
+ value:
+ simple: Slack
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: bfcc975f-d7db-46ad-8a1d-b9be5cfd186c
+ iscommand: false
+ name: Return Slack
+ script: Set
+ type: regular
+ version: -1
+ taskid: bfcc975f-d7db-46ad-8a1d-b9be5cfd186c
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 2130,
+ "y": 1350
+ }
+ }
+ "58":
+ continueonerrortype: ""
+ id: "58"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "19"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: remediationoptions
+ value:
+ simple: Automated remediation by patching vulnerable software
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 5a0f0dc0-2fc1-44a0-8256-339e9db69aea
+ iscommand: false
+ name: Set remediationoptions context (AWS Systems manager)
+ script: Set
+ type: regular
+ version: -1
+ taskid: 5a0f0dc0-2fc1-44a0-8256-339e9db69aea
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 400,
+ "y": -1485
+ }
+ }
view: |-
{
"linkLabelsPosition": {
"10_33_Unclaimed S3 Bucket": 0.55,
- "10_34_yes": 0.83,
+ "10_34_Restrict Open Ports": 0.83,
"10_52_Cortex Endpoint": 0.66,
- "16_17_yes": 0.55
+ "16_17_yes": 0.55,
+ "23_48_Jira": 0.88,
+ "23_57_Slack": 0.89
},
"paper": {
"dimensions": {
"height": 4070,
- "width": 2470,
- "x": -370,
+ "width": 3090,
+ "x": -580,
"y": -2150
}
}
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules_README.md
index cc8d9178f428..5a0e1a416948 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_Path_Rules_README.md
@@ -6,7 +6,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Cortex ASM - Remediation Objectives
+This playbook does not use any sub-playbooks.
### Integrations
@@ -14,10 +14,10 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Scripts
-* RemediationPathRuleEvaluation
* GetTime
-* Set
* GridFieldSetup
+* Set
+* RemediationPathRuleEvaluation
### Commands
@@ -45,4 +45,4 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
---
-![Cortex ASM - Remediation Path Rules](../doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
+![Cortex ASM - Remediation Path Rules](../doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
\ No newline at end of file
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_README.md
index e40c09570c20..cd5f87a53b96 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Remediation_README.md
@@ -1,4 +1,4 @@
-This playbook contains all the cloud provider sub playbooks for remediation
+This playbook contains all the cloud provider sub playbooks for remediation.
## Dependencies
@@ -6,11 +6,13 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* AWS - Unclaimed S3 Bucket Remediation
+* AWS - Package Upgrade
* AWS - Security Group Remediation v2
+* AWS - Unclaimed S3 Bucket Remediation
+* Azure - Network Security Group Remediation
+* Cortex ASM - Cortex Endpoint Remediation
* Cortex ASM - On Prem Remediation
* GCP - Firewall Remediation
-* Azure - Network Security Group Remediation
### Integrations
diff --git a/Packs/CortexAttackSurfaceManagement/README.md b/Packs/CortexAttackSurfaceManagement/README.md
index 883e26d7ffbf..3a950094e3cc 100644
--- a/Packs/CortexAttackSurfaceManagement/README.md
+++ b/Packs/CortexAttackSurfaceManagement/README.md
@@ -25,7 +25,7 @@ Aditionally, [a list of integrations used for the Active Response playbook can b
### Demo Video
-[![Active Response in Cortex Xpanse](https://raw.githubusercontent.com/demisto/content/98ead849e9e32921f64f7ac07fda2bff1b5f7c0b/Packs/CortexAttackSurfaceManagement/doc_files/Active_Response_in_Cortex_Xpanse.jpg)](https://www.youtube.com/watch?v=rryAQ23uuqw "Active Response in Cortex Xpanse")
+[![Active Response in Cortex Xpanse](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Active_Response_in_Cortex_Xpanse.jpg)](https://www.youtube.com/watch?v=rryAQ23uuqw "Active Response in Cortex Xpanse")
### Automated Remediation Requirements
@@ -47,6 +47,7 @@ Automated remediation is only possible when the right conditions are met. These
- Unclaimed S3 Bucket*
- Asset one of the following:
- AWS EC2 Instance
+ - AWS Systems manager agent (active) on AWS EC2 Instance*
- Azure Compute Instance
- GCP Compute Engine (VM)
- On-prem asset protected with a Palo Alto Networks Firewall
@@ -55,6 +56,7 @@ Automated remediation is only possible when the right conditions are met. These
- Active Directory
- AWS IAM
- Azure IAM
+ - Venafi
- Cortex Endpoint (XSIAM/XDR)
- Email addresses found in tags
- GCP IAM
@@ -72,6 +74,8 @@ Automated remediation is only possible when the right conditions are met. These
\* The `Unclaimed S3 Bucket` attack surface rule ID only requires `AWS-S3` integration to be enabled.
+\* Patching using AWS Systems manager requires agent to be installed on the EC2 instance and currently we only support InsecureOpenSSH and OS versions of Linux Ubuntu.
+
## What is included in this pack?
The main active response playbook is the `Cortex ASM - ASM Alert` playbook. This playbook contains a set of sub-playbooks and automation scripts, which support many different remediation paths that can be taken depending on the types of configured integrations, the type of alert, and input provided by the analyst. After the final stage, the alert is resolved.
@@ -81,12 +85,14 @@ The main active response playbook is the `Cortex ASM - ASM Alert` playbook. This
- [Cortex ASM - ASM Alert](#cortex-asm---asm-alert)
- [Cortex ASM - AWS Enrichment](#cortex-asm---aws-enrichment)
- [Cortex ASM - Azure Enrichment](#cortex-asm---azure-enrichment)
+ - [Cortex ASM - Certificate Enrichment](#cortex-asm---certificate-enrichment)
- [Cortex ASM - Cortex Endpoint Enrichment](#cortex-asm---cortex-endpoint-enrichment)
- [Cortex ASM - Cortex Endpoint Remediation](#cortex-asm---cortex-endpoint-remediation)
- [Cortex ASM - Detect Service](#cortex-asm---detect-service)
- [Cortex ASM - Email Notification](#cortex-asm---email-notification)
- [Cortex ASM - Enrichment](#cortex-asm---enrichment)
- [Cortex ASM - GCP Enrichment](#cortex-asm---gcp-enrichment)
+ - [Cortex ASM - Instant Message](#cortex-asm---instant-message)
- [Cortex ASM - Jira Notification](#cortex-asm---jira-notification)
- [Cortex ASM - On Prem Enrichment](#cortex-asm---on-prem-enrichment)
- [Cortex ASM - On Prem Remediation](#cortex-asm---on-prem-remediation)
@@ -118,163 +124,177 @@ The main active response playbook is the `Cortex ASM - ASM Alert` playbook. This
A playbook that given the email address enriches Service owner in Azure and On-Prem directory.
-![Cortex ASM - Active Directory Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Active_Directory_Enrichment.png)
+
+![Cortex ASM - Active Directory Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Active_Directory_Enrichment.png)
+
#### Cortex ASM - ASM Alert
A playbook that enriches asset information for ASM alerts and provides the means for remediation.
-![Cortex ASM - ASM Alert](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png)
+![Cortex ASM - ASM Alert](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png)
#### Cortex ASM - AWS Enrichment
A playbook that given the IP address enriches AWS information relevant to ASM alerts.
-![Cortex ASM - AWS Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png)
+![Cortex ASM - AWS Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png)
#### Cortex ASM - Azure Enrichment
A playbook that given the IP address enriches Azure information relevant to ASM alerts.
-![Cortex ASM - Azure Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Azure_Enrichment.png)
+![Cortex ASM - Azure Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Azure_Enrichment.png)
+
+#### Cortex ASM - Certificate Enrichment
+
+A playbook to enrich certificate information.
+
+![Cortex ASM - Certificate Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png)
#### Cortex ASM - Cortex Endpoint Enrichment
This playbook is used to pull information from Cortex Endpoint (XSIAM/XDR) systems for enrichment purposes.
-![Cortex ASM - Cortex Endpoint Enrichment](https://raw.githubusercontent.com/demisto/content/935a77339c2b1ecde3b9ea64992018bd625c61ed/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Enrichment.png)
+![Cortex ASM - Cortex Endpoint Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Enrichment.png)
#### Cortex ASM - Cortex Endpoint Remediation
This playbook is used for remediating a single exposed Cortex Endpoint (XSIAM/XDR) by isolating the endpoint from the network using the "Isolate Endpoint" feature in XSIAM ([see XSIAM details](https://docs-cortex.paloaltonetworks.com/r/Cortex-XSIAM/Cortex-XSIAM-Administrator-Guide/Isolate-an-Endpoint)) and XDR ([see XDR details](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Isolate-an-Endpoint)).
-![Cortex ASM - Cortex Endpoint Remediation](https://raw.githubusercontent.com/demisto/content/c421d6d3de62992a3ac3afbce09e82224e505641/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Remediation.png)
+![Cortex ASM - Cortex Endpoint Remediation](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Remediation.png)
#### Cortex ASM - Detect Service
A playbook that utilizes the Remediation Confirmation Scan service to check for mitigated vulnerabilities.
-![Cortex ASM - Detect Service](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Detect_Service.png)
+![Cortex ASM - Detect Service](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Detect_Service.png)
#### Cortex ASM - Email Notification
A playbook that is used to send email notifications to service owners to notify them of their internet exposures.
-![Cortex ASM - Email Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Email_Notification.png)
+![Cortex ASM - Email Notification](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Email_Notification.png)
#### Cortex ASM - Enrichment
A playbook that is used as a container folder for all enrichments of ASM alerts.
-![Cortex ASM - Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png)
+![Cortex ASM - Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png)
#### Cortex ASM - GCP Enrichment
A playbook that given the IP address enriches GCP information relevant to ASM alerts.
-![Cortex ASM - GCP Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_GCP_Enrichment.png)
+![Cortex ASM - GCP Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_GCP_Enrichment.png)
+
+#### Cortex ASM - Instant Message
+
+A playbook that is used to create instant messages toward service owners to notify them of their internet exposures.
+
+![Cortex ASM - Instant Message](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Instant_Message.png)
#### Cortex ASM - Jira Notification
A playbook that is used to create Jira tickets directed toward service owners to notify them of their internet exposures.
-![Cortex ASM - Jira Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Jira_Notification.png)
+![Cortex ASM - Jira Notification](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Jira_Notification.png)
#### Cortex ASM - On Prem Enrichment
A playbook that given an IP address, port, and protocol of a service, enriches using on-prem integrations to find the related firewall rule and other related information.
-![Cortex ASM - On Prem Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Enrichment.png)
+![Cortex ASM - On Prem Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Enrichment.png)
#### Cortex ASM - On Prem Remediation
A playbook that adds new block rule(s) to on-prem firewall vendors in order to block internet access for internet exposures.
-![Cortex ASM - On Prem Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Remediation.png)
+![Cortex ASM - On Prem Remediation](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Remediation.png)
#### Cortex ASM - Prisma Cloud Enrichment
Playbook that given the IP address enriches Prisma Cloud information relevant to ASM alerts.
-![Cortex ASM - Prisma Cloud Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Prisma_Cloud_Enrichment.png)
+![Cortex ASM - Prisma Cloud Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Prisma_Cloud_Enrichment.png)
#### Cortex ASM - Qualys Enrichment
Playbook that given the IP address enriches Qualys information relevant to ASM alerts.
-![Cortex ASM - Qualys Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Qualys_Enrichment.png)
+![Cortex ASM - Qualys Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Qualys_Enrichment.png)
#### Cortex ASM - Rapid7 Enrichment
A playbook that given the IP address enriches Rapid7 information relevant to ASM alerts.
-![Cortex ASM - Rapid7 Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Rapid7_Enrichment.png)
+![Cortex ASM - Rapid7 Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Rapid7_Enrichment.png)
#### Cortex ASM - Remediation Confirmation Scan
A playbook that creates an ASM Remediation Confirmation Scan using an existing service ID, if the scan does not already exist;. It then polls for results of a scan.
-![Cortex ASM - Remediation Confirmation Scan](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Confirmation_Scan.png)
+![Cortex ASM - Remediation Confirmation Scan](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Confirmation_Scan.png)
#### Cortex ASM - Remediation Guidance
A playbook that pulls remediation guidance off of a list based on ASM RuleID to be used in service owner notifications (email or ticketing system).
-![Cortex ASM - Remediation Guidance](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Guidance.png)
+![Cortex ASM - Remediation Guidance](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Guidance.png)
#### Cortex ASM - Remediation Objectives
A playbook that populates the remediation objectives field that is used to display the remediation actions to the end user.
-![Cortex ASM - Remediation Objectives](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Objectives.png)
+![Cortex ASM - Remediation Objectives](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Objectives.png)
#### Cortex ASM - Remediation Path Rules
A playbook that returns "RemediationAction" options based on the return from the Remediation Path Rules API, or defaults to data collection task options from the "Cortex ADM - Decision" sub-playbook.
-![Cortex ASM - Remediation Path Rules](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
+![Cortex ASM - Remediation Path Rules](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
#### Cortex ASM - Remediation
A playbook that is used as a container folder for all remediation of ASM alerts.
-![Cortex ASM - Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png)
+![Cortex ASM - Remediation](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png)
#### Cortex ASM - Service Ownership
Playbook that identifies and recommends the most likely owners of a given service.
-![Cortex ASM - Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png)
+![Cortex ASM - Remediation](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png)
#### Cortex ASM - ServiceNow CMDB Enrichment
A playbook that given the IP address enriches ServiceNow CMDB information relevant to ASM alerts.
-![Cortex ASM - ServiceNow CMDB Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png)
+![Cortex ASM - ServiceNow CMDB Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png)
#### Cortex ASM - ServiceNow ITSM Enrichment
A playbook that given the search terms enriches ServiceNow ITSM service owner information relevant to ASM alerts.
-![Cortex ASM - ServiceNow ITSM Enrichment](https://raw.githubusercontent.com/demisto/content/0fd2fb4a7240673f3a3fcb1dec5339549f0f2fb8/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png)
+![Cortex ASM - ServiceNow ITSM Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png)
#### Cortex ASM - ServiceNow Notification
A playbook that is used to create ServiceNow tickets directed toward service owners to notify them of their internet exposures.
-![Cortex ASM - ServiceNow Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_Notification.png)
+![Cortex ASM - ServiceNow Notification](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_Notification.png)
#### Cortex ASM - Splunk Enrichment
A playbook that given the IP address enriches Splunk information relevant to ASM alerts.
-![Cortex ASM - Splunk Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Splunk_Enrichment.png)
+![Cortex ASM - Splunk Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Splunk_Enrichment.png)
#### Cortex ASM - Tenable.io Enrichment
A playbook that given the IP address enriches Tenable.io information relevant to ASM alerts.
-![Cortex ASM - Tenable.io Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Tenable_io_Enrichment.png)
+![Cortex ASM - Tenable.io Enrichment](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Tenable_io_Enrichment.png)
### Automation Scripts
@@ -282,13 +302,13 @@ A playbook that given the IP address enriches Tenable.io information relevant to
An automation used to generate an ASM alert summary report with important information found via the playbook run.
-![GenerateASMReport](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/GenerateASMReport.png)
+![GenerateASMReport](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/GenerateASMReport.png)
#### InferWhetherServiceIsDev
An automation that identifies whether the service is a "development" server. Development servers have no external users and run no production workflows. These servers might be named "dev", but they might also be named "qa", "pre-production", "user acceptance testing", or use other non-production terms. This automation uses both public data visible to anyone (`active_classifications` as derived by Xpanse ASM) as well as checking internal data for AI-learned indicators of development systems (`asm_tags` as derived from integrations with non-public systems).
-![InferWhetherServiceIsDev](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/InferWhetherServiceIsDev.png)
+![InferWhetherServiceIsDev](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/InferWhetherServiceIsDev.png)
#### RankServiceOwners
@@ -302,7 +322,7 @@ This automation parses a GCP service account email for the project ID, then look
An automation that is used to find a matching remediation path rule based on criteria. If multiple rules match, it will return the most recently created rule. This assumes that the rules passed in are filtered to correlate with the alert's attack surface rule (Xpanse only).
-![RemediationPathRuleEvaluation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/RemediationPathRuleEvaluation.png)
+![RemediationPathRuleEvaluation](https://github.com/demisto/content/raw/master/Packs/CortexAttackSurfaceManagement/doc_files/RemediationPathRuleEvaluation.png)
### Layouts
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md
new file mode 100644
index 000000000000..b2286886bb32
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md
@@ -0,0 +1,9 @@
+
+#### Playbooks
+
+##### Cortex ASM - Enrichment
+
+- Updated the playbook to add Cortex ASM - Certificate Enrichment sub playbook.
+##### New: Cortex ASM - Certificate Enrichment
+
+- New: Playbook to enrich certificate information.
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md
new file mode 100644
index 000000000000..44030f5d1339
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md
@@ -0,0 +1,3 @@
+## Cortex Attack Surface Management
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_41.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_41.md
new file mode 100644
index 000000000000..6d43a4ac991f
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_41.md
@@ -0,0 +1,14 @@
+
+#### Playbooks
+
+##### Cortex ASM - ASM Alert
+
+Added support for Slack messaging option.
+
+##### New: Cortex ASM - Instant Message
+
+New: This playbook is used to create instant messages toward service owners to notify them of their internet exposures.<~XSIAM> (Available from Cortex XSIAM V2.2).~XSIAM>
+
+##### Cortex ASM - Remediation Path Rules
+
+Added support for Slack messaging option.
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_42.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_42.md
new file mode 100644
index 000000000000..7cd014b59002
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_42.md
@@ -0,0 +1,18 @@
+
+#### Playbooks
+
+##### Cortex ASM - ASM Alert
+
+Added support for patching using AWS Systems manger.
+
+##### Cortex ASM - AWS Enrichment
+
+Added the system ids to extract information from AWS Systems manger.
+
+##### Cortex ASM - Remediation Path Rules
+
+Added support for patching by AWS Systems manager data collection option.
+
+##### Cortex ASM - Remediation
+
+Added the "AWS - Package Upgrade" remediation playbook for patching.
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png
index 6b63ea7393b1..23943ef2b20e 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png
index a16a04cfb0b1..5149020a496b 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png
new file mode 100644
index 000000000000..a0c3cf723135
Binary files /dev/null and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png
index 9bf29f4de812..18f548ac3bc7 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Instant_Message.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Instant_Message.png
new file mode 100644
index 000000000000..3ed871146ae6
Binary files /dev/null and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Instant_Message.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png
index b38084a7b572..169f4b1db74b 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png
index 8e31d1f8ba92..031a09a03126 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png
index f8c4beaf3d3c..5a16e6a1b029 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png
index 5e2a039bd13f..9f0bbca06401 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/pack_metadata.json b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
index 551ada0fea18..c73bc4124b8c 100644
--- a/Packs/CortexAttackSurfaceManagement/pack_metadata.json
+++ b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Attack Surface Management",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.7.38",
+ "currentVersion": "1.7.42",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -88,6 +88,10 @@
"MicrosoftGraphUser": {
"mandatory": false,
"display_name": "Microsoft Graph User"
+ },
+ "Venafi": {
+ "mandatory": false,
+ "display_name": "Venafi"
}
},
"marketplaces": [
@@ -110,6 +114,7 @@
"PAN-OS",
"Azure-Enrichment-Remediation",
"Jira",
- "Tenable_io"
+ "Tenable_io",
+ "Venafi"
]
}
\ No newline at end of file
diff --git a/Packs/CortexDataLake/.pack-ignore b/Packs/CortexDataLake/.pack-ignore
index bd7d570f4a37..a88ad126c3f8 100644
--- a/Packs/CortexDataLake/.pack-ignore
+++ b/Packs/CortexDataLake/.pack-ignore
@@ -1,5 +1,5 @@
[file:CortexDataLake.yml]
-ignore=IN126,IN136,IN124
+ignore=IN126,IN136
[file:README.md]
ignore=RM102,RM106
diff --git a/Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.yml b/Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.yml
index 24af1b0390f0..e0007b22422c 100644
--- a/Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.yml
+++ b/Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.yml
@@ -1992,7 +1992,7 @@ script:
type: String
- description: Use this command in case your authentication calls fail due to internal call-limit, the command will reset the limit cache.
name: cdl-reset-authentication-timeout
- dockerimage: demisto/python_pancloud_v2:1.0.0.91454
+ dockerimage: demisto/python_pancloud_v2:1.0.0.100429
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/CortexDataLake/ReleaseNotes/1_4_11.md b/Packs/CortexDataLake/ReleaseNotes/1_4_11.md
new file mode 100644
index 000000000000..508cf2a23642
--- /dev/null
+++ b/Packs/CortexDataLake/ReleaseNotes/1_4_11.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Strata Logging Service XSOAR Connector
+
+- Updated the Docker image to: *demisto/python_pancloud_v2:1.0.0.100429*.
diff --git a/Packs/CortexDataLake/pack_metadata.json b/Packs/CortexDataLake/pack_metadata.json
index c27ee6e9e674..b2ef41bc9edb 100644
--- a/Packs/CortexDataLake/pack_metadata.json
+++ b/Packs/CortexDataLake/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Strata Logging Service by Palo Alto Networks",
"description": "Palo Alto Networks Strata Logging Service XSOAR Connector provides cloud-based, centralized log storage and aggregation for your on-premise, virtual (private cloud and public cloud) firewalls, for Prisma Access, and for cloud-delivered services such as Cortex XDR",
"support": "xsoar",
- "currentVersion": "1.4.10",
+ "currentVersion": "1.4.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexXDR/.pack-ignore b/Packs/CortexXDR/.pack-ignore
index 3dd8e5cabfcf..070fc3c0c717 100644
--- a/Packs/CortexXDR/.pack-ignore
+++ b/Packs/CortexXDR/.pack-ignore
@@ -2,7 +2,7 @@
ignore=PA116
[file:CortexXDRIR.yml]
-ignore=IN126,IN136,IN124,IN139
+ignore=IN126,IN136,IN139
[file:XDR_iocs.yml]
ignore=BA108,BA109
diff --git a/Packs/CortexXDR/Dashboards/dashboard-Cortex_XDR_Events_Grouping.json b/Packs/CortexXDR/Dashboards/dashboard-Cortex_XDR_Events_Grouping.json
index 407ee21acf49..ae7e515c5085 100644
--- a/Packs/CortexXDR/Dashboards/dashboard-Cortex_XDR_Events_Grouping.json
+++ b/Packs/CortexXDR/Dashboards/dashboard-Cortex_XDR_Events_Grouping.json
@@ -175,5 +175,8 @@
],
"fromVersion": "6.2.0",
"description": "",
- "isPredefined": true
+ "isPredefined": true,
+ "marketplaces": [
+ "xsoar_on_prem"
+ ]
}
\ No newline at end of file
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
index a250e4c2eff7..57b549eea3ba 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
@@ -130,6 +130,64 @@ def filter_and_save_unseen_incident(incidents: List, limit: int, number_of_alrea
return filtered_incidents
+def get_xsoar_close_reasons():
+ """
+ Get the default XSOAR close-reasons in addition to custom close-reasons from server configuration.
+ """
+ default_xsoar_close_reasons = list(XSOAR_RESOLVED_STATUS_TO_XDR.keys())
+ custom_close_reasons: List[str] = []
+ try:
+ server_config = get_server_config()
+ demisto.debug(f'get_xsoar_close_reasons server-config: {str(server_config)}')
+ if server_config:
+ custom_close_reasons = argToList(server_config.get('incident.closereasons', ''))
+ except Exception as e:
+ demisto.error(f"Could not get server configuration: {e}")
+ return default_xsoar_close_reasons + custom_close_reasons
+
+
+def validate_custom_close_reasons_mapping(mapping: str, direction: str):
+ """ Check validity of provided custom close-reason mappings. """
+
+ xdr_statuses = [status.replace("resolved_", "").replace("_", " ").title() for status in XDR_RESOLVED_STATUS_TO_XSOAR]
+ xsoar_statuses = get_xsoar_close_reasons()
+
+ exception_message = ('Improper custom mapping ({direction}) provided: "{key_or_value}" is not a valid Cortex '
+ '{xsoar_or_xdr} close-reason. Valid Cortex {xsoar_or_xdr} close-reasons are: {statuses}')
+
+ def to_xdr_status(status):
+ return "resolved_" + "_".join(status.lower().split(" "))
+
+ custom_mapping = comma_separated_mapping_to_dict(mapping)
+
+ valid_key = valid_value = True # If no mapping was provided.
+
+ for key, value in custom_mapping.items():
+ if direction == XSOAR_TO_XDR:
+ xdr_close_reason = to_xdr_status(value)
+ valid_key = key in xsoar_statuses
+ valid_value = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
+ elif direction == XDR_TO_XSOAR:
+ xdr_close_reason = to_xdr_status(key)
+ valid_key = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
+ valid_value = value in xsoar_statuses
+
+ if not valid_key:
+ raise DemistoException(
+ exception_message.format(direction=direction,
+ key_or_value=key,
+ xsoar_or_xdr="XSOAR" if direction == XSOAR_TO_XDR else "XDR",
+ statuses=xsoar_statuses
+ if direction == XSOAR_TO_XDR else xdr_statuses))
+ elif not valid_value:
+ raise DemistoException(
+ exception_message.format(direction=direction,
+ key_or_value=value,
+ xsoar_or_xdr="XDR" if direction == XSOAR_TO_XDR else "XSOAR",
+ statuses=xdr_statuses
+ if direction == XSOAR_TO_XDR else xsoar_statuses))
+
+
class Client(CoreClient):
def __init__(self, base_url, proxy, verify, timeout, params=None):
if not params:
@@ -157,54 +215,12 @@ def test_module(self, first_fetch_time):
raise
# XSOAR -> XDR
- self.validate_custom_mapping(mapping=self._params.get("custom_xsoar_to_xdr_close_reason_mapping"),
- direction=XSOAR_TO_XDR)
+ validate_custom_close_reasons_mapping(mapping=self._params.get("custom_xsoar_to_xdr_close_reason_mapping"),
+ direction=XSOAR_TO_XDR)
# XDR -> XSOAR
- self.validate_custom_mapping(mapping=self._params.get("custom_xdr_to_xsoar_close_reason_mapping"),
- direction=XDR_TO_XSOAR)
-
- def validate_custom_mapping(self, mapping: str, direction: str):
- """ Check validity of provided custom close-reason mappings. """
-
- xdr_statuses_to_xsoar = [status.replace("resolved_", "").replace("_", " ").title()
- for status in XDR_RESOLVED_STATUS_TO_XSOAR]
- xsoar_statuses_to_xdr = list(XSOAR_RESOLVED_STATUS_TO_XDR.keys())
-
- exception_message = ('Improper custom mapping ({direction}) provided: "{key_or_value}" is not a valid Cortex '
- '{xsoar_or_xdr} close-reason. Valid Cortex {xsoar_or_xdr} close-reasons are: {statuses}')
-
- def to_xdr_status(status):
- return "resolved_" + "_".join(status.lower().split(" "))
-
- custom_mapping = comma_separated_mapping_to_dict(mapping)
-
- valid_key = valid_value = True # If no mapping was provided.
-
- for key, value in custom_mapping.items():
- if direction == XSOAR_TO_XDR:
- xdr_close_reason = to_xdr_status(value)
- valid_key = key in XSOAR_RESOLVED_STATUS_TO_XDR
- valid_value = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
- elif direction == XDR_TO_XSOAR:
- xdr_close_reason = to_xdr_status(key)
- valid_key = xdr_close_reason in XDR_RESOLVED_STATUS_TO_XSOAR
- valid_value = value in XSOAR_RESOLVED_STATUS_TO_XDR
-
- if not valid_key:
- raise DemistoException(
- exception_message.format(direction=direction,
- key_or_value=key,
- xsoar_or_xdr="XSOAR" if direction == XSOAR_TO_XDR else "XDR",
- statuses=xsoar_statuses_to_xdr
- if direction == XSOAR_TO_XDR else xdr_statuses_to_xsoar))
- elif not valid_value:
- raise DemistoException(
- exception_message.format(direction=direction,
- key_or_value=value,
- xsoar_or_xdr="XDR" if direction == XSOAR_TO_XDR else "XSOAR",
- statuses=xdr_statuses_to_xsoar
- if direction == XSOAR_TO_XDR else xsoar_statuses_to_xdr))
+ validate_custom_close_reasons_mapping(mapping=self._params.get("custom_xdr_to_xsoar_close_reason_mapping"),
+ direction=XDR_TO_XSOAR)
def handle_fetch_starred_incidents(self, limit: int, page_number: int, request_data: dict) -> List:
"""
@@ -789,6 +805,7 @@ def resolve_xsoar_close_reason(xdr_close_reason: str):
:param xdr_close_reason: XDR raw status/close reason e.g. 'resolved_false_positive'.
:return: XSOAR close reason.
"""
+ possible_xsoar_close_reasons = get_xsoar_close_reasons()
# Check if incoming XDR close-reason has a non-default mapping to XSOAR close-reason.
if demisto.params().get("custom_xdr_to_xsoar_close_reason_mapping"):
@@ -802,7 +819,7 @@ def resolve_xsoar_close_reason(xdr_close_reason: str):
xdr_close_reason.replace("resolved_", "").replace("_", " ").title()
)
xsoar_close_reason = custom_xdr_to_xsoar_close_reason_mapping.get(title_cased_xdr_close_reason)
- if xsoar_close_reason in XSOAR_RESOLVED_STATUS_TO_XDR:
+ if xsoar_close_reason in possible_xsoar_close_reasons:
demisto.debug(
f"XDR->XSOAR custom close-reason exists, using {xdr_close_reason}={xsoar_close_reason}"
)
@@ -974,7 +991,10 @@ def get_remote_data_command(client, args):
def update_remote_system_command(client, args):
remote_args = UpdateRemoteSystemArgs(args)
incident_id = remote_args.remote_incident_id
+ remote_data = remote_args.data
demisto.debug(f"update_remote_system_command {incident_id=} {remote_args=}")
+ demisto.debug(f"update_remote_system_command {incident_id=} , {remote_data.get('closeReason')=}, "
+ f"{remote_data.get('closeNotes')=}")
if remote_args.delta:
demisto.debug(f'Got the following delta keys {str(list(remote_args.delta.keys()))} to update'
@@ -988,8 +1008,11 @@ def update_remote_system_command(client, args):
demisto.debug(f'Sending incident with remote ID [{remote_args.remote_incident_id}]\n')
demisto.debug(f"Before checking status {update_args=}")
current_remote_status = remote_args.data.get('status') if remote_args.data else None
- is_closed = (update_args.get('close_reason') or update_args.get('closeReason') or update_args.get('closeNotes')
- or update_args.get('resolve_comment') or update_args.get('closingUserId'))
+ is_closed_delta = (update_args.get('close_reason') or update_args.get('closeReason') or update_args.get('closeNotes')
+ or update_args.get('resolve_comment') or update_args.get('closingUserId'))
+ is_closed_data = (remote_data.get('closeReason') or remote_data.get('close_reason') or remote_data.get('closeNotes'))
+ demisto.debug(f"update_remote_system_command {is_closed_delta=}, {is_closed_data=}")
+ is_closed = is_closed_delta or is_closed_data
closed_without_status = not update_args.get('close_reason') and not update_args.get('closeReason')
remote_is_already_closed = current_remote_status in XDR_RESOLVED_STATUS_TO_XSOAR
demisto.debug(f"{remote_is_already_closed=}")
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
index 6133ffedc971..bd3efda84455 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
@@ -179,9 +179,9 @@ name: Cortex XDR - IR
script:
commands:
- arguments:
- - description: A date in the format 2019-12-31T23:59:00. Only incidents that were created on or before the specified date/time will be retrieved.
+ - description: A date in the format 2019-12-31T23:59:00 in UTC. Only incidents that were created on or before the specified date/time will be retrieved.
name: lte_creation_time
- - description: A date in the format 2019-12-31T23:59:00. Only incidents that were created on or after the specified date/time will be retrieved.
+ - description: A date in the format 2019-12-31T23:59:00 in UTC. Only incidents that were created on or after the specified date/time will be retrieved.
name: gte_creation_time
- description: Filters returned incidents that were created on or before the specified date/time, in the format 2019-12-31T23:59:00.
name: lte_modification_time
@@ -3079,9 +3079,9 @@ script:
description: The last page from which we bring the alerts.
isArray: true
name: limit
- - description: Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss.000Z).
+ - description: Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss).
name: start_time
- - description: Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss.000Z).
+ - description: Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss).
name: end_time
- auto: PREDEFINED
description: Whether the alert is starred or not.
@@ -3547,7 +3547,7 @@ script:
Update one or more alerts with the provided arguments.
Required license: Cortex XDR Prevent, Cortex XDR Pro per Endpoint, or Cortex XDR Pro per GB.
name: xdr-update-alert
- dockerimage: demisto/python3:3.10.14.96411
+ dockerimage: demisto/python3:3.11.9.101916
isfetch: true
isfetch:xpanse: false
script: ''
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
index 2bc7cccdadee..fe455a085a96 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
@@ -7,8 +7,9 @@
import demistomock as demisto
from CommonServerPython import urljoin, DemistoException
-from CoreIRApiModule import XDR_RESOLVED_STATUS_TO_XSOAR
-from CortexXDRIR import XSOAR_TO_XDR, XDR_TO_XSOAR
+from CoreIRApiModule import XDR_RESOLVED_STATUS_TO_XSOAR, XSOAR_RESOLVED_STATUS_TO_XDR
+from CortexXDRIR import XSOAR_TO_XDR, XDR_TO_XSOAR, get_xsoar_close_reasons
+
XDR_URL = 'https://api.xdrurl.com'
''' HELPER FUNCTIONS '''
@@ -94,7 +95,7 @@ def test_fetch_incidents_filtered_by_status(requests_mock, mocker):
client = Client(
base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False)
- incident_extra_data_under_investigation = load_test_data('./test_data/get_incident_extra_data_host_id_array.json')\
+ incident_extra_data_under_investigation = load_test_data('./test_data/get_incident_extra_data_host_id_array.json') \
.get('reply', {}).get('incidents')
incident_extra_data_new = load_test_data('./test_data/get_incident_extra_data_new_status.json').get('reply').get('incidents')
mocker.patch.object(Client, 'get_multiple_incidents_extra_data', side_effect=[incident_extra_data_under_investigation,
@@ -106,8 +107,8 @@ def test_fetch_incidents_filtered_by_status(requests_mock, mocker):
next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False, statuses=statuses_to_fetch)
assert len(incidents) == 2
- assert incidents[0]['name'] == "XDR Incident 1 - 'Local Analysis Malware' generated by XDR Agent detected on host AAAAAA "\
- "involving user Administrator"
+ assert incidents[0]['name'] == "XDR Incident 1 - 'Local Analysis Malware' generated by XDR Agent detected on host AAAAAA " \
+ "involving user Administrator"
assert incidents[1]['name'] == "XDR Incident 2 - 'Local Analysis Malware' generated by XDR Agent detected on host " \
"BBBBB involving user Administrator"
@@ -216,8 +217,8 @@ def test_fetch_only_starred_incidents(self, mocker):
starred=True,
starred_incidents_fetch_window='3 days')
assert len(incidents) == 2
- assert incidents[0]['name'] == "XDR Incident 3 - 'Local Analysis Malware' generated by XDR Agent detected"\
- " on host AAAAA involving user Administrator"
+ assert incidents[0]['name'] == "XDR Incident 3 - 'Local Analysis Malware' generated by XDR Agent detected" \
+ " on host AAAAA involving user Administrator"
last_run_obj = {'next_run': next_run,
'fetched_starred_incidents': {'3': True, '4': True}
@@ -479,7 +480,7 @@ def test_get_remote_data_command_should_not_update(requests_mock, mocker):
@pytest.mark.parametrize(argnames='incident_status', argvalues=XDR_RESOLVED_STATUS_TO_XSOAR.keys())
-def test_get_remote_data_command_should_close_issue(requests_mock, mocker, incident_status):
+def test_get_remote_data_command_should_close_issue(capfd, requests_mock, mocker, incident_status):
"""
Given:
- an XDR client
@@ -536,7 +537,8 @@ def test_get_remote_data_command_should_close_issue(requests_mock, mocker, incid
mocker.patch("CortexXDRIR.ALERTS_LIMIT_PER_INCIDENTS", new=50)
mocker.patch.object(Client, 'save_modified_incidents_to_integration_context')
mocker.patch.object(Client, 'get_multiple_incidents_extra_data', return_value=raw_incident['reply'])
- response = get_remote_data_command(client, args)
+ with capfd.disabled():
+ response = get_remote_data_command(client, args)
sort_all_list_incident_fields(expected_modified_incident)
assert response.mirrored_object == expected_modified_incident
@@ -759,7 +761,7 @@ def test_get_incident_extra_data(mocker):
"""
from CortexXDRIR import get_incident_extra_data_command, Client
- get_incident_extra_data_response = load_test_data('./test_data/get_incident_extra_data_host_id_array.json')\
+ get_incident_extra_data_response = load_test_data('./test_data/get_incident_extra_data_host_id_array.json') \
.get('reply', {}).get('incidents', [])
mocker.patch.object(Client, 'get_multiple_incidents_extra_data', return_value=get_incident_extra_data_response)
mocker.patch("CortexXDRIR.ALERTS_LIMIT_PER_INCIDENTS", new=2)
@@ -877,14 +879,14 @@ def test_test_module(capfd, custom_mapping, direction, should_raise_error):
Then:
- Ensure no error is raised, and return `ok`
"""
- from CortexXDRIR import Client
+ from CortexXDRIR import Client, validate_custom_close_reasons_mapping
# using two different credentials object as they both fields need to be encrypted
base_url = urljoin("dummy_url", '/public_api/v1')
proxy = demisto.params().get('proxy')
verify_cert = not demisto.params().get('insecure', False)
- client = Client(
+ Client(
base_url=base_url,
proxy=proxy,
verify=verify_cert,
@@ -895,10 +897,10 @@ def test_test_module(capfd, custom_mapping, direction, should_raise_error):
with capfd.disabled():
if should_raise_error:
with pytest.raises(DemistoException):
- client.validate_custom_mapping(mapping=custom_mapping, direction=direction)
+ validate_custom_close_reasons_mapping(mapping=custom_mapping, direction=direction)
else:
try:
- client.validate_custom_mapping(mapping=custom_mapping, direction=direction)
+ validate_custom_close_reasons_mapping(mapping=custom_mapping, direction=direction)
except DemistoException as e:
pytest.fail(f"Unexpected exception raised for input {input}: {e}")
@@ -1034,8 +1036,8 @@ def test_filter_and_save_unseen_incident_limit_test():
"creation_time": 1577836800000
},
{
- "id": "2",
- "creation_time": 1577836800001
+ "id": "2",
+ "creation_time": 1577836800001
}]
assert filter_and_save_unseen_incident(incident, 1, 1) == [{"id": "1", "creation_time": 1577836800000}]
@@ -1285,7 +1287,7 @@ def test_sort_all_incident_data_fields_fetch_case_get_multiple_incidents_extra_d
- Verify that alerts and artifacts are found.
"""
from CortexXDRIR import sort_incident_data, sort_all_list_incident_fields
- incident_case_get_multiple_incidents_extra_data = load_test_data('./test_data/get_multiple_incidents_extra_data.json')\
+ incident_case_get_multiple_incidents_extra_data = load_test_data('./test_data/get_multiple_incidents_extra_data.json') \
.get('reply').get('incidents')[0]
incident_data = sort_incident_data(incident_case_get_multiple_incidents_extra_data)
sort_all_list_incident_fields(incident_data)
@@ -1441,7 +1443,8 @@ def test_update_alerts_in_xdr_request_called_with():
json_data={'request_data':
{'alert_id_list': '1,2,3',
'update_data':
- {'severity': 'High', 'status': 'resolved', 'comment': 'i am a test'}
+ {'severity': 'High', 'status': 'resolved',
+ 'comment': 'i am a test'}
}
},
headers={
@@ -1523,3 +1526,57 @@ def test_main(mocker):
mock_client = mocker.patch('CortexXDRIR.Client', autospec=True)
mock_client.test_module.return_value = 'ok'
main()
+
+
+@freeze_time("1993-06-17 11:00:00 GMT")
+def test_core_http_request_xpanse_tenant(mocker):
+ """
+ Unit test to verify behavior in Xpanse tenants on the Xsiam platform with XSOAR Marketplace.
+
+ This test ensures that when working with Xpanse tenants on the Xsiam platform integrated with the
+ XSOAR Marketplace, the http_request function from CommonServerPython is used instead of _apiCall,
+ as required in Xsiam tenants (CIAC-10878).
+
+ Given:
+ - Only the required params in the configuration.
+ When:
+ - Running a get_incidents to test the http_request function in CoreIRApiModule.
+ Then:
+ - Should fail since command '_apiCall' is not available via engine.
+ """
+ from CortexXDRIR import Client
+ from CommonServerPython import BaseClient
+ base_url = urljoin("dummy_url", '/public_api/v1')
+ client = Client(
+ base_url=base_url,
+ proxy=False,
+ verify=False,
+ timeout=120,
+ params=False
+ )
+ mocker.patch("CoreIRApiModule.FORWARD_USER_RUN_RBAC", new=False)
+ mocker.patch.object(demisto, "_apiCall", return_value=Exception("command '_apiCall' is not available via engine (85)"))
+ mocker.patch.object(BaseClient, "_http_request", return_value={'reply': {"incidents": [{"incident": {"incident_id": "1"}}]}})
+ res = client.get_incidents(incident_id_list=['1'])
+ assert res == [{'incident': {'incident_id': '1'}}]
+
+
+def test_get_xsoar_close_reasons(mocker):
+ mock_response = {
+ 'body': '{"sysConf":{"incident.closereasons":"CustomReason1, CustomReason 2, Foo","versn":40},"defaultMap":{}}\n',
+ 'headers': {
+ 'Content-Length': ['104'],
+ 'X-Xss-Protection': ['1; mode=block'],
+ 'X-Content-Type-Options': ['nosniff'],
+ 'Strict-Transport-Security': ['max-age=10886400000000000; includeSubDomains'],
+ 'Vary': ['Accept-Encoding'],
+ 'Server-Timing': ['7'],
+ 'Date': ['Wed, 03 Jul 2010 09:11:35 GMT'],
+ 'X-Frame-Options': ['DENY'],
+ 'Content-Type': ['application/json']
+ },
+ 'status': '200 OK',
+ 'statusCode': 200
+ }
+ mocker.patch.object(demisto, 'internalHttpRequest', return_value=mock_response)
+ assert get_xsoar_close_reasons() == list(XSOAR_RESOLVED_STATUS_TO_XDR.keys()) + ['CustomReason1', 'CustomReason 2', 'Foo']
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
index ccc9733595ad..84fae3ed0c51 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
@@ -231,22 +231,22 @@ Builtin Roles with this permission includes: "Investigator", "Responder", "Privi
#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| lte_creation_time | A date in the format 2019-12-31T23:59:00. Only incidents that were created on or before the specified date/time will be retrieved. | Optional |
-| gte_creation_time | A date in the format 2019-12-31T23:59:00. Only incidents that were created on or after the specified date/time will be retrieved. | Optional |
-| lte_modification_time | Filters returned incidents that were created on or before the specified date/time, in the format 2019-12-31T23:59:00. | Optional |
-| gte_modification_time | Filters returned incidents that were modified on or after the specified date/time, in the format 2019-12-31T23:59:00. | Optional |
-| incident_id_list | An array or CSV string of incident IDs. | Optional |
-| since_creation_time | Filters returned incidents that were created on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on. | Optional |
-| since_modification_time | Filters returned incidents that were modified on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on. | Optional |
-| sort_by_modification_time | Sorts returned incidents by the date/time that the incident was last modified ("asc" - ascending, "desc" - descending). Possible values are: asc, desc. | Optional |
-| sort_by_creation_time | Sorts returned incidents by the date/time that the incident was created ("asc" - ascending, "desc" - descending). Possible values are: asc, desc. | Optional |
-| page | Page number (for pagination). The default is 0 (the first page). Default is 0. | Optional |
-| limit | Maximum number of incidents to return per page. The default and maximum is 100. Default is 100. | Optional |
+| **Argument Name** | **Description** | **Required** |
+| --- |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| --- |
+| lte_creation_time | A date in the format 2019-12-31T23:59:00 in UTC. Only incidents that were created on or before the specified date/time will be retrieved. | Optional |
+| gte_creation_time | A date in the format 2019-12-31T23:59:00 in UTC. Only incidents that were created on or after the specified date/time will be retrieved. | Optional |
+| lte_modification_time | Filters returned incidents that were created on or before the specified date/time, in the format 2019-12-31T23:59:00. | Optional |
+| gte_modification_time | Filters returned incidents that were modified on or after the specified date/time, in the format 2019-12-31T23:59:00. | Optional |
+| incident_id_list | An array or CSV string of incident IDs. | Optional |
+| since_creation_time | Filters returned incidents that were created on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on. | Optional |
+| since_modification_time | Filters returned incidents that were modified on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on. | Optional |
+| sort_by_modification_time | Sorts returned incidents by the date/time that the incident was last modified ("asc" - ascending, "desc" - descending). Possible values are: asc, desc. | Optional |
+| sort_by_creation_time | Sorts returned incidents by the date/time that the incident was created ("asc" - ascending, "desc" - descending). Possible values are: asc, desc. | Optional |
+| page | Page number (for pagination). The default is 0 (the first page). Default is 0. | Optional |
+| limit | Maximum number of incidents to return per page. The default and maximum is 100. Default is 100. | Optional |
| status | Filters only incidents in the specified status. The options are: new, under_investigation, resolved_known_issue, resolved_false_positive, resolved_true_positive resolved_security_testing, resolved_other, resolved_auto. | Optional |
-| starred | Whether the incident is starred (Boolean value: true or false). Possible values are: true, false. | Optional |
-| starred_incidents_fetch_window | Starred fetch window timestamp (<number> <time unit>, e.g., 12 hours, 7 days). Default is 3 days. | Optional |
+| starred | Whether the incident is starred (Boolean value: true or false). Possible values are: true, false. | Optional |
+| starred_incidents_fetch_window | Starred fetch window timestamp (<number> <time unit>, e.g., 12 hours, 7 days). Default is 3 days. | Optional |
#### Context Output
@@ -314,13 +314,13 @@ Builtin Roles with this permission includes: "Investigator", "Responder", "Privi
"description": "'test 1' generated by Virus Total - Firewall",
"severity": "medium",
"modification_time": 1579237974014,
- "assigned_user_pretty_name": "woo@demisto.com",
+ "assigned_user_pretty_name": "woo@test.com",
"notes": null,
"creation_time": 1576100096594,
"alert_count": 1,
"med_severity_alert_count": 0,
"detection_time": null,
- "assigned_user_mail": "woo@demisto.com",
+ "assigned_user_mail": "woo@test.com",
"resolve_comment": null,
"status": "new",
"user_count": 1,
@@ -361,11 +361,11 @@ Builtin Roles with this permission includes: "Investigator", "Responder", "Privi
>### Incidents
->|alert_count|assigned_user_mail|assigned_user_pretty_name|creation_time|description|detection_time|high_severity_alert_count|host_count|incident_id|low_severity_alert_count|manual_description|manual_severity|med_severity_alert_count|modification_time|notes|resolve_comment|severity|starred|status|user_count|xdr_url|
->|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
->| 5 | | | 1577276587937 | 5 'This alert from content TestXDRPlaybook' alerts detected by Checkpoint - SandBlast | | 4 | 1 | 4 | 0 | | medium | 1 | 1579290004178 | | This issue was solved in Incident number 192304 | medium | false | new | 1 | `https://some.xdr.url.com/incident-view/4` |
->| 1 | woo@demisto.com | woo@demisto.com | 1576100096594 | 'test 1' generated by Virus Total - Firewall | | 1 | 1 | 3 | 0 | | medium | 0 | 1579237974014 | | | medium | false | new | 1 | `https://some.xdr.url.com/incident-view/3` |
->| 2 | | | 1576062816474 | 'Alert Name Example 333' along with 1 other alert generated by Virus Total - VPN & Firewall-3 and Checkpoint - SandBlast | | 2 | 1 | 2 | 0 | | high | 0 | 1579288790259 | | | high | false | under_investigation | 1 | `https://some.xdr.url.com/incident-view/2` |
+>|alert_count| assigned_user_mail | assigned_user_pretty_name |creation_time|description|detection_time|high_severity_alert_count|host_count|incident_id|low_severity_alert_count|manual_description|manual_severity|med_severity_alert_count|modification_time|notes|resolve_comment|severity|starred|status|user_count|xdr_url|
+>|---|--------------------|---------------------------|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| 5 | | | 1577276587937 | 5 'This alert from content TestXDRPlaybook' alerts detected by Checkpoint - SandBlast | | 4 | 1 | 4 | 0 | | medium | 1 | 1579290004178 | | This issue was solved in Incident number 192304 | medium | false | new | 1 | `https://some.xdr.url.com/incident-view/4` |
+>| 1 | woo@test.com | woo@test.com | 1576100096594 | 'test 1' generated by Virus Total - Firewall | | 1 | 1 | 3 | 0 | | medium | 0 | 1579237974014 | | | medium | false | new | 1 | `https://some.xdr.url.com/incident-view/3` |
+>| 2 | | | 1576062816474 | 'Alert Name Example 333' along with 1 other alert generated by Virus Total - VPN & Firewall-3 and Checkpoint - SandBlast | | 2 | 1 | 2 | 0 | | high | 0 | 1579288790259 | | | high | false | under_investigation | 1 | `https://some.xdr.url.com/incident-view/2` |
### xdr-get-incident-extra-data
@@ -2447,8 +2447,8 @@ Builtin Roles with this permission includes: "Investigator", "Responder", "Privi
| sort_order | The order in which we sort the results. Possible values are: DESC, ASC. | Optional |
| offset | The first page from which we bring the alerts. Default is 0. | Optional |
| limit | The last page from which we bring the alerts. Default is 50. | Optional |
-| start_time | Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss.000Z). | Optional |
-| end_time | Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss.000Z). | Optional |
+| start_time | Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss). | Optional |
+| end_time | Relevant when "time_frame" argument is "custom". Supports Epoch timestamp and simplified extended ISO format (YYYY-MM-DDThh:mm:ss). | Optional |
| starred | Whether the alert is starred or not. Possible values are: true, false. | Optional |
| mitre_technique_id_and_name | The MITRE attack technique. | Optional |
diff --git a/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.py b/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.py
index 94723b8e05c3..9369ccab5ebf 100644
--- a/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.py
+++ b/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.py
@@ -532,19 +532,6 @@ def get_nonce() -> str:
return "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(64)])
-def remove_query_id_from_integration_context(query_id: str):
- """
- Remove the given query_id from the integration context.
-
- Args:
- query_id (str): The query ID to remove.
- """
- if query_id:
- integration_context = get_integration_context()
- integration_context.pop(query_id, None)
- set_integration_context(integration_context)
-
-
# ========================================== Generic Query ===============================================#
@@ -585,26 +572,15 @@ def start_xql_query_polling_command(client: Client, args: dict) -> Union[Command
Returns:
CommandResults: The command results.
"""
- if not (query_name := args.get('query_name')):
+ if not args.get('query_name'):
raise DemistoException('Please provide a query name')
execution_id = start_xql_query(client, args)
if not execution_id:
raise DemistoException('Failed to start query\n')
args['query_id'] = execution_id
- # the query data is being saved in the integration context for the next scheduled command command.
- try:
- set_to_integration_context_with_retries({
- execution_id: {
- 'query': args.get('query'),
- 'time_frame': args.get('time_frame'),
- 'command_name': demisto.command(),
- 'query_name': query_name,
- }
- })
- return get_xql_query_results_polling_command(client, args)
- except Exception:
- remove_query_id_from_integration_context(execution_id)
- raise
+ args['command_name'] = demisto.command()
+
+ return get_xql_query_results_polling_command(client, args)
def get_xql_query_results_polling_command(client: Client, args: dict) -> Union[CommandResults, list]:
@@ -618,19 +594,14 @@ def get_xql_query_results_polling_command(client: Client, args: dict) -> Union[C
Union[CommandResults, dict]: The command results.
"""
# get the query data either from the integration context (if its not the first run) or from the given args.
- query_id = args.get('query_id', '')
parse_result_file_to_context = argToBoolean(args.get('parse_result_file_to_context', 'false'))
- integration_context, _ = get_integration_context_with_version()
- command_data_raw = integration_context.get(query_id, args)
- command_data = json.loads(command_data_raw) if isinstance(command_data_raw, str)\
- else integration_context.get(query_id, args)
- command_name = command_data.get('command_name', demisto.command())
+ command_name = args.get('command_name', demisto.command())
interval_in_secs = int(args.get('interval_in_seconds', 10))
max_fields = arg_to_number(args.get('max_fields', 20))
if max_fields is None:
raise DemistoException('Please provide a valid number for max_fields argument.')
outputs, file_data = get_xql_query_results(client, args) # get query results with query_id
- outputs.update({'query_name': command_data.get('query_name', '')})
+ outputs.update({'query_name': args.get('query_name', '')})
outputs_prefix = get_outputs_prefix(command_name)
command_results = CommandResults(outputs_prefix=outputs_prefix, outputs_key_field='execution_id', outputs=outputs,
raw_response=copy.deepcopy(outputs))
@@ -640,18 +611,14 @@ def get_xql_query_results_polling_command(client: Client, args: dict) -> Union[C
# Extracts the results into a file only
file = fileResult(filename="results.gz", data=file_data)
command_results.readable_output = 'More than 1000 results were retrieved, see the compressed gzipped file below.'
- remove_query_id_from_integration_context(query_id)
return [file, command_results]
else:
# Parse the results to context:
data = gzip.decompress(file_data).decode()
outputs['results'] = [json.loads(line) for line in data.split("\n") if len(line) > 0]
- # if status is pending, in versions above 6.2.0, the command will be called again in the next run until success.
+ # if status is pending, the command will be called again in the next run until success.
if outputs.get('status') == 'PENDING':
- if not is_demisto_version_ge('6.2.0'): # only 6.2.0 version and above support polling command.
- remove_query_id_from_integration_context(query_id)
- return command_results
scheduled_command = ScheduledCommand(command='xdr-xql-get-query-results', next_run_in_seconds=interval_in_secs,
args=args, timeout_in_seconds=600)
command_results.scheduled_command = scheduled_command
@@ -660,8 +627,8 @@ def get_xql_query_results_polling_command(client: Client, args: dict) -> Union[C
results_to_format = outputs.pop('results')
# create Human Readable output
- query = command_data.get('query', '')
- time_frame = command_data.get('time_frame')
+ query = args.get('query', '')
+ time_frame = args.get('time_frame')
extra_for_human_readable = ({'query': query, 'time_frame': time_frame})
outputs.update(extra_for_human_readable)
command_results.readable_output = tableToMarkdown('General Information', outputs,
@@ -682,7 +649,7 @@ def get_xql_query_results_polling_command(client: Client, args: dict) -> Union[C
command_results.readable_output += tableToMarkdown('Data Results', outputs.get('results'),
headerTransform=string_to_table_header)
- remove_query_id_from_integration_context(query_id)
+
return command_results
diff --git a/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.yml b/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.yml
index 77385eb8fcd2..e9199ebc095d 100644
--- a/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.yml
+++ b/Packs/CortexXDR/Integrations/XQLQueryingEngine/XQLQueryingEngine.yml
@@ -207,6 +207,18 @@ script:
- "true"
- "false"
defaultValue: "false"
+ - description: The XQL query.
+ name: query
+ hidden: true
+ - description: Time in relative date or range format.
+ name: time_frame
+ hidden: true
+ - description: The name of the command.
+ name: command_name
+ hidden: true
+ - description: The name of the query.
+ name: query_name
+ hidden: true
description: |-
Retrieve results of an executed XQL query API. The command will be executed every 10 seconds until results are retrieved or until a timeout error is raised.
When more than 1000 results are retrieved, the command will return a compressed gzipped JSON format file,
@@ -1553,7 +1565,7 @@ script:
- contextPath: PaloAltoNetworksXQL.ProcessCausalityNetworkActivity.results._product
description: The result product.
type: String
- dockerimage: demisto/python3:3.10.13.80014
+ dockerimage: demisto/python3:3.10.14.99144
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
index 39519fabe269..3d57754c56a4 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2.yml
@@ -3,7 +3,7 @@ version: -1
contentitemexportablefields:
contentitemfields: {}
name: Cortex XDR Alerts Handling v2
-description: "This playbook is used to loop over every alert in a Cortex XDR incident. \nSupported alert categories:\n- Malware\n- Port Scan\n- Cloud Cryptojacking\n- Cloud Token Theft\n- RDP Brute-Force\n- First SSO Access\n- Cloud IAM User Access Investigation\n- Identity Analytics"
+description: "This playbook is used to loop over every alert in a Cortex XDR incident. \nSupported alert categories:\n- Malware\n- Port Scan\n- Cloud Cryptojacking\n- Cloud Token Theft\n- RDP Brute-Force\n- First SSO Access\n- Cloud IAM User Access Investigation\n- Identity Analytics\n- Malicious Pod."
starttaskid: "0"
tasks:
"0":
@@ -149,6 +149,15 @@ tasks:
right:
value:
simple: Cloud Audit Log
+ - operator: isEqualString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.module_id
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection
+ ignorecase: true
- label: RDP Brute-Force
condition:
- - operator: containsGeneral
@@ -764,6 +773,8 @@ tasks:
- "17"
IAM User Access:
- "15"
+ Malicious Pod:
+ - "23"
Token Theft:
- "16"
separatecontext: false
@@ -980,6 +991,134 @@ tasks:
right:
value:
simple: An identity performed a suspicious download of multiple cloud storage object
+ - label: Malicious Pod
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 2736470759
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 3395838097
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 4056473708
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 3906076747
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 1696441024
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 15999229
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 624304616
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 1099928083
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 779228750
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 807956875
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 3223457282
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 17641322
+ ignorecase: true
+ - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection - 2736470759
+ ignorecase: true
+ - - operator: containsString
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.agent_device_domain
+ iscontext: true
+ right:
+ value:
+ simple: compute
+ ignorecase: true
continueonerrortype: ""
view: |-
{
@@ -1825,11 +1964,84 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: ce90ccfb-1742-4360-8ec4-90365ea2c191
+ type: playbook
+ task:
+ id: ce90ccfb-1742-4360-8ec4-90365ea2c191
+ version: -1
+ name: Cortex XDR - Malicious Pod Response - Agent
+ description: "This playbook ensures a swift and effective response to malicious activities within Kubernetes environments, leveraging cloud-native tools to maintain cluster security and integrity.\n\nThe playbook is designed to handle agent-generated alerts due to malicious activities within Kubernetes (K8S) pods, such as mining activities, which requires immediate action. The playbook also addresses scenarios where the malicious pod is killed, but the malicious K8S workload repeatedly creates new pods.\n\nKey Features:\n\n1. Trigger: The playbook is activated when an agent-based mining alert is detected within a Kubernetes pod.\n2. AWS Function Integration: Utilizes an AWS Lambda function to facilitate rapid response actions.\n3. K8S Environment Remediation: \n - Pod Termination: The playbook includes steps to terminate the affected pod within the K8S environment safely.\n - Workload Suspension: If necessary, the playbook can be escalated to suspend the entire workload associated with the mining activity.\n\nWorkflow:\n\n1. Alert Detection: The playbook begins with the monitoring agent detecting a mining alert within a Kubernetes pod.\n2. Alert Validation: Validates the alert to ensure it is not a false positive.\n3. Response Decision: \n - Pod Termination: If the mining activity is isolated to a single pod, the AWS Lambda function is invoked to terminate the affected pod within the K8S environment.\n - Workload Suspension: If the mining activity is widespread or poses a significant threat, the AWS Lambda function suspends the entire workload within the K8S environment.\n4. Cleanup: Initiates a complete removal of all objects created for the Lambda execution for security and hardening purposes."
+ playbookName: Cortex XDR - Malicious Pod Response - Agent
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ AlerID:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Incident.alerts.name
+ iscontext: true
+ right:
+ value:
+ simple: Cryptominers Protection
+ ignorecase: true
+ accessor: alert_id
+ ClusterName:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: cluster_name
+ transformers:
+ - operator: uniq
+ Region:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: agent_device_domain
+ transformers:
+ - operator: Cut
+ args:
+ delimiter:
+ value:
+ simple: .
+ fields:
+ value:
+ simple: "1"
+ - operator: uniq
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": -3260,
+ "y": 750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
- "12_15_IAM User Access": 0.74,
- "12_16_Token Theft": 0.58,
+ "12_15_IAM User Access": 0.84,
+ "12_16_Token Theft": 0.65,
+ "12_17_Data Exfiltration": 0.9,
"1_11_Cloud": 0.9,
"1_14_First SSO Access": 0.86,
"1_18_ Remote PsExec with LOLBIN command": 0.67,
@@ -1841,8 +2053,8 @@ view: |-
"paper": {
"dimensions": {
"height": 925,
- "width": 5450,
- "x": -2840,
+ "width": 5870,
+ "x": -3260,
"y": 70
}
}
diff --git a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
index a726cfee582a..759d2a338559 100644
--- a/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
+++ b/Packs/CortexXDR/Playbooks/Cortex_XDR_Alerts_Handling_v2_README.md
@@ -8,6 +8,7 @@ Supported alert categories:
- First SSO Access
- Cloud IAM User Access Investigation
- Identity Analytics
+- Malicious Pod.
## Dependencies
@@ -15,18 +16,19 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* GenericPolling
-* Cortex XDR - Possible External RDP Brute-Force
-* Cortex XDR - XCloud Cryptojacking
+* Cortex XDR - Large Upload
* Cortex XDR - Port Scan - Adjusted
* Cortex XDR - First SSO Access
-* Cortex XDR - XCloud Token Theft Response
-* Cortex XDR Remote PsExec with LOLBIN command execution alert
* Cortex XDR - Cloud IAM User Access Investigation
-* Cortex XDR - Large Upload
+* Cortex XDR - Identity Analytics
+* Cortex XDR - XCloud Token Theft Response
* Cortex XDR - Cloud Data Exfiltration Response
+* Cortex XDR - Malicious Pod Response - Agent
+* Cortex XDR - Possible External RDP Brute-Force
+* Cortex XDR - XCloud Cryptojacking
* Cortex XDR - Malware Investigation
-* Cortex XDR - Identity Analytics
+* Cortex XDR Remote PsExec with LOLBIN command execution alert
+* GenericPolling
### Integrations
@@ -34,7 +36,7 @@ This playbook does not use any integrations.
### Scripts
-This playbook does not use any scripts.
+* Set
### Commands
diff --git a/Packs/CortexXDR/Playbooks/PaloAltoNetworks_Cortex_XDR_Incident_Sync_README.md b/Packs/CortexXDR/Playbooks/PaloAltoNetworks_Cortex_XDR_Incident_Sync_README.md
index b6c4a949609e..370503d3384d 100644
--- a/Packs/CortexXDR/Playbooks/PaloAltoNetworks_Cortex_XDR_Incident_Sync_README.md
+++ b/Packs/CortexXDR/Playbooks/PaloAltoNetworks_Cortex_XDR_Incident_Sync_README.md
@@ -1,4 +1,8 @@
-Deprecated. No available replacement. Compares incidents in Palo Alto Networks Cortex XDR and Cortex XSOAR, and updates the incidents appropriately. When an incident is updated in Cortex XSOAR, the XDRSyncScript will update the incident in XDR. When an incident is updated in XDR, the XDRSyncScript will update the incident fields in Cortex XSOAR and rerun the current playbook. Do not use this playbook when enabling the incident mirroring feature added in XSOAR version 6.0.0.
+Deprecated. No available replacement. The playbook has been deprecated because the "xdr sync script" is no longer necessary due to the availability of the mirroring feature. This mirroring feature offers improved performance and we strongly recommended using this feature instead.
+
+Note: This playbook no longer works after migrating from the on prem instance of Cortex XSOAR to the Cortex XSOAR cloud hosted version.
+
+Compares incidents in Palo Alto Networks Cortex XDR and Cortex XSOAR, and updates the incidents appropriately. When an incident is updated in Cortex XSOAR, the XDRSyncScript will update the incident in XDR. When an incident is updated in XDR, the XDRSyncScript will update the incident fields in Cortex XSOAR and rerun the current playbook. Do not use this playbook when enabling the incident mirroring feature added in XSOAR version 6.0.0.
## Dependencies
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_44.md b/Packs/CortexXDR/ReleaseNotes/6_1_44.md
new file mode 100644
index 000000000000..9a7f9af12682
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_44.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed the description of the *start_time* and *end_time* arguments to the ***xdr-get-alerts*** command.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_45.md b/Packs/CortexXDR/ReleaseNotes/6_1_45.md
new file mode 100644
index 000000000000..894cdff7c346
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_45.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+Fixed an issue in CoreIRApiModule regarding close reason resolution.
\ No newline at end of file
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_46.md b/Packs/CortexXDR/ReleaseNotes/6_1_46.md
new file mode 100644
index 000000000000..7439d51dfe39
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_46.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+Fixed an issue in CoreIRApiModule where it was failing to parse a response.
\ No newline at end of file
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_47.md b/Packs/CortexXDR/ReleaseNotes/6_1_47.md
new file mode 100644
index 000000000000..bda60e0e764f
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_47.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed an issue in mirroring when incident were not closing in XDR.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_48.md b/Packs/CortexXDR/ReleaseNotes/6_1_48.md
new file mode 100644
index 000000000000..c8a6e312c539
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_48.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Cortex XDR - XQL Query Engine
+
+- Updated all commands not to use the Integration Context for performance improvement.
+- Updated the Docker image to: *demisto/python3:3.10.14.99144*.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_49.md b/Packs/CortexXDR/ReleaseNotes/6_1_49.md
new file mode 100644
index 000000000000..a38aea16cc39
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_49.md
@@ -0,0 +1,10 @@
+<~XPANSE>
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+
+~XPANSE>
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_50.md b/Packs/CortexXDR/ReleaseNotes/6_1_50.md
new file mode 100644
index 000000000000..660ee87503ea
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_50.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Updated the descriptions for the **lte_creation_time** and **gte_creation_time** arguments in the ***xdr-get-incidents*** command to specify that they must be in UTC.
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_51.md b/Packs/CortexXDR/ReleaseNotes/6_1_51.md
new file mode 100644
index 000000000000..3c0cdd774392
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_51.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed an issue where the integration commands failed due to a change in the API request process.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_52.md b/Packs/CortexXDR/ReleaseNotes/6_1_52.md
new file mode 100644
index 000000000000..b2b7b65a892a
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_52.md
@@ -0,0 +1,7 @@
+
+#### Playbooks
+
+##### Cortex XDR Alerts Handling v2
+
+- Added the 'Cortex XDR - Malicious Pod Response - Agent' sub-playbook.
+-
\ No newline at end of file
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_53.md b/Packs/CortexXDR/ReleaseNotes/6_1_53.md
new file mode 100644
index 000000000000..12d7e272c2d5
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_53.md
@@ -0,0 +1,27 @@
+
+#### Dashboards
+
+##### Cortex XDR Events Grouping
+
+Cortex XDR Events Grouping dashboard is now only supported for Cortex XSOAR on-prem.
+
+#### Scripts
+
+##### DBotGroupXDRIncidents
+
+- DBotGroupXDRIncidents script is now only supported for Cortex XSOAR on-prem.
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+
+#### Widgets
+
+##### Cortex XDR Grouping - Summary
+
+Cortex XDR Grouping - Summary widget is now only supported for Cortex XSOAR on-prem.
+
+##### Cortex XDR Grouping - Incidents
+
+Cortex XDR Grouping - Incidents widget is now only supported for Cortex XSOAR on-prem.
+
+##### Cortex XDR Groups - Scatter
+
+Cortex XDR Groups - Scatter widget is now only supported for Cortex XSOAR on-prem.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_54.md b/Packs/CortexXDR/ReleaseNotes/6_1_54.md
new file mode 100644
index 000000000000..d40fc4231544
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_54.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+Added support for custom XSOAR close-reasons in XSOAR-XDR close-reason mapping.
diff --git a/Packs/CortexXDR/Scripts/DBotGroupXDRIncidents/DBotGroupXDRIncidents.yml b/Packs/CortexXDR/Scripts/DBotGroupXDRIncidents/DBotGroupXDRIncidents.yml
index 3ed249b9d0ff..0f029437939d 100644
--- a/Packs/CortexXDR/Scripts/DBotGroupXDRIncidents/DBotGroupXDRIncidents.yml
+++ b/Packs/CortexXDR/Scripts/DBotGroupXDRIncidents/DBotGroupXDRIncidents.yml
@@ -47,6 +47,7 @@ type: python
tests:
- No tests (auto formatted)
fromversion: 6.2.0
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.99865
marketplaces:
- xsoar
+- xsoar_on_prem
diff --git a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
index d7605a42e700..32eb17d863d9 100644
--- a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
+++ b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Endpoint_Investigation.yml
@@ -12,10 +12,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: ce53f9de-f436-41e7-8503-462a2f1d335b
+ taskid: 0e0b54d6-b8b0-423b-8a5a-999ec893b300
type: start
task:
- id: ce53f9de-f436-41e7-8503-462a2f1d335b
+ id: 0e0b54d6-b8b0-423b-8a5a-999ec893b300
version: -1
name: ""
iscommand: false
@@ -30,7 +30,7 @@ tasks:
{
"position": {
"x": 540,
- "y": -1465
+ "y": -1625
}
}
note: false
@@ -42,10 +42,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 41d60b6d-e2e8-46db-8c86-5dd132deea9d
+ taskid: 070df723-d62c-4a76-89c0-321a781ac5ba
type: regular
task:
- id: 41d60b6d-e2e8-46db-8c86-5dd132deea9d
+ id: 070df723-d62c-4a76-89c0-321a781ac5ba
version: -1
name: Delete Context
description: The task deletes all of the context data. Having a clean beginning to a test playbook ensures that a test can be sterile and that unrelated issues can be eliminated.
@@ -65,7 +65,7 @@ tasks:
{
"position": {
"x": 540,
- "y": -1335
+ "y": -1495
}
}
note: false
@@ -77,10 +77,10 @@ tasks:
isautoswitchedtoquietmode: false
"46":
id: "46"
- taskid: 8def3518-b0ee-4303-8458-98ce43cddb39
+ taskid: 141c6a09-7365-441a-868b-dd4988276a19
type: condition
task:
- id: 8def3518-b0ee-4303-8458-98ce43cddb39
+ id: 141c6a09-7365-441a-868b-dd4988276a19
version: -1
name: Verify Internal ID
description: |
@@ -110,8 +110,8 @@ tasks:
view: |-
{
"position": {
- "x": 2170,
- "y": -80
+ "x": 2380,
+ "y": -410
}
}
note: false
@@ -123,10 +123,10 @@ tasks:
isautoswitchedtoquietmode: false
"73":
id: "73"
- taskid: b20e81ae-be64-463e-83a5-da93f6d26bdd
+ taskid: bc8b907e-283f-4076-8112-b94536277cc9
type: condition
task:
- id: b20e81ae-be64-463e-83a5-da93f6d26bdd
+ id: bc8b907e-283f-4076-8112-b94536277cc9
version: -1
name: Verify Alerts
description: Verify that the ‘PaloAltoNetworksXDR.Incident.Alerts’ context key was extracted correctly.
@@ -153,8 +153,8 @@ tasks:
view: |-
{
"position": {
- "x": 1765,
- "y": -80
+ "x": 1975,
+ "y": -410
}
}
note: false
@@ -166,10 +166,10 @@ tasks:
isautoswitchedtoquietmode: false
"74":
id: "74"
- taskid: 6747ac48-90af-4c51-83d1-480b79458b8d
+ taskid: 7953ac22-a003-4d48-8bd9-c2caba6011a7
type: condition
task:
- id: 6747ac48-90af-4c51-83d1-480b79458b8d
+ id: 7953ac22-a003-4d48-8bd9-c2caba6011a7
version: -1
name: Verify Alert Name
description: |
@@ -195,8 +195,8 @@ tasks:
view: |-
{
"position": {
- "x": 3040,
- "y": -80
+ "x": 3170,
+ "y": -410
}
}
note: false
@@ -208,10 +208,10 @@ tasks:
isautoswitchedtoquietmode: false
"75":
id: "75"
- taskid: 3138e111-701b-4704-880e-6370bb8c48b7
+ taskid: 35b84fc9-006e-46cb-8d3d-3b67dbcfb215
type: condition
task:
- id: 3138e111-701b-4704-880e-6370bb8c48b7
+ id: 35b84fc9-006e-46cb-8d3d-3b67dbcfb215
version: -1
name: Verify Detection Timestamp
description: |
@@ -239,8 +239,8 @@ tasks:
view: |-
{
"position": {
- "x": 2605,
- "y": -80
+ "x": 2775,
+ "y": -410
}
}
note: false
@@ -252,10 +252,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 2e4e3fde-110c-4ce4-8f40-00180e9ff00e
+ taskid: ef8e188d-b50e-4b58-8544-abc0f2e0944e
type: title
task:
- id: 2e4e3fde-110c-4ce4-8f40-00180e9ff00e
+ id: ef8e188d-b50e-4b58-8544-abc0f2e0944e
version: -1
name: Start Tests
type: title
@@ -283,56 +283,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "112":
- id: "112"
- taskid: 40f54a83-2c2d-4da1-8225-171b5051dd5d
- type: title
- task:
- id: 40f54a83-2c2d-4da1-8225-171b5051dd5d
- version: -1
- name: '''PaloAltoNetworksXDR'' Context Data'
- type: title
- iscommand: false
- brand: ""
- description: ''
- nexttasks:
- '#none#':
- - "46"
- - "73"
- - "74"
- - "75"
- - "294"
- - "296"
- - "298"
- - "300"
- - "302"
- - "312"
- - "304"
- - "306"
- - "308"
- - "310"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 1765,
- "y": -225
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"113":
id: "113"
- taskid: ca13c35f-a6fe-43c0-841b-23463d19f793
+ taskid: 9edb91b9-451d-42a8-8fe3-5c1d57f1a196
type: title
task:
- id: ca13c35f-a6fe-43c0-841b-23463d19f793
+ id: 9edb91b9-451d-42a8-8fe3-5c1d57f1a196
version: -1
name: Done verifying 'PaloAltoNetworksXDR'
type: title
@@ -347,8 +303,8 @@ tasks:
view: |-
{
"position": {
- "x": 1360,
- "y": 110
+ "x": 1550,
+ "y": -60
}
}
note: false
@@ -360,10 +316,10 @@ tasks:
isautoswitchedtoquietmode: false
"114":
id: "114"
- taskid: 156b6f7f-eb66-45b2-8335-cfc9e2802fa2
+ taskid: 08a76e10-b981-4ba5-8c74-3a546e67325d
type: regular
task:
- id: 156b6f7f-eb66-45b2-8335-cfc9e2802fa2
+ id: 08a76e10-b981-4ba5-8c74-3a546e67325d
version: -1
name: Verify Context Error - Detection Timestamp
description: Prints an error entry with a given message
@@ -379,8 +335,8 @@ tasks:
view: |-
{
"position": {
- "x": 2605,
- "y": 165
+ "x": 2775,
+ "y": -25
}
}
note: false
@@ -392,10 +348,10 @@ tasks:
isautoswitchedtoquietmode: false
"115":
id: "115"
- taskid: 419426c3-62ef-4fd5-8e6c-d18e06a51e6e
+ taskid: 9b252a84-7125-42b0-88c1-3220caa14b73
type: regular
task:
- id: 419426c3-62ef-4fd5-8e6c-d18e06a51e6e
+ id: 9b252a84-7125-42b0-88c1-3220caa14b73
version: -1
name: Verify Context Error - Alert Name
description: Prints an error entry with a given message
@@ -411,8 +367,8 @@ tasks:
view: |-
{
"position": {
- "x": 3040,
- "y": 165
+ "x": 3170,
+ "y": -25
}
}
note: false
@@ -424,10 +380,10 @@ tasks:
isautoswitchedtoquietmode: false
"116":
id: "116"
- taskid: 2916273e-6b3b-4d10-844a-d273b741e083
+ taskid: e2cab238-6eb1-449c-805f-9be64c3cc9fe
type: regular
task:
- id: 2916273e-6b3b-4d10-844a-d273b741e083
+ id: e2cab238-6eb1-449c-805f-9be64c3cc9fe
version: -1
name: Verify Context Data Error - Alerts
description: Prints an error entry with a given message
@@ -443,8 +399,8 @@ tasks:
view: |-
{
"position": {
- "x": 1770,
- "y": 165
+ "x": 1975,
+ "y": -25
}
}
note: false
@@ -456,10 +412,10 @@ tasks:
isautoswitchedtoquietmode: false
"117":
id: "117"
- taskid: 40b8cdea-3c15-4490-8a49-9870b0ae7236
+ taskid: 0447489a-c244-4529-80d1-23a69aebaf2a
type: regular
task:
- id: 40b8cdea-3c15-4490-8a49-9870b0ae7236
+ id: 0447489a-c244-4529-80d1-23a69aebaf2a
version: -1
name: Verify Context Data Error - Internal ID
description: Prints an error entry with a given message
@@ -475,8 +431,8 @@ tasks:
view: |-
{
"position": {
- "x": 2170,
- "y": 165
+ "x": 2380,
+ "y": -25
}
}
note: false
@@ -488,10 +444,10 @@ tasks:
isautoswitchedtoquietmode: false
"132":
id: "132"
- taskid: e27e7e00-5718-43e8-8db8-5594b5f61c1c
+ taskid: 96dd1fb4-87c9-4d69-8dec-d79ab4550f23
type: title
task:
- id: e27e7e00-5718-43e8-8db8-5594b5f61c1c
+ id: 96dd1fb4-87c9-4d69-8dec-d79ab4550f23
version: -1
name: Check Incident Evidences
type: title
@@ -506,7 +462,7 @@ tasks:
view: |-
{
"position": {
- "x": 930,
+ "x": 1140,
"y": -550
}
}
@@ -519,10 +475,10 @@ tasks:
isautoswitchedtoquietmode: false
"149":
id: "149"
- taskid: 48258f6f-0ef8-4dc7-8e06-be71dbe77cde
+ taskid: 7cee0cc7-486c-4e0d-8991-13cb066ee82e
type: regular
task:
- id: 48258f6f-0ef8-4dc7-8e06-be71dbe77cde
+ id: 7cee0cc7-486c-4e0d-8991-13cb066ee82e
version: -1
name: Set Incident Fields To Context
description: Add incident fields required for testing to the mock incident.
@@ -544,7 +500,7 @@ tasks:
{
"position": {
"x": 540,
- "y": -1170
+ "y": -1330
}
}
note: false
@@ -556,10 +512,10 @@ tasks:
isautoswitchedtoquietmode: false
"151":
id: "151"
- taskid: 4f384f59-8318-4dd1-8c98-475e10ce4ea7
+ taskid: fbf2684c-25be-48f3-8392-04305f32da7a
type: title
task:
- id: 4f384f59-8318-4dd1-8c98-475e10ce4ea7
+ id: fbf2684c-25be-48f3-8392-04305f32da7a
version: -1
name: Check Context Data
type: title
@@ -601,10 +557,10 @@ tasks:
isautoswitchedtoquietmode: false
"220":
id: "220"
- taskid: 903cd374-83be-4dc5-86c6-386782ce0ab9
+ taskid: 46b7ab88-40ce-48bb-8fa3-a9ec36d330ca
type: condition
task:
- id: 903cd374-83be-4dc5-86c6-386782ce0ab9
+ id: 46b7ab88-40ce-48bb-8fa3-a9ec36d330ca
version: -1
name: Verify MITRE Techniques Specific ID
description: Verify that the 'MITRE_Techniques_Specific_ID’ context key was extracted.
@@ -646,10 +602,10 @@ tasks:
isautoswitchedtoquietmode: false
"221":
id: "221"
- taskid: 978e6936-9afd-45b7-83cd-1ffc5300057b
+ taskid: d98e35b0-b5b5-4250-8fe4-4fc15e53025a
type: regular
task:
- id: 978e6936-9afd-45b7-83cd-1ffc5300057b
+ id: d98e35b0-b5b5-4250-8fe4-4fc15e53025a
version: -1
name: Verify Context Error - MITRE Techniques Specific ID
description: Prints an error entry with a given message
@@ -682,10 +638,10 @@ tasks:
isautoswitchedtoquietmode: false
"292":
id: "292"
- taskid: fdb71645-23b4-457c-8138-9d67dc867ed8
+ taskid: c1216f7b-2ff9-43e6-80ea-6f46e3007150
type: playbook
task:
- id: fdb71645-23b4-457c-8138-9d67dc867ed8
+ id: c1216f7b-2ff9-43e6-80ea-6f46e3007150
version: -1
name: Cortex XDR - Endpoint Investigation
description: "This playbook is part of the 'Malware Investigation And Response' pack. For more information, refer to https://xsoar.pan.dev/docs/reference/packs/malware-investigation-and-response. This playbook handles all the endpoint investigation actions available with Cortex XSOAR, including the following tasks:\n * Pre-defined MITRE Tactics\n * Host fields (Host ID)\n * Attacker fields (Attacker IP, External host)\n * MITRE techniques\n * File hash (currently, the playbook supports only SHA256) \n\n Note: The playbook inputs enable manipulating the execution flow; read the input descriptions for details."
@@ -695,7 +651,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "84"
+ - "366"
scriptarguments:
FileSHA256:
complex:
@@ -756,7 +712,7 @@ tasks:
{
"position": {
"x": 540,
- "y": -850
+ "y": -1010
}
}
note: false
@@ -768,10 +724,10 @@ tasks:
isautoswitchedtoquietmode: false
"293":
id: "293"
- taskid: ab891216-a18d-4141-8eb3-2365de8abca0
+ taskid: c6f8e20b-bc2d-4e56-870b-bf026ce122b0
type: regular
task:
- id: ab891216-a18d-4141-8eb3-2365de8abca0
+ id: c6f8e20b-bc2d-4e56-870b-bf026ce122b0
version: -1
name: Set 'File.SHA256' to context
description: |-
@@ -799,7 +755,7 @@ tasks:
{
"position": {
"x": 540,
- "y": -1010
+ "y": -1170
}
}
note: false
@@ -811,10 +767,10 @@ tasks:
isautoswitchedtoquietmode: false
"294":
id: "294"
- taskid: 86594eb7-342a-49ee-81d7-4d5c1f449812
+ taskid: 89e126a4-862d-4639-8e11-2f557a9459ee
type: condition
task:
- id: 86594eb7-342a-49ee-81d7-4d5c1f449812
+ id: 89e126a4-862d-4639-8e11-2f557a9459ee
version: -1
name: Verify Alert Severity
description: |
@@ -842,8 +798,8 @@ tasks:
view: |-
{
"position": {
- "x": 3440,
- "y": -80
+ "x": 3570,
+ "y": -410
}
}
note: false
@@ -855,10 +811,10 @@ tasks:
isautoswitchedtoquietmode: false
"295":
id: "295"
- taskid: 1edc95c3-8ed8-4ec5-8124-f8b7117ba3e9
+ taskid: c3c02afd-627c-4e26-8fdf-4820339ccf5f
type: regular
task:
- id: 1edc95c3-8ed8-4ec5-8124-f8b7117ba3e9
+ id: c3c02afd-627c-4e26-8fdf-4820339ccf5f
version: -1
name: Verify Context Error - Alert Severity
description: Prints an error entry with a given message
@@ -874,8 +830,8 @@ tasks:
view: |-
{
"position": {
- "x": 3440,
- "y": 165
+ "x": 3570,
+ "y": -25
}
}
note: false
@@ -887,10 +843,10 @@ tasks:
isautoswitchedtoquietmode: false
"296":
id: "296"
- taskid: d4703163-c594-401d-88c3-704743a34a8d
+ taskid: b0f6293b-29db-4aaa-8fa8-0ba85027effc
type: condition
task:
- id: d4703163-c594-401d-88c3-704743a34a8d
+ id: b0f6293b-29db-4aaa-8fa8-0ba85027effc
version: -1
name: Verify Alert Category
description: |
@@ -916,8 +872,8 @@ tasks:
view: |-
{
"position": {
- "x": 3840,
- "y": -80
+ "x": 3970,
+ "y": -410
}
}
note: false
@@ -929,10 +885,10 @@ tasks:
isautoswitchedtoquietmode: false
"297":
id: "297"
- taskid: 4d366b1d-a7cf-4b29-8b6f-1c107b764e27
+ taskid: 6f7f8088-61dd-45db-845c-b754501fd4e0
type: regular
task:
- id: 4d366b1d-a7cf-4b29-8b6f-1c107b764e27
+ id: 6f7f8088-61dd-45db-845c-b754501fd4e0
version: -1
name: Verify Context Error - Alert Category
description: Prints an error entry with a given message
@@ -948,8 +904,8 @@ tasks:
view: |-
{
"position": {
- "x": 3840,
- "y": 165
+ "x": 3970,
+ "y": -25
}
}
note: false
@@ -961,10 +917,10 @@ tasks:
isautoswitchedtoquietmode: false
"298":
id: "298"
- taskid: 22aac2a3-e231-4f58-8da5-60f667cc7448
+ taskid: 48b6a0cd-0b8f-4b13-8f06-b58dff61ed1d
type: condition
task:
- id: 22aac2a3-e231-4f58-8da5-60f667cc7448
+ id: 48b6a0cd-0b8f-4b13-8f06-b58dff61ed1d
version: -1
name: Verify Alert Action Status
description: |
@@ -990,8 +946,8 @@ tasks:
view: |-
{
"position": {
- "x": 4240,
- "y": -80
+ "x": 4370,
+ "y": -410
}
}
note: false
@@ -1003,10 +959,10 @@ tasks:
isautoswitchedtoquietmode: false
"299":
id: "299"
- taskid: 52e87342-aee8-4add-8f21-955b42f75493
+ taskid: 7b0c6575-f09b-4196-8bb0-e9665a3d82d3
type: regular
task:
- id: 52e87342-aee8-4add-8f21-955b42f75493
+ id: 7b0c6575-f09b-4196-8bb0-e9665a3d82d3
version: -1
name: Verify Context Error - Alert Action Status
description: Prints an error entry with a given message
@@ -1022,8 +978,8 @@ tasks:
view: |-
{
"position": {
- "x": 4240,
- "y": 165
+ "x": 4370,
+ "y": -25
}
}
note: false
@@ -1035,10 +991,10 @@ tasks:
isautoswitchedtoquietmode: false
"300":
id: "300"
- taskid: ee17f12f-92ce-44c8-8f5e-62dbb3641349
+ taskid: 0ca5ec2e-563e-4e99-8542-dd7ce225c178
type: condition
task:
- id: ee17f12f-92ce-44c8-8f5e-62dbb3641349
+ id: 0ca5ec2e-563e-4e99-8542-dd7ce225c178
version: -1
name: Verify Alert Action Status Readable
description: |
@@ -1064,8 +1020,8 @@ tasks:
view: |-
{
"position": {
- "x": 4640,
- "y": -80
+ "x": 4770,
+ "y": -410
}
}
note: false
@@ -1077,10 +1033,10 @@ tasks:
isautoswitchedtoquietmode: false
"301":
id: "301"
- taskid: e2fa6463-cce7-456b-8ad5-a4a4c2ecf2af
+ taskid: 311ae564-3b0b-4e5a-8dce-70cad5d85ea3
type: regular
task:
- id: e2fa6463-cce7-456b-8ad5-a4a4c2ecf2af
+ id: 311ae564-3b0b-4e5a-8dce-70cad5d85ea3
version: -1
name: Verify Context Error - Alert Action Status Readable
description: Prints an error entry with a given message
@@ -1096,8 +1052,8 @@ tasks:
view: |-
{
"position": {
- "x": 4640,
- "y": 165
+ "x": 4770,
+ "y": -25
}
}
note: false
@@ -1109,10 +1065,10 @@ tasks:
isautoswitchedtoquietmode: false
"302":
id: "302"
- taskid: a660ad44-f759-435f-842e-56953037c6f1
+ taskid: e1017c16-fda4-40a2-8c95-a497d3d2140f
type: condition
task:
- id: a660ad44-f759-435f-842e-56953037c6f1
+ id: e1017c16-fda4-40a2-8c95-a497d3d2140f
version: -1
name: Verify Alert Description
description: |
@@ -1138,8 +1094,8 @@ tasks:
view: |-
{
"position": {
- "x": 5040,
- "y": -80
+ "x": 5170,
+ "y": -410
}
}
note: false
@@ -1151,10 +1107,10 @@ tasks:
isautoswitchedtoquietmode: false
"303":
id: "303"
- taskid: 011d36b8-b6f5-4cfd-88a5-890e71415f95
+ taskid: a0135392-68d7-4fdc-82b4-ddbede0a1220
type: regular
task:
- id: 011d36b8-b6f5-4cfd-88a5-890e71415f95
+ id: a0135392-68d7-4fdc-82b4-ddbede0a1220
version: -1
name: Verify Context Error - Alert Description
description: Prints an error entry with a given message
@@ -1170,8 +1126,8 @@ tasks:
view: |-
{
"position": {
- "x": 5040,
- "y": 165
+ "x": 5170,
+ "y": -25
}
}
note: false
@@ -1183,10 +1139,10 @@ tasks:
isautoswitchedtoquietmode: false
"304":
id: "304"
- taskid: a473c374-8522-426c-8527-d4d66f830eee
+ taskid: fe34c69e-4f46-40e1-83ee-b02bbc39bf32
type: condition
task:
- id: a473c374-8522-426c-8527-d4d66f830eee
+ id: fe34c69e-4f46-40e1-83ee-b02bbc39bf32
version: -1
name: Verify Agent IP Address
description: |
@@ -1212,8 +1168,8 @@ tasks:
view: |-
{
"position": {
- "x": 5440,
- "y": -80
+ "x": 5570,
+ "y": -410
}
}
note: false
@@ -1225,10 +1181,10 @@ tasks:
isautoswitchedtoquietmode: false
"305":
id: "305"
- taskid: b4458d27-4232-4ce8-8909-115dde0cef4c
+ taskid: 77258ec7-42a2-4bec-88bd-f29f7f233402
type: regular
task:
- id: b4458d27-4232-4ce8-8909-115dde0cef4c
+ id: 77258ec7-42a2-4bec-88bd-f29f7f233402
version: -1
name: Verify Context Error - Agent IP Address
description: Prints an error entry with a given message
@@ -1244,8 +1200,8 @@ tasks:
view: |-
{
"position": {
- "x": 5440,
- "y": 165
+ "x": 5570,
+ "y": -25
}
}
note: false
@@ -1257,10 +1213,10 @@ tasks:
isautoswitchedtoquietmode: false
"306":
id: "306"
- taskid: 28bcbd65-8a84-4e18-8d61-83c08ea61d8a
+ taskid: 826727e1-83c1-44a9-835e-4cb5f15489ef
type: condition
task:
- id: 28bcbd65-8a84-4e18-8d61-83c08ea61d8a
+ id: 826727e1-83c1-44a9-835e-4cb5f15489ef
version: -1
name: Verify Agent Hostname
description: |
@@ -1286,8 +1242,8 @@ tasks:
view: |-
{
"position": {
- "x": 5840,
- "y": -80
+ "x": 5970,
+ "y": -410
}
}
note: false
@@ -1299,10 +1255,10 @@ tasks:
isautoswitchedtoquietmode: false
"307":
id: "307"
- taskid: c1af8603-9e62-43ce-86a6-954553abf1bb
+ taskid: d6f84916-ae61-4c52-80a5-d450e2da259a
type: regular
task:
- id: c1af8603-9e62-43ce-86a6-954553abf1bb
+ id: d6f84916-ae61-4c52-80a5-d450e2da259a
version: -1
name: Verify Context Error - Agent Hostname
description: Prints an error entry with a given message
@@ -1318,156 +1274,8 @@ tasks:
view: |-
{
"position": {
- "x": 5840,
- "y": 165
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "308":
- id: "308"
- taskid: 0092afc4-d804-454c-8c07-c7acc97d8d64
- type: condition
- task:
- id: 0092afc4-d804-454c-8c07-c7acc97d8d64
- version: -1
- name: Verify Mitre Tactic ID & Name
- description: |
- Verify that the ‘PaloAltoNetworksXDR.Alert.mitre_tactic_id_and_name’ context key was extracted correctly.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- ' Verified':
- - "113"
- '#default#':
- - "309"
- separatecontext: false
- conditions:
- - label: ' Verified'
- condition:
- - - operator: isNotEmpty
- left:
- value:
- simple: PaloAltoNetworksXDR.Alert.mitre_tactic_id_and_name
- iscontext: true
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 6250,
- "y": -80
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "309":
- id: "309"
- taskid: 597c3ef8-ba18-43d8-8adc-57c92d54d6af
- type: regular
- task:
- id: 597c3ef8-ba18-43d8-8adc-57c92d54d6af
- version: -1
- name: Verify Context Error - Mitre Tactic ID & Name
- description: Prints an error entry with a given message
- scriptName: PrintErrorEntry
- type: regular
- iscommand: false
- brand: ""
- scriptarguments:
- message:
- simple: "The 'PaloAltoNetworksXDR.Alert.mitre_tactic_id_and_name' context key not extracted properly. This may indicate that one or more of the following changes have been made to the 'Cortex XDR Malware - Incident Enrichment' playbook:\n1- The 'xdr-get-incident-extra-data' automation outputs have been modified and no longer contain the 'PaloAltoNetworksXDR.Incident.network_artifacts' context key. \n2- The 'incident_id' input configuration was changed for the 'xdr-get-incident-extra-data' automation used in the 'Get Full Incident Details' task."
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 6250,
- "y": 165
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "310":
- id: "310"
- taskid: dcca840c-427a-49a0-889d-87bf1c6a5816
- type: condition
- task:
- id: dcca840c-427a-49a0-889d-87bf1c6a5816
- version: -1
- name: Verify Mitre Technique ID & Name
- description: |
- Verify that the ‘PaloAltoNetworksXDR.Alert.mitre_technique_id_and_name’ context key was extracted correctly.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- ' Verified':
- - "113"
- '#default#':
- - "311"
- separatecontext: false
- conditions:
- - label: ' Verified'
- condition:
- - - operator: isNotEmpty
- left:
- value:
- simple: PaloAltoNetworksXDR.Alert.mitre_technique_id_and_name
- iscontext: true
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 6660,
- "y": -80
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "311":
- id: "311"
- taskid: 4f19afda-9228-4c14-8b86-aecd1d3046c2
- type: regular
- task:
- id: 4f19afda-9228-4c14-8b86-aecd1d3046c2
- version: -1
- name: Verify Context Error - Mitre Technique ID & Name
- description: Prints an error entry with a given message
- scriptName: PrintErrorEntry
- type: regular
- iscommand: false
- brand: ""
- scriptarguments:
- message:
- simple: "The 'PaloAltoNetworksXDR.Alert.mitre_technique_id_and_name' context key not extracted properly. This may indicate that one or more of the following changes have been made to the 'Cortex XDR Malware - Incident Enrichment' playbook:\n1- The 'xdr-get-incident-extra-data' automation outputs have been modified and no longer contain the 'PaloAltoNetworksXDR.Incident.network_artifacts' context key. \n2- The 'incident_id' input configuration was changed for the 'xdr-get-incident-extra-data' automation used in the 'Get Full Incident Details' task."
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 6660,
- "y": 165
+ "x": 5970,
+ "y": -25
}
}
note: false
@@ -1479,10 +1287,10 @@ tasks:
isautoswitchedtoquietmode: false
"312":
id: "312"
- taskid: bd716226-74dc-4d37-8ba1-b6b68e76fa4f
+ taskid: eb17e1b4-26d2-4c11-83be-fda79c65507a
type: condition
task:
- id: bd716226-74dc-4d37-8ba1-b6b68e76fa4f
+ id: eb17e1b4-26d2-4c11-83be-fda79c65507a
version: -1
name: Verify Starred
description: |
@@ -1508,8 +1316,8 @@ tasks:
view: |-
{
"position": {
- "x": 7060,
- "y": -80
+ "x": 6370,
+ "y": -410
}
}
note: false
@@ -1521,10 +1329,10 @@ tasks:
isautoswitchedtoquietmode: false
"313":
id: "313"
- taskid: bdadc694-c5fa-423d-872c-2e7422c58e2d
+ taskid: ac9a8406-0e52-4237-8f57-dfeb97f4f5d5
type: regular
task:
- id: bdadc694-c5fa-423d-872c-2e7422c58e2d
+ id: ac9a8406-0e52-4237-8f57-dfeb97f4f5d5
version: -1
name: Verify Context Error - Starred
description: Prints an error entry with a given message
@@ -1540,8 +1348,8 @@ tasks:
view: |-
{
"position": {
- "x": 7060,
- "y": 165
+ "x": 6370,
+ "y": -25
}
}
note: false
@@ -1553,10 +1361,10 @@ tasks:
isautoswitchedtoquietmode: false
"331":
id: "331"
- taskid: 638b2658-f51f-4a16-8d82-c2cd5f8c42a3
+ taskid: f45150ff-cc9f-43bb-8fb7-04cd6f21c515
type: regular
task:
- id: 638b2658-f51f-4a16-8d82-c2cd5f8c42a3
+ id: f45150ff-cc9f-43bb-8fb7-04cd6f21c515
version: -1
name: Get Evidence Entries
description: Collect evidence entries.
@@ -1575,7 +1383,7 @@ tasks:
view: |-
{
"position": {
- "x": 930,
+ "x": 1140,
"y": -420
}
}
@@ -1588,10 +1396,10 @@ tasks:
isautoswitchedtoquietmode: false
"332":
id: "332"
- taskid: 92731dd3-bc06-4650-8c2e-1deb72c1e481
+ taskid: 75e7e92b-3c5c-4418-834a-8a62c7f91bcd
type: condition
task:
- id: 92731dd3-bc06-4650-8c2e-1deb72c1e481
+ id: 75e7e92b-3c5c-4418-834a-8a62c7f91bcd
version: -1
name: Verify Evidence
description: 'Verify that the hunting results have been marked as evidence. Considering that each of the hunting tasks are the only that marks items as evidence, we can assume that the hunting results were classified as evidence if the ''Entry.ID'' context key is present and contains 21 items. '
@@ -1761,7 +1569,7 @@ tasks:
view: |-
{
"position": {
- "x": 930,
+ "x": 1140,
"y": -255
}
}
@@ -1774,10 +1582,10 @@ tasks:
isautoswitchedtoquietmode: false
"333":
id: "333"
- taskid: 752a12f7-a72e-46d8-8f34-a8766d3a3fdd
+ taskid: 57029e43-5fa2-4fb4-830a-0f0e68cc6b74
type: regular
task:
- id: 752a12f7-a72e-46d8-8f34-a8766d3a3fdd
+ id: 57029e43-5fa2-4fb4-830a-0f0e68cc6b74
version: -1
name: Verify Incident Evidence Error
description: Prints an error entry with a given message
@@ -1793,7 +1601,7 @@ tasks:
view: |-
{
"position": {
- "x": 930,
+ "x": 1140,
"y": -25
}
}
@@ -1806,10 +1614,10 @@ tasks:
isautoswitchedtoquietmode: false
"334":
id: "334"
- taskid: 783db079-ce9b-4dd0-86f3-c078230fd49d
+ taskid: ccb861f9-6a00-493c-8205-deffa9afee96
type: title
task:
- id: 783db079-ce9b-4dd0-86f3-c078230fd49d
+ id: ccb861f9-6a00-493c-8205-deffa9afee96
version: -1
name: Done verifying Incident Evidence
type: title
@@ -1824,8 +1632,8 @@ tasks:
view: |-
{
"position": {
- "x": 540,
- "y": -80
+ "x": 750,
+ "y": -60
}
}
note: false
@@ -1837,10 +1645,10 @@ tasks:
isautoswitchedtoquietmode: false
"335":
id: "335"
- taskid: 74d276cf-61e1-4c80-8919-d7515b2c528c
+ taskid: 7e491fa8-343d-4f60-8afc-5eb5a897ca6e
type: title
task:
- id: 74d276cf-61e1-4c80-8919-d7515b2c528c
+ id: 7e491fa8-343d-4f60-8afc-5eb5a897ca6e
version: -1
name: Check Playbook Outputs
type: title
@@ -1849,13 +1657,24 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "366"
+ - "73"
+ - "46"
+ - "75"
+ - "294"
+ - "74"
+ - "296"
+ - "298"
+ - "300"
+ - "302"
+ - "304"
+ - "306"
+ - "312"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 1765,
+ "x": 1975,
"y": -550
}
}
@@ -1868,10 +1687,10 @@ tasks:
isautoswitchedtoquietmode: false
"336":
id: "336"
- taskid: f9400d92-fca4-4e15-8c71-35a24fc9ee8b
+ taskid: 28b150cb-8b59-4b37-8739-09caabe9a9ff
type: condition
task:
- id: f9400d92-fca4-4e15-8c71-35a24fc9ee8b
+ id: 28b150cb-8b59-4b37-8739-09caabe9a9ff
version: -1
name: Verify Host Activity
description: Verify that the 'Host_Activity’ context key was extracted.
@@ -1913,10 +1732,10 @@ tasks:
isautoswitchedtoquietmode: false
"337":
id: "337"
- taskid: ce70374b-9f58-4c59-8c76-43c911c2dc36
+ taskid: cd25053e-c04a-41b3-857d-0d2cf779b02a
type: regular
task:
- id: ce70374b-9f58-4c59-8c76-43c911c2dc36
+ id: cd25053e-c04a-41b3-857d-0d2cf779b02a
version: -1
name: Verify Context Error - Host Activity
description: Prints an error entry with a given message
@@ -1949,10 +1768,10 @@ tasks:
isautoswitchedtoquietmode: false
"338":
id: "338"
- taskid: 75a44135-71ba-4e53-818e-5e4de376218d
+ taskid: 5971b384-b09c-4980-8ec7-1ceb4cbb6f78
type: title
task:
- id: 75a44135-71ba-4e53-818e-5e4de376218d
+ id: 5971b384-b09c-4980-8ec7-1ceb4cbb6f78
version: -1
name: Done verifying context data
type: title
@@ -1968,7 +1787,7 @@ tasks:
{
"position": {
"x": 70,
- "y": -80
+ "y": -60
}
}
note: false
@@ -1980,10 +1799,10 @@ tasks:
isautoswitchedtoquietmode: false
"339":
id: "339"
- taskid: 63efae26-6bfd-4c2d-8882-222d465ebae7
+ taskid: 99ec7208-eb2b-40c1-8e52-c778c4666823
type: condition
task:
- id: 63efae26-6bfd-4c2d-8882-222d465ebae7
+ id: 99ec7208-eb2b-40c1-8e52-c778c4666823
version: -1
name: Verify Attacker Network Activity
description: Verify that the 'Attacker_Network_Activity’ context key was extracted.
@@ -2025,10 +1844,10 @@ tasks:
isautoswitchedtoquietmode: false
"340":
id: "340"
- taskid: e681400e-b63c-4198-8699-e8214ea2de33
+ taskid: a718190c-ed16-4371-821d-8cd5ec70b60f
type: regular
task:
- id: e681400e-b63c-4198-8699-e8214ea2de33
+ id: a718190c-ed16-4371-821d-8cd5ec70b60f
version: -1
name: Verify Context Error - Attacker Network Activity
description: Prints an error entry with a given message
@@ -2061,10 +1880,10 @@ tasks:
isautoswitchedtoquietmode: false
"341":
id: "341"
- taskid: 1746362c-9ca6-458d-8301-a99883f91e3c
+ taskid: 4ec267ac-6971-4506-8780-5119a9a22dc8
type: condition
task:
- id: 1746362c-9ca6-458d-8301-a99883f91e3c
+ id: 4ec267ac-6971-4506-8780-5119a9a22dc8
version: -1
name: Verify Hunt File Hash
description: Verify that the 'Hunt_File_Hash’ context key was extracted.
@@ -2106,10 +1925,10 @@ tasks:
isautoswitchedtoquietmode: false
"342":
id: "342"
- taskid: 42bd6f22-f589-49bb-82d1-3dbfafb44830
+ taskid: 3d1983b0-83c8-457a-8651-dc5bcddda789
type: regular
task:
- id: 42bd6f22-f589-49bb-82d1-3dbfafb44830
+ id: 3d1983b0-83c8-457a-8651-dc5bcddda789
version: -1
name: Verify Context Error - Hunt File Hash
description: Prints an error entry with a given message
@@ -2142,10 +1961,10 @@ tasks:
isautoswitchedtoquietmode: false
"343":
id: "343"
- taskid: 64686c6d-7d1c-4614-8492-bbbf00bc4e01
+ taskid: 143cdd57-6ca1-47e6-883f-6843c52f864a
type: condition
task:
- id: 64686c6d-7d1c-4614-8492-bbbf00bc4e01
+ id: 143cdd57-6ca1-47e6-883f-6843c52f864a
version: -1
name: Verify Reconnaissance
description: Verify that the 'Reconnaissance’ context key was extracted.
@@ -2187,10 +2006,10 @@ tasks:
isautoswitchedtoquietmode: false
"344":
id: "344"
- taskid: 63577ae2-435c-4073-88c7-dc85930eb66f
+ taskid: 5b18790b-9c45-4e93-81bb-57c9f549017f
type: regular
task:
- id: 63577ae2-435c-4073-88c7-dc85930eb66f
+ id: 5b18790b-9c45-4e93-81bb-57c9f549017f
version: -1
name: Verify Context Error - Reconnaissance
description: Prints an error entry with a given message
@@ -2223,10 +2042,10 @@ tasks:
isautoswitchedtoquietmode: false
"345":
id: "345"
- taskid: f1d9af51-59c8-481d-808d-3be964b0a4a2
+ taskid: 9e6908ef-47f4-4377-8d82-35422c1ecbf4
type: condition
task:
- id: f1d9af51-59c8-481d-808d-3be964b0a4a2
+ id: 9e6908ef-47f4-4377-8d82-35422c1ecbf4
version: -1
name: Verify Initial Access
description: Verify that the 'Initial_Access’ context key was extracted.
@@ -2268,10 +2087,10 @@ tasks:
isautoswitchedtoquietmode: false
"346":
id: "346"
- taskid: 42b24657-45f8-466c-8b9b-c46a5f02e1a3
+ taskid: 83ba8ff5-f87c-4b67-8a26-9e9538da8caf
type: regular
task:
- id: 42b24657-45f8-466c-8b9b-c46a5f02e1a3
+ id: 83ba8ff5-f87c-4b67-8a26-9e9538da8caf
version: -1
name: Verify Context Error - Initial Access
description: Prints an error entry with a given message
@@ -2304,10 +2123,10 @@ tasks:
isautoswitchedtoquietmode: false
"347":
id: "347"
- taskid: 433016ae-08b3-4b6f-8e10-6c63f5a6018a
+ taskid: 342a2178-ea03-4c9b-8c4b-a8e69475b84b
type: condition
task:
- id: 433016ae-08b3-4b6f-8e10-6c63f5a6018a
+ id: 342a2178-ea03-4c9b-8c4b-a8e69475b84b
version: -1
name: Verify Execution
description: Verify that the 'Execution’ context key was extracted.
@@ -2349,10 +2168,10 @@ tasks:
isautoswitchedtoquietmode: false
"348":
id: "348"
- taskid: 4db87b53-c5e9-4ec6-8e45-20551021c500
+ taskid: 4ff0104d-a943-4c5f-84dc-b9fe556304a9
type: regular
task:
- id: 4db87b53-c5e9-4ec6-8e45-20551021c500
+ id: 4ff0104d-a943-4c5f-84dc-b9fe556304a9
version: -1
name: Verify Context Error - Execution
description: Prints an error entry with a given message
@@ -2385,10 +2204,10 @@ tasks:
isautoswitchedtoquietmode: false
"349":
id: "349"
- taskid: 1f0f8309-e58c-4e3b-880c-285c6b23542e
+ taskid: 43659daf-28d8-4c0b-8c49-4569e16953d5
type: condition
task:
- id: 1f0f8309-e58c-4e3b-880c-285c6b23542e
+ id: 43659daf-28d8-4c0b-8c49-4569e16953d5
version: -1
name: Verify Persistence
description: Verify that the 'Persistence’ context key was extracted.
@@ -2430,10 +2249,10 @@ tasks:
isautoswitchedtoquietmode: false
"350":
id: "350"
- taskid: 744078e9-826c-4c2b-8f6b-0224dbab0ea5
+ taskid: c12f07b1-95d6-41e3-8098-9d6343472f8f
type: regular
task:
- id: 744078e9-826c-4c2b-8f6b-0224dbab0ea5
+ id: c12f07b1-95d6-41e3-8098-9d6343472f8f
version: -1
name: Verify Context Error - Persistence
description: Prints an error entry with a given message
@@ -2466,10 +2285,10 @@ tasks:
isautoswitchedtoquietmode: false
"351":
id: "351"
- taskid: b114d53b-8d27-4592-8607-40791100430c
+ taskid: 398fd920-d7f5-45ef-8072-7696baf1499a
type: condition
task:
- id: b114d53b-8d27-4592-8607-40791100430c
+ id: 398fd920-d7f5-45ef-8072-7696baf1499a
version: -1
name: Verify Privilege Escalation
description: Verify that the 'Privilege_Escalation’ context key was extracted.
@@ -2511,10 +2330,10 @@ tasks:
isautoswitchedtoquietmode: false
"352":
id: "352"
- taskid: 68e0921b-3283-4c1d-8124-cdc9e15dea57
+ taskid: 9d660e74-d217-415a-863e-87b37a113adf
type: regular
task:
- id: 68e0921b-3283-4c1d-8124-cdc9e15dea57
+ id: 9d660e74-d217-415a-863e-87b37a113adf
version: -1
name: Verify Context Error - Privilege Escalation
description: Prints an error entry with a given message
@@ -2547,10 +2366,10 @@ tasks:
isautoswitchedtoquietmode: false
"353":
id: "353"
- taskid: 2df7361c-bf5c-4e53-8a04-70c23406a98c
+ taskid: 03c432f1-4ee0-4b83-8cb6-d779b932ef36
type: condition
task:
- id: 2df7361c-bf5c-4e53-8a04-70c23406a98c
+ id: 03c432f1-4ee0-4b83-8cb6-d779b932ef36
version: -1
name: Verify Defense Evasion
description: Verify that the 'Defense_Evasion’ context key was extracted.
@@ -2592,10 +2411,10 @@ tasks:
isautoswitchedtoquietmode: false
"354":
id: "354"
- taskid: a5ebcc21-9847-4559-8405-a16ee1e7c39f
+ taskid: f9c27970-a7ad-44af-867a-d08e61429c35
type: regular
task:
- id: a5ebcc21-9847-4559-8405-a16ee1e7c39f
+ id: f9c27970-a7ad-44af-867a-d08e61429c35
version: -1
name: Verify Context Error - Defense Evasion
description: Prints an error entry with a given message
@@ -2628,10 +2447,10 @@ tasks:
isautoswitchedtoquietmode: false
"355":
id: "355"
- taskid: 82eadf53-9db8-45c6-8aa8-f9e1e0d9615e
+ taskid: 7fddcae5-c480-436e-8691-d57dee621725
type: condition
task:
- id: 82eadf53-9db8-45c6-8aa8-f9e1e0d9615e
+ id: 7fddcae5-c480-436e-8691-d57dee621725
version: -1
name: Verify Discovery
description: Verify that the 'Discovery’ context key was extracted.
@@ -2673,10 +2492,10 @@ tasks:
isautoswitchedtoquietmode: false
"356":
id: "356"
- taskid: 4e51b10d-b9d9-4098-860a-30b6fd98762d
+ taskid: ae6d6c06-9d83-404f-809a-8c95e478b4ab
type: regular
task:
- id: 4e51b10d-b9d9-4098-860a-30b6fd98762d
+ id: ae6d6c06-9d83-404f-809a-8c95e478b4ab
version: -1
name: Verify Context Error - Discovery
description: Prints an error entry with a given message
@@ -2709,10 +2528,10 @@ tasks:
isautoswitchedtoquietmode: false
"357":
id: "357"
- taskid: a667191a-b973-4c29-849b-5cbf448c5400
+ taskid: 71e6f2ea-012b-40f3-8eb1-b2a75339f83b
type: condition
task:
- id: a667191a-b973-4c29-849b-5cbf448c5400
+ id: 71e6f2ea-012b-40f3-8eb1-b2a75339f83b
version: -1
name: Verify Lateral Movement
description: Verify that the 'Lateral_Movement’ context key was extracted.
@@ -2754,10 +2573,10 @@ tasks:
isautoswitchedtoquietmode: false
"358":
id: "358"
- taskid: 679b56b4-118c-4ad1-8f28-b6d793afccdd
+ taskid: 88562859-81c6-45f6-8fee-762811cb8090
type: regular
task:
- id: 679b56b4-118c-4ad1-8f28-b6d793afccdd
+ id: 88562859-81c6-45f6-8fee-762811cb8090
version: -1
name: Verify Context Error - Lateral Movement
description: Prints an error entry with a given message
@@ -2790,10 +2609,10 @@ tasks:
isautoswitchedtoquietmode: false
"359":
id: "359"
- taskid: 39f79842-bd00-44b8-80ac-f9bf146d8c84
+ taskid: 7063a409-47b8-46e5-8201-260a2e738faa
type: condition
task:
- id: 39f79842-bd00-44b8-80ac-f9bf146d8c84
+ id: 7063a409-47b8-46e5-8201-260a2e738faa
version: -1
name: Verify Collection
description: Verify that the 'Collection’ context key was extracted.
@@ -2835,10 +2654,10 @@ tasks:
isautoswitchedtoquietmode: false
"360":
id: "360"
- taskid: 6d8b48be-9035-4cef-8dd1-afb24542a4d1
+ taskid: 22e3c82e-1ca2-42b5-8541-ff4ae9953522
type: regular
task:
- id: 6d8b48be-9035-4cef-8dd1-afb24542a4d1
+ id: 22e3c82e-1ca2-42b5-8541-ff4ae9953522
version: -1
name: Verify Context Error - Collection
description: Prints an error entry with a given message
@@ -2871,10 +2690,10 @@ tasks:
isautoswitchedtoquietmode: false
"361":
id: "361"
- taskid: eac34397-328d-4c36-83e2-669c4a82539b
+ taskid: 1de1d3c4-61de-4ff6-86b9-b3f836e16945
type: condition
task:
- id: eac34397-328d-4c36-83e2-669c4a82539b
+ id: 1de1d3c4-61de-4ff6-86b9-b3f836e16945
version: -1
name: Verify Command And Control
description: Verify that the 'Command_And_Control’ context key was extracted.
@@ -2916,10 +2735,10 @@ tasks:
isautoswitchedtoquietmode: false
"362":
id: "362"
- taskid: a09f83f6-5d73-4e49-85ef-7dbe3d5b299b
+ taskid: be37ade9-246c-4354-82b5-43b45d31ce99
type: regular
task:
- id: a09f83f6-5d73-4e49-85ef-7dbe3d5b299b
+ id: be37ade9-246c-4354-82b5-43b45d31ce99
version: -1
name: Verify Context Error - Command And Control
description: Prints an error entry with a given message
@@ -2952,10 +2771,10 @@ tasks:
isautoswitchedtoquietmode: false
"363":
id: "363"
- taskid: b224c7bb-9460-4b5b-8154-7dbc4ab21d76
+ taskid: f9b03b7b-fab8-4864-8a29-f81443b1f84e
type: condition
task:
- id: b224c7bb-9460-4b5b-8154-7dbc4ab21d76
+ id: f9b03b7b-fab8-4864-8a29-f81443b1f84e
version: -1
name: Verify Impact Techniques
description: Verify that the 'Impact_techniques’ context key was extracted.
@@ -2997,10 +2816,10 @@ tasks:
isautoswitchedtoquietmode: false
"364":
id: "364"
- taskid: 41f55b8b-0ae9-4226-8873-cc565286d4c9
+ taskid: e52d2539-d961-4f22-8dad-e206e833fb0d
type: regular
task:
- id: 41f55b8b-0ae9-4226-8873-cc565286d4c9
+ id: e52d2539-d961-4f22-8dad-e206e833fb0d
version: -1
name: Verify Context Error - Impact Techniques
description: Prints an error entry with a given message
@@ -3033,10 +2852,10 @@ tasks:
isautoswitchedtoquietmode: false
"365":
id: "365"
- taskid: 2d34789e-851a-441d-8f36-0709b085819a
+ taskid: d7726820-8235-4fff-8a7c-2f56d08c9e26
type: title
task:
- id: 2d34789e-851a-441d-8f36-0709b085819a
+ id: d7726820-8235-4fff-8a7c-2f56d08c9e26
version: -1
name: Done
type: title
@@ -3049,7 +2868,7 @@ tasks:
{
"position": {
"x": 540,
- "y": 250
+ "y": 140
}
}
note: false
@@ -3061,20 +2880,20 @@ tasks:
isautoswitchedtoquietmode: false
"366":
id: "366"
- taskid: a6713fd8-827f-4339-845b-def430819831
+ taskid: 6513f3c7-2ebb-433b-8d24-e65d67ff1f17
type: condition
task:
- id: a6713fd8-827f-4339-845b-def430819831
+ id: 6513f3c7-2ebb-433b-8d24-e65d67ff1f17
version: -1
- name: Is the PaloAltoNetworksXDR Alert key defined?
+ name: Are the PaloAltoNetworksXDR Alert and Mitre keys defined?
type: condition
iscommand: false
brand: ""
nexttasks:
'#default#':
- - "113"
+ - "365"
"yes":
- - "112"
+ - "84"
separatecontext: false
conditions:
- label: "yes"
@@ -3084,12 +2903,24 @@ tasks:
value:
simple: PaloAltoNetworksXDR.Alert
iscontext: true
+ right:
+ value: {}
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Alert.mitre_technique_id_and_name
+ iscontext: true
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: PaloAltoNetworksXDR.Alert.mitre_tactic_id_and_name
+ iscontext: true
continueonerrortype: ""
view: |-
{
"position": {
- "x": 1765,
- "y": -420
+ "x": 540,
+ "y": -850
}
}
note: false
@@ -3103,16 +2934,8 @@ view: |-
{
"linkLabelsPosition": {
"220_338_Verified": 0.16,
- "294_113_ Verified": 0.11,
- "296_113_ Verified": 0.1,
- "298_113_ Verified": 0.1,
- "300_113_ Verified": 0.1,
- "302_113_ Verified": 0.1,
- "304_113_ Verified": 0.1,
- "306_113_ Verified": 0.1,
- "308_113_ Verified": 0.1,
- "310_113_ Verified": 0.1,
- "312_113_ Verified": 0.1,
+ "306_113_ Verified": 0.73,
+ "312_113_ Verified": 0.72,
"336_338_Verified": 0.2,
"339_338_Verified": 0.1,
"341_338_Verified": 0.1,
@@ -3127,19 +2950,15 @@ view: |-
"359_338_Verified": 0.1,
"361_338_Verified": 0.1,
"363_338_Verified": 0.1,
- "366_112_yes": 0.55,
- "46_113_Verified": 0.1,
- "73_113_Verified": 0.3,
- "73_116_#default#": 0.7,
- "74_113_ Verified": 0.1,
- "75_113_ Verified": 0.1
+ "366_365_#default#": 0.19,
+ "73_116_#default#": 0.7
},
"paper": {
"dimensions": {
- "height": 1780,
- "width": 13370,
+ "height": 1830,
+ "width": 12680,
"x": -5930,
- "y": -1465
+ "y": -1625
}
}
}
diff --git a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Incidents.json b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Incidents.json
index 864aef65a60b..dbd2ff5400c0 100644
--- a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Incidents.json
+++ b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Incidents.json
@@ -21,5 +21,8 @@
"id": "c89bc765-50a9-479f-88e5-d256e09788c5",
"widgetType": "text",
"fromVersion": "6.2.0",
- "description": ""
+ "description": "",
+ "marketplaces": [
+ "xsoar_on_prem"
+ ]
}
\ No newline at end of file
diff --git a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Scatter.json b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Scatter.json
index 5d7df2e4f7b0..e0a24c91c922 100644
--- a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Scatter.json
+++ b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Scatter.json
@@ -16,5 +16,8 @@
"id": "5827f988-df26-42a5-8c46-18b3af444702",
"widgetType": "scatter",
"fromVersion": "6.2.0",
- "description": ""
+ "description": "",
+ "marketplaces": [
+ "xsoar_on_prem"
+ ]
}
\ No newline at end of file
diff --git a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Summary.json b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Summary.json
index f5e3bece8fc1..3186a53a2222 100644
--- a/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Summary.json
+++ b/Packs/CortexXDR/Widgets/Widget-Cortex_XDR_Groups_Summary.json
@@ -21,5 +21,8 @@
"id": "ccf4d070-47f3-4559-8563-a63240f17f74",
"widgetType": "text",
"fromVersion": "6.2.0",
- "description": ""
+ "description": "",
+ "marketplaces": [
+ "xsoar_on_prem"
+ ]
}
\ No newline at end of file
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png b/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png
index c39c3ef84187..bf926a793da3 100644
Binary files a/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png and b/Packs/CortexXDR/doc_files/Cortex_XDR_Alerts_Handling_v2.png differ
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_disconnected_endpoints.png b/Packs/CortexXDR/doc_files/Cortex_XDR_disconnected_endpoints.png
index 2af2b9bd2f76..54e1d82a6251 100644
Binary files a/Packs/CortexXDR/doc_files/Cortex_XDR_disconnected_endpoints.png and b/Packs/CortexXDR/doc_files/Cortex_XDR_disconnected_endpoints.png differ
diff --git a/Packs/CortexXDR/pack_metadata.json b/Packs/CortexXDR/pack_metadata.json
index d437a14ee015..5f2aeb91f77a 100644
--- a/Packs/CortexXDR/pack_metadata.json
+++ b/Packs/CortexXDR/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex XDR by Palo Alto Networks",
"description": "Automates Cortex XDR incident response, and includes custom Cortex XDR incident views and layouts to aid analyst investigations.",
"support": "xsoar",
- "currentVersion": "6.1.43",
+ "currentVersion": "6.1.54",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexXpanse/.secrets-ignore b/Packs/CortexXpanse/.secrets-ignore
index edab5b1fb3b2..6781d45c158f 100644
--- a/Packs/CortexXpanse/.secrets-ignore
+++ b/Packs/CortexXpanse/.secrets-ignore
@@ -3,4 +3,7 @@ inferredCveMatchType
noc@acme.com
cs@acme.com
2600:1900:4000:9664:0:7
-00:11:22:33:44:55
\ No newline at end of file
+00:11:22:33:44:55
+192.168.1.1
+192.168.1.2
+https://test.com
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Author_image.png b/Packs/CortexXpanse/Author_image.png
deleted file mode 100644
index 249fc6f403d6..000000000000
Binary files a/Packs/CortexXpanse/Author_image.png and /dev/null differ
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json
index 4ddfdcae4279..17db8f1aad23 100644
--- a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json
@@ -46,6 +46,19 @@
"type": "shortText",
"width": 150
},
+ {
+ "displayName": "tags",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "tags",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
{
"displayName": "explainers",
"fieldCalcScript": "",
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json
index 3774ebc5a366..3a59fcdc830b 100644
--- a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json
@@ -46,6 +46,19 @@
"type": "shortText",
"width": 150
},
+ {
+ "displayName": "tags",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "tags",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
{
"displayName": "explainers",
"fieldCalcScript": "",
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json
index 822546907b0b..85366233f0fe 100644
--- a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json
@@ -46,6 +46,19 @@
"type": "shortText",
"width": 150
},
+ {
+ "displayName": "tags",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "tags",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
{
"displayName": "explainers",
"fieldCalcScript": "",
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json
index c54197dc4145..2ab7850cbbd2 100644
--- a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json
@@ -46,6 +46,19 @@
"type": "shortText",
"width": 150
},
+ {
+ "displayName": "tags",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "tags",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
{
"displayName": "explainers",
"fieldCalcScript": "",
diff --git a/Packs/CortexXpanse/IncidentTypes/incidenttype-Xpanse_Alert6.8.json b/Packs/CortexXpanse/IncidentTypes/incidenttype-Xpanse_Alert6.8.json
index 4999f977a3f9..27a4aa81be5c 100644
--- a/Packs/CortexXpanse/IncidentTypes/incidenttype-Xpanse_Alert6.8.json
+++ b/Packs/CortexXpanse/IncidentTypes/incidenttype-Xpanse_Alert6.8.json
@@ -1,5 +1,5 @@
{
- "autorun": false,
+ "autorun": true,
"color": "#3C64C5",
"days": 0,
"daysR": 0,
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
index e3a319fa52dd..4bec1bda637f 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
@@ -3,6 +3,8 @@
from typing import Any, cast
+from datetime import datetime, timedelta
+import ipaddress
import urllib3
@@ -14,6 +16,8 @@
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
V1_URL_SUFFIX = "/public_api/v1"
V2_URL_SUFFIX = "/public_api/v2"
+PACK_VERSION = get_pack_version()
+DEMISTO_VERSION = demisto.demistoVersion()
SEVERITY_DICT = {
'informational': IncidentSeverity.INFO,
'low': IncidentSeverity.LOW,
@@ -288,10 +292,66 @@ def get_external_websites(self, request_data: Dict[str, Any]) -> Dict[str, Any]:
return response
+ def add_note_to_asset(self, asm_asset_id: str, entity_type: str, annotation_note: str, should_append: bool) -> dict[str, Any]:
+ """Adds an annotation (also called a note) to an asset or IP range
+ using the /assets/assets_internet_exposure/annotation endpoint.
+
+ Args:
+ asm_asset_id (str): The Xpanse asset ID.
+ entity_type (str): The type of Xpanse asset, Allowed values: 'asset' or 'ip_range'.
+ annotation_note (str): The custom note to be added to the notes section of the asset in Xpanse
+
+ Returns:
+ dict[str, Any]: a response that indicates if adding the note succeeded.
+ """
+ data = {
+ "request_data":
+ {"assets":
+ [{"entity_id": asm_asset_id,
+ "entity_type": entity_type,
+ "annotation": annotation_note
+ }],
+ "should_append": should_append
+ }
+ }
+
+ response = self._http_request('POST', f'{V1_URL_SUFFIX}/assets/assets_internet_exposure/annotation', json_data=data)
+
+ return response
+
''' HELPER FUNCTIONS '''
+def is_timestamp_within_days(timestamp, days: int):
+ """_summary_
+
+ Args:
+ timestamp (_type_): _description_
+ days (int): _description_
+ debug_msg (str): _description_
+
+ Returns:
+ _type_: _description_
+ """
+ timestamp = timestamp.replace(" ", "").replace("Z", "")
+ date_part, time_part = timestamp.split('T')
+ main_time, fractional_seconds = time_part.split('.')
+ fractional_seconds = fractional_seconds[:6]
+ timestamp = f"{date_part}T{main_time}.{fractional_seconds}"
+ target_time = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f')
+
+ current_time = datetime.now()
+ time_difference = current_time - target_time
+
+ if time_difference >= timedelta(days=days):
+ demisto.debug(f"The timestamp was not within the last {days} days.")
+ return False
+ else:
+ demisto.debug(f"The timestamp was within the last {days} days.")
+ return True
+
+
def append_search_param(search_params, field, operator, value):
"""
Appends a search parameter to the given list of search parameters.
@@ -666,7 +726,7 @@ def list_asset_internet_exposure_command(client: Client, args: dict[str, Any]) -
append_search_param(search_params, "business_units_list", "in", str(business_units_list).split(","))
if has_bu_overrides:
- append_search_param(search_params, "has_bu_overrides", "eq", False if has_bu_overrides.lower() == 'false' else True)
+ append_search_param(search_params, "has_bu_overrides", "eq", has_bu_overrides.lower() != 'false')
if mac_addresses:
append_search_param(search_params, "mac_addresses", "contains", mac_addresses)
@@ -952,6 +1012,8 @@ def list_incidents_command(client: Client, args: dict[str, Any]) -> CommandResul
incident_id_list = argToList(args.get('incident_id_list'))
description = args.get('description')
status = args.get('status')
+ starred = args.get('starred')
+ cloud_management_status = args.get('cloud_management_status')
lte_creation_time = args.get('lte_creation_time')
gte_creation_time = args.get('gte_creation_time')
sort_by_creation_time = args.get('sort_by_creation_time')
@@ -976,6 +1038,10 @@ def list_incidents_command(client: Client, args: dict[str, Any]) -> CommandResul
search_params.append({"field": "description", "operator": "contains", "value": description})
if status:
search_params.append({"field": "status", "operator": "eq", "value": status})
+ if starred:
+ search_params.append({"field": "starred", "operator": "eq", "value": starred})
+ if cloud_management_status:
+ search_params.append({"field": "cloud_management_status", "operator": "eq", "value": cloud_management_status})
if lte_creation_time:
search_params.append({
'field': 'creation_time',
@@ -1181,6 +1247,7 @@ def update_alert_command(client: Client, args: dict[str, Any]) -> CommandResults
alert_id_list = argToList(args.get('alert_id_list'))
severity = args.get('severity')
status = args.get('status')
+ comment = str(args.get('comment'))
update_params = {"update_data": {}} # type: ignore
if alert_id_list:
@@ -1195,6 +1262,8 @@ def update_alert_command(client: Client, args: dict[str, Any]) -> CommandResults
update_params["update_data"]["status"] = status
else:
raise ValueError(f'status must be one of {ALERT_STATUSES}')
+ if comment:
+ update_params["update_data"]["comment"] = comment
response = client.update_alert_request(request_data=update_params)
@@ -1209,6 +1278,46 @@ def update_alert_command(client: Client, args: dict[str, Any]) -> CommandResults
return command_results
+def add_note_to_asset_command(client: Client, args: dict[str, Any]) -> CommandResults:
+ """Adds an annotation (also called a note) to an asset or IP range
+ using the /assets/assets_internet_exposure/annotation endpoint.
+
+ Args:
+ client (Client): CortexXpanse client to use.
+ args (dict): all command arguments, usually passed from demisto.args().
+ args['asset_id'] (str): The Xpanse asset ID.
+ args['entity_type'] (str): The type of Xpanse asset, Allowed values: 'asset' or 'ip_range'.
+ args['annotation_note'] (str): The custom note to be added to the notes section of the asset in Xpanse
+
+ Returns:
+ CommandResults: A CommandResults demisto object that is then passed to return_results
+ """
+ asset_id = str(args.get('asset_id'))
+ entity_type = str(args.get('entity_type'))
+ note_to_add = str(args.get('note_to_add'))
+ should_append = argToBoolean(args.get('should_append'))
+
+ response = client.add_note_to_asset(asm_asset_id=asset_id,
+ entity_type=entity_type,
+ annotation_note=note_to_add,
+ should_append=should_append)
+ response_message = {"status": response.get('reply', {})}
+ response_message['asset'] = asset_id
+ markdown = tableToMarkdown('Add Note to Asset Command Results:',
+ response_message.get('status'),
+ headers=['Status'],
+ removeNull=True)
+ command_results = CommandResults(
+ outputs_prefix='ASM.AssetAnnotation',
+ outputs_key_field='',
+ outputs=response_message,
+ raw_response=response,
+ readable_output=markdown
+ )
+
+ return command_results
+
+
def ip_command(client: Client, args: dict[str, Any]) -> list[CommandResults]:
"""
ip command returns enrichment for an IP address.
@@ -1225,50 +1334,137 @@ def ip_command(client: Client, args: dict[str, Any]) -> list[CommandResults]:
if len(ips) == 0:
raise ValueError('ip(s) not specified')
- # trim down the list to the max number of supported results
if len(ips) > DEFAULT_SEARCH_LIMIT:
ips = ips[:DEFAULT_SEARCH_LIMIT]
- ip_data_list: list[dict[str, Any]] = []
+ xpanse_ip_list_command_output: list[dict[str, Any]] = []
+ xsoar_xpanse_indicator_list_command_output: list[dict[str, Any]] = []
+ xsoar_indicator_list_command_output: list[dict[str, Any]] = []
command_results = []
+ ips_not_found = []
+
for ip in ips:
- search_params = [{"field": "ip_address", "operator": "eq", "value": ip}]
- ip_data = client.list_asset_internet_exposure_request(search_params=search_params)
- formatted_response = ip_data.get("reply", {}).get("assets_internet_exposure", {})
- if len(formatted_response) > 0:
- formatted_response = formatted_response[0]
+ is_xsoar_timestamp_within_three_days = None
+ xsoar_ips_of_indicators = []
+ xsoar_indicators = []
+
+ ip_version_type = ipaddress.ip_address(ip).version
+
+ if ip_version_type == 4:
+ search_xsoar_indicator_results = demisto.searchIndicators(query=f"{ip} type:IP")
+ search_params = [{"field": "ip_address", "operator": "eq", "value": ip}]
+ elif ip_version_type == 6:
+ search_xsoar_indicator_results = demisto.searchIndicators(query=f"{ip} type:IPv6")
+ search_params = [{"field": "ipv6_address", "operator": "eq", "value": ip}]
else:
+ ips_not_found.append(ip)
continue
- formatted_response['ip'] = ip
- ip_standard_context = Common.IP(
- ip=ip,
- dbot_score=Common.DBotScore(
- indicator=ip,
- indicator_type=DBotScoreType.IP,
- integration_name="CortexXpanse",
- score=Common.DBotScore.NONE,
- reliability=demisto.params().get('integration_reliability')
- ),
- hostname=formatted_response.get("domain", "N/A")
- )
+ if "total" in search_xsoar_indicator_results and search_xsoar_indicator_results.get('total') != 0:
+ xsoar_indicators = search_xsoar_indicator_results.get('iocs')
+ if not isinstance(xsoar_indicators, list):
+ xsoar_indicators = [xsoar_indicators]
+ xsoar_ips_of_indicators = [entry['value'] for entry in xsoar_indicators if 'value' in entry]
+
+ if ip in xsoar_ips_of_indicators:
+ xsoar_indicators = [entry for entry in xsoar_indicators if entry.get('value') == ip]
+ if len(xsoar_indicators) == 1 and "insightCache" in xsoar_indicators[0]:
+ indicator_timestamp = xsoar_indicators[0].get('insightCache').get('modified')
+ is_xsoar_timestamp_within_three_days = is_timestamp_within_days(timestamp=indicator_timestamp, days=3)
+
+ if xsoar_indicators and is_xsoar_timestamp_within_three_days and "insightCache" in xsoar_indicators[0]:
+ insight_cache = xsoar_indicators[0].get('insightCache')
+ score_data = insight_cache.get('scores')
+ if score_data:
+ cortex_xpanse_score = score_data.get('Cortex Xpanse')
+ if cortex_xpanse_score:
+ xpanse_indicator_data_subset = {
+ 'name': xsoar_indicators[0].get('value'),
+ 'indicator_type': xsoar_indicators[0].get('indicator_type'),
+ 'score': xsoar_indicators[0].get('score'),
+ 'reliability': cortex_xpanse_score.get('reliability'),
+ 'id': xsoar_indicators[0].get('id')
+ }
+ xsoar_xpanse_indicator_list_command_output.append(xpanse_indicator_data_subset)
+ elif insight_cache and not cortex_xpanse_score:
+ indicator_sources: list = xsoar_indicators[0].get('sourceBrands')
+ non_xpanse_indicator_data_subset = {
+ 'name': xsoar_indicators[0].get('value'),
+ 'integrations': indicator_sources
+ }
+ xsoar_indicator_list_command_output.append(non_xpanse_indicator_data_subset)
+ elif not is_xsoar_timestamp_within_three_days:
+ ip_data = client.list_asset_internet_exposure_request(search_params=search_params)
+ formatted_response = ip_data.get("reply", {}).get("assets_internet_exposure", {})
+ if len(formatted_response) > 0:
+ formatted_response = formatted_response[0]
+ else:
+ ips_not_found.append(ip)
+ continue
+ formatted_response['ip'] = ip
+
+ xpanse_ip_list_command_output.append({
+ k: formatted_response.get(k) for k in formatted_response if k in ASSET_HEADER_HEADER_LIST
+ })
+ else:
+ ips_not_found.append(ip)
+
+ xpanse_api_response_ip_list = [entry['ip'] for entry in xpanse_ip_list_command_output if 'ip' in entry]
+
+ if ip in xpanse_api_response_ip_list:
+ ip_standard_context = Common.IP(
+ ip=ip,
+ dbot_score=Common.DBotScore(
+ indicator=ip,
+ indicator_type=DBotScoreType.IP,
+ integration_name="CortexXpanse",
+ score=Common.DBotScore.NONE,
+ reliability=demisto.params().get('integration_reliability')
+ )
+ )
+ command_results.append(CommandResults(
+ readable_output=tableToMarkdown("IP indicator was found from Xpanse API", {"IP": ip}),
+ indicator=ip_standard_context
+ ))
+
+ if len(xpanse_ip_list_command_output) > 0:
+ readable_output = tableToMarkdown('Xpanse Discovered IP List', xpanse_ip_list_command_output)
command_results.append(CommandResults(
- readable_output=tableToMarkdown("New IP indicator was found", {"IP": ip}),
- indicator=ip_standard_context
+ readable_output=readable_output,
+ outputs_prefix='ASM.IP',
+ outputs_key_field=['name', 'asset_type'],
+ outputs=xpanse_ip_list_command_output,
+ raw_response=xpanse_ip_list_command_output
))
- ip_data_list.append({
- k: formatted_response.get(k) for k in formatted_response if k in ASSET_HEADER_HEADER_LIST
- })
+ if len(xsoar_indicator_list_command_output) > 0:
+ markdown_body = ("This IP list is from existing records found in XSOAR within the last 3 days.\n"
+ "These IPs have not been found to be attributed to Xpanse`.")
+ readable_output = tableToMarkdown("XSOAR Indicator Discovered IP List (Not Related to Xpanse)\n" + markdown_body,
+ xsoar_indicator_list_command_output)
+ command_results.append(CommandResults(
+ readable_output=readable_output
+ ))
+
+ if len(xsoar_xpanse_indicator_list_command_output) > 0:
+ markdown_body = ("This IP list is from existing records found in XSOAR within the last 3 days.\n"
+ "If you would additional Xpanse specific information about these please use "
+ "`asm-list-asset-internet-exposure`.")
+ readable_output = tableToMarkdown(name="Xpanse Discovered IP List (Existing Indicators)\n" + markdown_body,
+ t=xsoar_xpanse_indicator_list_command_output)
+ command_results.append(CommandResults(
+ readable_output=readable_output,
+ outputs_prefix='ASM.TIM.IP',
+ outputs_key_field='name',
+ outputs=xsoar_xpanse_indicator_list_command_output,
+ raw_response=xsoar_xpanse_indicator_list_command_output
+ ))
+
+ if ips_not_found:
+ command_results.append(CommandResults(
+ readable_output=tableToMarkdown(name="IPs Not Found", t={"ip": ips_not_found})
+ ))
- readable_output = tableToMarkdown(
- 'Xpanse IP List', ip_data_list) if len(ip_data_list) > 0 else "## No IPs found"
- command_results.append(CommandResults(
- readable_output=readable_output,
- outputs_prefix='ASM.IP',
- outputs_key_field=['ip', 'asset_type'],
- outputs=ip_data_list if len(ip_data_list) > 0 else None,
- ))
return command_results
@@ -1288,55 +1484,143 @@ def domain_command(client: Client, args: dict[str, Any]) -> list[CommandResults]
if len(domains) == 0:
raise ValueError('domains(s) not specified')
- # trim down the list to the max number of supported results
if len(domains) > DEFAULT_SEARCH_LIMIT:
domains = domains[:DEFAULT_SEARCH_LIMIT]
- domain_data_list: list[dict[str, Any]] = []
+ xpanse_domain_list_command_output: list[dict[str, Any]] = []
+ xsoar_xpanse_indicator_list_command_output: list[dict[str, Any]] = []
+ xsoar_indicator_list_command_output: list[dict[str, Any]] = []
command_results = []
- for domain in domains:
- search_params = [{"field": "name", "operator": "eq", "value": domain}]
- domain_data = client.list_asset_internet_exposure_request(search_params=search_params)
- formatted_response = domain_data.get("reply", {}).get("assets_internet_exposure", {})
- if len(formatted_response) > 0:
- formatted_response = formatted_response[0]
- else:
- continue
+ is_xsoar_timestamp_within_three_days = None
+ domains_not_found = []
- formatted_response['domain'] = domain
+ for domain in domains:
+ xsoar_indicators = []
+ xsoar_domains_of_indicators = []
+ is_xsoar_timestamp_within_three_days = False
if domain.startswith('*.'):
- indicator_type = DBotScoreType.DOMAINGLOB
+ search_xsoar_indicator_results = demisto.searchIndicators(query=f"{domain} type:DomainGlob")
else:
- indicator_type = DBotScoreType.DOMAIN
-
- domain_standard_context = Common.Domain(
- domain=domain,
- dbot_score=Common.DBotScore(
- indicator=domain,
- indicator_type=indicator_type,
- integration_name="CortexXpanse",
- score=Common.DBotScore.NONE,
- reliability=demisto.params().get('integration_reliability')
+ search_xsoar_indicator_results = demisto.searchIndicators(query=f"{domain} type:Domain")
+
+ if "total" in search_xsoar_indicator_results and search_xsoar_indicator_results.get('total') != 0:
+ xsoar_indicators = search_xsoar_indicator_results.get('iocs')
+ if not isinstance(xsoar_indicators, list):
+ xsoar_indicators = [xsoar_indicators]
+ xsoar_domains_of_indicators = [entry['value'] for entry in xsoar_indicators if 'value' in entry]
+
+ if domain in xsoar_domains_of_indicators:
+ xsoar_indicators = [entry for entry in xsoar_indicators if entry.get('value') == domain]
+ if len(xsoar_indicators) == 1 and "insightCache" in xsoar_indicators[0]:
+ indicator_timestamp = xsoar_indicators[0].get('insightCache').get('modified')
+ is_xsoar_timestamp_within_three_days = is_timestamp_within_days(timestamp=indicator_timestamp, days=3)
+
+ if xsoar_indicators and is_xsoar_timestamp_within_three_days and "insightCache" in xsoar_indicators[0]:
+ insight_cache = xsoar_indicators[0].get('insightCache')
+ score_data = insight_cache.get('scores')
+ if score_data:
+ cortex_xpanse_score = score_data.get('Cortex Xpanse')
+ if cortex_xpanse_score:
+ indicator_data_subset = {
+ 'name': xsoar_indicators[0].get('value'),
+ 'indicator_type': xsoar_indicators[0].get('indicator_type'),
+ 'score': xsoar_indicators[0].get('score'),
+ 'reliability': cortex_xpanse_score.get('reliability'),
+ 'id': xsoar_indicators[0].get('id')
+ }
+ xpanse_indicator_data = indicator_data_subset
+ xsoar_xpanse_indicator_list_command_output.append(xpanse_indicator_data)
+ elif insight_cache and not cortex_xpanse_score:
+ indicator_sources: list = xsoar_indicators[0].get('sourceBrands')
+ indicator_data_subset = {
+ 'name': xsoar_indicators[0].get('value'),
+ 'integrations': indicator_sources
+ }
+ non_xpanse_indicator_data = indicator_data_subset
+ xsoar_indicator_list_command_output.append(non_xpanse_indicator_data)
+ elif not is_xsoar_timestamp_within_three_days:
+ search_params = [
+ {"field": "name", "operator": "eq", "value": domain},
+ {"field": "type", "operator": "in", "value": ['domain']}
+ ]
+ domain_data = client.list_asset_internet_exposure_request(search_params=search_params)
+ formatted_response = domain_data.get("reply", {}).get("assets_internet_exposure", {})
+ if len(formatted_response) > 0:
+ formatted_response = formatted_response[0]
+ else:
+ domains_not_found.append(domain)
+ continue
+ formatted_response['domain'] = domain
+
+ xpanse_domain_list_command_output.append({
+ k: formatted_response.get(k) for k in formatted_response if k in ASSET_HEADER_HEADER_LIST
+ })
+ else:
+ domains_not_found.append(domain)
+
+ xpanse_api_response_domain_list = [entry['domain'] for entry in xpanse_domain_list_command_output if 'domain' in entry]
+
+ if domain in xpanse_api_response_domain_list:
+ if domain.startswith('*.'):
+ indicator_type = DBotScoreType.DOMAINGLOB
+ else:
+ indicator_type = DBotScoreType.DOMAIN
+
+ domain_standard_context = Common.Domain(
+ domain=domain,
+ dbot_score=Common.DBotScore(
+ indicator=domain,
+ indicator_type=indicator_type,
+ integration_name="CortexXpanse",
+ score=Common.DBotScore.NONE,
+ reliability=demisto.params().get('integration_reliability')
+ )
)
- )
+
+ command_results.append(CommandResults(
+ readable_output=tableToMarkdown("Domain indicator was found from Xpanse API", {"domain": domain}),
+ indicator=domain_standard_context
+ ))
+
+ if len(xpanse_domain_list_command_output) > 0:
+ readable_output = tableToMarkdown('Xpanse Discovered Domain List', xpanse_domain_list_command_output)
command_results.append(CommandResults(
- readable_output=tableToMarkdown("New Domain indicator was found", {"Domain": domain}),
- indicator=domain_standard_context
+ readable_output=readable_output,
+ outputs_prefix='ASM.Domain',
+ outputs_key_field=['name', 'asset_type'],
+ outputs=xpanse_domain_list_command_output,
+ raw_response=xpanse_domain_list_command_output
))
- domain_data_list.append({
- k: formatted_response.get(k) for k in formatted_response if k in ASSET_HEADER_HEADER_LIST
- })
+ if len(xsoar_indicator_list_command_output) > 0:
+ markdown_body = ("This domain list is from existing records found in XSOAR within the last 3 days.\n"
+ "These domains have not been found to be attributed to Xpanse`.")
+ readable_output = tableToMarkdown("XSOAR Indicator Discovered Domain List (Not Related to Xpanse)\n" + markdown_body,
+ xsoar_indicator_list_command_output)
+ command_results.append(CommandResults(
+ readable_output=readable_output
+ ))
+
+ if len(xsoar_xpanse_indicator_list_command_output) > 0:
+ markdown_body = ("This domain list is from existing records found in XSOAR within the last 3 days.\n"
+ "If you would additional Xpanse specific information about these please use"
+ " `asm-list-asset-internet-exposure`.")
+ readable_output = tableToMarkdown(name="Xpanse Discovered Domain List (Existing Indicators)\n" + markdown_body,
+ t=xsoar_xpanse_indicator_list_command_output)
+ command_results.append(CommandResults(
+ readable_output=readable_output,
+ outputs_prefix='ASM.TIM.Domain',
+ outputs_key_field='name',
+ outputs=xsoar_xpanse_indicator_list_command_output,
+ raw_response=xsoar_xpanse_indicator_list_command_output
+ ))
+
+ if domains_not_found:
+ command_results.append(CommandResults(
+ readable_output=tableToMarkdown(name="Domains Not Found", t={"domain": domains_not_found})
+ ))
- readable_output = tableToMarkdown(
- 'Xpanse Domain List', domain_data_list) if len(domain_data_list) > 0 else "## No Domains found"
- command_results.append(CommandResults(
- readable_output=readable_output,
- outputs_prefix='ASM.Domain',
- outputs_key_field=['name', 'asset_type'],
- outputs=domain_data_list if len(domain_data_list) > 0 else None,
- ))
return command_results
@@ -1474,7 +1758,8 @@ def main() -> None:
headers = {
'Authorization': f'{api}',
'x-xdr-auth-id': f'{auth_id}',
- 'Content-Type': 'application/json'
+ 'Content-Type': 'application/json',
+ "User-Agent": f"Cortex Xpanse Integration Pack/{PACK_VERSION} XSOAR/{DEMISTO_VERSION}"
}
proxy = params.get('proxy', False)
@@ -1499,25 +1784,26 @@ def main() -> None:
demisto.debug(demisto.debug(f"CortexXpanse - Integration Severity: {severity}"))
commands = {
- 'asm-list-external-service': list_external_service_command,
- 'asm-get-external-service': get_external_service_command,
- 'asm-list-external-ip-address-range': list_external_ip_address_range_command,
- 'asm-get-external-ip-address-range': get_external_ip_address_range_command,
- 'asm-list-asset-internet-exposure': list_asset_internet_exposure_command,
+ 'asm-add-note-to-asset': add_note_to_asset_command,
'asm-get-asset-internet-exposure': get_asset_internet_exposure_command,
- 'asm-list-alerts': list_alerts_command,
'asm-get-attack-surface-rule': list_attack_surface_rules_command,
+ 'asm-get-external-ip-address-range': get_external_ip_address_range_command,
+ 'asm-get-external-service': get_external_service_command,
+ 'asm-get-incident': get_incident_command,
+ 'asm-list-alerts': list_alerts_command,
+ 'asm-list-asset-internet-exposure': list_asset_internet_exposure_command,
+ 'asm-list-external-ip-address-range': list_external_ip_address_range_command,
+ 'asm-list-external-service': list_external_service_command,
+ 'asm-list-external-websites': list_external_websites_command,
+ 'asm-list-incidents': list_incidents_command,
'asm-tag-asset-assign': assign_tag_to_assets_command,
'asm-tag-asset-remove': remove_tag_to_assets_command,
'asm-tag-range-assign': assign_tag_to_ranges_command,
'asm-tag-range-remove': remove_tag_to_ranges_command,
- 'asm-list-incidents': list_incidents_command,
- 'asm-get-incident': get_incident_command,
- 'asm-update-incident': update_incident_command,
'asm-update-alerts': update_alert_command,
- 'asm-list-external-websites': list_external_websites_command,
+ 'asm-update-incident': update_incident_command,
+ 'domain': domain_command,
'ip': ip_command,
- 'domain': domain_command
}
if command == 'test-module':
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
index 60a00c441d7f..37207612902e 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
@@ -553,6 +553,34 @@ script:
required: true
description: The name of the tags to apply to supplied assets.
description: Assigns tags to a list of assets.
+ - name: asm-add-note-to-asset
+ arguments:
+ - name: asset_id
+ description: The asset ID to add a note to.
+ required: true
+ - name: entity_type
+ description: "The type of Xpanse asset. Allowed values are: 'asset' or 'ip_range'."
+ auto: PREDEFINED
+ predefined:
+ - ip_range
+ - asset
+ required: true
+ - name: note_to_add
+ required: true
+ description: The custom note to be added to the notes section of the asset in Cortex Xpanse.
+ - name: should_append
+ required: false
+ auto: PREDEFINED
+ predefined:
+ - 'True'
+ - 'False'
+ defaultValue: true
+ description: Set to 'false' to overwrite the current note on the asset. Set to 'true' to append to the current note.
+ description: Adds a note to an asset in Xpanse.
+ outputs:
+ - contextPath: ASM.AssetAnnotation.status
+ description: Status of the note being added to the asset in Cortex Xpanse.
+ type: String
- name: asm-tag-asset-remove
arguments:
- name: asm_id_list
@@ -570,7 +598,7 @@ script:
- name: asm-tag-range-remove
arguments:
- name: range_id_list
- description: Comma-separated list of range IDs to remove tags from.
+ description: 'Comma-separated list of range IDs to remove tags from.'
- name: tags
description: The name of the tags to remove from supplied IP ranges.
description: Removes tags from a list of IP ranges.
@@ -581,12 +609,12 @@ script:
- name: description
description: String to search for within the incident description field.
- name: status
+ description: Status to search incidents for.
auto: PREDEFINED
predefined:
- new
- under_investigation
- resolved
- description: Status to search incidents for.
- name: lte_creation_time
description: A date in the format 2019-12-31T23:59:00. Only incidents that were created on or before the specified date/time will be retrieved.
- name: gte_creation_time
@@ -679,8 +707,8 @@ script:
- name: asm-update-incident
arguments:
- name: incident_id
- required: true
description: ID of the incident to modify.
+ required: true
- name: alert_id
description: Used for scoping updates such as comments to the alert level.
- name: assigned_user_mail
@@ -741,8 +769,8 @@ script:
- name: ip
description: IP address to enrich.
required: true
- default: true
isArray: true
+ default: true
outputs:
- contextPath: ASM.IP.ip
description: The IP address of the asset.
@@ -790,12 +818,27 @@ script:
- contextPath: IP.Address
description: IP address.
type: String
+ - contextPath: ASM.TIM.IP.name
+ description: The existing Cortex Xpanse IP address recently updated in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.IP.indicator_type
+ description: The existing Cortex Xpanse indicator type in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.IP.id
+ description: The existing indicator ID in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.IP.reliability
+ description: The existing indicator reliability recently updated in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.IP.score
+ description: The existing indicator score recently updated in the Cortex XSOAR indicators.
+ type: Number
description: Returns enrichment for an IP address.
- name: domain
arguments:
- name: domain
- description: Domain to enrich.
required: true
+ description: Domain to enrich.
isArray: true
default: true
outputs:
@@ -842,12 +885,27 @@ script:
- contextPath: Domain.Name
description: 'The domain name, for example: "google.com".'
type: String
+ - contextPath: ASM.TIM.Domain.name
+ description: The existing Cortex Xpanse domain recently updated in XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.Domain.indicator_type
+ description: The existing Cortex Xpanse indicator type in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.Domain.id
+ description: The existing indicator ID in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.Domain.reliability
+ description: The existing indicator reliability recently updated in the Cortex XSOAR indicators.
+ type: String
+ - contextPath: ASM.TIM.Domain.score
+ description: The existing indicator score recently updated in the Cortex XSOAR indicators.
+ type: Number
description: Returns enrichment for a domain.
- name: asm-get-incident
arguments:
- name: incident_id
- required: true
description: The ID of the incident to be fetched.
+ required: true
outputs:
- contextPath: ASM.Incident.incident_id
description: The ID of the incident.
@@ -883,7 +941,7 @@ script:
- contextPath: ASM.ExternalWebsite
description: A list of the websites results assets.
description: Get external websites assets.
- dockerimage: demisto/python3:3.10.13.89009
+ dockerimage: demisto/python3:3.10.14.99474
isFetchSamples: true
isfetch: true
script: ''
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
index 0ba00dfd7474..094434be64a1 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
@@ -1,9 +1,28 @@
"""
Tests module for Cortex Xpanse integration.
"""
+import pytest
+# Helper Functions
-def test_format_asm_id_func(requests_mock):
+
+def new_client():
+ from CortexXpanse import Client
+
+ client = Client(
+ base_url='https://test.com',
+ verify=True,
+ headers={
+ "HOST": "test.com",
+ "Authorization": "THISISAFAKEKEY",
+ "Content-Type": "application/json"
+ },
+ proxy=False)
+
+ return client
+
+
+def test_format_asm_id_func():
"""Tests format_asm_id helper function.
Given:
@@ -14,7 +33,6 @@ def test_format_asm_id_func(requests_mock):
- Checks the output of the helper function with the expected output.
"""
from CortexXpanse import format_asm_id
-
from test_data.raw_response import INTERNET_EXPOSURE_PRE_FORMAT
from test_data.expected_results import INTERNET_EXPOSURE_POST_FORMAT
@@ -34,22 +52,14 @@ def test_list_external_service_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_external_service_command
+ from CortexXpanse import list_external_service_command
from test_data.raw_response import EXTERNAL_SERVICES_RESPONSE
from test_data.expected_results import EXTERNAL_SERVICES_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_external_services/',
json=EXTERNAL_SERVICES_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'domain': 'testdomain.com',
@@ -73,22 +83,14 @@ def test_get_external_service_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, get_external_service_command
+ from CortexXpanse import get_external_service_command
from test_data.raw_response import EXTERNAL_SERVICE_RESPONSE
from test_data.expected_results import EXTERNAL_SERVICE_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_external_service',
json=EXTERNAL_SERVICE_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'service_id': '94232f8a-f001-3292-aa65-63fa9d981427'
@@ -112,22 +114,14 @@ def test_list_external_ip_address_range_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_external_ip_address_range_command
+ from CortexXpanse import list_external_ip_address_range_command
from test_data.raw_response import EXTERNAL_RANGES_RESPONSE
from test_data.expected_results import EXTERNAL_RANGES_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_external_ip_address_ranges/',
json=EXTERNAL_RANGES_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {}
response = list_external_ip_address_range_command(client, args)
@@ -148,22 +142,14 @@ def test_get_external_ip_address_range_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, get_external_ip_address_range_command
+ from CortexXpanse import get_external_ip_address_range_command
from test_data.raw_response import EXTERNAL_RANGE_RESPONSE
from test_data.expected_results import EXTERNAL_RANGE_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_external_ip_address_range/',
json=EXTERNAL_RANGE_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'range_id': '1093124c-ce26-33ba-8fb8-937fecb4c7b6'
}
@@ -175,7 +161,21 @@ def test_get_external_ip_address_range_command(requests_mock):
assert response.outputs_key_field == 'range_id'
-def test_list_asset_internet_exposure_command(requests_mock):
+@pytest.mark.parametrize("args", [
+ ({'name': 'testdomain.com'}),
+ ({"externally_inferred_cves": ["CVE-2020-15778"]}),
+ ({"ipv6s": ["2600:1900:4000:9664:0:7::"]}),
+ ({"asm_ids": ["3c176460-8735-333c-b618-8262e2fb660c"]}),
+ ({"aws_cloud_tags": ["Name:AD Lab"]}),
+ ({"gcp_cloud_tags": ["Name:gcp Lab"]}),
+ ({"azure_cloud_tags": ["Name:azure Lab"]}),
+ ({"has_xdr_agent": "NO"}),
+ ({"externally_detected_providers": ["Amazon Web Services"]}),
+ ({"has_bu_overrides": False}),
+ ({"business_units": ["Acme"]}),
+ ({"mac_address": ["00:11:22:33:44:55"]}),
+])
+def test_list_asset_internet_exposure_command(requests_mock, args):
"""Tests list_asset_internet_exposure_command function.
Given:
@@ -186,97 +186,21 @@ def test_list_asset_internet_exposure_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_asset_internet_exposure_command
+ from CortexXpanse import list_asset_internet_exposure_command
from test_data.raw_response import EXTERNAL_EXPOSURES_RESPONSE
from test_data.expected_results import EXTERNAL_EXPOSURES_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_assets_internet_exposure/',
json=EXTERNAL_EXPOSURES_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
- args = {'name': 'testdomain.com'}
- args_externally_inferred_cves = {"externally_inferred_cves": ["CVE-2020-15778"]}
- args_ipv6s = {"ipv6s": ["2600:1900:4000:9664:0:7::"]}
- args_asm_id_list = {"asm_ids": ["3c176460-8735-333c-b618-8262e2fb660c"]}
- args_aws_cloud_tags = {"aws_cloud_tags": ["Name:AD Lab"]}
- args_gcp_cloud_tags = {"gcp_cloud_tags": ["Name:gcp Lab"]}
- args_azure_cloud_tags = {"azure_cloud_tags": ["Name:azure Lab"]}
- args_has_xdr_agent = {"has_xdr_agent": "NO"}
- args_externally_detected_providers = {"externally_detected_providers": ["Amazon Web Services"]}
- args_has_bu_overrides = {"has_bu_overrides": False}
- args_business_units_list = {"business_units": ["Acme"]}
- args_mac_address = {"mac_address": ["00:11:22:33:44:55"]}
+ client = new_client()
response = list_asset_internet_exposure_command(client, args)
- response_externally_inferred_cves = list_asset_internet_exposure_command(args=args_externally_inferred_cves, client=client)
- response_ipv6s = list_asset_internet_exposure_command(args=args_ipv6s, client=client)
- response_asm_id_list = list_asset_internet_exposure_command(args=args_asm_id_list, client=client)
- response_aws_cloud_tags = list_asset_internet_exposure_command(args=args_aws_cloud_tags, client=client)
- response_gcp_cloud_tags = list_asset_internet_exposure_command(args=args_gcp_cloud_tags, client=client)
- response_azure_cloud_tags = list_asset_internet_exposure_command(args=args_azure_cloud_tags, client=client)
- response_has_xdr_agent = list_asset_internet_exposure_command(args=args_has_xdr_agent, client=client)
- response_externally_detected_providers = list_asset_internet_exposure_command(
- args=args_externally_detected_providers, client=client)
- response_has_bu_overrides = list_asset_internet_exposure_command(args=args_has_bu_overrides, client=client)
- response_business_units_list = list_asset_internet_exposure_command(args=args_business_units_list, client=client)
- response_mac_address = list_asset_internet_exposure_command(args=args_mac_address, client=client)
assert response.outputs == EXTERNAL_EXPOSURES_RESULTS
assert response.outputs_prefix == 'ASM.AssetInternetExposure'
assert response.outputs_key_field == 'asm_ids'
- assert response_externally_inferred_cves.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_externally_inferred_cves.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_externally_inferred_cves.outputs_key_field == "asm_ids"
-
- assert response_ipv6s.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_ipv6s.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_ipv6s.outputs_key_field == "asm_ids"
-
- assert response_asm_id_list.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_asm_id_list.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_asm_id_list.outputs_key_field == "asm_ids"
-
- assert response_aws_cloud_tags.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_aws_cloud_tags.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_aws_cloud_tags.outputs_key_field == "asm_ids"
-
- assert response_gcp_cloud_tags.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_gcp_cloud_tags.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_gcp_cloud_tags.outputs_key_field == "asm_ids"
-
- assert response_azure_cloud_tags.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_azure_cloud_tags.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_azure_cloud_tags.outputs_key_field == "asm_ids"
-
- assert response_has_xdr_agent.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_has_xdr_agent.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_has_xdr_agent.outputs_key_field == "asm_ids"
-
- assert response_externally_detected_providers.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_externally_detected_providers.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_externally_detected_providers.outputs_key_field == "asm_ids"
-
- assert response_has_bu_overrides.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_has_bu_overrides.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_has_bu_overrides.outputs_key_field == "asm_ids"
-
- assert response_business_units_list.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_business_units_list.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_business_units_list.outputs_key_field == "asm_ids"
-
- assert response_mac_address.outputs == EXTERNAL_EXPOSURES_RESULTS
- assert response_mac_address.outputs_prefix == "ASM.AssetInternetExposure"
- assert response_mac_address.outputs_key_field == "asm_ids"
-
def test_get_asset_internet_exposure_command(requests_mock):
"""Tests get_asset_internet_exposure_command function.
@@ -289,22 +213,14 @@ def test_get_asset_internet_exposure_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, get_asset_internet_exposure_command
+ from CortexXpanse import get_asset_internet_exposure_command
from test_data.raw_response import EXTERNAL_EXPOSURE_RESPONSE
from test_data.expected_results import EXTERNAL_EXPOSURE_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_asset_internet_exposure/',
json=EXTERNAL_EXPOSURE_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'asm_id': 'testdomain.com'
}
@@ -327,22 +243,14 @@ def test_list_alerts_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_alerts_command
+ from CortexXpanse import list_alerts_command
from test_data.raw_response import LIST_ALERTS_RESPONSE
from test_data.expected_results import LIST_ALERTS_RESULTS
requests_mock.post('https://test.com/public_api/v2/alerts/get_alerts_multi_events/',
json=LIST_ALERTS_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'limit': '3',
'sort_by_creation_time': 'asc'
@@ -371,22 +279,14 @@ def test_list_attack_surface_rules_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_attack_surface_rules_command
+ from CortexXpanse import list_attack_surface_rules_command
from test_data.raw_response import ATTACK_SURFACE_RULES_RAW
from test_data.expected_results import ATTACK_SURFACE_RULES_RESULTS
requests_mock.post('https://test.com/public_api/v1/get_attack_surface_rules/',
json=ATTACK_SURFACE_RULES_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'enabled_status': 'on',
'severity': 'high',
@@ -411,22 +311,14 @@ def test_assign_tag_to_assets_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, assign_tag_to_assets_command
+ from CortexXpanse import assign_tag_to_assets_command
from test_data.raw_response import TAG_APPLY_RAW
from test_data.expected_results import TAG_APPLY_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/tags/assets_internet_exposure/assign/',
json=TAG_APPLY_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'asm_id_list': '11111111-1111-1111-1111-111111111111',
'tags': 'Test'
@@ -449,22 +341,14 @@ def test_remove_tag_to_assets_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, remove_tag_to_assets_command
+ from CortexXpanse import remove_tag_to_assets_command
from test_data.raw_response import TAG_REMOVE_RAW
from test_data.expected_results import TAG_REMOVE_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/tags/assets_internet_exposure/remove/',
json=TAG_REMOVE_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'asm_id_list': '11111111-1111-1111-1111-111111111111',
'tags': 'Test'
@@ -487,22 +371,14 @@ def test_assign_tag_to_ranges_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, assign_tag_to_ranges_command
+ from CortexXpanse import assign_tag_to_ranges_command
from test_data.raw_response import TAG_APPLY_RAW
from test_data.expected_results import TAG_APPLY_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/tags/external_ip_address_ranges/assign/',
json=TAG_APPLY_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'range_id_list': '11111111-1111-1111-1111-111111111111',
'tags': 'Test'
@@ -525,22 +401,14 @@ def test_remove_tag_to_ranges_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, remove_tag_to_ranges_command
+ from CortexXpanse import remove_tag_to_ranges_command
from test_data.raw_response import TAG_REMOVE_RAW
from test_data.expected_results import TAG_REMOVE_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/tags/external_ip_address_ranges/remove/',
json=TAG_REMOVE_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'range_id_list': '11111111-1111-1111-1111-111111111111',
'tags': 'Test'
@@ -563,22 +431,14 @@ def test_list_incidents_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_incidents_command
+ from CortexXpanse import list_incidents_command
from test_data.raw_response import LIST_INCIDENTS_RAW
from test_data.expected_results import LIST_INCIDENTS_RESULTS
requests_mock.post('https://test.com/public_api/v1/incidents/get_incidents/',
json=LIST_INCIDENTS_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'limit': 1,
'status': 'new'
@@ -602,22 +462,14 @@ def test_get_incident_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, get_incident_command
+ from CortexXpanse import get_incident_command
from test_data.raw_response import INCIDENT_GET_RAW
from test_data.expected_results import INCIDENT_GET_RESULTS
requests_mock.post('https://test.com/public_api/v1/incidents/get_incident_extra_data/',
json=INCIDENT_GET_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'incident_id': 1
}
@@ -639,22 +491,14 @@ def test_update_incident_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, update_incident_command
+ from CortexXpanse import update_incident_command
from test_data.raw_response import INCIDENT_UPDATE_RAW
from test_data.expected_results import INCIDENT_UPDATE_RESULTS
requests_mock.post('https://test.com/public_api/v1/incidents/update_incident/',
json=INCIDENT_UPDATE_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'incident_id': 1,
'status': 'new'
@@ -677,25 +521,18 @@ def test_update_alert_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, update_alert_command
+ from CortexXpanse import update_alert_command
from test_data.raw_response import ALERT_UPDATE_RAW
from test_data.expected_results import ALERT_UPDATE_RESULTS
requests_mock.post('https://test.com/public_api/v1/alerts/update_alerts/',
json=ALERT_UPDATE_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'alert_id_list': 602,
- 'status': 'new'
+ 'status': 'new',
+ 'comment': 'test updating'
}
response = update_alert_command(client, args)
@@ -704,7 +541,38 @@ def test_update_alert_command(requests_mock):
assert response.outputs_prefix == 'ASM.UpdatedAlerts'
-def test_ip_command(requests_mock):
+def test_successfully_add_note_to_asset_command(requests_mock):
+ """Tests update_alert_command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate update_alert_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Running the 'update_alert_command'.
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from CortexXpanse import add_note_to_asset_command
+
+ json_data = {"reply": "succeeded"}
+ requests_mock.post('https://test.com/public_api/v1/assets/assets_internet_exposure/annotation',
+ json=json_data)
+
+ client = new_client()
+ args = {
+ 'asset_id': "abcd1234-a1b2-a1b2-a1b2-abcdefg12345",
+ 'entity_type': 'ip_range',
+ 'note_to_add': 'Test note adding to asset in Ev2',
+ 'should_append': 'true'
+ }
+
+ response = add_note_to_asset_command(client, args)
+
+ assert response.outputs.get('status') == "succeeded"
+ assert response.outputs_prefix == 'ASM.AssetAnnotation'
+
+
+def test_ip_command_xpanse_asset_object(requests_mock):
"""Tests ip_command function.
Given:
@@ -715,22 +583,14 @@ def test_ip_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, ip_command
+ from CortexXpanse import ip_command
from test_data.raw_response import IP_DOMAIN_RAW
from test_data.expected_results import IP_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_assets_internet_exposure/',
json=IP_DOMAIN_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'ip': "1.1.1.1"
}
@@ -740,11 +600,13 @@ def test_ip_command(requests_mock):
for response in responses:
if response.outputs_prefix == 'ASM.IP':
assert response.outputs == IP_RESULTS
- elif response.outputs_prefix == 'DBotScore':
- assert response.outputs.get("Score") == 0
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator == '1.1.1.1'
+ else:
+ pytest.fail()
-def test_domain_command(requests_mock):
+def test_ip_command_xsoar_indicator(mocker):
"""Tests domain_command function.
Given:
@@ -755,37 +617,176 @@ def test_domain_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, domain_command
+ from CortexXpanse import ip_command
+ import demistomock as demisto
+ from test_data.raw_response import XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW
+ from test_data.expected_results import XSOAR_SEARCH_INDICATOR_IP_RESULTS
+ from datetime import datetime
+ insight_cache = XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW['iocs'][0]['insightCache']
+ insight_cache['modified'] = datetime.now().isoformat(timespec='milliseconds') + 'Z'
+
+ mocker.patch.object(demisto, 'searchIndicators', return_value=XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW)
+
+ client = new_client()
+
+ responses = ip_command(client, {'ip': '1.1.1.2'})
+
+ assert len(responses) == 1
+ for response in responses:
+ if response.outputs_prefix == 'ASM.TIM.IP':
+ assert response.outputs[0] == XSOAR_SEARCH_INDICATOR_IP_RESULTS
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator == '1.1.1.2'
+ else:
+ pytest.fail()
+
+def test_ip_command_xsoar_and_xpanse(mocker, requests_mock):
+ """Tests domain_command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate domain_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Running the 'domain_command'.
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from CortexXpanse import ip_command
+ import demistomock as demisto
+ from datetime import datetime
+ from test_data.raw_response import IP_DOMAIN_RAW, XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW
+ from test_data.expected_results import IP_RESULTS, XSOAR_SEARCH_INDICATOR_IP_RESULTS
+ insight_cache = XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW['iocs'][0]['insightCache']
+ insight_cache['modified'] = datetime.now().isoformat(timespec='milliseconds') + 'Z'
+
+ requests_mock.post('https://test.com/public_api/v1/assets/get_assets_internet_exposure/',
+ json=IP_DOMAIN_RAW)
+ mocker.patch.object(demisto, 'searchIndicators', return_value=XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW)
+
+ client = new_client()
+
+ responses = ip_command(client, {'ip': '1.1.1.2, 1.1.1.1'})
+
+ assert len(responses) == 3
+ for response in responses:
+ if response.outputs_prefix == 'ASM.IP':
+ assert response.outputs == IP_RESULTS
+ elif response.outputs_prefix == 'ASM.TIM.IP':
+ assert response.outputs[0] == XSOAR_SEARCH_INDICATOR_IP_RESULTS
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator_type == 'ip'
+ else:
+ pytest.fail()
+
+
+def test_domain_command_xpanse_asset_object(requests_mock):
+ """Tests domain_command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate domain_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Running the 'domain_command'.
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from CortexXpanse import domain_command
from test_data.raw_response import IP_DOMAIN_RAW
from test_data.expected_results import IP_RESULTS
+
requests_mock.post('https://test.com/public_api/v1/assets/get_assets_internet_exposure/',
json=IP_DOMAIN_RAW)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
- args = {
- 'domain': "*.acme.com"
- }
- del IP_RESULTS[0]['ip']
- responses = domain_command(client, args)
+ client = new_client()
+
+ responses = domain_command(client, {'domain': '*.acme.com'})
+ del IP_RESULTS[0]['ip']
assert len(responses) == 2
for response in responses:
if response.outputs_prefix == 'ASM.Domain':
assert response.outputs == IP_RESULTS
- elif response.outputs_prefix == 'DBotScore':
- assert response.outputs.get("Score") == 0
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator == '*.acme.com'
+ else:
+ pytest.fail()
+
+
+def test_domain_command_xsoar_indicator(mocker):
+ """Tests domain_command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate domain_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Running the 'domain_command'.
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from CortexXpanse import domain_command
+ import demistomock as demisto
+ from datetime import datetime
+ from test_data.raw_response import XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW
+ from test_data.expected_results import XSOAR_SEARCH_INDICATOR_DOMAIN_RESULTS
+ insight_cache = XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW['iocs'][0]['insightCache']
+ insight_cache['modified'] = datetime.now().isoformat(timespec='milliseconds') + 'Z'
+ mocker.patch.object(demisto, 'searchIndicators', return_value=XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW)
+
+ client = new_client()
+
+ responses = domain_command(client, {'domain': 'www.toysrus.com'})
+
+ assert len(responses) == 1
+ for response in responses:
+ if response.outputs_prefix == 'ASM.TIM.Domain':
+ assert response.outputs[0] == XSOAR_SEARCH_INDICATOR_DOMAIN_RESULTS
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator == 'www.toysrus.com'
+ else:
+ pytest.fail()
-def test_fetch_incidents(requests_mock, mocker):
+def test_domain_command_xsoar_and_xpanse(mocker, requests_mock):
+ """Tests domain_command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate domain_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Running the 'domain_command'.
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from CortexXpanse import domain_command
+ import demistomock as demisto
+ from datetime import datetime
+ from test_data.raw_response import IP_DOMAIN_RAW, XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW
+ from test_data.expected_results import DOMAIN_RESULTS, XSOAR_SEARCH_INDICATOR_DOMAIN_RESULTS
+ insight_cache = XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW['iocs'][0]['insightCache']
+ insight_cache['modified'] = datetime.now().isoformat(timespec='milliseconds') + 'Z'
+
+ requests_mock.post('https://test.com/public_api/v1/assets/get_assets_internet_exposure/',
+ json=IP_DOMAIN_RAW)
+ mocker.patch.object(demisto, 'searchIndicators', return_value=XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW)
+
+ client = new_client()
+
+ responses = domain_command(client, {'domain': 'www.toysrus.com, *.acme.com'})
+
+ assert len(responses) == 3
+ for response in responses:
+ if response.outputs_prefix == 'ASM.TIM.Domain':
+ assert response.outputs[0] == XSOAR_SEARCH_INDICATOR_DOMAIN_RESULTS
+ elif response.outputs_prefix == 'ASM.Domain':
+ assert response.outputs[0] == DOMAIN_RESULTS
+ elif response.indicator:
+ assert response.indicator.dbot_score.indicator_type in ['domain', 'domainglob']
+ else:
+ pytest.fail()
+
+
+def test_fetch_incidents(requests_mock):
"""Tests fetch_incidents function.
Given:
@@ -796,22 +797,14 @@ def test_fetch_incidents(requests_mock, mocker):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, fetch_incidents
+ from CortexXpanse import fetch_incidents
import json
from test_data.raw_response import LIST_ALERTS_RESPONSE
requests_mock.post('https://test.com/public_api/v2/alerts/get_alerts_multi_events/',
json=LIST_ALERTS_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
last_run = {'last_fetch': 1659452708759}
next_run, incidents = fetch_incidents(
@@ -840,22 +833,14 @@ def test_list_external_websites_command(requests_mock):
Then:
- Checks the output of the command function with the expected output.
"""
- from CortexXpanse import Client, list_external_websites_command
+ from CortexXpanse import list_external_websites_command
from test_data.raw_response import EXTERNAL_WEBSITES_RESPONSE
from test_data.expected_results import EXTERNAL_WEBSITES_RESULTS
requests_mock.post('https://test.com/public_api/v1/assets/get_external_websites/',
json=EXTERNAL_WEBSITES_RESPONSE)
- client = Client(
- base_url='https://test.com',
- verify=True,
- headers={
- "HOST": "test.com",
- "Authorizatio": "THISISAFAKEKEY",
- "Content-Type": "application/json"
- },
- proxy=False)
+ client = new_client()
args = {
'authentication': 'Form',
@@ -866,4 +851,3 @@ def test_list_external_websites_command(requests_mock):
assert response.outputs == EXTERNAL_WEBSITES_RESULTS.get('ExternalWebsite', {}).get('websites')
assert response.outputs_prefix == 'ASM.ExternalWebsite'
- # assert response.outputs_key_field == ''
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/README.md b/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
index 15995be47e6f..2fe893a830e0 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
@@ -23,7 +23,9 @@ This integration was integrated and tested with version 2.0 of Cortex Expander.
| Source Reliability | Reliability of the source providing the intelligence data. Used for !ip and !domain commands. | False |
4. Click **Test** to validate the URLs, token, and connection.
+
## Commands
+
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
After you successfully execute a command, a DBot message appears in the War Room with the command details.
@@ -65,8 +67,11 @@ Get a list of all your external services filtered by business units, externally
| ASM.ExternalService.externally_inferred_vulnerability_score | Unknown | External service vulnerability score. |
#### Command example
+
```!asm-list-external-service domain=acme.com is_active=yes discovery_type=directly_discovery```
+
#### Context Example
+
```json
{
"ASM": {
@@ -187,6 +192,7 @@ Get a list of all your external services filtered by business units, externally
#### Human Readable Output
>### External Services
+>
>|Active Classifications|Business Units|Discovery Type|Domain|Externally Detected Providers|First Observed|Ip Address|Is Active|Last Observed|Port|Protocol|Service Id|Service Name|Service Type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| HttpServer,
MicrosoftOWAServer,
ServerSoftware,
MicrosoftIisWebServer,
ApplicationServerSoftware | Acme,
VanDelay Industries | DirectlyDiscovered | autodiscover.acme.com | Microsoft Azure | 1659395040000 | 1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1,
1.1.1.1 | Active | 1663024320000 | 80 | TCP | 4c755fea-59e8-3719-8829-9f6adde65068 | HTTP Server at autodiscover.acme.com:80 | HttpServer |
@@ -229,8 +235,11 @@ Get service details according to the service ID.
| ASM.ExternalService.details | String | Additional details. |
#### Command example
+
```!asm-get-external-service service_id=94232f8a-f001-3292-aa65-63fa9d981427```
+
#### Context Example
+
```json
{
"ASM": {
@@ -573,12 +582,14 @@ Get service details according to the service ID.
#### Human Readable Output
>### External Service
+>
>|Active Classifications|Business Units|Details|Discovery Type|Externally Detected Providers|Externally Inferred Cves|Externally Inferred Vulnerability Score|First Observed|Ip Address|Is Active|Last Observed|Port|Protocol|Service Id|Service Name|Service Type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| SSHWeakMACAlgorithmsEnabled,
SshServer,
OpenSSH | Acme | serviceKey: 1.1.1.1:22
serviceKeyType: IP
businessUnits: {'name': 'Acme'}
providerDetails: {'name': 'AWS', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000}
certificates:
domains:
ips: {'ip': 873887795, 'protocol': 'TCP', 'provider': 'AWS', 'geolocation': {'latitude': 39.0438, 'longitude': -77.4879, 'countryCode': 'US', 'city': 'ASHBURN', 'regionCode': 'VA', 'timeZone': None}, 'activityStatus': 'Active', 'lastObserved': 1663026500000, 'firstObserved': 1662774169000}
classifications: {'name': 'SshServer', 'activityStatus': 'Active', 'values': [{'jsonValue': '{"version":"2.0","serverVersion":"OpenSSH_7.6p1","extraInfo":"Ubuntu-4ubuntu0.7"}', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000}], 'firstObserved': 1662774120000, 'lastObserved': 1663026480000},
{'name': 'SSHWeakMACAlgorithmsEnabled', 'activityStatus': 'Active', 'values': [{'jsonValue': '{}', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000}], 'firstObserved': 1662774120000, 'lastObserved': 1663026480000},
{'name': 'OpenSSH', 'activityStatus': 'Active', 'values': [{'jsonValue': '{"version":"7.6"}', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000}], 'firstObserved': 1662774120000, 'lastObserved': 1663026480000}
tlsVersions:
inferredCvesObserved: {'inferredCve': {'cveId': 'CVE-2020-15778', 'cvssScoreV2': 6.8, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 7.8, 'cveSeverityV3': 'HIGH', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2021-41617', 'cvssScoreV2': 4.4, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 7.0, 'cveSeverityV3': 'HIGH', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2019-6110', 'cvssScoreV2': 4.0, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 6.8, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2019-6109', 'cvssScoreV2': 4.0, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 6.8, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2020-14145', 'cvssScoreV2': 4.3, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 5.9, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2019-6111', 'cvssScoreV2': 5.8, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 5.9, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2018-20685', 'cvssScoreV2': 2.6, 'cveSeverityV2': 'LOW', 'cvssScoreV3': 5.3, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2018-15919', 'cvssScoreV2': 5.0, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 5.3, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2016-20012', 'cvssScoreV2': 4.3, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 5.3, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2018-15473', 'cvssScoreV2': 5.0, 'cveSeverityV2': 'MEDIUM', 'cvssScoreV3': 5.3, 'cveSeverityV3': 'MEDIUM', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000},
{'inferredCve': {'cveId': 'CVE-2021-36368', 'cvssScoreV2': 2.6, 'cveSeverityV2': 'LOW', 'cvssScoreV3': 3.7, 'cveSeverityV3': 'LOW', 'inferredCveMatchMetadata': {'inferredCveMatchType': 'ExactVersionMatch', 'product': 'openssh', 'confidence': 'High', 'vendor': 'openbsd', 'version': '7.6'}}, 'activityStatus': 'Active', 'firstObserved': 1662774169000, 'lastObserved': 1663026500000}
enrichedObservationSource: CLOUD
ip_ranges: {} | ColocatedOnIp | Amazon Web Services | CVE-2020-15778,
CVE-2021-41617,
CVE-2019-6110,
CVE-2019-6109,
CVE-2020-14145,
CVE-2019-6111,
CVE-2018-20685,
CVE-2018-15919,
CVE-2016-20012,
CVE-2018-15473,
CVE-2021-36368 | 7.8 | 1662774120000 | 1.1.1.1 | Active | 1663026480000 | 22 | TCP | 94232f8a-f001-3292-aa65-63fa9d981427 | SSH Server at 1.1.1.1:22 | SshServer |
### asm-list-external-ip-address-range
+
***
Get a list of all your internet exposures filtered by business units and organization handles. Maximum result limit is 100 ranges.
@@ -586,6 +597,7 @@ Get a list of all your internet exposures filtered by business units and organiz
#### Base Command
`asm-list-external-ip-address-range`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -606,8 +618,11 @@ Get a list of all your internet exposures filtered by business units and organiz
| ASM.ExternalIpAddressRange.organization_handles | String | External IP address range associated organization handles. |
#### Command example
+
```!asm-list-external-ip-address-range```
+
#### Context Example
+
```json
{
"ASM": {
@@ -653,6 +668,7 @@ Get a list of all your internet exposures filtered by business units and organiz
#### Human Readable Output
>### External IP Address Ranges
+>
>|Active Responsive Ips Count|Business Units|Date Added|First Ip|Ips Count|Last Ip|Organization Handles|Range Id|
>|---|---|---|---|---|---|---|---|
>| 0 | VanDelay Industries | 1663031000145 | 1.1.1.1 | 64 | 1.1.1.1 | MAINT-HK-PCCW-BIA-CS,
BNA2-AP,
TA66-AP | 4da29b7f-3086-3b52-981b-aa8ee5da1e60 |
@@ -660,6 +676,7 @@ Get a list of all your internet exposures filtered by business units and organiz
### asm-get-external-ip-address-range
+
***
Get external IP address range details according to the range IDs.
@@ -667,6 +684,7 @@ Get external IP address range details according to the range IDs.
#### Base Command
`asm-get-external-ip-address-range`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -689,8 +707,11 @@ Get external IP address range details according to the range IDs.
| ASM.ExternalIpAddressRange.details | String | Additional information. |
#### Command example
+
```!asm-get-external-ip-address-range range_id=4da29b7f-3086-3b52-981b-aa8ee5da1e60```
+
#### Context Example
+
```json
{
"ASM": {
@@ -780,18 +801,21 @@ Get external IP address range details according to the range IDs.
#### Human Readable Output
>### External IP Address Range
+
>|Active Responsive Ips Count|Business Units|Date Added|Details|First Ip|Ips Count|Last Ip|Organization Handles|Range Id|
>|---|---|---|---|---|---|---|---|---|
>| 0 | VanDelay Industries | 1663031000145 | networkRecords: {'handle': '1.1.1.1 - 1.1.1.1', 'firstIp': '1.1.1.1', 'lastIp': '1.1.1.1', 'name': 'SEARS-HK', 'whoIsServer': 'whois.apnic.net', 'lastChanged': 1663030241931, 'organizationRecords': [{'handle': 'MAINT-HK-PCCW-BIA-CS', 'dateAdded': 1663029346957, 'address': '', 'email': 'noc@acme.com', 'phone': '', 'org': '', 'formattedName': '', 'kind': 'group', 'roles': ['registrant'], 'lastChanged': None, 'firstRegistered': None, 'remarks': ''}, {'handle': 'BNA2-AP', 'dateAdded': 1663029346957, 'address': "27/F, PCCW Tower, Taikoo Place,\n979 King's Road, Quarry Bay, HK ", 'email': 'cs@acme.com', 'phone': '+852-2888-6932', 'org': '', 'formattedName': 'BIZ NETVIGATOR ADMINISTRATORS', 'kind': 'group', 'roles': ['administrative'], 'lastChanged': 1514892767000, 'firstRegistered': 1220514857000, 'remarks': ''}, {'handle': 'TA66-AP', 'dateAdded': 1663029346957, 'address': 'HKT Limited\nPO Box 9896 GPO ', 'email': 'noc@acme.com', 'phone': '+852-2883-5151', 'org': '', 'formattedName': 'TECHNICAL ADMINISTRATORS', 'kind': 'group', 'roles': ['technical'], 'lastChanged': 1468555410000, 'firstRegistered': 1220514856000, 'remarks': ''}], 'remarks': 'Sears Holdings Global Sourcing Ltd'} | 1.1.1.1 | 64 | 1.1.1.1 | MAINT-HK-PCCW-BIA-CS,
BNA2-AP,
TA66-AP | 4da29b7f-3086-3b52-981b-aa8ee5da1e60 |
### asm-list-asset-internet-exposure
+
***
Get a list of all your internet exposures filtered by IP address, domain, type, asm id, IPv6 address, AWS/GCP/Azure tags, has XDR agent, Externally detected providers, Externally inferred cves, Business units list, has BU overrides and/or if there is an active external service. Maximum result limit is 100 assets.
#### Base Command
`asm-list-asset-internet-exposure`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -838,8 +862,11 @@ Get a list of all your internet exposures filtered by IP address, domain, type,
#### Command example
+
```!asm-list-asset-internet-exposure name="acme.com" type=certificate has_active_external_services=no```
+
#### Context Example
+
```json
{
"ASM": {
@@ -936,6 +963,7 @@ Get a list of all your internet exposures filtered by IP address, domain, type,
#### Human Readable Output
>### Asset Internet Exposures
+>
>|Asm Ids|Asset Type|Business Units|Certificate Algorithm|Certificate Classifications|Certificate Issuer|Domain Resolves|Has Active Externally Services|Has Xdr Agent|Name|Sensor|
>|---|---|---|---|---|---|---|---|---|---|---|
>| cfa1cd5a-77f1-3963-8557-7f652309a143 | CERTIFICATE | Acme,
VanDelay Industries | SHA256withRSA | LongExpiration,
Wildcard,
Expired | DigiCert | false | false | NA | *.digital-dev.acme.com | XPANSE |
@@ -943,6 +971,7 @@ Get a list of all your internet exposures filtered by IP address, domain, type,
### asm-get-asset-internet-exposure
+
***
Get internet exposure asset details according to the asset ID.
@@ -950,6 +979,7 @@ Get internet exposure asset details according to the asset ID.
#### Base Command
`asm-get-asset-internet-exposure`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -977,8 +1007,11 @@ Get internet exposure asset details according to the asset ID.
| ASM.AssetInternetExposure.externally_inferred_vulnerability_score | Unknown | Asset vulnerability score. |
#### Command example
+
```!asm-get-asset-internet-exposure asm_id=3c176460-8735-333c-b618-8262e2fb660c```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1071,6 +1104,7 @@ Get internet exposure asset details according to the asset ID.
#### Human Readable Output
>### Asset Internet Exposure
+>
>|Asm Ids|Business Units|Certificate Algorithm|Certificate Classifications|Certificate Issuer|Created|Details|Name|Resolves|Type|
>|---|---|---|---|---|---|---|---|---|---|
>| 3c176460-8735-333c-b618-8262e2fb660c | Acme | SHA1withRSA | Wildcard,
Expired,
InsecureSignature | Thawte | 1663030146931 | providerDetails:
domain: null
topLevelAssetMapperDomain: null
domainAssetType: null
isPaidLevelDomain: false
domainDetails: null
dnsZone: null
latestSampledIp: null
subdomainMetadata: null
recentIps:
businessUnits: {'name': 'Acme'}
certificateDetails: {"issuer": "C=US,O=Thawte\\, Inc.,CN=Thawte SSL CA", "issuerAlternativeNames": "", "issuerCountry": "US", "issuerEmail": null, "issuerLocality": null, "issuerName": "Thawte SSL CA", "issuerOrg": "Thawte\\\\, Inc.", "formattedIssuerOrg": "Thawte", "issuerOrgUnit": null, "issuerState": null, "publicKey": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp21W/QVHuo0Nyy9l6Qp6Ye7yniuCccplWLdkL34pB0roNWBiklLJFftFTXJLtUuYEBhEbUtOPtNr5QRZFo+LQSj+JMQsGajEgNvIIMDms2xtc+vYkuJeNRsN/0zRm8iBjCNEZ0zBbWdupO6xee+Lngq5RiyRzAN2+Q5HlmHmVOcc7NtY5VIQhajp3a5Gc7tmLXa7ZxwQb+afdlpmE0iv4ZxmXFyHwlPXUlIxfETDDjtv2EzAgrnpZ5juo7TEFZA7AjsT0lO6cC2qPE9x9kC02PeC1Heg4hWf70CsXcKQBsprLqusrPYM9+OYfZnj+Dq9j6FjZD314Nz4qTGwmZrwDQIDAQAB", "publicKeyAlgorithm": "RSA", "publicKeyRsaExponent": 65537, "signatureAlgorithm": "SHA1withRSA", "subject": "C=US,ST=New Jersey,L=Wayne,O=Acme,OU=MIS,CN=*.acme.com", "subjectAlternativeNames": "*.acme.com", "subjectCountry": "US", "subjectEmail": null, "subjectLocality": "Wayne", "subjectName": "*.acme.com", "subjectOrg": "Acme", "subjectOrgUnit": "MIS", "subjectState": "New Jersey", "serialNumber": "91384582774546160650506315451812470612", "validNotBefore": 1413158400000, "validNotAfter": 1444780799000, "version": "3", "publicKeyBits": 2048, "publicKeyModulus": "a76d56fd0547ba8d0dcb2f65e90a7a61eef29e2b8271ca6558b7642f7e29074ae83560629252c915fb454d724bb54b981018446d4b4e3ed36be50459168f8b4128fe24c42c19a8c480dbc820c0e6b36c6d73ebd892e25e351b0dff4cd19bc8818c2344674cc16d676ea4eeb179ef8b9e0ab9462c91cc0376f90e479661e654e71cecdb58e5521085a8e9ddae4673bb662d76bb671c106fe69f765a661348afe19c665c5c87c253d75252317c44c30e3b6fd84cc082b9e96798eea3b4c415903b023b13d253ba702daa3c4f71f640b4d8f782d477a0e2159fef40ac5dc29006ca6b2eabacacf60cf7e3987d99e3f83abd8fa163643df5e0dcf8a931b0999af00d", "publicKeySpki": "Up3fHwOddA9cXEeO4XBOgn63bfnvkXsOrOv6AycwQAk=", "sha1Fingerprint": "77d025c36f055e254063ae2ac3625fd4bf4507fb", "sha256Fingerprint": "9a37c952ee1169cfa6e91efb57fe6d405d1ca48b26a714e9a46f008c15ea62e8", "md5Fingerprint": "498ec19ebd6c6883ecd43d064e713002"}
inferredCvesObserved:
ip_ranges: {} | *.acme.com | false | Certificate |
@@ -1117,8 +1151,11 @@ Get a list of all your ASM alerts filtered by alert IDs, severity and/or creatio
| ASM.Alert.detection_timestamp | Date | Date the alert was created. |
#### Command example
+
```!asm-list-alerts limit=2 severity=high sort_by_creation_time=asc```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1443,6 +1480,7 @@ Get a list of all your ASM alerts filtered by alert IDs, severity and/or creatio
#### Human Readable Output
>### ASM Alerts
+>
>|Action|Action Pretty|Agent Os Type|Alert Id|Alert Type|Description|Detection Timestamp|Events|External Id|Is Pcap|Is Whitelisted|Last Modified Ts|Local Insert Ts|Matching Status|Name|Resolution Comment|Resolution Status|Severity|Source|Starred|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| NOT_AVAILABLE | N/A | NO_HOST | 231 | Unclassified | Networking and security infrastructure, such as firewalls and routers, generally should not have their administration panels open to public Internet. Compromise of these devices, often though password guessing or vulnerability exploitation, provides privileged access to an enterprise network. | 1659452808759 | {'agent_install_type': 'NA', 'agent_host_boot_time': None, 'event_sub_type': None, 'module_id': None, 'association_strength': None, 'dst_association_strength': None, 'story_id': None, 'event_id': None, 'event_type': None, 'event_timestamp': 1659452808759, 'actor_process_instance_id': None, 'actor_process_image_path': None, 'actor_process_image_name': None, 'actor_process_command_line': None, 'actor_process_signature_status': 'N/A', 'actor_process_signature_vendor': None, 'actor_process_image_sha256': None, 'actor_process_image_md5': None, 'actor_process_causality_id': None, 'actor_causality_id': None, 'actor_process_os_pid': None, 'actor_thread_thread_id': None, 'causality_actor_process_image_name': None, 'causality_actor_process_command_line': None, 'causality_actor_process_image_path': None, 'causality_actor_process_signature_vendor': None, 'causality_actor_process_signature_status': 'N/A', 'causality_actor_causality_id': None, 'causality_actor_process_execution_time': None, 'causality_actor_process_image_md5': None, 'causality_actor_process_image_sha256': None, 'action_file_path': None, 'action_file_name': None, 'action_file_md5': None, 'action_file_sha256': None, 'action_file_macro_sha256': None, 'action_registry_data': None, 'action_registry_key_name': None, 'action_registry_value_name': None, 'action_registry_full_key': None, 'action_local_ip': None, 'action_local_ip_v6': None, 'action_local_port': None, 'action_remote_ip': None, 'action_remote_ip_v6': None, 'action_remote_port': 80, 'action_external_hostname': None, 'action_country': 'UNKNOWN', 'action_process_instance_id': None, 'action_process_causality_id': None, 'action_process_image_name': None, 'action_process_image_sha256': None, 'action_process_image_command_line': None, 'action_process_signature_status': 'N/A', 'action_process_signature_vendor': None, 'os_actor_effective_username': None, 'os_actor_process_instance_id': None, 'os_actor_process_image_path': None, 'os_actor_process_image_name': None, 'os_actor_process_command_line': None, 'os_actor_process_signature_status': 'N/A', 'os_actor_process_signature_vendor': None, 'os_actor_process_image_sha256': None, 'os_actor_process_causality_id': None, 'os_actor_causality_id': None, 'os_actor_process_os_pid': None, 'os_actor_thread_thread_id': None, 'fw_app_id': None, 'fw_interface_from': None, 'fw_interface_to': None, 'fw_rule': None, 'fw_rule_id': None, 'fw_device_name': None, 'fw_serial_number': None, 'fw_url_domain': None, 'fw_email_subject': None, 'fw_email_sender': None, 'fw_email_recipient': None, 'fw_app_subcategory': None, 'fw_app_category': None, 'fw_app_technology': None, 'fw_vsys': None, 'fw_xff': None, 'fw_misc': None, 'fw_is_phishing': 'N/A', 'dst_agent_id': None, 'dst_causality_actor_process_execution_time': None, 'dns_query_name': None, 'dst_action_external_hostname': None, 'dst_action_country': None, 'dst_action_external_port': None, 'contains_featured_host': 'NO', 'contains_featured_user': 'NO', 'contains_featured_ip': 'NO', 'image_name': None, 'container_id': None, 'cluster_name': None, 'referenced_resource': None, 'operation_name': None, 'identity_sub_type': None, 'identity_type': None, 'project': None, 'cloud_provider': None, 'resource_type': None, 'resource_sub_type': None, 'user_agent': None, 'user_name': None} | FAKE-GUID | false | false | 1660240725450 | 1659455267908 | MATCHED | Networking Infrastructure | ASM alert resolution | STATUS_070_RESOLVED_OTHER | high | ASM | false |
@@ -1483,8 +1521,11 @@ Fetches attack surface rules related to how Cortex Xpanse does assessment.
| ASM.AttackSurfaceRules.modified | unknown | Last modification of the attack surface rule. |
#### Command example
+
```!asm-get-attack-surface-rule enabled_status=On limit=1```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1508,6 +1549,7 @@ Fetches attack surface rules related to how Cortex Xpanse does assessment.
#### Human Readable Output
>### Attack Surface Rules
+>
>|Attack Surface Rule Id|Attack Surface Rule Name|Category|Created|Description|Enabled Status|Modified|Priority|Remediation Guidance|
>|---|---|---|---|---|---|---|---|---|
>| VMwareVRealizeAutomationAppliance | VMware vRealize Automation Appliance | Attack Surface Reduction | 1688836450000 | VMware vRealize Automation, formerly vCloud Automation Center, is a software product that offers multivendor and multicloud support. It allows for IT infrastructure personalization and resource provisioning and configuration, and it automates application delivery and container management. This issue identifies the web login interface for VMware vRealize Automation Appliance. | On | 1688074708000 | High | Due to the network access provided by VMware vRealize Automation, it is recommended for instances of VMware vRealize Automation to not be accessible to the public Internet unless there is a business need.
Xpanse recommends working to identify the asset owner and collaborating with them to remove the asset from the internet. |
@@ -1533,8 +1575,11 @@ Assigns tags to a list of assets.
There is no context output for this command.
#### Command example
+
```!asm-tag-asset-assign tags="Test" asm_id_list="76fb0c06-52cf-33b5-8166-3a130bb25eb6"```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1544,6 +1589,7 @@ There is no context output for this command.
```
#### Human Readable Output
+
```Assignment operation: succeeded```
### asm-tag-asset-remove
@@ -1567,8 +1613,11 @@ Removes tags from a list of assets.
There is no context output for this command.
#### Command example
+
```!asm-tag-asset-remove tags="Test" asm_id_list="76fb0c06-52cf-33b5-8166-3a130bb25eb6"```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1578,6 +1627,7 @@ There is no context output for this command.
```
#### Human Readable Output
+
```Removal operation: succeeded```
### asm-tag-range-assign
@@ -1601,8 +1651,11 @@ Assigns tags to a list of IP ranges.
There is no context output for this command.
#### Command example
+
```!asm-tag-range-assign range_id_list="ba8d8f59-6445-37c0-a145-2233f9e5a9bd" tags="Test"```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1612,6 +1665,7 @@ There is no context output for this command.
```
#### Human Readable Output
+
```Assignment operation: succeeded```
### asm-tag-range-remove
@@ -1635,8 +1689,11 @@ Removes tags from a list of IP ranges.
There is no context output for this command.
#### Command example
+
```!asm-tag-range-remove range_id_list="ba8d8f59-6445-37c0-a145-2233f9e5a9bd" tags="Test"```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1646,6 +1703,7 @@ There is no context output for this command.
```
#### Human Readable Output
+
```Removal operation: succeeded```
### asm-list-incidents
@@ -1702,8 +1760,11 @@ Fetches ASM incidents that match provided filters. Incidents are an aggregation
| ASM.Incident.xpanse_risk_score | Unknown | Risk score of the incident. |
#### Command example
+
```!asm-list-incidents limit=1 status=new```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1755,7 +1816,9 @@ Fetches ASM incidents that match provided filters. Incidents are an aggregation
```
#### Human Readable Output
+>
>### ASM Incidents
+>
>|Alert Count|Alerts Grouping Status|Creation Time|Critical Severity Alert Count|Description|High Severity Alert Count|Host Count| Hosts |Incident Id|Incident Sources|Low Severity Alert Count|Med Severity Alert Count|Modification Time|Severity|Starred|Status|Tags|User Count|Xdr Url|
>|---|---|---|---|---|---|---|--------------|---|---|---|---|---|---|---|---|---|---|---|
>| 1 | Enabled | 1688837015292 | 0 | 'Insecure Communication Protocol at example.com:443' | 0 | 1 | 1.1.1.1:null | 5508 | ASM | 0 | 1 | 1688837015292 | medium | false | new | AR:Registered to You,
IPR:Test IP | 0 | https://exp-test.crtx.eu.paloaltonetworks.com/incident-view?caseId=5508 |
@@ -1776,6 +1839,7 @@ Returns additional details about a specific incident. Note: Incident IDs may als
| incident_id | The ID of the incident to be fetched. | Required |
#### Context Output
+
| **Path** | **Type** | **Description** |
| --- |----------| --- |
| ASM.Incident.incident_id | String | The ID of the incident. |
@@ -1788,8 +1852,11 @@ Returns additional details about a specific incident. Note: Incident IDs may als
| ASM.Incident.notes | String | User-provided notes related to the incident. |
#### Command example
+
```!asm-get-incident incident_id=71```
+
#### Context Example
+
```json
{
"aggregated_score": 825,
@@ -1922,8 +1989,11 @@ Returns additional details about a specific incident. Note: Incident IDs may als
"xpanse_risk_score": 825
}
```
+
#### Human Readable Output
+>
>### ASM Incident
+>
>|Aggregated Score| Alert Count | Alerts |Alerts Grouping Status|Assigned User Mail| Assigned User Pretty Name |Creation Time|Critical Severity Alert Count| Description |High Severity Alert Count|Host Count| Hosts |Incident Id|Incident Sources|Is Blocked|Low Severity Alert Count|Med Severity Alert Count|Modification Time|Original Tags|Rule Based Score|Severity|Starred|Status|Tags|User Count|Xdr Url|Xpanse Risk Explainer|Xpanse Risk Score|
>|---|-------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---|---|---------------------------|---|---|----------------------------------------------------------------------------------------------|---|---|--------------|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| 825 | 2 | {'alert_id': '113716', 'name': 'Insecure Microsoft Exchange Server (15.0.1497.36) at 1.1.1.1:443', 'description': 'This issue flags on-premises Microsoft Exchange Servers that are known to be below the current up-to-date secured versions suggested by Microsoft.' , 'resolution_status': 'STATUS_010_NEW'},
{'alert_id': '89896', 'name': 'Missing X-Xss-Protection at 1.1.1.1:443', 'description': 'The X-XSS-Protection header is used to reduce the risk of cross-site scripting attacks. Not including it could make your website less secure.', 'resolution_status': 'STATUS_010_NEW'} | Disabled | cs@acme.com | User One | 1671912678672 | 0 | 'Insecure Microsoft Exchange Server (15.0.1497.36) at 1.1.1.1:443' along with 1 other alerts | 4 | 1 | 1.1.1.1:null | 71 | ASM | false | 0 | 2 | 1696275576460 | BU:Xpanse VanDelay Demo 3 | 825 | high | true | under_investigation | AR:Registered to You | 0 | https://exp-test.crtx.eu.paloaltonetworks.com/incident-view?caseId=71 | cves: {'cveId': 'CVE-2021-26855', 'cvssScore': 9.800000190734863, 'epssScore': 0.9749900102615356, 'matchType': 'ExactVersionMatch', 'confidence': 'High', 'exploitMaturity': 'Weaponized', 'reportedExploitInTheWild': True, 'mostRecentReportedExploitDate': '2023-10-12'},
{'cveId': 'CVE-2021-34473', 'cvssScore': 9.800000190734863, 'epssScore': 0.9732999801635742, 'matchType': 'ExactVersionMatch', 'confidence': 'High', 'exploitMaturity': 'Weaponized', 'reportedExploitInTheWild': True, 'mostRecentReportedExploitDate': '2023-10-12'},
{'cveId': 'CVE-2021-34523', 'cvssScore': 9.800000190734863, 'epssScore': 0.9726300239562988, 'matchType': 'ExactVersionMatch', 'confidence': 'High', 'exploitMaturity': 'Weaponized', 'reportedExploitInTheWild': True, 'mostRecentReportedExploitDate': '2023-10-12'}
riskFactors: {'attributeId': 'misconfiguration', 'attributeName': 'Misconfiguration', 'issueTypes': [{'displayName': 'Insecure Microsoft Exchange Server', 'issueTypeId': 'InsecureMicrosoftExchangeServer'}, {'displayName': 'Missing X-XSS-Protection Header', 'issueTypeId': 'MissingXXssProtectionHeader'}]},
{'attributeId': 'critical_system', 'attributeName': 'Critical System', 'issueTypes': [{'displayName': 'Insecure Microsoft Exchange Server', 'issueTypeId': 'InsecureMicrosoftExchangeServer'}]},
{'attributeId': 'potential_data_loss', 'attributeName': 'Potential Data Loss', 'issueTypes': [{'displayName': 'Insecure Microsoft Exchange Server', 'issueTypeId': 'InsecureMicrosoftExchangeServer'}]}
versionMatched: true | 825 |
@@ -1957,8 +2027,11 @@ Updates a given incident. Can be used to modify the status, severity, assignee,
#### Command example
+
```!asm-update-incident incident_id="3674" alert_id="4372" comment="this is an xsoar test"```
+
#### Context Example
+
```json
{
"ASM": {
@@ -1968,6 +2041,7 @@ Updates a given incident. Can be used to modify the status, severity, assignee,
```
#### Human Readable Output
+
```Update operation successful: true```
@@ -1995,8 +2069,11 @@ Updates the state of one or more alerts.
| ASM.UpdatedAlerts | unknown | IDs of the updated alerts. |
#### Command example
+
```!asm-update-alerts alert_id_list=602 status=new```
+
#### Context Example
+
```json
{
"ASM": {
@@ -2006,12 +2083,13 @@ Updates the state of one or more alerts.
```
#### Human Readable Output
+
```Updated alerts: [602]```
### ip
***
-Returns enrichment for an IP address.
+Returns reputation lookup for an IP address found in Xpanse.
#### Base Command
@@ -2025,31 +2103,41 @@ Returns enrichment for an IP address.
#### Context Output
-| **Path** | **Type** | **Description** |
-|-------------------------|----------|------------------------------------------------------------|
-| ASM.IP.ip | String | The IP address of the asset. |
-| ASM.IP.domain | String | The domain affiliated with an asset. |
-| ASM.IP.name | String | The asset name. |
-| ASM.IP.asset_type | String | The asset type. |
-| ASM.IP.first_observed | unknown | When the asset was first observed. |
-| ASM.IP.last_observed | unknown | When the asset was last observed. |
-| ASM.IP.asm_ids | unknown | The ID of the asset. |
-| ASM.IP.service_type | unknown | Affiliated service types for the asset. |
-| ASM.IP.tags | unknown | A list of tags that have been assigned to the asset. |
-| ASM.IP.asset_explainers | unknown | The asset explanation details. |
-| ASM.IP.domain_details | unknown | Additional domain details. |
-| ASM.IP.recent_ips | unknown | Details about the recent IP observations. |
-| DBotScore.Vendor | String | The vendor reporting the score of the indicator. |
-| DBotScore.Score | Number | An integer regarding the status of the indicator. |
-| DBotScore.Indicator | String | The indicator value. |
-| DBotScore.Type | String | The vendor used to calculate the score. |
-| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
-| IP.Address | String | IP address. |
+| **Path** | **Type** | **Description** |
+|---------------------------|----------|----------------------------------------------------------------------------|
+| ASM.IP.ip | String | The IP address of the asset. |
+| ASM.IP.domain | String | The domain affiliated with an asset. |
+| ASM.IP.name | String | The asset name. |
+| ASM.IP.asset_type | String | The asset type. |
+| ASM.IP.first_observed | unknown | When the asset was first observed. |
+| ASM.IP.last_observed | unknown | When the asset was last observed. |
+| ASM.IP.asm_ids | unknown | The ID of the asset. |
+| ASM.IP.service_type | unknown | Affiliated service types for the asset. |
+| ASM.IP.tags | unknown | A list of tags that have been assigned to the asset. |
+| ASM.IP.asset_explainers | unknown | The asset explanation details. |
+| ASM.IP.domain_details | unknown | Additional domain details. |
+| ASM.IP.recent_ips | unknown | Details about the recent IP observations. |
+| DBotScore.Vendor | String | The vendor reporting the score of the indicator. |
+| DBotScore.Score | Integer | An integer regarding the status of the indicator. |
+| DBotScore.Indicator | String | The indicator value. |
+| DBotScore.Type | String | The vendor used to calculate the score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| IP.Address | String | IP address. |
+| ASM.TIM.IP.name | String | The existing Cortex Xpanse IP address recently updated in the Cortex XSOAR indicators |
+| ASM.TIM.IP.indicator_type | String | The existing Cortex Xpanse indicator type in the Cortex XSOAR indicators |
+| ASM.TIM.IP.id | String | The existing indicator ID in the Cortex XSOAR indicators |
+| ASM.TIM.IP.reliability | String | The existing indicator reliability recently updated in the Cortex XSOAR indicators |
+| ASM.TIM.IP.score | Integer | The existing indicator score recently updated in the Cortex XSOAR indicators |
#### Command example
-```!ip ip=1.1.1.1```
+
+```!ip ip='1.1.1.1, 1.1.1.2, 8.8.8.8'```
+
#### Context Example
+
+If the indicator was **not** updated in Cortex XSOAR in the last 3 days:
+
```json
{
"ASM": {
@@ -2124,16 +2212,59 @@ Returns enrichment for an IP address.
}
```
+If the indicator is **related to Xpanse** was updated in Cortex XSOAR in the last 3 days:
+
+```json
+{
+ "ASM": {
+ "TIM": {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "indicator_type": "IP",
+ "name": "1.1.1.2",
+ "reliability": "A+ - 3rd party enrichment",
+ "score": 0
+ }
+ }
+}
+```
+
#### Human Readable Output
->### Xpanse IP List
->|asm_ids|asset_explainers|asset_type|domain|domain_details|first_observed|ip|last_observed|name|recent_ips|service_type|tags|
->|---|---|---|---|---|---|---|---|---|---|---|---|
->| 4b1f3765-de40-3a1a-8535-667420408fd9 | | DOMAIN | *.acme.com | admin: {"city": "", "country": "us", "emailAddress": "", "faxExtension": null, "faxNumber": "", "name": "", "organization": "Acme, Inc.", "phoneExtension": null, "phoneNumber": "", "postalCode": "", "province": "AZ", "registryId": null, "street": ""}| 1679457579382 | 1.1.1.1 | 1697361335282 | *.acme.com | {'id': '218b3cc9-2d26-3a17-aadd-9eac08cc30ec', 'ip': 52529952, 'ipv6': None, 'source': {'name': 'DOMAIN_RESOLUTION'}, 'provider': {'name': 'AWS', 'additionalProviderInfo': None, 'isCdn': False, 'legacyName': 'AWS', 'displayName': 'Amazon Web Services', 'cdn': False}, 'firstObserved': 1692418207732, 'lastObserved': 1697361335282} | HttpServer | BU:Xpanse VanDelay Demo 3 |
+
+If the indicator was **not** updated in Cortex XSOAR in the last 3 days:
+
+> ### Xpanse Discovered IP List
+>
+> |asm_ids|asset_explainers|asset_type|domain|domain_details|first_observed|ip|last_observed|name|recent_ips|service_type|tags|
+> |---|---|---|---|---|---|---|---|---|---|---|---|
+> | 4b1f3765-de40-3a1a-8535-667420408fd9 | | DOMAIN | *.acme.com | admin: {"city": "", "country": "us", "emailAddress": "", "faxExtension": null, "faxNumber": "", "name": "", "organization": "Acme, Inc.", "phoneExtension": null, "phoneNumber": "", "postalCode": "", "province": "AZ", "registryId": null, "street": ""}| 1679457579382 | 1.1.1.1 | 1697361335282 | *.acme.com | {'id': '218b3cc9-2d26-3a17-aadd-9eac08cc30ec', 'ip': 52529952, 'ipv6': None, 'source': {'name': 'DOMAIN_RESOLUTION'}, 'provider': {'name': 'AWS', 'additionalProviderInfo': None, 'isCdn': False, 'legacyName': 'AWS', 'displayName': 'Amazon Web Services', 'cdn': False}, 'firstObserved': 1692418207732, 'lastObserved': 1697361335282} | HttpServer | BU:Xpanse VanDelay Demo 3 |
+
+If the indicator is **related to Xpanse** was updated in Cortex XSOAR in the last 3 days:
+
+> ### Xpanse Discovered IP List (Existing Indicators)
+>
+> This domain list is from existing records found in Cortex XSOAR within the last 3 days.
+> If you would additional Cortex Xpanse specific information about these, use asm-list-asset-internet-exposure.
+>
+> |id|indicator_type|name|reliability|score|
+> |---|---|---|---|---|
+> | abcd1b2abcd1a0b20c7a8bc5d67e8eea | IP | 1.1.1.2 | A+ - 3rd party enrichment | 0 |
+
+If the indicator was updated in Cortex XSOAR in the last 3 days:
+
+> ### XSOAR Indicator Discovered IP List (Not Related to Cortex Xpanse)
+>
+> This IP list is from existing records found in Cortex XSOAR within the last 3 days.
+> These IPs have not been found to be attributed to Cortex Xpanse.
+>
+> |integrations|name|
+> |---|---|
+> | VirusTotal (API v3) | 8.8.8.8 |
+
### domain
***
-Returns enrichment for a domain.
+Returns reputation lookup for an domain found in Cortex Xpanse.
#### Base Command
@@ -2147,29 +2278,39 @@ Returns enrichment for a domain.
#### Context Output
-| **Path** | **Type** | **Description** |
-|-----------------------------|----------|------------------------------------------------------------|
-| ASM.Domain.domain | String | The domain affiliated with an asset. |
-| ASM.Domain.name | String | The asset name. |
-| ASM.Domain.asset_type | String | The asset type. |
-| ASM.Domain.first_observed | unknown | When the asset was first observed. |
-| ASM.Domain.last_observed | unknown | When the asset was last observed. |
-| ASM.Domain.asm_ids | unknown | The ID of the asset. |
-| ASM.Domain.service_type | unknown | Affiliated service types for the asset. |
-| ASM.Domain.tags | unknown | A list of tags that have been assigned to the asset. |
-| ASM.Domain.asset_explainers | unknown | The asset explanation details. |
-| ASM.Domain.domain_details | unknown | Additional domain details. |
-| ASM.Domain.recent_ips | unknown | Details about the recent IP observations. |
-| DBotScore.Vendor | String | The vendor reporting the score of the indicator. |
-| DBotScore.Score | Number | An integer regarding the status of the indicator. |
-| DBotScore.Indicator | String | The indicator value. |
-| DBotScore.Type | String | The vendor used to calculate the score. |
-| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
-| Domain.Name | String | Name of the domain. |
+| **Path** | **Type** | **Description** |
+|-------------------------------|----------|----------------------------------------------------------------------------|
+| ASM.Domain.domain | String | The domain affiliated with an asset. |
+| ASM.Domain.name | String | The asset name. |
+| ASM.Domain.asset_type | String | The asset type. |
+| ASM.Domain.first_observed | unknown | When the asset was first observed. |
+| ASM.Domain.last_observed | unknown | When the asset was last observed. |
+| ASM.Domain.asm_ids | unknown | The ID of the asset. |
+| ASM.Domain.service_type | unknown | Affiliated service types for the asset. |
+| ASM.Domain.tags | unknown | A list of tags that have been assigned to the asset. |
+| ASM.Domain.asset_explainers | unknown | The asset explanation details. |
+| ASM.Domain.domain_details | unknown | Additional domain details. |
+| ASM.Domain.recent_ips | unknown | Details about the recent IP observations. |
+| DBotScore.Vendor | String | The vendor reporting the score of the indicator. |
+| DBotScore.Score | Number | An integer regarding the status of the indicator. |
+| DBotScore.Indicator | String | The indicator value. |
+| DBotScore.Type | String | The vendor used to calculate the score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| Domain.Name | String | Name of the domain. |
+| ASM.TIM.Domain.name | String | The existing Cortex Xpanse domain recently updated in the Cortex XSOAR indicators |
+| ASM.TIM.Domain.indicator_type | String | The existing Cortex Xpanse indicator type in the Cortex XSOAR indicators |
+| ASM.TIM.Domain.id | String | The existing indicator ID in the Cortex XSOAR indicators |
+| ASM.TIM.Domain.reliability | String | The existing indicator reliability recently updated in the Cortex XSOAR indicators |
+| ASM.TIM.Domain.score | Integer | The existing indicator score recently updated in the Cortex XSOAR indicators |
#### Command example
-```!domain domain="*.acme.com"```
+
+```!domain domain="*.acme.com, www.example.com, www.fakedomain.com"```
+
#### Context Example
+
+If the indicator was **not** updated in Cortex XSOAR in the last 3 days:
+
```json
{
"ASM": {
@@ -2243,11 +2384,53 @@ Returns enrichment for a domain.
}
```
+If the indicator is **related to Xpanse** was updated in Cortex XSOAR in the last 3 days:
+
+```json
+{
+ "ASM": {
+ "TIM": {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "indicator_type": "Domain",
+ "name": "www.example.com",
+ "reliability": "A+ - 3rd party enrichment",
+ "score": 0
+ }
+ }
+}
+```
+
#### Human Readable Output
->### Xpanse Domain List
->|asm_ids|asset_explainers|asset_type|domain|domain_details|first_observed|last_observed|name|recent_ips|service_type|tags|
->|---|---|---|---|---|---|---|---|---|---|---|---|
->| 4b1f3765-de40-3a1a-8535-667420408fd9 | | DOMAIN | *.acme.com | admin: {"city": "", "country": "us", "emailAddress": "", "faxExtension": null, "faxNumber": "", "name": "", "organization": "Acme, Inc.", "phoneExtension": null, "phoneNumber": "", "postalCode": "", "province": "AZ", "registryId": null, "street": ""}| 1679457579382 | 1697361335282 | *.acme.com | {'id': '218b3cc9-2d26-3a17-aadd-9eac08cc30ec', 'ip': 52529952, 'ipv6': None, 'source': {'name': 'DOMAIN_RESOLUTION'}, 'provider': {'name': 'AWS', 'additionalProviderInfo': None, 'isCdn': False, 'legacyName': 'AWS', 'displayName': 'Amazon Web Services', 'cdn': False}, 'firstObserved': 1692418207732, 'lastObserved': 1697361335282} | HttpServer | BU:Xpanse VanDelay Demo 3 |
+
+If the indicator was **not** updated in Cortex XSOAR in the last 3 days:
+
+> ### Xpanse Discovered Domain List
+>
+> |asm_ids|asset_explainers|asset_type|domain|domain_details|first_observed|last_observed|name|recent_ips|service_type|tags|
+> |---|---|---|---|---|---|---|---|---|---|---|---|
+> | 4b1f3765-de40-3a1a-8535-667420408fd9 | | DOMAIN | *.acme.com | admin: {"city": "", "country": "us", "emailAddress": "", "faxExtension": null, "faxNumber": "", "name": "", "organization": "Acme, Inc.", "phoneExtension": null, "phoneNumber": "", "postalCode": "", "province": "AZ", "registryId": null, "street": ""}| 1679457579382 | 1697361335282 | *.acme.com | {'id': '218b3cc9-2d26-3a17-aadd-9eac08cc30ec', 'ip': 52529952, 'ipv6': None, 'source': {'name': 'DOMAIN_RESOLUTION'}, 'provider': {'name': 'AWS', 'additionalProviderInfo': None, 'isCdn': False, 'legacyName': 'AWS', 'displayName': 'Amazon Web Services', 'cdn': False}, 'firstObserved': 1692418207732, 'lastObserved': 1697361335282} | HttpServer | BU:Xpanse VanDelay Demo 3 |
+
+If the indicator is **related to Xpanse** was updated in Cortex XSOAR in the last 3 days:
+
+> ### Xpanse Discovered Domain List (Existing Indicators)
+>
+> This domain list is from existing records found in Cortex XSOAR within the last 3 days.
+> If you would like additional Cortex Xpanse specific information about these, use asm-list-asset-internet-exposure.
+>
+> |id|indicator_type|name|reliability|score|
+> |---|---|---|---|---|
+> | abcd1b2abcd1a0b20c7a8bc5d67e8eea | Domain | www.example.com | A+ - 3rd party enrichment | 0 |
+
+If the indicator was updated in Cortex XSOAR in the last 3 days:
+
+> ### XSOAR Indicator Discovered Domain List (Not Related to Xpanse)
+>
+> This domain list is from existing records found in Cortex XSOAR within the last 3 days.
+> These domains have not been found to be attributed to Cortex Xpanse.
+> |integrations|name|
+> |---|---|
+> | VirusTotal (API v3) | www.fakedomain.com |
+
### asm-list-external-websites
@@ -2285,8 +2468,11 @@ Get a list of all your external websites filtered by authentication type. Maximu
| ASM.Externalwebsite.externally_inferred_vulnerability_score | Unknown | External website vulnerability score. |
#### Command example
+
```!asm-list-external-website authentiaction=Form limit=5```
+
#### Context Example
+
```json
{
"ASM":{
@@ -2714,7 +2900,53 @@ Get a list of all your external websites filtered by authentication type. Maximu
#### Human Readable Output
>### External websites
+>
>|Host|Authentication Type|
>|---|---|
>| example.com | Form based authentication |
+
+### asm-add-note-to-asset
+
+#### Base Command
+
+`asm-add-note-to-asset`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| asset_id | Authentication type string on which to search. | Required |
+| entity_type | Maximum number of assets to return. The default and maximum is 100. | Required |
+| note_to_add | The custom note to be added to the notes section of the asset in Cortex Xpanse | Required |
+| should_append | Set to 'false' to overwrite the current note on the asset. Set to 'true' to append to the current note. Default is 'true'. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| ASM.AssetAnnotation.status | String | Status of the note being added to the asset in Xpanse. |
+
+#### Command example
+
+
+```!asm-add-note-to-asset asset_id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx entity_type=asset note_to_add="Test adding note to asset."```
+
+
+#### Context Example
+
+```json
+{
+ "status": "succeeded"
+}
+```
+
+#### Human Readable Output
+
+> ### Status
+>
+>|Status|
+>|---|
+>| succeeded |
+
+
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples b/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
index 1c9fa1dee9f5..c24b23cb318e 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
@@ -1,8 +1,20 @@
+!asm-add-note-to-asset asset_id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx entity_type=asset note_to_add="Test adding note to asset."
!asm-get-asset-internet-exposure asm_id=3c176460-8735-333c-b618-8262e2fb660c
+!asm-get-attack-surface-rule enabled_status=On limit=1
!asm-get-external-ip-address-range range_id=4da29b7f-3086-3b52-981b-aa8ee5da1e60
!asm-get-external-service service_id=94232f8a-f001-3292-aa65-63fa9d981427
-!asm-list-external-ip-address-range
+!asm-get-incident incident_id=71
+!asm-list-alerts limit=2 severity=high sort_by_creation_time=asc status=reopened
!asm-list-asset-internet-exposure name="acme.com" type=certificate has_active_external_services=no
+!asm-list-external-ip-address-range
!asm-list-external-service domain=acme.com is_active=yes discovery_type=directly_discovery
-!asm-list-alerts limit=2 severity=high sort_by_creation_time=asc status=reopened
-!asm-list-external-websites authentication="Form" limit=5
\ No newline at end of file
+!asm-list-external-websites authentication="Form" limit=5
+!asm-list-incidents limit=1 status=new`
+!asm-tag-asset-assign tags="Test" asm_id_list="76fb0c06-52cf-33b5-8166-3a130bb25eb6"
+!asm-tag-asset-remove tags="Test" asm_id_list="76fb0c06-52cf-33b5-8166-3a130bb25eb6"
+!asm-tag-range-assign range_id_list="ba8d8f59-6445-37c0-a145-2233f9e5a9bd" tags="Test"
+!asm-tag-range-remove range_id_list="ba8d8f59-6445-37c0-a145-2233f9e5a9bd" tags="Test"`
+!asm-update-alerts alert_id_list=602 status=new
+!asm-update-incident incident_id="3674" alert_id="4372" comment="this is an xsoar test"
+!domain domain="*.acme.com, www.example.com, www.fakedomain.com"
+!ip ip="1.1.1.1, 1.1.1.2, 8.8.8.8"
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
index ac7deb480f46..b15e5ec9afa8 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
@@ -1484,6 +1484,67 @@
ALERT_UPDATE_RESULTS = "Updated alerts: [602]"
+DOMAIN_RESULTS = {
+ "asm_ids": [
+ "4b1f3765-de40-3a1a-8535-667420408fd9"
+ ],
+ "asset_explainers": [],
+ "asset_type": "DOMAIN",
+ "domain": "*.acme.com",
+ "domain_details": {
+ "admin": {},
+ "alignedRegistrar": "MarkMonitor",
+ "collectionTime": 1695942091000,
+ "creationDate": 785376000000,
+ "dnssec": null,
+ "domainName": "acme.com",
+ "domainStatuses": [
+ "clientUpdateProhibited",
+ "clientTransferProhibited",
+ "clientDeleteProhibited"
+ ],
+ "dropped": false,
+ "nameServers": [],
+ "registrant": {},
+ "registrar": {},
+ "registryDomainId": null,
+ "registryExpiryDate": 1732060800000,
+ "reseller": null,
+ "retrievedDate": 1696075229360,
+ "tech": {},
+ "updatedDate": 1666137600000
+ },
+ "first_observed": 1679457579382,
+ "last_observed": 1697361335282,
+ "name": "*.acme.com",
+ "recent_ips": [
+ {
+ "firstObserved": 1692418207732,
+ "id": "218b3cc9-2d26-3a17-aadd-9eac08cc30ec",
+ "ip": "1.1.1.1",
+ "ipv6": null,
+ "lastObserved": 1697361335282,
+ "provider": {
+ "additionalProviderInfo": null,
+ "cdn": false,
+ "displayName": "Amazon Web Services",
+ "isCdn": false,
+ "legacyName": "AWS",
+ "name": "AWS"
+ },
+ "source": {
+ "name": "DOMAIN_RESOLUTION"
+ }
+ }
+ ],
+ "service_type": [
+ "HttpServer"
+ ],
+ "tags": [
+ "BU:Xpanse VanDelay Demo 3"
+ ]
+}
+
IP_RESULTS = [{
"asm_ids": [
"4b1f3765-de40-3a1a-8535-667420408fd9"
@@ -1546,6 +1607,22 @@
]
}]
+XSOAR_SEARCH_INDICATOR_DOMAIN_RESULTS = {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "indicator_type": "Domain",
+ "name": "www.toysrus.com",
+ "reliability": "A+ - 3rd party enrichment",
+ "score": 0
+}
+
+XSOAR_SEARCH_INDICATOR_IP_RESULTS = {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "indicator_type": "IP",
+ "name": "1.1.1.2",
+ "reliability": "A+ - 3rd party enrichment",
+ "score": 0
+}
+
EXTERNAL_WEBSITES_RESULTS = {
"ExternalWebsite": {
"total_count": 3343,
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
index bcb7af27ba38..b6e89502bfdc 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
@@ -1755,6 +1755,294 @@
}
}
+XSOAR_SEARCH_INDICATOR_DOMAIN_RESPONSE_RAW = {
+ "total": 1,
+ "iocs": [
+ {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "version": 26,
+ "cacheVersn": 0,
+ "sequenceNumber": 5460,
+ "primaryTerm": 1,
+ "modified": "2099-12-31T23:59:59.9999999Z",
+ "sizeInBytes": 2920,
+ "sortValues": [
+ "1718702883637",
+ "abcd1b2abcd1a0b20c7a8bc5d67e8eea"
+ ],
+ "comments": [
+ {
+ "id": "2556bab0-4390-4383-8b61-bc8e4698e11c",
+ "version": 0,
+ "cacheVersn": 0,
+ "modified": "0001-01-01T00:00:00Z",
+ "sizeInBytes": 0,
+ "content": "Created",
+ "user": "DBot",
+ "created": "2099-12-31T23:59:59.9999999Z",
+ "type": "IndicatorCommentTimeLine",
+ "source": "DBot",
+ "entryId": "",
+ "category": "Sighting"
+ }
+ ],
+ "timestamp": "2099-12-31T23:59:59.9999999Z",
+ "indicator_type": "Domain",
+ "value": "www.toysrus.com",
+ "sourceInstances": [
+ "Cortex Xpanse"
+ ],
+ "sourceBrands": [
+ "Cortex Xpanse"
+ ],
+ "lastSeen": "2099-12-31T23:59:59.9999999Z",
+ "firstSeen": "2099-12-31T23:59:59.9999999Z",
+ "lastSeenEntryID": "f9e8d7c6-5b4a-3210-9876-543210fedcba@fedcba98-7654-3210-9876-543210fedcba",
+ "firstSeenEntryID": "f9e8d7c6-5b4a-3210-9876-543210fedcba@fedcba98-7654-3210-9876-543210fedcba",
+ "score": 0,
+ "insightCache": {
+ "id": "www.toysrus.com",
+ "version": 18,
+ "cacheVersn": 0,
+ "sequenceNumber": 652451,
+ "primaryTerm": 1,
+ "modified": "2024-06-18T09:00:00.9999999Z",
+ "sizeInBytes": 829,
+ "scores": {
+ "Cortex Xpanse": {
+ "score": 0,
+ "content": "null",
+ "contentFormat": "json",
+ "timestamp": "2099-12-31T23:59:59.9999999Z",
+ "scoreChangeTimestamp": "2099-12-31T23:59:59.9999999Z",
+ "isTypedIndicator": false,
+ "type": "domain",
+ "context": {
+ "DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)": [
+ {
+ "Indicator": "www.toysrus.com",
+ "Reliability": "A+ - 3rd party enrichment",
+ "Score": 0,
+ "Type": "domain",
+ "Vendor": "Cortex Xpanse"
+ }
+ ],
+ "Domain(val.Name && val.Name == obj.Name)": [
+ {
+ "Name": "www.toysrus.com"
+ }
+ ]
+ },
+ "reliability": "A+ - 3rd party enrichment"
+ }
+ }
+ },
+ "moduleToFeedMap": {
+ "Cortex Xpanse": {
+ "reliability": "A+ - 3rd party enrichment",
+ "fetchTime": "2099-12-31T23:59:59.9999999Z",
+ "sourceBrand": "Cortex Xpanse",
+ "sourceInstance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 0,
+ "bypassExclusionList": false,
+ "score": 0,
+ "classifierVersion": 0,
+ "classifierId": "",
+ "mapperVersion": 0,
+ "mapperId": "",
+ "type": "Domain",
+ "value": "www.toysrus.com",
+ "timestamp": "0001-01-01T00:00:00Z",
+ "fields": null,
+ "modifiedTime": "0001-01-01T00:00:00Z",
+ "ExpirationSource": {
+ "setTime": "2099-12-31T23:59:59.9999999Z",
+ "source": "indicatorType",
+ "user": "",
+ "brand": "Cortex Xpanse",
+ "instance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 20160
+ },
+ "rawJSON": null,
+ "isEnrichment": true
+ }
+ },
+ "expiration": "2099-12-31T23:59:59.9999999Z",
+
+ "expirationStatus": "active",
+ "expirationSource": {
+ "setTime": "2099-12-31T23:59:59.9999999Z",
+ "source": "indicatorType",
+ "user": "",
+ "brand": "Cortex Xpanse",
+ "instance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 20160
+ },
+ "calculatedTime": "2099-12-31T23:59:59.9999999Z",
+ "lastReputationRun": "2099-12-31T23:59:59.9999999Z",
+ "modifiedTime": "2099-12-31T23:59:59.9999999Z",
+ "isDetectable": false,
+ "isPreventable": false
+ }
+ ],
+ "searchAfter": [
+ "1718702883637",
+ "abcd1b2abcd1a0b20c7a8bc5d67e8eea"
+ ],
+ "accountErrors": null,
+ "totalAccounts": 0
+}
+
+XSOAR_SEARCH_INDICATOR_IP_RESPONSE_RAW = {
+ "total": 1,
+ "iocs": [
+ {
+ "id": "abcd1b2abcd1a0b20c7a8bc5d67e8eea",
+ "version": 26,
+ "cacheVersn": 0,
+ "sequenceNumber": 5460,
+ "primaryTerm": 1,
+ "modified": "2099-12-31T23:59:59.9999999Z",
+ "sizeInBytes": 2920,
+ "sortValues": [
+ "1718702883637",
+ "abcd1b2abcd1a0b20c7a8bc5d67e8eea"
+ ],
+ "comments": [
+ {
+ "id": "2556bab0-4390-4383-8b61-bc8e4698e11c",
+ "version": 0,
+ "cacheVersn": 0,
+ "modified": "0001-01-01T00:00:00Z",
+ "sizeInBytes": 0,
+ "content": "Created",
+ "user": "DBot",
+ "created": "2099-12-31T23:59:59.9999999Z",
+ "type": "IndicatorCommentTimeLine",
+ "source": "DBot",
+ "entryId": "",
+ "category": "Sighting"
+ }
+ ],
+ "timestamp": "2099-12-31T23:59:59.9999999Z",
+ "indicator_type": "IP",
+ "value": "1.1.1.2",
+ "sourceInstances": [
+ "Cortex Xpanse"
+ ],
+ "sourceBrands": [
+ "Cortex Xpanse"
+ ],
+ "lastSeen": "2099-12-31T23:59:59.9999999Z",
+ "firstSeen": "2099-12-31T23:59:59.9999999Z",
+ "lastSeenEntryID": "f9e8d7c6-5b4a-3210-9876-543210fedcba@fedcba98-7654-3210-9876-543210fedcba",
+ "firstSeenEntryID": "f9e8d7c6-5b4a-3210-9876-543210fedcba@fedcba98-7654-3210-9876-543210fedcba",
+ "score": 0,
+ "insightCache": {
+ "id": "www.toysrus.com",
+ "version": 18,
+ "cacheVersn": 0,
+ "sequenceNumber": 652451,
+ "primaryTerm": 1,
+ "modified": "2024-06-18T09:00:00.9999999Z",
+ "sizeInBytes": 829,
+ "scores": {
+ "Cortex Xpanse": {
+ "score": 0,
+ "content": "null",
+ "contentFormat": "json",
+ "timestamp": "2099-12-31T23:59:59.9999999Z",
+ "scoreChangeTimestamp": "2099-12-31T23:59:59.9999999Z",
+ "isTypedIndicator": false,
+ "type": "domain",
+ "context": {
+ "DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)": [
+ {
+ "Indicator": "1.1.1.2",
+ "Reliability": "A+ - 3rd party enrichment",
+ "Score": 0,
+ "Type": "IP",
+ "Vendor": "Cortex Xpanse"
+ }
+ ],
+ "Domain(val.Name && val.Name == obj.Name)": [
+ {
+ "Name": "1.1.1.2"
+ }
+ ]
+ },
+ "reliability": "A+ - 3rd party enrichment"
+ }
+ }
+ },
+ "moduleToFeedMap": {
+ "Cortex Xpanse": {
+ "reliability": "A+ - 3rd party enrichment",
+ "fetchTime": "2099-12-31T23:59:59.9999999Z",
+ "sourceBrand": "Cortex Xpanse",
+ "sourceInstance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 0,
+ "bypassExclusionList": false,
+ "score": 0,
+ "classifierVersion": 0,
+ "classifierId": "",
+ "mapperVersion": 0,
+ "mapperId": "",
+ "type": "IP",
+ "value": "1.1.1.2",
+ "timestamp": "0001-01-01T00:00:00Z",
+ "fields": null,
+ "modifiedTime": "0001-01-01T00:00:00Z",
+ "ExpirationSource": {
+ "setTime": "2099-12-31T23:59:59.9999999Z",
+ "source": "indicatorType",
+ "user": "",
+ "brand": "Cortex Xpanse",
+ "instance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 20160
+ },
+ "rawJSON": null,
+ "isEnrichment": true
+ }
+ },
+ "expiration": "2099-12-31T23:59:59.9999999Z",
+
+ "expirationStatus": "active",
+ "expirationSource": {
+ "setTime": "2099-12-31T23:59:59.9999999Z",
+ "source": "indicatorType",
+ "user": "",
+ "brand": "Cortex Xpanse",
+ "instance": "Cortex Xpanse",
+ "moduleId": "Cortex Xpanse",
+ "expirationPolicy": "indicatorType",
+ "expirationInterval": 20160
+ },
+ "calculatedTime": "2099-12-31T23:59:59.9999999Z",
+ "lastReputationRun": "2099-12-31T23:59:59.9999999Z",
+ "modifiedTime": "2099-12-31T23:59:59.9999999Z",
+ "isDetectable": false,
+ "isPreventable": false
+ }
+ ],
+ "searchAfter": [
+ "1718702883637",
+ "abcd1b2abcd1a0b20c7a8bc5d67e8eea"
+ ],
+ "accountErrors": null,
+ "totalAccounts": 0
+}
+
IP_DOMAIN_RAW = {
"reply": {
"total_count": 1,
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py
new file mode 100644
index 000000000000..d8c73f012d09
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py
@@ -0,0 +1,374 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
+
+import urllib3
+from typing import Any
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+''' CONSTANTS '''
+
+DEFAULT_FEED_TAGS = {'XPANSE'}
+DEFAULT_ASSET_SEARCH_LIMIT = 5000
+V1_URL_SUFFIX = "/public_api/v1"
+
+''' CLIENT CLASS '''
+
+
+class Client(BaseClient):
+ """
+ Client class to interact with the service API
+ """
+
+ def __init__(self, base_url: str, verify: bool, proxy: bool, feed_tags: list[str], tlp_color: str, headers: dict):
+ """
+ Class initialization.
+ """
+ super().__init__(base_url=base_url, verify=verify, proxy=proxy, headers=headers)
+ self.feed_tags = feed_tags
+ self.tlp_color = tlp_color
+ self.verify = verify
+ self.proxy = proxy
+ self.headers = headers
+
+ def list_asset_internet_exposure_request(self, search_params: list[dict] = [], search_from: int = 0,
+ search_to: int = DEFAULT_ASSET_SEARCH_LIMIT,
+ use_paging: bool = True) -> list:
+ """Get a list of all your internet exposure assets using the '/assets/get_assets_internet_exposure/' endpoint.
+
+ Args:
+ search_params (list): list of search parameters to add to the API call body.
+ search_from (int): Starting search index.
+ search_to (int): Ending search index.
+ use_paging (bool): whether to use paging or not (default is True)
+
+ Returns:
+ List: list containing dictionaries of internet exposure assets.
+ """
+ body = {"request_data": {"filters": search_params, "search_to": int(
+ search_to), "search_from": int(search_from), "use_page_token": True}}
+ full_response = []
+ while True:
+ result = self._http_request(
+ method='POST',
+ url_suffix=f'{V1_URL_SUFFIX}/assets/get_assets_internet_exposure/',
+ json_data=body
+ )
+
+ data = result.get('reply', {}).get('assets_internet_exposure')
+ if data:
+ full_response.extend(data)
+ if not use_paging:
+ break
+ pagination = result.get('reply', {}).get("next_page_token")
+ if pagination is None:
+ break
+ body["request_data"]["next_page_token"] = pagination
+
+ return full_response
+
+
+''' HELPER FUNCTIONS '''
+
+
+def create_x509_certificate_grids(string_object: Optional[str]) -> list:
+ """
+ Creates a grid field related to the subject and issuer field of the x509 certificate object.
+
+ Args:
+ string_object (Optional[str]): A str in format of C=ZA, Inc.,ST=Western Cape,L=Cape Town,O=Thawte.
+ Returns:
+ list: The return value. A list of dict [{"title": "C", "data": "ZA"}].
+ """
+ result_grid_list = []
+ if string_object:
+ key_value_pairs = string_object.split(',')
+ for pair in key_value_pairs:
+ result_grid = {}
+ # '=' in pair means we extracted the right entries for k/v
+ if '=' in pair:
+ key, value = pair.split('=', 1)
+ result_grid['title'] = key
+ result_grid['data'] = value
+ result_grid_list.append(result_grid)
+ # If no '=' that means we had a ',' within the value we need to append
+ else:
+ result_grid_list[-1]['data'] = (result_grid_list[-1]['data'] + ", " + pair).replace("\\", "")
+ return result_grid_list
+
+
+def map_indicator_fields(raw_indicator: dict[str, Any], asset_type: str) -> dict[str, Any]:
+ """
+ Create indicator field mapping based on asset_type
+
+ Args:
+ raw_indicator (Dict[str, Any]): raw indicator as JSON.
+ asset_type (str): indicator type
+
+ Returns:
+ Dict: dictionary of indicator field mappings.
+ """
+ # name is a required API return parameter
+ description = raw_indicator['name'] + " indicator of asset type " + asset_type + " from Cortex Xpanse"
+ indicator_fields = {"internal": True, "description": description}
+ if asset_type == 'Domain':
+ if domain_details := raw_indicator.get("domain_details"):
+ domain_fields_mapping: dict = {
+ "creationDate": "creationdate",
+ "registryExpiryDate": "expirationdate",
+ }
+
+ for key, mapped_key in domain_fields_mapping.items():
+ if detail_value := domain_details.get(key):
+ indicator_fields[mapped_key] = timestamp_to_datestring(detail_value)
+
+ elif asset_type == 'X509 Certificate' and (cert_details := raw_indicator.get("certificate_details")):
+ cert_fields_mapping: dict = {
+ "signatureAlgorithm": ("signaturealgorithm", None),
+ "serialNumber": ("serialnumber", None),
+ "validNotAfter": ("validitynotafter", timestamp_to_datestring),
+ "validNotBefore": ("validitynotbefore", timestamp_to_datestring),
+ "issuer": ("issuer", create_x509_certificate_grids),
+ "subject": ("subject", create_x509_certificate_grids),
+ }
+
+ for key, (mapped_key, processing_func) in cert_fields_mapping.items():
+ if detail_value := cert_details.get(key):
+ # Apply processing function if one is defined
+ if processing_func:
+ indicator_fields[mapped_key] = processing_func(detail_value)
+ else:
+ indicator_fields[mapped_key] = detail_value
+ return indicator_fields
+
+
+def map_indicator_type(asset_type: str) -> str:
+ """
+ Correlates asset_type to indicator type or returns "None"
+
+ Args:
+ asset_type (str): Xpanse asset type.
+
+ Returns:
+ str: indicator type or "None".
+ """
+ asset_types_mapping = {
+ 'UNASSOCIATED_RESPONSIVE_IP': 'IP',
+ "DOMAIN": 'Domain',
+ "CERTIFICATE": "X509 Certificate",
+ 'CIDR': 'CIDR'
+ }
+ return asset_types_mapping.get(asset_type, "None")
+
+
+def build_asset_indicators(client: Client, raw_indicators: list[dict[str, Any]]) -> list:
+ """
+ Builds indicators JSON data in XSOAR expected format from the raw response.
+
+ Args:
+ client (Client): Xpanse client.
+ raw_indicators (List[Dict[str, Any]]): raw indicators as JSON.
+
+ Returns:
+ List: list of indicators to be send to XSOAR.
+ """
+ demisto.debug(f'Creating {len(raw_indicators)} asset indicators.')
+ indicators: list = []
+
+ for raw_indicator in raw_indicators:
+ asset_type = raw_indicator.get("asset_type", 'None')
+ indicator_type = map_indicator_type(asset_type)
+
+ # Skip IPv6 responsive or not found type
+ if raw_indicator.get("ipv6s") or indicator_type == 'None':
+ continue
+
+ # name is a required API return parameter
+ name = raw_indicator['name']
+ indicator_type = 'DomainGlob' if '*' in name and indicator_type == 'Domain' else indicator_type
+ fields = map_indicator_fields(raw_indicator, indicator_type)
+
+ # Add TLP color and feed tags if they exist
+ if client.tlp_color:
+ fields['trafficlightprotocol'] = client.tlp_color
+ if client.feed_tags:
+ fields['tags'] = client.feed_tags
+
+ indicator = {
+ 'value': name,
+ 'type': indicator_type,
+ 'fields': fields,
+ 'rawJSON': raw_indicator
+ }
+
+ indicators.append(indicator)
+
+ return indicators
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client: Client): # pragma: no cover
+ """
+ Tests API connectivity and authentication'
+
+ Returning 'ok' indicates that the integration works like it is supposed to.
+ Connection to the service is successful.
+ Raises exceptions if something goes wrong.
+
+ :type client: ``Client``
+ :param Client: client to use
+
+ :return: 'ok' if test passed, anything else will fail the test.
+ :rtype: ``str``
+ """
+ client.list_asset_internet_exposure_request(search_to=1, use_paging=False)
+ return_results('ok')
+
+
+def fetch_indicators(client: Client, limit: Optional[int] = None,
+ asset_type: str = 'all') -> tuple | list:
+ """
+ Fetch indicators from Xpanse API and create indicators in XSOAR.
+
+ Args:
+ client (Client): Xpanse client.
+ limit (int): limt the number of indicators to return.
+ asset_type (str): which asset_types to pull from API.
+
+ Returns:
+ List: list of indicators to be send to XSOAR.
+ List: raw response from API.
+ """
+ asset_list, asset_response = [], []
+ if asset_type == 'all':
+ asset_list = ["CERTIFICATE", "DOMAIN", "UNASSOCIATED_RESPONSIVE_IP"]
+ if 'domain' in asset_type:
+ asset_list.append("DOMAIN")
+ if 'certificate' in asset_type:
+ asset_list.append("CERTIFICATE")
+ if 'ipv4' in asset_type:
+ asset_list.append("UNASSOCIATED_RESPONSIVE_IP")
+ if limit:
+ # Had to add 1 to the limit to get the right return.
+ asset_response = client.list_asset_internet_exposure_request(
+ search_params=[{"field": "type", "operator": "in", "value": asset_list}], search_to=limit + 1, use_paging=False)
+ else:
+ asset_response = client.list_asset_internet_exposure_request(
+ search_params=[{"field": "type", "operator": "in", "value": asset_list}])
+
+ assset_indicators = build_asset_indicators(client, asset_response)
+
+ return assset_indicators, asset_response
+
+
+''' MAIN FUNCTION '''
+
+
+def get_indicators(client: Client, args: dict[str, Any]) -> CommandResults:
+ """
+ Get indicators from Xpanse API, mainly for debug.
+
+ Args:
+ client (Client): Xpanse client.
+ args (dict): all command arguments, usually passed from ``demisto.args()``.
+
+ Returns:
+ CommandResults: A ``CommandResults`` object that is then passed to ``return_results``,
+ that contains Xpanse indicators.
+ """
+ hr_list = []
+
+ asset_type = ''
+ if argToBoolean(args.get('ip', 'yes')):
+ asset_type += 'ipv4'
+ if argToBoolean(args.get('domain', 'yes')):
+ asset_type += 'domain'
+ if argToBoolean(args.get('certificate', 'yes')):
+ asset_type += 'certificate'
+
+ limit = arg_to_number(args.get('limit', None))
+
+ if limit and limit <= 0:
+ raise ValueError('Limit must be a positive number.')
+ if limit and limit > DEFAULT_ASSET_SEARCH_LIMIT:
+ raise ValueError('Limit must be less that the API limit of ' + str(DEFAULT_ASSET_SEARCH_LIMIT) + '.')
+ if asset_type == '':
+ raise ValueError('need to specify at least one asset type')
+
+ indicators, raw_res = fetch_indicators(client=client, limit=limit, asset_type=asset_type)
+
+ indicators = indicators[:limit] if isinstance(indicators, list) \
+ else [indicators] if indicators else []
+ for record in indicators:
+ hr = {'Name': record.get('value'), 'Type': record.get('type'), 'Description': record['fields']['description']}
+ hr_list.append(hr)
+ return CommandResults(outputs=hr_list, outputs_prefix='ASM.Indicators', raw_response=raw_res,
+ readable_output=tableToMarkdown("Xpanse indicators", hr_list, headers=['Name', 'Type', 'Description']),
+ outputs_key_field='Name')
+
+
+def main() -> None: # pragma: no cover
+ """
+ main function
+ """
+ params = demisto.params()
+ base_url = params.get('url')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+ # Append default tags.
+ feed_tags = list(set(argToList(params.get('feedTags', []))) | DEFAULT_FEED_TAGS)
+ tlp_color = params.get('tlp_color', '')
+ creds = params.get('credentials', {})
+ api = creds.get('password', '')
+ add_sensitive_log_strs(api)
+ auth_id = creds.get('identifier', '')
+ headers = {
+ 'Authorization': f'{api}',
+ 'x-xdr-auth-id': f'{auth_id}',
+ 'Content-Type': 'application/json'
+ }
+ command = demisto.command()
+
+ demisto.info(f'Command being called is {command}')
+ try:
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy,
+ feed_tags=feed_tags,
+ tlp_color=tlp_color,
+ headers=headers
+ )
+
+ if command == 'test-module':
+ test_module(client)
+ elif command == 'fetch-indicators':
+ indicators, _ = fetch_indicators(client)
+ for iter_ in batch(indicators, batch_size=2000):
+ try:
+ demisto.createIndicators(iter_)
+ except Exception:
+ # find problematic indicator
+ for indicator in iter_:
+ try:
+ demisto.createIndicators([indicator])
+ except Exception as err:
+ demisto.debug(f'createIndicators Error: failed to create the following indicator:'
+ f' {indicator}\n {err}')
+ raise
+ elif command == 'xpanse-get-indicators':
+ return_results(get_indicators(client, demisto.args()))
+ else:
+ raise NotImplementedError(f'Command "{command}" is not implemented.')
+
+ except Exception as e:
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
+
+
+''' ENTRY POINT '''
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml
new file mode 100644
index 000000000000..748e74857dda
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml
@@ -0,0 +1,153 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Xpanse Feed
+ version: -1
+sectionOrder:
+- Connect
+- Collect
+configuration:
+- additionalinfo: The web UI with `api-` appended to front (e.g., https://api-xsiam.paloaltonetworks.com). For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis.
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- additionalinfo: For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. Only standard API key type is supported.
+ display: API Key ID
+ displaypassword: API Key
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'true'
+ display: Fetch indicators
+ name: feed
+ type: 8
+ section: Collect
+ required: false
+- additionalinfo: Indicators from this integration instance will be marked with this reputation.
+ section: Collect
+ display: Indicator Reputation
+ name: feedReputation
+ defaultvalue: Good
+ options:
+ - None
+ - Good
+ - Suspicious
+ - Bad
+ type: 18
+ required: false
+- additionalinfo: Reliability of the source providing the intelligence data.
+ defaultvalue: A - Completely reliable
+ display: Source Reliability
+ name: feedReliability
+ options:
+ - A - Completely reliable
+ - B - Usually reliable
+ - C - Fairly reliable
+ - D - Not usually reliable
+ - E - Unreliable
+ - F - Reliability cannot be judged
+ required: true
+ type: 15
+ section: Collect
+- additionalinfo: The Traffic Light Protocol (TLP) designation to apply to indicators fetched from the feed.
+ defaultvalue: CLEAR
+ display: Traffic Light Protocol Color
+ name: tlp_color
+ options:
+ - RED
+ - AMBER+STRICT
+ - AMBER
+ - GREEN
+ - CLEAR
+ type: 15
+ section: Collect
+ required: false
+- defaultvalue: '20160'
+ display: ''
+ name: feedExpirationInterval
+ type: 1
+ required: false
+- additionalinfo: When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system.
+ defaultvalue: 'false'
+ display: Bypass exclusion list
+ name: feedBypassExclusionList
+ type: 8
+ section: Collect
+ advanced: true
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- additionalinfo: Supports CSV values.
+ display: Tags
+ name: feedTags
+ type: 0
+ section: Collect
+ advanced: true
+ required: false
+- name: feedExpirationPolicy
+ defaultvalue: suddenDeath
+ display: ''
+ type: 17
+ options:
+ - never
+ - interval
+ - indicatorType
+ - suddenDeath
+ section: Collect
+ required: false
+- defaultvalue: '1440'
+ name: feedFetchInterval
+ display: Feed Fetch Interval
+ type: 19
+ section: Collect
+ advanced: true
+ required: false
+description: Use this feed to retrieve the discovered IPs/Domains/Certificates from Cortex Xpanse asset database.
+display: 'Xpanse Feed'
+name: Xpanse Feed
+script:
+ commands:
+ - arguments:
+ - description: The maximum number of indicators to return.
+ name: limit
+ required: true
+ - defaultValue: yes
+ description: Retrieve discovered IPs.
+ name: ip
+ - defaultValue: yes
+ description: Retrieve discovered domains.
+ name: domain
+ - defaultValue: yes
+ description: Retrieve discovered certificates.
+ name: certificate
+ description: Retrieves a limited number of indicators.
+ name: xpanse-get-indicators
+ outputs:
+ - contextPath: ASM.Indicators.Name
+ description: The name of the indicator.
+ type: String
+ - contextPath: ASM.Indicators.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ASM.Indicators.Type
+ description: The type of the indicator.
+ type: String
+ feed: true
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.96411
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md
new file mode 100644
index 000000000000..52ba1c5c08cb
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md
@@ -0,0 +1,13 @@
+## Configure Cortex Xpanse
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Xpanse Feed.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ Server URL | The web UI with \`api-\` appended to front (e.g., https://api-xsiam.paloaltonetworks.com). For more information, see [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). | True
+ API Key ID | See [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). | True
+ API Key | See [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). **Only standard API key type is supported**. | True
+
+4. Click **Test** to validate the URLs, token, and connection.
diff --git a/Packs/AWS-Enrichment-Remediation/Author_image.png b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_image.png
similarity index 100%
rename from Packs/AWS-Enrichment-Remediation/Author_image.png
rename to Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_image.png
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py
new file mode 100644
index 000000000000..4adef942e144
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py
@@ -0,0 +1,209 @@
+"""
+Tests module for Xpanse Feed integration.
+"""
+
+# Client for multiple tests
+from FeedXpanse import Client
+client = Client(
+ base_url='https://test.com', tlp_color="GREEN",
+ verify=True, feed_tags=["test_tag"],
+ headers={
+ "HOST": "test.com",
+ "Authorizatio": "THISISAFAKEKEY",
+ "Content-Type": "application/json"
+ },
+ proxy=False)
+
+
+def test_map_indicator_type():
+ """Tests map_indicator_type helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from map_indicator_type helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import map_indicator_type
+ # Test know types
+ assert map_indicator_type('UNASSOCIATED_RESPONSIVE_IP') == 'IP'
+ assert map_indicator_type('DOMAIN') == 'Domain'
+ assert map_indicator_type('CERTIFICATE') == 'X509 Certificate'
+ assert map_indicator_type('CIDR') == 'CIDR'
+ # test_map_unknown_type
+ assert map_indicator_type('UNKNOWN_TYPE') == 'None'
+ # test_map_empty_string
+ assert map_indicator_type('') == 'None'
+ # test_map_none_input
+ assert map_indicator_type('domain') == 'None'
+
+
+def test_create_x509_certificate_grids():
+ """Tests create_x509_certificate_grids helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from create_x509_certificate_grids helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import create_x509_certificate_grids
+ # test_with_valid_string
+ input_str = "C=ZA,ST=Western Cape,L=Cape Town,O=Thawte"
+ expected_output = [
+ {"title": "C", "data": "ZA"},
+ {"title": "ST", "data": "Western Cape"},
+ {"title": "L", "data": "Cape Town"},
+ {"title": "O", "data": "Thawte"}
+ ]
+ assert create_x509_certificate_grids(input_str) == expected_output
+
+ # test_with_none_input
+ assert create_x509_certificate_grids(None) == []
+
+ # test_with_empty_string
+ assert create_x509_certificate_grids('') == []
+
+
+def test_map_indicator_fields():
+ """Tests map_indicator_fields helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from map_indicator_fields helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import map_indicator_fields
+ # test_map_indicator_fields_domain
+ raw_indicator = {
+ "name": "example.com",
+ "domain_details": {
+ "creationDate": 1609459200,
+ "registryExpiryDate": 1609459200,
+ }
+ }
+ asset_type = 'Domain'
+ expected_output = {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "creationdate": '1970-01-19T15:04:19.000Z',
+ "expirationdate": '1970-01-19T15:04:19.000Z'
+ }
+ assert map_indicator_fields(raw_indicator, asset_type) == expected_output
+
+ # test_map_indicator_fields_x509_certificate
+ raw_indicator = {
+ "name": "certificate",
+ "certificate_details": {
+ "signatureAlgorithm": "SHA256WithRSAEncryption",
+ "serialNumber": "1234567890",
+ "validNotAfter": 1609459200,
+ "validNotBefore": 1609459200,
+ "issuer": "C=US,ST=California",
+ "subject": "C=US,ST=California",
+ }
+ }
+ asset_type = 'X509 Certificate'
+ expected_output = {
+ "internal": True,
+ "description": "certificate indicator of asset type X509 Certificate from Cortex Xpanse",
+ "signaturealgorithm": "SHA256WithRSAEncryption",
+ "serialnumber": "1234567890",
+ "validitynotafter": "1970-01-19T15:04:19.000Z",
+ "validitynotbefore": "1970-01-19T15:04:19.000Z",
+ "issuer": [{"title": "C", "data": "US"}, {"title": "ST", "data": "California"}],
+ "subject": [{"title": "C", "data": "US"}, {"title": "ST", "data": "California"}]
+ }
+ assert map_indicator_fields(raw_indicator, asset_type) == expected_output
+
+
+def test_build_asset_indicators():
+ """Tests build_asset_indicators helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from build_asset_indicators helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import build_asset_indicators
+ # test_build_asset_indicators
+ raw_indicators = [
+ {"name": "example.com", "asset_type": "DOMAIN"},
+ {"name": "example.net", "asset_type": "DOMAIN", "ipv6s": ["::1"]}, # This should be skipped
+ {"name": "*.example.org", "asset_type": "DOMAIN"}, # This should become a DomainGlob
+ {"name": "nonexistent", "asset_type": "CLOUD_SERVER"}, # This should be skipped
+ ]
+ expected_output = [
+ {
+ 'value': "example.com",
+ 'type': "Domain",
+ 'fields': {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"]
+ },
+ 'rawJSON': {"name": "example.com", "asset_type": "DOMAIN"}
+ },
+ {
+ 'value': "*.example.org",
+ 'type': "DomainGlob",
+ 'fields': {
+ "internal": True,
+ "description": "*.example.org indicator of asset type DomainGlob from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"]
+ },
+ 'rawJSON': {"name": "*.example.org", "asset_type": "DOMAIN"}
+ }
+ ]
+ assert build_asset_indicators(client, raw_indicators) == expected_output
+
+
+def test_fetch_indicators(mocker):
+ """Tests fetch_indicators command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate list_asset_internet_exposure_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Getting output from fetch_indicators command function
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from FeedXpanse import fetch_indicators
+ from test_data.raw_response import EXTERNAL_EXPOSURES_RESPONSE
+ mocker.patch.object(client, 'list_asset_internet_exposure_request', return_value=EXTERNAL_EXPOSURES_RESPONSE)
+ indicators, _ = fetch_indicators(client, limit=1, asset_type='domain')
+ expected_indicators_fields = {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"],
+ }
+ assert indicators[0]['fields'] == expected_indicators_fields
+
+
+def test_get_indicators(mocker):
+ """Tests get_indicators command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate list_asset_internet_exposure_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Getting output from get_indicators command function
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from FeedXpanse import get_indicators
+ from test_data.raw_response import EXTERNAL_EXPOSURES_RESPONSE
+ mocker.patch.object(client, 'list_asset_internet_exposure_request', return_value=EXTERNAL_EXPOSURES_RESPONSE)
+ args = {"limit": "1", 'domain': "yes", "certificate": "no", "ipv4": "no"}
+ response = get_indicators(client, args)
+ assert response.outputs[0]['Type'] == 'Domain'
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/README.md b/Packs/CortexXpanse/Integrations/FeedXpanse/README.md
new file mode 100644
index 000000000000..0a193d2459e5
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/README.md
@@ -0,0 +1,79 @@
+Use this feed to retrieve the discovered IPs/Domains/Certificates from Cortex Xpanse asset database.
+This integration was integrated and tested with version 2.5 of Cortex Xpanse.
+
+## Configure Xpanse Feed on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Xpanse Feed.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Server URL | The web UI with \`api-\` appended to front \(e.g., https://api-xsiam.paloaltonetworks.com\). For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. | True |
+ | API Key ID | For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. Only standard API key type is supported. | True |
+ | API Key | | True |
+ | Fetch indicators | | False |
+ | Indicator Reputation | Indicators from this integration instance will be marked with this reputation. | False |
+ | Source Reliability | Reliability of the source providing the intelligence data. | True |
+ | Traffic Light Protocol Color | The Traffic Light Protocol \(TLP\) designation to apply to indicators fetched from the feed. | False |
+ | Bypass exclusion list | When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system. | False |
+ | Use system proxy settings | | False |
+ | Trust any certificate (not secure) | | False |
+ | Tags | Supports CSV values. | False |
+ | Feed Fetch Interval | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### xpanse-get-indicators
+
+***
+Retrieves a limited number of indicators.
+
+#### Base Command
+
+`xpanse-get-indicators`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| limit | The maximum number of indicators to return. | Required |
+| ip | Retrieve discovered IPs. Default is yes. | Optional |
+| domain | Retrieve discovered domains. Default is yes. | Optional |
+| certificate | Retrieve discovered certificates. Default is yes. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| ASM.Indicators.Name | String | The name of the indicator. |
+| ASM.Indicators.Description | String | The description of the indicator. |
+| ASM.Indicators.Type | String | The type of the indicator. |
+
+#### Command example
+```!xpanse-get-indicators limit=1 ip=yes certificate=no domain=no```
+#### Context Example
+```json
+{
+ "ASM": {
+ "Indicators": {
+ "Description": "1.1.1.1 indicator of asset type IP from Cortex Xpanse",
+ "Name": "1.1.1.1",
+ "Type": "IP"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Xpanse indicators
+>|Name|Type|Description|
+>|---|---|---|
+>| 1.1.1.1 | IP | 1.1.1.1 indicator of asset type IP from Cortex Xpanse |
+
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples b/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples
new file mode 100644
index 000000000000..515cdf78bda3
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples
@@ -0,0 +1 @@
+!xpanse-get-indicators limit=1 ip=yes certificate=no domain=no
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py b/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py
new file mode 100644
index 000000000000..9273fc414136
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py
@@ -0,0 +1,28 @@
+# adding so null don't get seen as variable.
+null = None
+false = False
+true = True
+
+EXTERNAL_EXPOSURES_RESPONSE = [
+ {
+ "asm_ids": [
+ "1111-1111-1111-1111"
+ ],
+ "name": "example.com",
+ "asset_type": "DOMAIN",
+ },
+ {
+ "asm_ids": [
+ "2222-2222-2222-2222"
+ ],
+ "name": "192.168.1.1",
+ "asset_type": "UNASSOCIATED_RESPONSIVE_IP",
+ },
+ {
+ "asm_ids": [
+ "3333-3333-3333-3333"
+ ],
+ "name": "192.168.1.2",
+ "asset_type": "UNASSOCIATED_RESPONSIVE_IP",
+ },
+]
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
index 12a67501c63d..04bec91f1c71 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 214bf91b-ba2c-4bc0-8f79-b8fa030361a1
+ taskid: 51e81457-177e-4950-8197-6bfcaabd0be7
type: start
task:
- id: 214bf91b-ba2c-4bc0-8f79-b8fa030361a1
+ id: 51e81457-177e-4950-8197-6bfcaabd0be7
version: -1
name: ""
iscommand: false
@@ -36,17 +36,17 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: ae2ce6f3-734a-47ff-84f6-61606557ffea
+ taskid: 2dbfd762-668c-4085-89db-75eeae61e6fe
type: playbook
task:
- id: ae2ce6f3-734a-47ff-84f6-61606557ffea
+ id: 2dbfd762-668c-4085-89db-75eeae61e6fe
version: -1
name: Xpanse - NMap - Detect Service
description: Looks at what ASM sub-type the alert is and uses NMap to do a validation scan.
- playbookName: Xpanse - NMap - Detect Service
type: playbook
iscommand: false
brand: ""
+ playbookId: Xpanse - NMap - Detect Service
nexttasks:
'#none#':
- "6"
@@ -72,7 +72,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 250
+ "y": 180
}
}
note: false
@@ -84,10 +84,10 @@ tasks:
isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: b216af93-2946-42a9-87b8-be99ddc48659
+ taskid: 30d2124a-8ddf-4901-8bd7-089476652a10
type: title
task:
- id: b216af93-2946-42a9-87b8-be99ddc48659
+ id: 30d2124a-8ddf-4901-8bd7-089476652a10
version: -1
name: Complete
type: title
@@ -100,7 +100,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 980
+ "y": 1370
}
}
note: false
@@ -112,17 +112,17 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 0a1ed7f5-fc19-4362-87c5-686806c40cbd
+ taskid: c47edf3e-f390-4c0c-8e46-666c4429f94d
type: playbook
task:
- id: 0a1ed7f5-fc19-4362-87c5-686806c40cbd
+ id: c47edf3e-f390-4c0c-8e46-666c4429f94d
version: -1
name: Xpanse - Alert Self-Enrichment
description: Enrichment on the alert itself using Cortex Xpanse APIs.
- playbookName: Xpanse - Alert Self-Enrichment
type: playbook
iscommand: false
brand: ""
+ playbookId: Xpanse - Alert Self-Enrichment
nexttasks:
'#none#':
- "4"
@@ -137,7 +137,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 580
+ "y": 470
}
}
note: false
@@ -149,27 +149,27 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: eb478d2a-e73f-4840-8682-b02f063b587a
+ taskid: e55bd85b-30af-42a7-8556-2d5bf31ddfa5
type: playbook
task:
- id: eb478d2a-e73f-4840-8682-b02f063b587a
+ id: e55bd85b-30af-42a7-8556-2d5bf31ddfa5
version: -1
name: Xpanse - Alert Enrichment
- playbookName: Xpanse - Alert Enrichment
type: playbook
iscommand: false
brand: ""
+ playbookId: Xpanse - Alert Enrichment
description: ''
nexttasks:
'#none#':
- - "2"
+ - "10"
separatecontext: true
continueonerrortype: ""
view: |-
{
"position": {
"x": 450,
- "y": 780
+ "y": 630
}
}
note: false
@@ -181,10 +181,10 @@ tasks:
isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: 6f78d9b9-f90e-4c99-833d-742e6697d336
+ taskid: e7947bb6-bd91-43f4-8b58-9ea6e402aa4d
type: title
task:
- id: 6f78d9b9-f90e-4c99-833d-742e6697d336
+ id: e7947bb6-bd91-43f4-8b58-9ea6e402aa4d
version: -1
name: Is the service still exposed?
type: title
@@ -200,7 +200,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 80
+ "y": 50
}
}
note: false
@@ -212,10 +212,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 9d7740e1-a0b5-4fc5-8c01-e5e0b876bee4
+ taskid: 8db016d6-0000-4d73-8b2e-d6d5d1ab0e2f
type: title
task:
- id: 9d7740e1-a0b5-4fc5-8c01-e5e0b876bee4
+ id: 8db016d6-0000-4d73-8b2e-d6d5d1ab0e2f
version: -1
name: Asset and Alert Enrichment
type: title
@@ -231,7 +231,7 @@ tasks:
{
"position": {
"x": 450,
- "y": 420
+ "y": 340
}
}
note: false
@@ -241,20 +241,216 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "7":
+ continueonerrortype: ""
+ form:
+ description: Add one or more tags to the assets(s) associated with the current Cortex XSOAR incident.
+ expired: false
+ questions:
+ - defaultrows: []
+ fieldassociated: ""
+ gridcolumns: []
+ id: "0"
+ label: ""
+ labelarg:
+ simple: What tag(s) do you want to add to asset(s)?
+ options: []
+ optionsarg: []
+ placeholder: ""
+ readonly: false
+ required: true
+ tooltip: Specify a comma-separated list of tags you want to add to the asset(s) associated to the current Cortex XSOAR incident.
+ type: shortText
+ sender: Your SOC team
+ title: Determine tags to add to asset
+ totalanswers: 0
+ id: "7"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ message:
+ bcc:
+ body:
+ cc:
+ format: ""
+ methods: []
+ subject:
+ timings:
+ completeafterreplies: 1
+ completeaftersla: false
+ completeafterv2: true
+ retriescount: 0
+ retriesinterval: 0
+ to:
+ nexttasks:
+ '#none#':
+ - "9"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 54995d00-c6c0-4549-8ded-319d5bc01e0e
+ iscommand: false
+ name: Determine tags to add to asset(s)
+ type: collection
+ version: -1
+ description: Add one or more tags to the assets(s) associated with the current Cortex XSOAR incident.
+ taskid: 54995d00-c6c0-4549-8ded-319d5bc01e0e
+ timertriggers: []
+ type: collection
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1020
+ }
+ }
+ "9":
+ continueonerrortype: ""
+ id: "9"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "2"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ asm_id_list:
+ complex:
+ accessor: xpanseassetids
+ root: incident
+ transformers:
+ - args:
+ chars:
+ value:
+ simple: '[]'
+ operator: StripChars
+ - args:
+ limit: {}
+ replaceWith: {}
+ toReplace:
+ value:
+ simple: '"'
+ operator: replace
+ tags:
+ complex:
+ accessor: Answers
+ root: Determine tags to add to asset
+ transformers:
+ - operator: LastArrayElement
+ - args:
+ field:
+ value:
+ simple: "0"
+ operator: getField
+ - args:
+ delimiter:
+ value:
+ simple: ','
+ operator: split
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Cortex Xpanse
+ description: Assigns tags to a list of assets.
+ id: 3279b4b2-f6ca-4f10-8bcf-c199409bcf5f
+ iscommand: true
+ name: Assign Tag to Asset(s)
+ script: Cortex Xpanse|||asm-tag-asset-assign
+ type: regular
+ version: -1
+ taskid: 3279b4b2-f6ca-4f10-8bcf-c199409bcf5f
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1180
+ }
+ }
+ "10":
+ conditions:
+ - condition:
+ - - ignorecase: true
+ left:
+ iscontext: true
+ value:
+ simple: inputs.WriteToXpanse
+ operator: isEqualString
+ right:
+ value:
+ simple: "True"
+ label: "yes"
+ continueonerrortype: ""
+ id: "10"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "2"
+ "yes":
+ - "7"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determine whether the playbook should write data back to Cortex Xpanse.
+ id: 62fb488b-6b5a-469d-800e-80a6e253d8b1
+ iscommand: false
+ name: Are we writing to Cortex Xpanse?
+ type: condition
+ version: -1
+ taskid: 62fb488b-6b5a-469d-800e-80a6e253d8b1
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 790
+ }
+ }
view: |-
{
- "linkLabelsPosition": {},
+ "linkLabelsPosition": {
+ "10_7_yes": 0.49
+ },
"paper": {
"dimensions": {
- "height": 1125,
+ "height": 1515,
"width": 380,
"x": 450,
"y": -80
}
}
}
-inputs: []
+inputs:
+- description: Whether to write data back to Cortex Xpanse.
+ key: WriteToXpanse
+ playbookInputQuery:
+ required: false
+ value:
+ simple: "False"
outputs: []
tests:
- No tests (auto formatted)
fromversion: 6.8.0
+contentitemexportablefields:
+ contentitemfields: {}
+inputSections:
+- description: Generic group for inputs.
+ inputs:
+ - WriteToXpanse
+ name: General (Inputs group)
+outputSections:
+- description: Generic group for outputs.
+ name: General (Outputs group)
+ outputs: []
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler_README.md b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler_README.md
index 52fc3f85fd7f..b606c342f999 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler_README.md
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler_README.md
@@ -1,4 +1,4 @@
-This playbook is the default handler for Cortex Xpanse alerts that focuses primarily on enrichment.
+Default alert handling for Cortex Xpanse alerts.
## Dependencies
@@ -10,10 +10,25 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
* Xpanse - Alert Self-Enrichment
* Xpanse - Alert Enrichment
+### Integrations
+
+Cortex Xpanse
+
+### Scripts
+
+This playbook does not use any scripts.
+
+### Commands
+
+asm-tag-asset-assign
+
## Playbook Inputs
---
-There are no inputs for this playbook
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| WriteToXpanse | Whether to write data back to Cortex Expanse. | False | Optional |
## Playbook Outputs
@@ -24,4 +39,4 @@ There are no outputs for this playbook.
---
-![Xpanse - Alert Handler](../doc_files/Xpanse_-_Alert_Handler.png)
\ No newline at end of file
+![Xpanse - Alert Handler](../doc_files/Xpanse_-_Alert_Handler.png)
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
index 0d98cb9f69f2..46f837f9d4cb 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
@@ -575,7 +575,7 @@ tasks:
gridfield:
simple: xpansecertificateasset
keys:
- simple: name,type,date_added,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
+ simple: name,type,date_added,tags,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
val1:
complex:
accessor: name
@@ -632,7 +632,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -647,9 +647,28 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ val6:
complex:
accessor: details.certificateDetails.subject
filters:
@@ -669,7 +688,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val6:
+ val7:
complex:
accessor: details.certificateDetails.subjectAlternativeNames
filters:
@@ -689,7 +708,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val7:
+ val8:
complex:
accessor: certificate_issuer
filters:
@@ -709,7 +728,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val8:
+ val9:
complex:
accessor: details.certificateDetails.issuerEmail
filters:
@@ -723,13 +742,13 @@ tasks:
simple: Certificate
root: ASM.AssetInternetExposure
transformers:
- - args:
+ - operator: SetIfEmpty
+ args:
applyIfEmpty: {}
defaultValue:
value:
simple: n/a
- operator: SetIfEmpty
- val9:
+ val10:
complex:
accessor: details.certificateDetails.validNotAfter
filters:
@@ -744,7 +763,7 @@ tasks:
root: ASM.AssetInternetExposure
transformers:
- operator: TimeStampToDate
- val10:
+ val11:
complex:
accessor: certificate_algorithm
filters:
@@ -848,7 +867,7 @@ tasks:
gridfield:
simple: xpanseresponsiveipasset
keys:
- simple: name,type,date_added,explainers,ip_version,range,asn_number,asn_country
+ simple: name,type,date_added,tags,explainers,ip_version,range,asn_number,asn_country
val1:
complex:
accessor: name
@@ -905,7 +924,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -920,20 +939,39 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
+ complex:
+ accessor: explainers
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ val6:
complex:
accessor: IP_VERSION
root: range_info
transformers:
- - args:
+ - operator: SetIfEmpty
+ args:
applyIfEmpty: {}
defaultValue:
value:
simple: n/a
- operator: SetIfEmpty
- val6:
+ val7:
complex:
accessor: handle
root: range_info.DETAILS.networkRecords
@@ -942,9 +980,9 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
- val7:
+ val8:
complex:
accessor: ASN_HANDLES
root: range_info
@@ -952,9 +990,9 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
- val8:
+ val9:
complex:
accessor: ASN_COUNTRIES
root: range_info
@@ -1492,7 +1530,7 @@ tasks:
gridfield:
simple: xpansecertificateasset
keys:
- simple: name,type,date_added,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
+ simple: name,type,date_added,tags,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
val1:
complex:
accessor: name
@@ -1549,7 +1587,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -1564,9 +1602,28 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ val6:
complex:
accessor: details.certificateDetails.subject
filters:
@@ -1586,7 +1643,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val6:
+ val7:
complex:
accessor: details.certificateDetails.subjectAlternativeNames
filters:
@@ -1606,7 +1663,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val7:
+ val8:
complex:
accessor: certificate_issuer
filters:
@@ -1626,7 +1683,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val8:
+ val9:
complex:
accessor: details.certificateDetails.issuerEmail
filters:
@@ -1646,9 +1703,9 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val9:
- simple: n/a
val10:
+ simple: n/a
+ val11:
complex:
accessor: certificate_algorithm
filters:
@@ -1706,7 +1763,7 @@ tasks:
gridfield:
simple: xpansedomainasset
keys:
- simple: name,type,date_added,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
+ simple: name,type,date_added,tags,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
val1:
complex:
accessor: name
@@ -1763,7 +1820,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -1778,11 +1835,11 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
complex:
- accessor: details.domainDetails.registrar.name
+ accessor: explainers
filters:
- - left:
iscontext: true
@@ -1795,12 +1852,31 @@ tasks:
root: ASM.AssetInternetExposure
transformers:
- args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ val6:
+ complex:
+ accessor: details.domainDetails.registrar.name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: SetIfEmpty
+ args:
applyIfEmpty: {}
defaultValue:
value:
simple: n/a
- operator: SetIfEmpty
- val6:
+ val7:
complex:
accessor: details.domainDetails.registryExpiryDate
filters:
@@ -1815,7 +1891,7 @@ tasks:
root: ASM.AssetInternetExposure
transformers:
- operator: TimeStampToDate
- val7:
+ val8:
complex:
accessor: details.domainDetails.domainStatuses
filters:
@@ -1832,9 +1908,9 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
- val8:
+ val9:
complex:
accessor: details.domainDetails.registrant.name
filters:
@@ -1854,7 +1930,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val9:
+ val10:
complex:
accessor: details.domainDetails.registrant.organization
filters:
@@ -1874,7 +1950,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val10:
+ val11:
complex:
accessor: details.domainDetails.registrant.emailAddress
filters:
@@ -1932,7 +2008,7 @@ tasks:
gridfield:
simple: xpansecloudasset
keys:
- simple: name,type,date_added,explainers,externally_detected_providers,ips,domain
+ simple: name,type,date_added,tags,explainers,externally_detected_providers,ips,domain
val1:
complex:
accessor: name
@@ -1989,7 +2065,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -2004,11 +2080,11 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
complex:
- accessor: externally_detected_providers
+ accessor: explainers
filters:
- - left:
iscontext: true
@@ -2023,11 +2099,11 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val6:
complex:
- accessor: ips
+ accessor: externally_detected_providers
filters:
- - left:
iscontext: true
@@ -2042,9 +2118,28 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val7:
+ complex:
+ accessor: ips
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ val8:
complex:
accessor: domain
filters:
@@ -2156,7 +2251,7 @@ tasks:
gridfield:
simple: xpansedomainasset
keys:
- simple: name,type,date_added,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
+ simple: name,type,date_added,tags,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
val1:
complex:
accessor: name
@@ -2213,7 +2308,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -2228,9 +2323,28 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
+ val6:
complex:
accessor: details.domainDetails.registrar.name
filters:
@@ -2250,9 +2364,9 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val6:
- simple: n/a
val7:
+ simple: n/a
+ val8:
complex:
accessor: details.domainDetails.domainStatuses
filters:
@@ -2269,9 +2383,9 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
- val8:
+ val9:
complex:
accessor: details.domainDetails.registrant.name
filters:
@@ -2291,7 +2405,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val9:
+ val10:
complex:
accessor: details.domainDetails.registrant.organization
filters:
@@ -2311,7 +2425,7 @@ tasks:
value:
simple: n/a
operator: SetIfEmpty
- val10:
+ val11:
complex:
accessor: details.domainDetails.registrant.emailAddress
filters:
@@ -2520,7 +2634,7 @@ tasks:
gridfield:
simple: xpanseresponsiveipasset
keys:
- simple: name,type,date_added,explainers,ip_version,range,asn_number,asn_country
+ simple: name,type,date_added,tags,explainers,ip_version,range,asn_number,asn_country
val1:
complex:
accessor: name
@@ -2577,7 +2691,7 @@ tasks:
- operator: TimeStampToDate
val4:
complex:
- accessor: explainers
+ accessor: tags
filters:
- - left:
iscontext: true
@@ -2592,16 +2706,35 @@ tasks:
- args:
separator:
value:
- simple: ','
+ simple: ', '
operator: join
val5:
- simple: n/a
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
val6:
simple: n/a
val7:
simple: n/a
val8:
simple: n/a
+ val9:
+ simple: n/a
separatecontext: false
skipunavailable: false
task:
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_2_0.md b/Packs/CortexXpanse/ReleaseNotes/1_2_0.md
new file mode 100644
index 000000000000..e25fe00ecbd2
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_2_0.md
@@ -0,0 +1,5 @@
+#### Integrations
+
+##### Xpanse Feed
+
+Added the *Xpanse Feed* integration to retrieve the discovered IPs/Domains/Certificates from the Cortex Xpanse asset database.
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_2_1.md b/Packs/CortexXpanse/ReleaseNotes/1_2_1.md
new file mode 100644
index 000000000000..7e5db33e6cd3
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_2_1.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+
+##### Cortex Xpanse
+
+- Updated the following Cortex Xpanse integration commands:
+ - ***domain***- Supports pulling existing domains in Cortex XSOAR that are related to Cortex Xpanse that were recently updated.
+ - ***ip***- Supports pulling existing IPs in Cortex XSOAR that are related to Cortex Xpanse that were recently updated.
+ - ***asm-list-incidents***- Fetches ASM incidents that match provided filters. Incidents are an aggregation of related alerts.
+
+- Added the ***asm-add-note-to-asset*** Cortex Xpanse integration command.
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_2_2.md b/Packs/CortexXpanse/ReleaseNotes/1_2_2.md
new file mode 100644
index 000000000000..9748ac0d68a8
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_2_2.md
@@ -0,0 +1,34 @@
+
+#### Incident Fields
+
+##### Xpanse Certificate Asset
+
+Added the tags column.
+
+##### Xpanse Cloud Asset
+
+Added the tags column.
+
+##### Xpanse Responsive IP Asset
+
+Added the tags column.
+
+##### Xpanse Domain Asset
+
+Added the tags column.
+
+#### Incident Types
+
+##### Xpanse Alert
+
+Updated the playbook to run automatically.
+
+#### Playbooks
+
+##### Xpanse - Alert Self-Enrichment
+
+Updated the playbook to populate the tags column of assets gridfields.
+
+##### Xpanse - Alert Handler
+
+Updated the playbook to include *WriteToXpanse* input to determine if the analyst wants to be prompted to manually add tags to assets associated with the incident.
diff --git a/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Handler.png b/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Handler.png
index ddd5485ac7e0..a40afaa33837 100644
Binary files a/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Handler.png and b/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Handler.png differ
diff --git a/Packs/CortexXpanse/pack_metadata.json b/Packs/CortexXpanse/pack_metadata.json
index ed1e204bac6b..a8a34ad56f86 100644
--- a/Packs/CortexXpanse/pack_metadata.json
+++ b/Packs/CortexXpanse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Xpanse",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.1.0",
+ "currentVersion": "1.2.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CovalenceForSecurityProviders/.pack-ignore b/Packs/CovalenceForSecurityProviders/.pack-ignore
index b8f18c02464d..e69de29bb2d1 100644
--- a/Packs/CovalenceForSecurityProviders/.pack-ignore
+++ b/Packs/CovalenceForSecurityProviders/.pack-ignore
@@ -1,2 +0,0 @@
-[file:CovalenceForSecurityProviders.yml]
-ignore=IN124
\ No newline at end of file
diff --git a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.py b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.py
index e06ec4980733..766dcfa537a7 100644
--- a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.py
+++ b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.py
@@ -7,7 +7,6 @@
import dateparser
from datetime import datetime, timedelta
from bs4 import BeautifulSoup
-from typing import List
DATE_FORMAT = '%Y-%m-%dT%H:%M:%S'
HOST = demisto.params().get('host')
@@ -79,7 +78,7 @@ def login(host=HOST, cov_id=None, username=USERNAME, password=PASSWORD, verify_s
p = {'username': username, 'password': password}
r = s.post(host + '/rest/login', data=p, verify=verify_ssl)
- if 200 != r.status_code:
+ if r.status_code != 200:
raise Exception("Failed to login to %s - %d" % (host, r.status_code))
if not s.cookies:
@@ -626,14 +625,13 @@ def list_org():
url = f'https://{HOST}/index'
r = requests.get(url, verify=VERIFY_SSL)
- org_names: List[dict] = []
+ org_names: list[dict] = []
soup = BeautifulSoup(r.text, 'html.parser')
for link in soup.find_all('a'):
org_name = link.contents[0]
- if org_name:
- if org_name not in [i['org_name'] for i in org_names]:
- org_names.append({'org_name': org_name})
+ if org_name and org_name not in [i['org_name'] for i in org_names]:
+ org_names.append({'org_name': org_name})
return org_names
diff --git a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.yml b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.yml
index b5ad75ac9add..c1b35d569000 100644
--- a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.yml
+++ b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders.yml
@@ -72,913 +72,913 @@ name: Covalence For Security Providers
script:
commands:
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Maximum number of alerts to be returned, if none provided will be set to 1000
+ - description: Maximum number of alerts to be returned, if none provided will be set to 1000.
name: max_count
- - description: Initial index where to start listing alerts
+ - description: Initial index where to start listing alerts.
name: initial_index
- - description: Alert type to be listed
+ - description: Alert type to be listed.
name: alert_type
- - description: Minimal alert time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Minimal alert time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: alert_time_min
- - description: Maximal alert time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Maximal alert time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: alert_time_max
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: Lists Covalence alerts
+ description: Lists Covalence alerts.
name: cov-secpr-list-alerts
outputs:
- contextPath: Covalence.Alert.acknowledgedStatus
- description: Acknowledged Status
+ description: Acknowledged Status.
type: String
- contextPath: Covalence.Alert.alertCount
- description: Alert Count
+ description: Alert Count.
type: Number
- contextPath: Covalence.Alert.alertHash
- description: Alert Hash
+ description: Alert Hash.
type: String
- contextPath: Covalence.Alert.analystDescription
- description: Analyst Description
+ description: Analyst Description.
type: String
- contextPath: Covalence.Alert.analystTitle
- description: Analyst Title
+ description: Analyst Title.
type: String
- contextPath: Covalence.Alert.assignee
- description: Assignee
+ description: Assignee.
type: String
- contextPath: Covalence.Alert.blacklistDetails.blacklistedEntity
- description: Blacklisted Entity
+ description: Blacklisted Entity.
type: String
- contextPath: Covalence.Alert.blacklistDetails.bytesIn
- description: Bytes In
+ description: Bytes In.
type: Number
- contextPath: Covalence.Alert.blacklistDetails.bytesOut
- description: Bytes Out
+ description: Bytes Out.
type: Number
- contextPath: Covalence.Alert.blacklistDetails.listLabels
- description: List Labels
+ description: List Labels.
type: String
- contextPath: Covalence.Alert.blacklistDetails.listUuids
- description: List Uuids
+ description: List Uuids.
type: String
- contextPath: Covalence.Alert.createdTime
- description: Created Time
+ description: Created Time.
type: Number
- contextPath: Covalence.Alert.destCiscoUmbrellaRanking
- description: Dest Cisco Umbrella Ranking
+ description: Dest Cisco Umbrella Ranking.
type: Number
- contextPath: Covalence.Alert.destCiscoUmbrellaTopLevelDomainRanking
- description: Dest Cisco Umbrella Top Level Domain Ranking
+ description: Dest Cisco Umbrella Top Level Domain Ranking.
type: Number
- contextPath: Covalence.Alert.destCityName
- description: Dest City Name
+ description: Dest City Name.
type: String
- contextPath: Covalence.Alert.destCountryName
- description: Dest Country Name
+ description: Dest Country Name.
- contextPath: Covalence.Alert.destDomainName
- description: Dest Domain Name
+ description: Dest Domain Name.
type: String
- contextPath: Covalence.Alert.destGeoX
- description: Dest Geo X
+ description: Dest Geo X.
type: Number
- contextPath: Covalence.Alert.destGeoY
- description: Dest Geo Y
+ description: Dest Geo Y.
type: Number
- contextPath: Covalence.Alert.destIp
- description: Dest Ip
+ description: Dest Ip.
type: String
- contextPath: Covalence.Alert.destIpAttributes.k
- description: Key
+ description: Key.
type: String
- contextPath: Covalence.Alert.destIpAttributes.t
- description: Type
+ description: Type.
type: Number
- contextPath: Covalence.Alert.destIpAttributes.v
- description: Value
+ description: Value.
type: String
- contextPath: Covalence.Alert.destMajesticMillionRanking
- description: Dest Majestic Million Ranking
+ description: Dest Majestic Million Ranking.
type: Number
- contextPath: Covalence.Alert.destMajesticMillionTopLevelDomainRanking
- description: Dest Majestic Million Top Level Domain Ranking
+ description: Dest Majestic Million Top Level Domain Ranking.
type: Number
- contextPath: Covalence.Alert.destPort
- description: Dest Port
+ description: Dest Port.
type: String
- contextPath: Covalence.Alert.endpointAgentUuid
- description: Endpoint Agent Uuid
+ description: Endpoint Agent Uuid.
type: String
- contextPath: Covalence.Alert.facility
- description: Facility
+ description: Facility.
type: String
- contextPath: Covalence.Alert.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.Alert.isFavorite
- description: Is Favorite
+ description: Is Favorite.
type: Boolean
- contextPath: Covalence.Alert.lastAlertedTime
- description: Last Alerted Time
+ description: Last Alerted Time.
type: Number
- contextPath: Covalence.Alert.notes
- description: Notes
+ description: Notes.
type: String
- contextPath: Covalence.Alert.organizationId
- description: Organization Id
+ description: Organization Id.
type: String
- contextPath: Covalence.Alert.pcapResourceUuid
- description: Pcap Resource Uuid
+ description: Pcap Resource Uuid.
type: String
- contextPath: Covalence.Alert.priority
- description: Priority
+ description: Priority.
- contextPath: Covalence.Alert.protocol
- description: Protocol
+ description: Protocol.
type: String
- contextPath: Covalence.Alert.sensorId
- description: Sensor Id
+ description: Sensor Id.
type: String
- contextPath: Covalence.Alert.severity
- description: Severity
+ description: Severity.
type: String
- contextPath: Covalence.Alert.sigEvalDetails.id
- description: Id
+ description: Id.
type: Number
- contextPath: Covalence.Alert.sigEvalDetails.message
- description: Message
+ description: Message.
type: String
- contextPath: Covalence.Alert.sourceCiscoUmbrellaRanking
- description: Source Cisco Umbrella Ranking
+ description: Source Cisco Umbrella Ranking.
type: Number
- contextPath: Covalence.Alert.sourceCiscoUmbrellaTopLevelDomainRanking
- description: Source Cisco Umbrella Top Level Domain Ranking
+ description: Source Cisco Umbrella Top Level Domain Ranking.
type: Number
- contextPath: Covalence.Alert.sourceCityName
- description: Source City Name
+ description: Source City Name.
type: String
- contextPath: Covalence.Alert.sourceCountryName
- description: Source Country Name
+ description: Source Country Name.
type: String
- contextPath: Covalence.Alert.sourceDomainName
- description: Source Domain Name
+ description: Source Domain Name.
type: String
- contextPath: Covalence.Alert.sourceGeoX
- description: Source Geo X
+ description: Source Geo X.
type: Number
- contextPath: Covalence.Alert.sourceGeoY
- description: Source Geo Y
+ description: Source Geo Y.
type: Number
- contextPath: Covalence.Alert.sourceIp
- description: Source Ip
+ description: Source Ip.
type: String
- contextPath: Covalence.Alert.sourceIpAttributes.k
- description: Key
+ description: Key.
type: String
- contextPath: Covalence.Alert.sourceIpAttributes.t
- description: Type
+ description: Type.
type: Number
- contextPath: Covalence.Alert.sourceIpAttributes.v
- description: Value
+ description: Value.
type: String
- contextPath: Covalence.Alert.sourceMajesticMillionRanking
- description: Source Majestic Million Ranking
+ description: Source Majestic Million Ranking.
type: Number
- contextPath: Covalence.Alert.sourceMajesticMillionTopLevelDomainRanking
- description: Source Majestic Million Top Level Domain Ranking
+ description: Source Majestic Million Top Level Domain Ranking.
type: Number
- contextPath: Covalence.Alert.sourcePort
- description: Source Port
+ description: Source Port.
type: String
- contextPath: Covalence.Alert.subType
- description: Sub Type
+ description: Sub Type.
type: String
- contextPath: Covalence.Alert.title
- description: Title
+ description: Title.
type: String
- contextPath: Covalence.Alert.type
- description: Type
+ description: Type.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: Lists Covalence sensors
+ description: Lists Covalence sensors.
name: cov-secpr-list-sensors
outputs:
- contextPath: Covalence.Sensors.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.Sensors.name
- description: Name
+ description: Name.
type: String
- contextPath: Covalence.Sensors.isAuthorized
- description: Is Authorized
+ description: Is Authorized.
type: Boolean
- contextPath: Covalence.Sensors.isNetflowGenerator
- description: Is Netflow Generator
+ description: Is Netflow Generator.
type: Boolean
- contextPath: Covalence.Sensors.bytesIn
- description: Bytes In
+ description: Bytes In.
type: Number
- contextPath: Covalence.Sensors.bytesOut
- description: Bytes Out
+ description: Bytes Out.
type: Number
- contextPath: Covalence.Sensors.lastActive
- description: Last Active
+ description: Last Active.
type: String
- contextPath: Covalence.Sensors.listeningInterfaces
- description: Listening Interfaces
+ description: Listening Interfaces.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Sensor id
+ - description: Sensor id.
name: sensor_id
required: true
- description: Get sensor details when provided with the sensor id
+ description: Get sensor details when provided with the sensor id.
name: cov-secpr-get-sensor
outputs:
- contextPath: Covalence.Sensor.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.Sensor.name
- description: Name
+ description: Name.
type: String
- contextPath: Covalence.Sensor.isAuthorized
- description: Is Authorized
+ description: Is Authorized.
type: Boolean
- contextPath: Covalence.Sensor.listeningInterfaces
- description: Listening Interfaces
+ description: Listening Interfaces.
type: String
- contextPath: Covalence.Sensor.isNetflowGenerator
- description: Is Netflow Generator
+ description: Is Netflow Generator.
type: Boolean
- contextPath: Covalence.Sensor.bytesIn
- description: Bytes In
+ description: Bytes In.
type: Number
- contextPath: Covalence.Sensor.bytesOut
- description: Bytes Out
+ description: Bytes Out.
type: Number
- contextPath: Covalence.Sensor.lastActive
- description: Last Active
+ description: Last Active.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Maximum number of connection summary by ip to be returned, if none provided will be set to 100
+ - description: Maximum number of connection summary by ip to be returned, if none provided will be set to 100.
name: max_count
- - description: Initial index where to start listing connection summaries
+ - description: Initial index where to start listing connection summaries.
name: initial_index
- - description: source ip filter, if used only connections related to the specified source ip will be returned
+ - description: source ip filter, if used only connections related to the specified source ip will be returned.
name: source_ip
- - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: start_time
- - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: end_time
- - description: if "clients_only=true", only connections labeled as client connections will be returned
+ - description: if "clients_only=true", only connections labeled as client connections will be returned.
name: clients_only
- - description: if "internal_only=true", only internal connections will be returned
+ - description: if "internal_only=true", only internal connections will be returned.
name: internal_only
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: List summarized connections details by IP Address
+ description: List summarized connections details by IP Address.
name: cov-secpr-connections-summary-ip
outputs:
- contextPath: Covalence.Connections.averageDuration
- description: Average Duration
+ description: Average Duration.
type: Number
- contextPath: Covalence.Connections.bytesIn
- description: Bytes In
+ description: Bytes In.
type: Number
- contextPath: Covalence.Connections.bytesOut
- description: Bytes Out
+ description: Bytes Out.
type: Number
- contextPath: Covalence.Connections.clientServerRelationship
- description: Client Server Relationship
+ description: Client Server Relationship.
type: String
- contextPath: Covalence.Connections.continuingConnectionCount
- description: Continuing Connection Count
+ description: Continuing Connection Count.
type: Number
- contextPath: Covalence.Connections.destinationCity
- description: Destination City
+ description: Destination City.
type: String
- contextPath: Covalence.Connections.destinationCountry
- description: Destination Country
+ description: Destination Country.
type: String
- contextPath: Covalence.Connections.destinationId
- description: Destination Id
+ description: Destination Id.
type: String
- contextPath: Covalence.Connections.destinationIpAddress
- description: Destination Ip Address
+ description: Destination Ip Address.
type: String
- contextPath: Covalence.Connections.destinationMacAddress
- description: Destination Mac Address
+ description: Destination Mac Address.
type: String
- contextPath: Covalence.Connections.dstDomainName
- description: Dst Domain Name
+ description: Dst Domain Name.
type: String
- contextPath: Covalence.Connections.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.Connections.packetsIn
- description: Packets In
+ description: Packets In.
type: Number
- contextPath: Covalence.Connections.packetsOut
- description: Packets Out
+ description: Packets Out.
type: Number
- contextPath: Covalence.Connections.serverPortCount
- description: Server Port Count
+ description: Server Port Count.
type: Number
- contextPath: Covalence.Connections.serverPorts
- description: Server Ports
+ description: Server Ports.
type: String
- contextPath: Covalence.Connections.sourceCity
- description: Source City
+ description: Source City.
type: String
- contextPath: Covalence.Connections.sourceCountry
- description: Source Country
+ description: Source Country.
type: String
- contextPath: Covalence.Connections.sourceDomainName
- description: Source Domain Name
+ description: Source Domain Name.
type: String
- contextPath: Covalence.Connections.sourceId
- description: Source Id
+ description: Source Id.
type: String
- contextPath: Covalence.Connections.sourceIpAddress
- description: Source Ip Address
+ description: Source Ip Address.
type: String
- contextPath: Covalence.Connections.sourceMacAddress
- description: Source Mac Address
+ description: Source Mac Address.
type: String
- contextPath: Covalence.Connections.terminatedConnectionCount
- description: Terminated Connection Count
+ description: Terminated Connection Count.
type: Number
- contextPath: Covalence.Connections.totalDuration
- description: Total Duration
+ description: Total Duration.
type: Number
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Maximum number of connection summary by port to be returned, if none provided will be set to 100
+ - description: Maximum number of connection summary by port to be returned, if none provided will be set to 100.
name: max_count
- - description: Initial index where to start listing connection summaries
+ - description: Initial index where to start listing connection summaries.
name: initial_index
- - description: source ip filter, only connections related to the specified source ip will be returned
+ - description: source ip filter, only connections related to the specified source ip will be returned.
name: source_ip
required: true
- - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: start_time
- - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: end_time
- - description: if "clients_only=true", only connections labeled as client connections will be returned
+ - description: if "clients_only=true", only connections labeled as client connections will be returned.
name: clients_only
- - description: if "internal_only=true", only internal connections will be returned
+ - description: if "internal_only=true", only internal connections will be returned.
name: internal_only
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: List summarized connections details by Port
+ description: List summarized connections details by Port.
name: cov-secpr-connections-summary-port
outputs:
- contextPath: Covalence.Connections.averageDuration
- description: Average Duration
+ description: Average Duration.
type: Number
- contextPath: Covalence.Connections.bytesIn
- description: Bytes In
+ description: Bytes In.
type: Number
- contextPath: Covalence.Connections.bytesOut
- description: Bytes Out
+ description: Bytes Out.
type: Number
- contextPath: Covalence.Connections.continuingConnectionCount
- description: Continuing Connection Count
+ description: Continuing Connection Count.
type: Number
- contextPath: Covalence.Connections.destinationCity
- description: Destination City
+ description: Destination City.
type: String
- contextPath: Covalence.Connections.destinationCountry
- description: Destination Country
+ description: Destination Country.
type: String
- contextPath: Covalence.Connections.destinationId
- description: Destination Id
+ description: Destination Id.
type: String
- contextPath: Covalence.Connections.destinationIpAddress
- description: Destination Ip Address
+ description: Destination Ip Address.
type: String
- contextPath: Covalence.Connections.destinationMacAddress
- description: Destination Mac Address
+ description: Destination Mac Address.
type: String
- contextPath: Covalence.Connections.dstDomainName
- description: Dst Domain Name
+ description: Dst Domain Name.
type: String
- contextPath: Covalence.Connections.endTime
- description: End Time
+ description: End Time.
type: Date
- contextPath: Covalence.Connections.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.Connections.packetsIn
- description: Packets In
+ description: Packets In.
type: Number
- contextPath: Covalence.Connections.packetsOut
- description: Packets Out
+ description: Packets Out.
type: Number
- contextPath: Covalence.Connections.protocol
- description: Protocol
+ description: Protocol.
type: String
- contextPath: Covalence.Connections.serverPort
- description: Server Port
+ description: Server Port.
type: Number
- contextPath: Covalence.Connections.sourceCity
- description: Source City
+ description: Source City.
type: String
- contextPath: Covalence.Connections.sourceCountry
- description: Source Country
+ description: Source Country.
type: String
- contextPath: Covalence.Connections.sourceDomainName
- description: Source Domain Name
+ description: Source Domain Name.
type: String
- contextPath: Covalence.Connections.sourceId
- description: Source Id
+ description: Source Id.
type: String
- contextPath: Covalence.Connections.sourceIpAddress
- description: Source Ip Address
+ description: Source Ip Address.
type: String
- contextPath: Covalence.Connections.sourceMacAddress
- description: Source Mac Address
+ description: Source Mac Address.
type: String
- contextPath: Covalence.Connections.startTime
- description: Start Time
+ description: Start Time.
type: Date
- contextPath: Covalence.Connections.terminatedConnectionCount
- description: Terminated Connection Count
+ description: Terminated Connection Count.
type: Number
- contextPath: Covalence.Connections.totalDuration
- description: Total Duration
+ description: Total Duration.
type: Number
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Maximum number of DNS resolutions to be returned, if none provided will be set to 100
+ - description: Maximum number of DNS resolutions to be returned, if none provided will be set to 100.
name: max_count
- - description: Initial index where to start listing DNS resolutions
+ - description: Initial index where to start listing DNS resolutions.
name: initial_index
- - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Minimal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: request_time_after
- - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone
+ - description: Maximal time in %Y-%m-%dT%H:%M:%S format and UTC time zone.
name: request_time_before
- - description: Domain name filter, if used will only return DNS resolutions from the specified domain name
+ - description: Domain name filter, if used will only return DNS resolutions from the specified domain name.
name: domain_name
- - description: IP filter, if used will only return DNS resolutions to the specified IP
+ - description: IP filter, if used will only return DNS resolutions to the specified IP.
name: resolved_ip
- - description: Source IP filter, if used will only return DNS resolutions originating from the specified IP
+ - description: Source IP filter, if used will only return DNS resolutions originating from the specified IP.
name: request_origin_ip
- - description: Nameserver IP filter, if used will only return DNS resolutions involving the specified nameserver IP
+ - description: Nameserver IP filter, if used will only return DNS resolutions involving the specified nameserver IP.
name: nameserver_ip
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: List summarized connections details by Port
+ description: List summarized connections details by Port.
name: cov-secpr-list-dns-resolutions
outputs:
- contextPath: Covalence.DNSResolutions.id
- description: Id
+ description: Id.
type: String
- contextPath: Covalence.DNSResolutions.domainName
- description: Domain Name
+ description: Domain Name.
type: String
- contextPath: Covalence.DNSResolutions.resolvedIp
- description: Resolved Ip
+ description: Resolved Ip.
type: String
- contextPath: Covalence.DNSResolutions.requestOriginIp
- description: Request Origin Ip
+ description: Request Origin Ip.
type: String
- contextPath: Covalence.DNSResolutions.nameserverIp
- description: Nameserver Ip
+ description: Nameserver Ip.
type: String
- contextPath: Covalence.DNSResolutions.nodeLabel
- description: Node Label
+ description: Node Label.
type: String
- contextPath: Covalence.DNSResolutions.requestTime
- description: Request Time
+ description: Request Time.
type: Number
- contextPath: Covalence.DNSResolutions.byteCount
- description: Byte Count
+ description: Byte Count.
type: Number
- contextPath: Covalence.DNSResolutions.pktCount
- description: Pkt Count
+ description: Pkt Count.
type: Number
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- description: List internal networks
+ description: List internal networks.
name: cov-secpr-list-internal-networks
outputs:
- contextPath: Covalence.InternalNetworks.cidr
- description: Cidr
+ description: Cidr.
type: String
- contextPath: Covalence.InternalNetworks.notes
- description: Notes
+ description: Notes.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: The network to be set as internal in CIDR notation
+ - description: The network to be set as internal in CIDR notation.
name: cidr
required: true
- - description: Comment notes associated with the network, notes must be inside quotes
+ - description: Comment notes associated with the network, notes must be inside quotes.
name: notes
required: true
- description: Set internal networks
+ description: Set internal networks.
name: cov-secpr-set-internal-networks
outputs:
- contextPath: Covalence.InternalNetworks.cidr
- description: Cidr
+ description: Cidr.
type: String
- contextPath: Covalence.InternalNetworks.notes
- description: Notes
+ description: Notes.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Advanced filter query, if used any other parameters provided to the command will be ignored
+ - description: Advanced filter query, if used any other parameters provided to the command will be ignored.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: List endpoint agents
+ description: List endpoint agents.
name: cov-secpr-list-endpoint-agents
outputs:
- contextPath: Covalence.EndpointAgents.agentUuid
- description: Agent Uuid
+ description: Agent Uuid.
type: String
- contextPath: Covalence.EndpointAgents.agentVersion
- description: Agent Version
+ description: Agent Version.
type: String
- contextPath: Covalence.EndpointAgents.firstSeenTime
- description: First Seen Time
+ description: First Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSeenTime
- description: Last Seen Time
+ description: Last Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSessionUser
- description: Last Session User
+ description: Last Session User.
type: String
- contextPath: Covalence.EndpointAgents.isMobile
- description: Is Mobile
+ description: Is Mobile.
type: Boolean
- contextPath: Covalence.EndpointAgents.isConnected
- description: Is Connected
+ description: Is Connected.
type: Boolean
- contextPath: Covalence.EndpointAgents.coreVersion
- description: Core Version
+ description: Core Version.
type: String
- contextPath: Covalence.EndpointAgents.coreArchitecture
- description: Core Architecture
+ description: Core Architecture.
type: String
- contextPath: Covalence.EndpointAgents.coreOs
- description: Core Os
+ description: Core Os.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystem
- description: Operating System
+ description: Operating System.
type: String
- contextPath: Covalence.EndpointAgents.hostName
- description: Host Name
+ description: Host Name.
type: String
- contextPath: Covalence.EndpointAgents.hardwareVendor
- description: Hardware Vendor
+ description: Hardware Vendor.
type: String
- contextPath: Covalence.EndpointAgents.hardwareModel
- description: Hardware Model
+ description: Hardware Model.
type: String
- contextPath: Covalence.EndpointAgents.arch
- description: Arch
+ description: Arch.
type: String
- contextPath: Covalence.EndpointAgents.osDistro
- description: Os Distro
+ description: Os Distro.
type: String
- contextPath: Covalence.EndpointAgents.osVersion
- description: Os Version
+ description: Os Version.
type: String
- contextPath: Covalence.EndpointAgents.kernelVersion
- description: Kernel Version
+ description: Kernel Version.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystemReleaseId
- description: Operating System Release Id
+ description: Operating System Release Id.
type: String
- contextPath: Covalence.EndpointAgents.ipAddress
- description: Ip Address
+ description: Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.secondaryIpAddress
- description: Secondary Ip Address
+ description: Secondary Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.ipAddresses
- description: Ip Addresses
+ description: Ip Addresses.
type: String
- contextPath: Covalence.EndpointAgents.serialNumber
- description: Serial Number
+ description: Serial Number.
type: String
- contextPath: Covalence.EndpointAgents.deviceIdentifier
- description: Device Identifier
+ description: Device Identifier.
type: String
- contextPath: Covalence.EndpointAgents.cpuArchitectureEnum
- description: Cpu Architecture Enum
+ description: Cpu Architecture Enum.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: User filter
+ - description: User filter.
name: user
required: true
- description: List endpoint agents where the last session user is the one provided as parameter
+ description: List endpoint agents where the last session user is the one provided as parameter.
name: cov-secpr-find-endpoint-agents-by-user
outputs:
- contextPath: Covalence.EndpointAgents.agentUuid
- description: Agent Uuid
+ description: Agent Uuid.
type: String
- contextPath: Covalence.EndpointAgents.agentVersion
- description: Agent Version
+ description: Agent Version.
type: String
- contextPath: Covalence.EndpointAgents.firstSeenTime
- description: First Seen Time
+ description: First Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSeenTime
- description: Last Seen Time
+ description: Last Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSessionUser
- description: Last Session User
+ description: Last Session User.
type: String
- contextPath: Covalence.EndpointAgents.isMobile
- description: Is Mobile
+ description: Is Mobile.
type: Boolean
- contextPath: Covalence.EndpointAgents.isConnected
- description: Is Connected
+ description: Is Connected.
type: Boolean
- contextPath: Covalence.EndpointAgents.coreVersion
- description: Core Version
+ description: Core Version.
type: String
- contextPath: Covalence.EndpointAgents.coreArchitecture
- description: Core Architecture
+ description: Core Architecture.
type: String
- contextPath: Covalence.EndpointAgents.coreOs
- description: Core Os
+ description: Core Os.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystem
- description: Operating System
+ description: Operating System.
type: String
- contextPath: Covalence.EndpointAgents.hostName
- description: Host Name
+ description: Host Name.
type: String
- contextPath: Covalence.EndpointAgents.hardwareVendor
- description: Hardware Vendor
+ description: Hardware Vendor.
type: String
- contextPath: Covalence.EndpointAgents.hardwareModel
- description: Hardware Model
+ description: Hardware Model.
type: String
- contextPath: Covalence.EndpointAgents.arch
- description: Arch
+ description: Arch.
type: String
- contextPath: Covalence.EndpointAgents.osDistro
- description: Os Distro
+ description: Os Distro.
type: String
- contextPath: Covalence.EndpointAgents.osVersion
- description: Os Version
+ description: Os Version.
type: String
- contextPath: Covalence.EndpointAgents.kernelVersion
- description: Kernel Version
+ description: Kernel Version.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystemReleaseId
- description: Operating System Release Id
+ description: Operating System Release Id.
type: String
- contextPath: Covalence.EndpointAgents.ipAddress
- description: Ip Address
+ description: Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.secondaryIpAddress
- description: Secondary Ip Address
+ description: Secondary Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.ipAddresses
- description: Ip Addresses
+ description: Ip Addresses.
type: String
- contextPath: Covalence.EndpointAgents.serialNumber
- description: Serial Number
+ description: Serial Number.
type: String
- contextPath: Covalence.EndpointAgents.deviceIdentifier
- description: Device Identifier
+ description: Device Identifier.
type: String
- contextPath: Covalence.EndpointAgents.cpuArchitectureEnum
- description: Cpu Architecture Enum
+ description: Cpu Architecture Enum.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Endpoint agent UUID
+ - description: Endpoint agent UUID.
name: uuid
required: true
- description: Find the endpoint agent with the UUID provided as parameter
+ description: Find the endpoint agent with the UUID provided as parameter.
name: cov-secpr-find-endpoint-agents-by-uuid
outputs:
- contextPath: Covalence.EndpointAgents.agentUuid
- description: Agent Uuid
+ description: Agent Uuid.
type: String
- contextPath: Covalence.EndpointAgents.agentVersion
- description: Agent Version
+ description: Agent Version.
type: String
- contextPath: Covalence.EndpointAgents.firstSeenTime
- description: First Seen Time
+ description: First Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSeenTime
- description: Last Seen Time
+ description: Last Seen Time.
type: Date
- contextPath: Covalence.EndpointAgents.lastSessionUser
- description: Last Session User
+ description: Last Session User.
type: String
- contextPath: Covalence.EndpointAgents.isMobile
- description: Is Mobile
+ description: Is Mobile.
type: Boolean
- contextPath: Covalence.EndpointAgents.isConnected
- description: Is Connected
+ description: Is Connected.
type: Boolean
- contextPath: Covalence.EndpointAgents.coreVersion
- description: Core Version
+ description: Core Version.
type: String
- contextPath: Covalence.EndpointAgents.coreArchitecture
- description: Core Architecture
+ description: Core Architecture.
type: String
- contextPath: Covalence.EndpointAgents.coreOs
- description: Core Os
+ description: Core Os.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystem
- description: Operating System
+ description: Operating System.
type: String
- contextPath: Covalence.EndpointAgents.hostName
- description: Host Name
+ description: Host Name.
type: String
- contextPath: Covalence.EndpointAgents.hardwareVendor
- description: Hardware Vendor
+ description: Hardware Vendor.
type: String
- contextPath: Covalence.EndpointAgents.hardwareModel
- description: Hardware Model
+ description: Hardware Model.
type: String
- contextPath: Covalence.EndpointAgents.arch
- description: Arch
+ description: Arch.
type: String
- contextPath: Covalence.EndpointAgents.osDistro
- description: Os Distro
+ description: Os Distro.
type: String
- contextPath: Covalence.EndpointAgents.osVersion
- description: Os Version
+ description: Os Version.
type: String
- contextPath: Covalence.EndpointAgents.kernelVersion
- description: Kernel Version
+ description: Kernel Version.
type: String
- contextPath: Covalence.EndpointAgents.operatingSystemReleaseId
- description: Operating System Release Id
+ description: Operating System Release Id.
type: String
- contextPath: Covalence.EndpointAgents.ipAddress
- description: Ip Address
+ description: Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.secondaryIpAddress
- description: Secondary Ip Address
+ description: Secondary Ip Address.
type: String
- contextPath: Covalence.EndpointAgents.ipAddresses
- description: Ip Addresses
+ description: Ip Addresses.
type: String
- contextPath: Covalence.EndpointAgents.serialNumber
- description: Serial Number
+ description: Serial Number.
type: String
- contextPath: Covalence.EndpointAgents.deviceIdentifier
- description: Device Identifier
+ description: Device Identifier.
type: String
- contextPath: Covalence.EndpointAgents.cpuArchitectureEnum
- description: Cpu Architecture Enum
+ description: Cpu Architecture Enum.
type: String
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- - description: Process name
+ - description: Process name.
name: name
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: Search processes by name or advanced filter, at least one parameter is required
+ description: Search processes by name or advanced filter, at least one parameter is required.
name: cov-secpr-search-endpoint-process
outputs:
- contextPath: Covalence.EndpointProcess.id
- description: Id
+ description: Id.
type: Number
- contextPath: Covalence.EndpointProcess.agentUuid
- description: Agent Uuid
+ description: Agent Uuid.
type: String
- contextPath: Covalence.EndpointProcess.processName
- description: Process Name
+ description: Process Name.
type: String
- contextPath: Covalence.EndpointProcess.processPath
- description: Process Path
+ description: Process Path.
type: String
- contextPath: Covalence.EndpointProcess.parentProcessName
- description: Parent Process Name
+ description: Parent Process Name.
type: String
- contextPath: Covalence.EndpointProcess.parentProcessPath
- description: Parent Process Path
+ description: Parent Process Path.
type: String
- contextPath: Covalence.EndpointProcess.commandLine
- description: Command Line
+ description: Command Line.
type: String
- contextPath: Covalence.EndpointProcess.username
- description: Username
+ description: Username.
type: String
- contextPath: Covalence.EndpointProcess.firstSeenTime
- description: First Seen Time
+ description: First Seen Time.
type: Date
- contextPath: Covalence.EndpointProcess.lastSeenTime
- description: Last Seen Time
+ description: Last Seen Time.
type: Date
- contextPath: Covalence.EndpointProcess.lastEndTime
- description: Last End Time
+ description: Last End Time.
type: Date
- contextPath: Covalence.EndpointProcess.seenCount
- description: Seen Count
+ description: Seen Count.
type: Number
- contextPath: Covalence.EndpointProcess.activeCount
- description: Active Count
+ description: Active Count.
type: Number
- arguments:
- - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation"'
+ - description: 'Only required in broker mode, used to target a specific organization: target_org="Acme Corporation".'
name: target_org
- description: The name of installed software, quotes are required is space character is used. At least one parameter is required.
name: name
required: true
- - description: The version of installed software
+ - description: The version of installed software.
name: version
- - description: Advanced filter query
+ - description: Advanced filter query.
name: advanced_filter
- - description: if details=true, will return the complete response from Covalence API
+ - description: if details=true, will return the complete response from Covalence API.
name: details
- description: Search for endpoint installed software
+ description: Search for endpoint installed software.
name: cov-secpr-search-endpoint-installed-software
outputs:
- contextPath: Covalence.EndpointSoftware.arch
- description: Arch
+ description: Arch.
type: Number
- contextPath: Covalence.EndpointSoftware.type
- description: Type
+ description: Type.
type: Number
- contextPath: Covalence.EndpointSoftware.packageManager
- description: Package Manager
+ description: Package Manager.
type: Number
- contextPath: Covalence.EndpointSoftware.installTimestamp
- description: Install Timestamp
+ description: Install Timestamp.
type: Date
- contextPath: Covalence.EndpointSoftware.uninstallTimestamp
- description: Uninstall Timestamp
+ description: Uninstall Timestamp.
type: Date
- contextPath: Covalence.EndpointSoftware.name
- description: Name
+ description: Name.
type: String
- contextPath: Covalence.EndpointSoftware.version
- description: Version
+ description: Version.
type: String
- contextPath: Covalence.EndpointSoftware.vendor
- description: Vendor
+ description: Vendor.
type: String
- contextPath: Covalence.EndpointSoftware.installPath
- description: Install Path
+ description: Install Path.
type: String
- contextPath: Covalence.EndpointSoftware.appDataPath
- description: App Data Path
+ description: App Data Path.
type: String
- contextPath: Covalence.EndpointSoftware.sharedDataPath
- description: Shared Data Path
+ description: Shared Data Path.
type: String
- contextPath: Covalence.EndpointSoftware.installedForUser
- description: Installed For User
+ description: Installed For User.
type: String
- contextPath: Covalence.EndpointSoftware.installSource
- description: Install Source
+ description: Install Source.
type: String
- contextPath: Covalence.EndpointSoftware.id
- description: Id
+ description: Id.
type: Number
- contextPath: Covalence.EndpointSoftware.agentUuid
- description: Agent Uuid
+ description: Agent Uuid.
type: String
- contextPath: Covalence.EndpointSoftware.softwareNotifyAction
- description: Software Notify Action
+ description: Software Notify Action.
type: String
- arguments: []
- description: List monitored organizations, only available in broker mode
+ description: List monitored organizations, only available in broker mode.
name: cov-secpr-list-organizations
outputs:
- contextPath: Covalence.Organization.org_name
- description: Org_name
+ description: Org_name.
type: String
- dockerimage: demisto/xsoar-tools:1.0.0.25075
+ dockerimage: demisto/xsoar-tools:1.0.0.99061
isfetch: true
script: '-'
subtype: python3
diff --git a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders_test.py b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders_test.py
index 1af6782e0dad..3e245c264603 100644
--- a/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders_test.py
+++ b/Packs/CovalenceForSecurityProviders/Integrations/CovalenceForSecurityProviders/CovalenceForSecurityProviders_test.py
@@ -15,11 +15,10 @@
import demistomock as demisto
import json
-import io
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
diff --git a/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_3.md b/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_3.md
new file mode 100644
index 000000000000..0d86cd3d036e
--- /dev/null
+++ b/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_3.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Covalence For Security Providers
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.96723*.
diff --git a/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_4.md b/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_4.md
new file mode 100644
index 000000000000..969ed3e6df37
--- /dev/null
+++ b/Packs/CovalenceForSecurityProviders/ReleaseNotes/1_1_4.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Covalence For Security Providers
+
+- Updated the Docker image to: *demisto/xsoar-tools:1.0.0.99061*.
diff --git a/Packs/CovalenceForSecurityProviders/pack_metadata.json b/Packs/CovalenceForSecurityProviders/pack_metadata.json
index 182acb02e77f..16690bebdcaa 100644
--- a/Packs/CovalenceForSecurityProviders/pack_metadata.json
+++ b/Packs/CovalenceForSecurityProviders/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Covalence For Security Providers",
"description": "Triggers by any alert from endpoint, cloud, and network security monitoring, with mitigation steps where applicable. Query Covalence for more detail.",
"support": "partner",
- "currentVersion": "1.1.2",
+ "currentVersion": "1.1.4",
"author": "Field Effect Security",
"url": "https://fieldeffect.com/products/covalence-cyber-security/",
"email": "support@fieldeffect.com",
diff --git a/Packs/CrowdStrikeFalcon/.pack-ignore b/Packs/CrowdStrikeFalcon/.pack-ignore
index 7eb67a07053e..403e40212429 100644
--- a/Packs/CrowdStrikeFalcon/.pack-ignore
+++ b/Packs/CrowdStrikeFalcon/.pack-ignore
@@ -1,5 +1,5 @@
[file:CrowdStrikeFalcon.yml]
-ignore=IN126,IN136,IN144,IN124
+ignore=IN126,IN136,IN144
[file:README.md]
ignore=RM102,RM104,RM106
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
index 5213fe88758e..8aeff904a94b 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
@@ -2207,6 +2207,8 @@ def get_remote_data_command(args: dict[str, Any]):
"""
remote_args = GetRemoteDataArgs(args)
remote_incident_id = remote_args.remote_incident_id
+ reopen_statuses_list = argToList(demisto.params().get('reopen_statuses', ''))
+ demisto.debug(f'In get_remote_data_command {reopen_statuses_list=}')
mirrored_data = {}
entries: list = []
@@ -2218,20 +2220,20 @@ def get_remote_data_command(args: dict[str, Any]):
mirrored_data, updated_object = get_remote_incident_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update incident {remote_incident_id} with fields: {updated_object}')
- set_xsoar_incident_entries(updated_object, entries, remote_incident_id) # sets in place
+ set_xsoar_incident_entries(updated_object, entries, remote_incident_id, reopen_statuses_list) # sets in place
elif incident_type == IncidentType.DETECTION:
mirrored_data, updated_object = get_remote_detection_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update detection {remote_incident_id} with fields: {updated_object}')
- set_xsoar_detection_entries(updated_object, entries, remote_incident_id) # sets in place
+ set_xsoar_detection_entries(updated_object, entries, remote_incident_id, reopen_statuses_list) # sets in place
elif incident_type == IncidentType.IDP_OR_MOBILE_DETECTION:
mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update {detection_type} detection {remote_incident_id} with fields: {updated_object}')
set_xsoar_idp_or_mobile_detection_entries(
- updated_object, entries, remote_incident_id, detection_type) # sets in place
+ updated_object, entries, remote_incident_id, detection_type, reopen_statuses_list) # sets in place
else:
# this is here as prints can disrupt mirroring
@@ -2313,36 +2315,55 @@ def get_remote_idp_or_mobile_detection_data(remote_incident_id):
"""
mirrored_data_list = get_detection_entities([remote_incident_id]).get('resources', []) # a list with one dict in it
mirrored_data = mirrored_data_list[0]
+ demisto.debug(f'in get_remote_idp_or_mobile_detection_data {mirrored_data=}')
detection_type = ''
+ mirroring_fields = ['status']
updated_object: dict[str, Any] = {}
if 'idp' in mirrored_data['product']:
updated_object = {'incident_type': IDP_DETECTION}
detection_type = 'IDP'
+ mirroring_fields.append('id')
if 'mobile' in mirrored_data['product']:
updated_object = {'incident_type': MOBILE_DETECTION}
detection_type = 'Mobile'
- set_updated_object(updated_object, mirrored_data, ['status'])
+ mirroring_fields.append('mobile_detection_id')
+ set_updated_object(updated_object, mirrored_data, mirroring_fields)
+ demisto.debug(f'in get_remote_idp_or_mobile_detection_data {mirroring_fields=} {updated_object=}')
return mirrored_data, updated_object, detection_type
-def set_xsoar_incident_entries(updated_object: dict[str, Any], entries: list, remote_incident_id: str):
+def set_xsoar_incident_entries(updated_object: dict[str, Any], entries: list, remote_incident_id: str,
+ reopen_statuses_list: list):
+ reopen_statuses_set = {str(status).strip() for status in reopen_statuses_list} \
+ if reopen_statuses_list else set(STATUS_TEXT_TO_NUM.keys()) - {'Closed'}
+ demisto.debug(f'In set_xsoar_incident_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'Closed':
close_in_xsoar(entries, remote_incident_id, 'Incident')
- elif updated_object.get('status') in (set(STATUS_TEXT_TO_NUM.keys()) - {'Closed'}):
+ elif updated_object.get('status', '') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_incident_id, 'Incident')
+ else:
+ demisto.debug(f"In set_xsoar_incident_entries not closing and not reopening since {updated_object.get('status')=} "
+ f"and {reopen_statuses_set=}.")
-def set_xsoar_detection_entries(updated_object: dict[str, Any], entries: list, remote_detection_id: str):
+def set_xsoar_detection_entries(updated_object: dict[str, Any], entries: list, remote_detection_id: str,
+ reopen_statuses_list: list):
+ reopen_statuses_set = {str(status).lower().strip().replace(' ', '_') for status in reopen_statuses_list} \
+ if reopen_statuses_list else (set(DETECTION_STATUS) - {'closed'})
+ demisto.debug(f'In set_xsoar_detection_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'closed':
close_in_xsoar(entries, remote_detection_id, 'Detection')
- elif updated_object.get('status') in (set(DETECTION_STATUS) - {'closed'}):
+ elif updated_object.get('status') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_detection_id, 'Detection')
+ else:
+ demisto.debug(f"In set_xsoar_detection_entries not closing and not reopening since {updated_object.get('status')=} "
+ f"and {reopen_statuses_set=}.")
-def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], entries: list,
- remote_idp_detection_id: str, incident_type_name: str):
+def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], entries: list, remote_idp_detection_id: str,
+ incident_type_name: str, reopen_statuses_list: list):
"""
Send the updated object to the relevant status handler
@@ -2352,15 +2373,23 @@ def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], en
:param entries: The list of entries to add the new entry into.
:type remote_idp_detection_id: ``str``
:param remote_idp_detection_id: the remote idp detection id
+ :type reopen_statuses_list: ``list``
+ :param reopen_statuses_list: the set of statuses that should reopen an incident in XSOAR.
:return: The response.
:rtype ``dict``
"""
+ reopen_statuses_set = {str(status).lower().strip().replace(' ', '_') for status in reopen_statuses_list} \
+ if reopen_statuses_list else (set(IDP_AND_MOBILE_DETECTION_STATUS) - {'closed'})
+ demisto.debug(f'In set_xsoar_idp_or_mobile_detection_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'closed':
close_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
- elif updated_object.get('status') in (set(IDP_AND_MOBILE_DETECTION_STATUS) - {'closed'}):
+ elif updated_object.get('status') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
+ else:
+ demisto.debug(f"In set_xsoar_idp_or_mobile_detection_entries not closing and not reopening since "
+ f"{updated_object.get('status')=} and {reopen_statuses_set=}.")
def close_in_xsoar(entries: list, remote_incident_id: str, incident_type_name: str):
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
index 07d12aca619c..0f3c53f13414 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
@@ -89,14 +89,14 @@ configuration:
section: Collect
required: false
advanced: true
- additionalinfo: "Use the Falcon Query Language. For more information, refer to the integration docs."
+ additionalinfo: "Use the Falcon Query Language. For more information, refer to https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql."
- display: IOA fetch query
name: ioa_fetch_query
type: 0
section: Collect
required: false
advanced: true
- additionalinfo: "In the format: cloud_provider=aws&aws_account_id=1234. The query must have the argument 'cloud_provider' configured. Multiple values for the same parameter is not supported. For more information, refer to the integration docs."
+ additionalinfo: "In the format: cloud_provider=aws&aws_account_id=1234. The query must have the argument 'cloud_provider' configured. Multiple values for the same parameter is not supported. For more information, refer to https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql."
- display: Fetch incidents
name: isFetch
type: 8
@@ -163,6 +163,21 @@ configuration:
- Indicator of Misconfiguration
- Indicator of Attack
- Mobile Detection
+- defaultvalue: 'New,In progress,True positive,False positive,Reopened,Ignored'
+ display: Reopen Statuses
+ name: reopen_statuses
+ type: 16
+ section: Collect
+ advanced: true
+ required: false
+ additionalinfo: CrowdStrike Falcon statuses that will reopen an incident in Cortex XSOAR if closed. You can choose any combination.
+ options:
+ - New
+ - In progress
+ - True positive
+ - False positive
+ - Reopened
+ - Ignored
- defaultvalue: '1'
display: 'Incidents Fetch Interval'
name: incidentFetchInterval
@@ -170,7 +185,7 @@ configuration:
section: Collect
advanced: true
required: false
-- additionalinfo: Use this parameter to determine how long backward to look in the search for incidents that were created before the last run time and did not match the query when they were created.
+- additionalinfo: Use this parameter to determine the look-back period for searching for incidents that were created before the last run time and did not match the query when they were created.
defaultvalue: 1
display: 'Advanced: Time in minutes to look back when fetching incidents and detections'
name: look_back
@@ -190,7 +205,7 @@ script:
- No
description: Whether or not to get additional data about the device.
auto: PREDEFINED
- - description: The query to filter the device.
+ - description: The query by which to filter the device.
name: filter
- description: The maximum records to return [1-5000].
name: limit
@@ -200,7 +215,7 @@ script:
defaultValue: 0
- description: 'A comma-separated list of device IDs to limit the results.'
name: ids
- - description: 'The status of the device. Possible values are: "Normal", "containment_pending", "contained", and "lift_containment_pending".'
+ - description: The status of the device.
name: status
auto: PREDEFINED
predefined:
@@ -208,12 +223,12 @@ script:
- containment_pending
- contained
- lift_containment_pending
- - description: 'The host name of the device.'
+ - description: The hostname of the device.
name: hostname
auto: PREDEFINED
predefined:
- ''
- - description: 'The platform name of the device. Possible values are: Windows, Mac, and Linux.'
+ - description: The platform name of the device.
name: platform_name
auto: PREDEFINED
predefined:
@@ -222,7 +237,7 @@ script:
- Linux
- description: 'The site name of the device.'
name: site_name
- - description: The property to sort by (e.g. status.desc or hostname.asc).
+ - description: The property to sort by (e.g., status.desc or hostname.asc).
name: sort
description: Searches for a device that matches the query.
name: cs-falcon-search-device
@@ -237,7 +252,7 @@ script:
description: The external IP address of the device.
type: String
- contextPath: CrowdStrike.Device.Hostname
- description: The host name of the device.
+ description: The hostname of the device.
type: String
- contextPath: CrowdStrike.Device.OS
description: The operating system of the device.
@@ -285,14 +300,14 @@ script:
description: The endpoint operation system version.
type: String
- arguments:
- - description: The ID of the behavior.
+ - description: The ID of the behavior. The ID of the behavior can be retrieved by running the cs-falcon-search-detection or cs-falcon-get-detections-for-incident command.
name: behavior_id
required: true
description: Searches for and fetches the behavior that matches the query.
name: cs-falcon-get-behavior
outputs:
- contextPath: CrowdStrike.Behavior.FileName
- description: The file name of the behavior.
+ description: The filename of the behavior.
type: String
- contextPath: CrowdStrike.Behavior.Scenario
description: The scenario name of the behavior.
@@ -307,13 +322,13 @@ script:
description: The type of the indicator of compromise.
type: String
- contextPath: CrowdStrike.Behavior.IOCValue
- description: The value of the IOC.
+ description: The value of the indicator of compromise.
type: String
- contextPath: CrowdStrike.Behavior.CommandLine
description: The command line executed in the behavior.
type: String
- contextPath: CrowdStrike.Behavior.UserName
- description: The user name related to the behavior.
+ description: The username related to the behavior.
type: String
- contextPath: CrowdStrike.Behavior.SensorID
description: The sensor ID related to the behavior.
@@ -328,7 +343,7 @@ script:
description: The ID of the behavior.
type: String
- arguments:
- - description: The IDs of the detections to search. If provided, will override other arguments.
+ - description: A comma-separated list of IDs of the detections to search. If provided, will override other arguments.
isArray: true
name: ids
- description: |-
@@ -346,7 +361,7 @@ script:
name: cs-falcon-search-detection
outputs:
- contextPath: CrowdStrike.Detection.Behavior.FileName
- description: The file name of the behavior.
+ description: The filename of the behavior.
type: String
- contextPath: CrowdStrike.Detection.Behavior.Scenario
description: The scenario name of the behavior.
@@ -367,7 +382,7 @@ script:
description: The command line executed in the behavior.
type: String
- contextPath: CrowdStrike.Detection.Behavior.UserName
- description: The user name related to the behavior.
+ description: The username related to the behavior.
type: String
- contextPath: CrowdStrike.Detection.Behavior.SensorID
description: The sensor ID related to the behavior.
@@ -401,7 +416,7 @@ script:
name: ids
required: true
- auto: PREDEFINED
- description: 'The status to transition a detection to.'
+ description: The status to transition a detection to.
name: status
predefined:
- new
@@ -421,19 +436,19 @@ script:
predefined:
- 'true'
- 'false'
- - description: Username to assign the detections to. (This is usually the user’s email address, but may vary based on your configuration). username and assigned_to_uuid are mutually exclusive.
+ - description: Username to assign the detections to. (This is usually the user's email address, but may vary based on your configuration). username and assigned_to_uuid are mutually exclusive.
name: username
description: Resolves and updates a detection using the provided arguments. At least one optional argument must be passed, otherwise no change will take place. Note that IDP detections are not supported.
name: cs-falcon-resolve-detection
- arguments:
- - description: The host agent ID (AID) of the host to contain. Get an agent ID from a detection.
+ - description: A comma-separated list of host agent IDs (AID) of the host to contain. Get an agent ID from a detection.
isArray: true
name: ids
required: true
description: Contains containment for a specified host. When contained, a host can only communicate with the CrowdStrike cloud and any IPs specified in your containment policy.
name: cs-falcon-contain-host
- arguments:
- - description: The host agent ID (AID) of the host you want to contain. Get an agent ID from a detection. Can also be a comma separated list of IDs.
+ - description: A comma-separated list of host agent IDs (AIDs) of the hosts to contain. Get an agent ID from a detection.
isArray: true
name: ids
required: true
@@ -443,18 +458,18 @@ script:
- description: Any commands run against an offline-queued session will be queued up and executed when the host comes online.
name: queue_offline
defaultValue: false
- - description: A comma-separated list of host agent IDs to run commands for. (Can be retrieved by running the 'cs-falcon-search-device' command.).
+ - description: A comma-separated list of host agent IDs to run commands for. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.
name: host_ids
required: true
- description: The type of command to run.
name: command_type
required: true
- - description: 'The full command to run.'
+ - description: The full command to run.
name: full_command
required: true
- auto: PREDEFINED
defaultValue: read
- description: 'The scope to run the command for. Possible values are: "read", "write", and "admin". (NOTE: In order to run the CrowdStrike RTR `put` command, it is necessary to pass `scope=admin`.).'
+ description: 'The scope to run the command for. (NOTE: In order to run the CrowdStrike RTR `put` command, it is necessary to pass `scope=admin`).'
name: scope
predefined:
- read
@@ -466,7 +481,7 @@ script:
type: unknown
- auto: PREDEFINED
defaultValue: batch
- description: 'The target to run the command for. Possible values are: "single" and "batch".'
+ description: The target to run the command for.
name: target
predefined:
- batch
@@ -521,22 +536,22 @@ script:
- description: The content of the PowerShell script.
name: content
required: true
- description: Uploads a script to Falcon.
+ description: Uploads a script to Falcon CrowdStrike.
name: cs-falcon-upload-script
- arguments:
- description: The file entry ID to upload.
name: entry_id
required: true
- description: Uploads a file to the CrowdStrike cloud. (Can be used for the RTR 'put' command.).
+ description: Uploads a file to the CrowdStrike cloud. (Can be used for the RTR 'put' command).
name: cs-falcon-upload-file
- arguments:
- - description: The ID of the file to delete. (The ID of the file can be retrieved by running the 'cs-falcon-list-files' command).
+ - description: The ID of the file to delete. The ID of the file can be retrieved by running the 'cs-falcon-list-files' command.
name: file_id
required: true
description: Deletes a file based on the provided ID. Can delete only one file at a time.
name: cs-falcon-delete-file
- arguments:
- - description: A comma-separated list of file IDs to get. (The list of file IDs can be retrieved by running the 'cs-falcon-list-files' command.).
+ - description: A comma-separated list of file IDs to get. The list of file IDs can be retrieved by running the 'cs-falcon-list-files' command.
name: file_id
required: true
description: Returns files based on the provided IDs. These files are used for the RTR 'put' command.
@@ -549,7 +564,7 @@ script:
description: The email address of the user who created the file.
type: String
- contextPath: CrowdStrike.File.CreatedTime
- description: The date and time the file was created.
+ description: The datetime the file was created.
type: Date
- contextPath: CrowdStrike.File.Description
description: The description of the file.
@@ -561,7 +576,7 @@ script:
description: The email address of the user who modified the file.
type: String
- contextPath: CrowdStrike.File.ModifiedTime
- description: The date and time the file was modified.
+ description: The datetime the file was modified.
type: Date
- contextPath: CrowdStrike.File.Name
description: The full name of the file.
@@ -585,7 +600,7 @@ script:
description: The size of the file in bytes.
type: Number
- arguments: []
- description: Returns a list of put-file IDs that are available for the user in the 'put' command.
+ description: Returns a list of put-file IDs that are available for the user in the 'put' command. Due to an API limitation, the maximum number of files returned is 100.
name: cs-falcon-list-files
outputs:
- contextPath: CrowdStrike.File.ID
@@ -595,7 +610,7 @@ script:
description: The email address of the user who created the file.
type: String
- contextPath: CrowdStrike.File.CreatedTime
- description: The date and time the file was created.
+ description: The datetime the file was created.
type: Date
- contextPath: CrowdStrike.File.Description
description: The description of the file.
@@ -607,7 +622,7 @@ script:
description: The email address of the user who modified the file.
type: String
- contextPath: CrowdStrike.File.ModifiedTime
- description: The date and time the file was modified.
+ description: The datetime the file was modified.
type: Date
- contextPath: CrowdStrike.File.Name
description: The full name of the file.
@@ -631,7 +646,7 @@ script:
description: The size of the file in bytes.
type: Number
- arguments:
- - description: A comma-separated list of script IDs to return. (The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.).
+ - description: A comma-separated list of script IDs to return. The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.
name: script_id
required: true
description: Returns custom scripts based on the provided ID. Used for the RTR 'runscript' command.
@@ -644,7 +659,7 @@ script:
description: The email address of the user who created the script.
type: String
- contextPath: CrowdStrike.Script.CreatedTime
- description: The date and time the script was created.
+ description: The datetime the script was created.
type: Date
- contextPath: CrowdStrike.Script.Description
description: The description of the script.
@@ -653,7 +668,7 @@ script:
description: The email address of the user who modified the script.
type: String
- contextPath: CrowdStrike.Script.ModifiedTime
- description: The date and time the script was modified.
+ description: The datetime the script was modified.
type: Date
- contextPath: CrowdStrike.Script.Name
description: The script name.
@@ -677,7 +692,7 @@ script:
description: Whether the user has write access to the script.
type: Boolean
- arguments:
- - description: The script ID to delete. (Script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.).
+ - description: The script ID to delete. The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.
name: script_id
required: true
description: Deletes a custom-script based on the provided ID. Can delete only one script at a time.
@@ -693,7 +708,7 @@ script:
description: The email address of the user who created the script.
type: String
- contextPath: CrowdStrike.Script.CreatedTime
- description: The date and time the script was created.
+ description: The datetime the script was created.
type: Date
- contextPath: CrowdStrike.Script.Description
description: The description of the script.
@@ -702,7 +717,7 @@ script:
description: The email address of the user who modified the script.
type: String
- contextPath: CrowdStrike.Script.ModifiedTime
- description: The date and time the script was modified.
+ description: The datetime the script was modified.
type: Date
- contextPath: CrowdStrike.Script.Name
description: The script name.
@@ -728,7 +743,7 @@ script:
- arguments:
- description: The name of the script to run.
name: script_name
- - description: A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.).
+ - description: A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.
name: host_ids
required: true
- description: The PowerShell script code to run.
@@ -761,15 +776,16 @@ script:
description: The full command.
type: String
- arguments:
- - description: List of host agent IDs on which to run the RTR command.
+ - description: A comma-separated list of host agent IDs on which to run the RTR command.
isArray: true
name: host_ids
required: true
- description: Full path to the file that will be retrieved from each host in the batch.
name: file_path
required: true
- - description: List of a subset of hosts on which to run the command.
+ - description: A comma-separated list of a subset of hosts on which to run the command.
name: optional_hosts
+ isArray: true
- description: 'The number of seconds to wait for the request before it times out. In ISO time format. For example: 2019-10-17T13:41:48.487520845Z.'
name: timeout
- description: 'The amount of time to wait for the request before it times out. In duration syntax. For example, 10s. Valid units are: ns, us, ms, s, m, h. Maximum value is 10 minutes.'
@@ -802,7 +818,7 @@ script:
description: The file path.
type: string
- arguments:
- - description: The list of IDs of the command requested.
+ - description: A comma-separated list of IDs of the command requested.
isArray: true
name: request_ids
required: true
@@ -854,7 +870,7 @@ script:
name: sequence_id
- auto: PREDEFINED
defaultValue: read
- description: 'The scope to run the command for. Possible values are: "read", "write", or "admin".'
+ description: The scope to run the command for.
name: scope
predefined:
- read
@@ -965,14 +981,14 @@ script:
- description: A comma-separated list of IOC sources.
isArray: true
name: sources
- - description: Start of date range to search in (YYYY-MM-DD format).
+ - description: Start of date range to search in YYYY-MM-DD format.
name: from_expiration_date
- - description: End of date range to search in (YYYY-MM-DD format).
+ - description: End of date range to search in YYYY-MM-DD format.
name: to_expiration_date
- description: The maximum number of records to return. The minimum is 1 and the maximum is 500. Default is 100.
name: limit
- auto: PREDEFINED
- description: 'The order the results are returned in. Possible values are: "type.asc", "type.desc", "value.asc", "value.desc", "policy.asc", "policy.desc", "share_level.asc", "share_level.desc", "expiration_timestamp.asc", and "expiration_timestamp.desc".'
+ description: The order the results are returned in.
name: sort
predefined:
- type.asc
@@ -1029,7 +1045,7 @@ script:
type: string
- arguments:
- auto: PREDEFINED
- description: 'The IOC type to retrieve. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6".'
+ description: The IOC type to retrieve.
name: type
predefined:
- sha256
@@ -1065,13 +1081,13 @@ script:
description: The level at which the indicator will be shared.
type: string
- contextPath: CrowdStrike.IOC.Expiration
- description: The date and time when the indicator will expire.
+ description: The datetime when the indicator will expire.
type: string
- contextPath: CrowdStrike.IOC.Description
description: The description of the IOC.
type: string
- contextPath: CrowdStrike.IOC.CreatedTime
- description: The date and time the IOC was created.
+ description: The datetime the IOC was created.
type: string
- contextPath: CrowdStrike.IOC.CreatedBy
description: The identity of the user/process who created the IOC.
@@ -1084,7 +1100,7 @@ script:
type: string
- arguments:
- auto: PREDEFINED
- description: 'The type of the indicator. Possible values are: "sha256", "md5", "domain", "ipv4", and "ipv6".'
+ description: The type of the indicator.
name: ioc_type
predefined:
- sha256
@@ -1098,7 +1114,7 @@ script:
required: true
- auto: PREDEFINED
defaultValue: detect
- description: 'The policy to enact when the value is detected on a host. Possible values are: "detect" and "none". A value of "none" is equivalent to turning the indicator off.'
+ description: 'The policy to enact when the value is detected on a host. A value of "none" is equivalent to turning the indicator off.'
name: policy
predefined:
- detect
@@ -1149,14 +1165,14 @@ script:
description: The identity of the user/process who created the IOC.
type: string
- contextPath: CrowdStrike.IOC.ModifiedTime
- description: The date and time the indicator was last modified.
+ description: The datetime the indicator was last modified.
type: string
- contextPath: CrowdStrike.IOC.ModifiedBy
description: The identity of the user/process who last updated the IOC.
type: string
- arguments:
- auto: PREDEFINED
- description: 'The type of the indicator. Possible values are: "sha256", "md5", "sha1", "domain", "ipv4", and "ipv6".'
+ description: The type of the indicator.
name: ioc_type
predefined:
- sha256
@@ -1171,7 +1187,7 @@ script:
required: true
- auto: PREDEFINED
defaultValue: detect
- description: 'The policy to enact when the value is detected on a host. Possible values are: "detect" and "none". A value of "none" is equivalent to turning the indicator off.'
+ description: 'The policy to enact when the value is detected on a host. A value of "none" is equivalent to turning the indicator off.'
name: policy
predefined:
- detect
@@ -1222,14 +1238,14 @@ script:
description: The identity of the user/process who created the IOC.
type: string
- contextPath: CrowdStrike.IOC.ModifiedTime
- description: The date and time the indicator was last modified.
+ description: The datetime the indicator was last modified.
type: string
- contextPath: CrowdStrike.IOC.ModifiedBy
description: The identity of the user/process who last updated the IOC.
type: string
- arguments:
- auto: PREDEFINED
- description: 'The IOC type to delete. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6".'
+ description: The IOC type to delete.
name: type
predefined:
- sha256
@@ -1247,7 +1263,7 @@ script:
name: cs-falcon-delete-ioc
- arguments:
- auto: PREDEFINED
- description: 'A comma-separated list of indicator types. Valid types are: "sha256", "sha1", "md5", "domain", "ipv4", "ipv6".'
+ description: A comma-separated list of indicator types.
isArray: true
name: types
predefined:
@@ -1263,13 +1279,13 @@ script:
- description: A comma-separated list of IOC sources.
isArray: true
name: sources
- - description: The date the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ).
+ - description: The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ).
name: expiration
- defaultValue: '50'
description: The maximum number of records to return. The minimum is 1 and the maximum is 500.
name: limit
- auto: PREDEFINED
- description: 'The order the results are returned in. Possible values are: "type.asc", "type.desc", "value.asc", "value.desc", "policy.asc", "policy.desc", "share_level.asc", "share_level.desc", "expiration_timestamp.asc", and "expiration_timestamp.desc".'
+ description: The order the results are returned in.
name: sort
predefined:
- type.asc
@@ -1329,7 +1345,7 @@ script:
description: A pagination token used with the limit parameter to manage pagination of results.
- arguments:
- auto: PREDEFINED
- description: 'The IOC type to retrieve. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". Either ioc_id or ioc_type and value must be provided.'
+ description: The IOC type to retrieve. Either ioc_id or ioc_type and value must be provided.
name: type
predefined:
- sha256
@@ -1340,7 +1356,7 @@ script:
- ipv6
- description: The string representation of the indicator. Either ioc_id or ioc_type and value must be provided.
name: value
- - description: The ID of the IOC to get. Can be retrieved by running the cs-falcon-search-custom-iocs command. Either ioc_id or ioc_type and value must be provided.
+ - description: The ID of the IOC to get. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command. Either ioc_id or ioc_type and value must be provided.
name: ioc_id
description: Gets the full definition of one or more indicators that you are watching.
name: cs-falcon-get-custom-ioc
@@ -1383,7 +1399,7 @@ script:
type: string
- arguments:
- auto: PREDEFINED
- description: 'The type of the indicator. Possible values are: "sha256", "md5", "domain", "ipv4", and "ipv6".'
+ description: The type of the indicator.
name: ioc_type
predefined:
- sha256
@@ -1393,7 +1409,7 @@ script:
- ipv6
required: true
- description: |-
- A comma separated list of indicators.
+ A comma-separated list of indicators.
More than one value can be supplied to upload multiple IOCs of the same type but with different values. Note that the uploaded IOCs will have the same properties (as supplied in other arguments).
isArray: true
name: value
@@ -1409,7 +1425,7 @@ script:
- detect
required: true
- auto: PREDEFINED
- description: 'The platforms that the indicator applies to. You can enter multiple platform names, separated by commas. Possible values are: mac, windows and linux.'
+ description: A comma-separated list of the platforms that the indicator applies to.
isArray: true
name: platforms
predefined:
@@ -1418,7 +1434,7 @@ script:
- linux
required: true
- auto: PREDEFINED
- description: 'The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action. Possible values are: informational, low, medium, high, and critical.'
+ description: The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action.
name: severity
predefined:
- informational
@@ -1426,7 +1442,7 @@ script:
- medium
- high
- critical
- - description: The date the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ).
+ - description: The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ).
name: expiration
- description: The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters.
name: source
@@ -1438,10 +1454,10 @@ script:
predefined:
- 'true'
- 'false'
- - description: List of host group IDs that the indicator applies to. Can be retrieved by running the cs-falcon-list-host-groups command. Either applied_globally or host_groups must be provided.
+ - description: A comma-separated list of host group IDs that the indicator applies to. The list of host group IDs can be retrieved by running the 'cs-falcon-list-host-groups' command. Either applied_globally or host_groups must be provided.
isArray: true
name: host_groups
- - description: List of tags to apply to the indicator.
+ - description: A comma-separated list of tags to apply to the indicator.
isArray: true
name: tags
- description: Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs.
@@ -1495,7 +1511,7 @@ script:
description: Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs.
type: string
- arguments:
- - description: The ID of the IOC to update. Can be retrieved by running the cs-falcon-search-custom-iocs command.
+ - description: The ID of the IOC to update. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command.
name: ioc_id
required: true
- auto: PREDEFINED
@@ -1508,14 +1524,14 @@ script:
- prevent
- detect
- auto: PREDEFINED
- description: 'The platforms that the indicator applies to. You can enter multiple platform names, separated by commas. Possible values are: mac, windows and linux.'
+ description: A comma-separated list of the platforms that the indicator applies to.
name: platforms
predefined:
- mac
- windows
- linux
- auto: PREDEFINED
- description: 'The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action. Possible values are: informational, low, medium, high and critical.'
+ description: The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action.
name: severity
predefined:
- informational
@@ -1523,7 +1539,7 @@ script:
- medium
- high
- critical
- - description: The date the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ).
+ - description: The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ).
name: expiration
- description: The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters.
name: source
@@ -1575,14 +1591,14 @@ script:
type: string
- arguments:
- - description: The ID of the IOC to delete. Can be retrieved by running the cs-falcon-search-custom-iocs command.
+ - description: The ID of the IOC to delete. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command.
name: ioc_id
required: true
description: Deletes a monitored indicator.
name: cs-falcon-delete-custom-ioc
- arguments:
- auto: PREDEFINED
- description: 'The IOC type. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6".'
+ description: The IOC type.
name: type
predefined:
- sha256
@@ -1612,7 +1628,7 @@ script:
type: number
- arguments:
- auto: PREDEFINED
- description: 'The IOC type. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6".'
+ description: The IOC type.
name: type
predefined:
- sha256
@@ -1715,7 +1731,7 @@ script:
type: string
- arguments:
- auto: PREDEFINED
- description: 'The type of indicator. Possible values are: "domain", "ipv4", "ipv6", "md5", "sha1", or "sha256".'
+ description: The type of indicator.
name: type
predefined:
- domain
@@ -1737,7 +1753,7 @@ script:
- arguments:
- description: The query used to filter the results.
name: fetch_query
- - description: A comma separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678, If you use this argument, fetch_query argument will be ignored.
+ - description: A comma-separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678. If you use this argument, the fetch_query argument will be ignored.
isArray: true
name: ids
description: Lists detection summaries.
@@ -1786,7 +1802,7 @@ script:
description: The external IP address of the device.
type: String
- contextPath: CrowdStrike.Detections.device.hostname
- description: The host name of the device.
+ description: The hostname of the device.
type: String
- contextPath: CrowdStrike.Detections.device.first_seen
description: The datetime the host was first seen by CrowdStrike Falcon.
@@ -1840,7 +1856,7 @@ script:
description: The ID of the behavior.
type: String
- contextPath: CrowdStrike.Detections.behaviors.filename
- description: The file name of the triggering process.
+ description: The filename of the triggering process.
type: String
- contextPath: CrowdStrike.Detections.behaviors.alleged_filetype
description: The file extension of the behavior's filename.
@@ -1894,7 +1910,7 @@ script:
description: The SHA256 of the triggering process.
type: String
- contextPath: CrowdStrike.Detections.behaviors.md5
- description: The MD5 of the triggering process.
+ description: The MD5 hash of the triggering process.
type: String
- contextPath: CrowdStrike.Detections.behaviors.parent_details.parent_sha256
description: The SHA256 hash of the parent process.
@@ -1995,7 +2011,7 @@ script:
- arguments:
- description: The query used to filter the results.
name: fetch_query
- - description: A comma separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678, If you use this argument, fetch_query argument will be ignored.
+ - description: A comma-separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678. If you use this argument, the fetch_query argument will be ignored.
isArray: true
name: ids
description: Lists incident summaries.
@@ -2047,10 +2063,10 @@ script:
description: The name of the host.
type: String
- contextPath: CrowdStrike.Incidents.hosts.first_seen
- description: The date and time the host was first seen by CrowdStrike Falcon.
+ description: The datetime the host was first seen by CrowdStrike Falcon.
type: Date
- contextPath: CrowdStrike.Incidents.hosts.last_seen
- description: The date and time the host was last seen by CrowdStrike Falcon.
+ description: The datetime the host was last seen by CrowdStrike Falcon.
type: Date
- contextPath: CrowdStrike.Incidents.hosts.local_ip
description: The device local IP address.
@@ -2089,13 +2105,13 @@ script:
description: 'The datetime a user modified the incident in ISO time format. For example: 2019-10-17T13:41:48.487520845Z.'
type: Date
- contextPath: CrowdStrike.Incidents.created
- description: The time that the incident was created.
+ description: The datetime that the incident was created.
type: Date
- contextPath: CrowdStrike.Incidents.start
- description: The recorded time of the earliest incident.
+ description: The recorded datetime of the earliest incident.
type: Date
- contextPath: CrowdStrike.Incidents.end
- description: The recorded time of the latest incident.
+ description: The recorded datetime of the latest incident.
type: Date
- contextPath: CrowdStrike.Incidents.state
description: The state of the incident.
@@ -2123,7 +2139,7 @@ script:
name: ip
- description: The endpoint hostname.
name: hostname
- description: Returns information about an endpoint, does not support regex.
+ description: Returns information about an endpoint. Does not support regex.
name: endpoint
outputs:
- contextPath: Endpoint.Hostname
@@ -2158,7 +2174,7 @@ script:
name: name
required: true
- auto: PREDEFINED
- description: The group type of the group. Can be 'static' or 'dynamic'.
+ description: The group type of the group.
name: group_type
predefined:
- static
@@ -2287,7 +2303,7 @@ script:
defaultValue: '50'
description: The maximum number of results on a page.
name: limit
- - description: The property to sort by (e.g. status.desc or hostname.asc).
+ - description: The property to sort by (e.g., status.desc or hostname.asc).
name: sort
description: Gets the list of host group members.
name: cs-falcon-list-host-group-members
@@ -2302,7 +2318,7 @@ script:
description: The external IP address of the device.
type: String
- contextPath: CrowdStrike.Device.Hostname
- description: The host name of the device.
+ description: The hostname of the device.
type: String
- contextPath: CrowdStrike.Device.OS
description: The operating system of the device.
@@ -2323,7 +2339,7 @@ script:
- description: The ID of the host group.
name: host_group_id
required: true
- - description: A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.).
+ - description: A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.
isArray: true
name: host_ids
required: true
@@ -2358,7 +2374,7 @@ script:
- description: The ID of the host group.
name: host_group_id
required: true
- - description: A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.).
+ - description: A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.
isArray: true
name: host_ids
required: true
@@ -2395,7 +2411,7 @@ script:
name: ids
required: true
- auto: PREDEFINED
- description: The new status of the incident. Can be "New", "In Progress", "Reopened", "Closed".
+ description: The new status of the incident.
name: status
predefined:
- New
@@ -2415,7 +2431,7 @@ script:
description: Resolve and update incidents using the specified settings.
name: cs-falcon-resolve-incident
- arguments:
- - description: A JSON object with list of CS Falcon indicators to upload.
+ - description: A JSON object with a list of CrowdStrike Falcon indicators to upload.
isArray: true
name: multiple_indicators_json
required: true
@@ -2469,7 +2485,7 @@ script:
description: The platforms of the IOC.
type: Unknown
- arguments:
- - description: The host ID you would like to kill the given process for.
+ - description: The host ID to kill the given process for.
name: host_id
required: true
- description: A comma-separated list of process IDs to kill.
@@ -2488,17 +2504,17 @@ script:
description: The process ID that was killed.
type: String
- contextPath: CrowdStrike.Command.kill.Error
- description: The error message raised if the command was failed.
+ description: The error message raised if the command failed.
type: String
- contextPath: CrowdStrike.Command.kill.HostID
description: The host ID.
type: String
- arguments:
- - description: A comma-separated list of the hosts IDs you would like to remove the file for.
+ - description: A comma-separated list of the hosts IDs to remove the file for.
isArray: true
name: host_ids
required: true
- - description: The path to a file or a directory you want to remove.
+ - description: The path to a file or a directory to remove.
name: file_path
required: true
- auto: PREDEFINED
@@ -2524,7 +2540,7 @@ script:
description: The error message raised if the command failed.
type: String
- arguments:
- - description: The host ID you want to get the processes list from.
+ - description: The host ID to get the processes list from.
name: host_id
required: true
- description: Whether the command will run against an offline-queued session and be queued for execution when the host comes online.
@@ -2536,10 +2552,10 @@ script:
name: cs-falcon-rtr-list-processes
outputs:
- contextPath: CrowdStrike.Command.ps.Filename
- description: The the name of the result file to be returned.
+ description: The name of the result file to be returned.
type: String
- arguments:
- - description: The host ID you want to get the network status and protocol statistics list from.
+ - description: The host ID to get the network status and protocol statistics list from.
name: host_id
required: true
- description: Whether the command will run against an offline-queued session and be queued for execution when the host comes online.
@@ -2551,14 +2567,14 @@ script:
name: cs-falcon-rtr-list-network-stats
outputs:
- contextPath: CrowdStrike.Command.netstat.Filename
- description: The the name of the result file to be returned.
+ description: The name of the result file to be returned.
type: String
- arguments:
- - description: A comma-separated list of the host IDs you want to get the registry keys from.
+ - description: A comma-separated list of the host IDs to get the registry keys from.
isArray: true
name: host_ids
required: true
- - description: A comma-separated list of the registry keys, sub keys or value to get.
+ - description: A comma-separated list of the registry keys, sub-keys, or value to get.
isArray: true
name: registry_keys
required: true
@@ -2570,7 +2586,7 @@ script:
description: Executes an RTR active-responder read registry keys command across the given hosts. This command is valid only for Windows hosts.
name: cs-falcon-rtr-read-registry
- arguments:
- - description: A comma-separated list of the hosts IDs you want to get the list of scheduled tasks from.
+ - description: A comma-separated list of the hosts IDs to get the list of scheduled tasks from.
isArray: true
name: host_ids
required: true
@@ -2582,14 +2598,14 @@ script:
description: Executes an RTR active-responder netstat command to get a list of scheduled tasks across the given host. This command is valid only for Windows hosts.
name: cs-falcon-rtr-list-scheduled-tasks
- arguments:
- - description: A comma-separated list of the hosts IDs you want to get the file from.
+ - description: A comma-separated list of the hosts IDs to get the file from.
isArray: true
name: host_ids
required: true
- description: The file path of the required file to extract.
name: file_path
required: true
- - description: The file name to use for the archive name and the file within the archive.
+ - description: The filename to use for the archive name and the file within the archive.
name: filename
- description: Interval between polling. Default is 60 seconds. Must be higher than 10.
name: interval_in_seconds
@@ -2610,7 +2626,7 @@ script:
name: cs-falcon-rtr-retrieve-file
outputs:
- contextPath: CrowdStrike.File.FileName
- description: The file name.
+ description: The filename.
type: String
- contextPath: CrowdStrike.File.HostID
description: The host ID.
@@ -2679,7 +2695,7 @@ script:
description: Gets remote data from a remote incident or detection. This method does not update the current incident or detection, and should be used for debugging purposes only.
name: get-remote-data
- arguments:
- - description: Date string representing the local time. The incident or detection is only returned if it was modified after the last update time.
+ - description: Date string representing the local time in UTC timestamp in seconds. The incident or detection is only returned if it was modified after the last update time.
name: lastUpdate
description: Gets the list of incidents and detections that were modified since the last update time. This method is used for debugging purposes. The get-modified-remote-data command is used as part of the Mirroring feature that was introduced in Cortex XSOAR version 6.1.
name: get-modified-remote-data
@@ -2689,16 +2705,16 @@ script:
- arguments:
- description: Limit the vulnerabilities returned to specific properties. Each value must be enclosed in single quotes and placed immediately after the colon with no space. For example, 'filter=status:'open'+cve.id:['CVE-2013-3900','CVE-2021-1675']'.
name: filter
- - description: Unique agent identifier (AID) of a sensor.
+ - description: A comma-separated list of unique agent identifiers (AIDs) of a sensor.
name: aid
isArray: true
- - description: Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation.
+ - description: A comma-separated list of unique identifiers for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation.
name: cve_id
isArray: true
- - description: "Severity of the CVE. The possible values are: CRITICAL, HIGH, MEDIUM, LOW, UNKNOWN, or NONE."
+ - description: "A comma-separated list of severities of the CVE. The possible values are: CRITICAL, HIGH, MEDIUM, LOW, UNKNOWN, or NONE."
name: cve_severity
isArray: true
- - description: Name of a tag assigned to a host. Retrieve tags from Host Tags APIs.
+ - description: A comma-separated list of names of a tag assigned to a host. Retrieve tags from Host Tags APIs.
name: tags
isArray: true
- description: "Status of a vulnerability. This filter supports multiple values and negation. The possible values are: open, closed, reopen, expired."
@@ -2706,18 +2722,18 @@ script:
isArray: true
- description: "Operating system platform. This filter supports negation. The possible values are: Windows, Mac, Linux."
name: platform_name
- - description: Unique system-assigned ID of a host group. Retrieve the host group ID from Host Group APIs.
+ - description: A comma-separated list of unique system-assigned IDs of a host group. Retrieve the host group ID from Host Group APIs.
name: host_group
isArray: true
- - description: Type of host a sensor is running on.
+ - description: A comma-separated list of types of hosts a sensor is running on.
name: host_type
isArray: true
- - description: Filter for vulnerabilities based on the number of days since a host last connected to CrowdStrike Falcon. Enter a numeric value from 3 to 45 to indicate the number of days you want to look back. Example- last_seen_within:10.
+ - description: Filter for vulnerabilities based on the number of days since a host last connected to CrowdStrike Falcon. Enter a numeric value from 3 to 45 to indicate the number of days to look back. For example, last_seen_within:10.
name: last_seen_within
- auto: PREDEFINED
description: Indicates if the vulnerability is suppressed by a suppression rule.
name: is_suppressed
- isArray: true
+ isArray: false
predefined:
- 'true'
- 'false'
@@ -2798,7 +2814,7 @@ script:
description: Device's local IP address.
type: String
- contextPath: CrowdStrike.Vulnerability.host_info.machine_domain
- description: Active Directory domain name.
+ description: Active directory domain name.
type: String
- contextPath: CrowdStrike.Vulnerability.host_info.os_version
description: Operating system version.
@@ -2903,7 +2919,7 @@ script:
name: cve
outputs:
- contextPath: DBotScore.Indicator
- description: The indicator value.
+ description: The indicator that was tested.
type: String
- contextPath: DBotScore.Type
description: The indicator type.
@@ -2965,7 +2981,7 @@ script:
description: Device's local IP address.
type: String
- contextPath: CrowdStrike.VulnerabilityHost.host_info.machine_domain
- description: Active Directory domain name.
+ description: Active directory domain name.
type: String
- contextPath: CrowdStrike.VulnerabilityHost.host_info.os_version
description: Operating system version.
@@ -3032,19 +3048,19 @@ script:
description: What the exclusion applies to (e.g., a specific ML model).
type: String
- contextPath: CrowdStrike.MLExclusion.groups.id
- description: Group's ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.created_by
description: Indicate who created the group.
@@ -3084,7 +3100,7 @@ script:
- description: A comma-separated list of group ID(s) impacted by the exclusion.
name: groups
isArray: true
- description: Updates an ML exclusion. At least one argument is required in addition to the ID argument.
+ description: Updates an ML exclusion. At least one argument is required in addition to the id argument.
name: cs-falcon-update-ml-exclusion
outputs:
- contextPath: CrowdStrike.MLExclusion.id
@@ -3097,25 +3113,25 @@ script:
description: A regular expression for matching the excluded value.
type: String
- contextPath: CrowdStrike.MLExclusion.value_hash
- description: An hash of the value field.
+ description: A hash of the value field.
type: String
- contextPath: CrowdStrike.MLExclusion.excluded_from
description: What the exclusion applies to (e.g., a specific ML model).
type: String
- contextPath: CrowdStrike.MLExclusion.groups.id
- description: Groups ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.created_by
description: Indicate who created the group.
@@ -3152,16 +3168,16 @@ script:
description: Delete the ML exclusions by ID.
name: cs-falcon-delete-ml-exclusion
- arguments:
- - description: "A custom filter by which the exclusions should be filtered.\n The syntax follows the pattern `:[operator]''` for example: value:'test'.\n Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value.\n For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language."
+ - description: "A custom filter by which the exclusions should be filtered.\n The syntax follows the pattern `:[operator]''`. For example: value:'test'.\n Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value.\n For more information, see: https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql."
name: filter
- description: The value by which the exclusions should be filtered.
name: value
- description: A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and value.
name: ids
isArray: true
- - description: The maximum number of records to return. [1-500]. Applies only if the IDs argument is not supplied.
+ - description: The maximum number of records to return. [1-500]. Applies only if the ids argument is not supplied.
name: limit
- - description: The offset to start retrieving records from. Applies only if the IDs argument is not supplied.
+ - description: The offset to start retrieving records from. Applies only if the ids argument is not supplied.
name: offset
- auto: PREDEFINED
description: How to sort the retrieved exclusions.
@@ -3198,19 +3214,19 @@ script:
description: What the exclusion applies to (e.g., a specific ML model).
type: String
- contextPath: CrowdStrike.MLExclusion.groups.id
- description: Groups ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.MLExclusion.groups.created_by
description: Indicate who created the group.
@@ -3251,7 +3267,7 @@ script:
- description: Command line regular expression.
name: cl_regex
required: true
- - description: Image file name regular expression.
+ - description: Image filename regular expression.
name: ifn_regex
required: true
- description: Comment describing why the exclusions were created.
@@ -3283,7 +3299,7 @@ script:
description: The name of the pattern associated with the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.ifn_regex
- description: A regular expression used for file name matching.
+ description: A regular expression used for filename matching.
type: String
- contextPath: CrowdStrike.IOAExclusion.cl_regex
description: A regular expression used for command line matching.
@@ -3292,19 +3308,19 @@ script:
description: A JSON string that describes the detection logic for the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.id
- description: Groups ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.created_by
description: Indicate who created the group.
@@ -3345,7 +3361,7 @@ script:
name: pattern_name
- description: Command line regular expression.
name: cl_regex
- - description: Image file name regular expression.
+ - description: Image filename regular expression.
name: ifn_regex
- description: Comment describing why the exclusions was created.
name: comment
@@ -3356,7 +3372,7 @@ script:
- description: A comma-separated list of group ID(s) impacted by the exclusion.
name: groups
isArray: true
- description: Updates an IOA exclusion. At least one argument is required in addition to the ID argument.
+ description: Updates an IOA exclusion. At least one argument is required in addition to the id argument.
name: cs-falcon-update-ioa-exclusion
outputs:
- contextPath: CrowdStrike.IOAExclusion.id
@@ -3375,7 +3391,7 @@ script:
description: The name of the pattern associated with the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.ifn_regex
- description: A regular expression used for file name matching.
+ description: A regular expression used for filename matching.
type: String
- contextPath: CrowdStrike.IOAExclusion.cl_regex
description: A regular expression used for command line matching.
@@ -3384,19 +3400,19 @@ script:
description: A JSON string that describes the detection logic for the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.id
- description: Groups ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.created_by
description: Indicate who created the group.
@@ -3433,16 +3449,16 @@ script:
description: Delete the IOA exclusions by ID.
name: cs-falcon-delete-ioa-exclusion
- arguments:
- - description: "A custom filter by which the exclusions should be filtered.\n The syntax follows the pattern `:[operator]''` for example: name:'test'.\n Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern.\n For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language."
+ - description: "A custom filter by which the exclusions should be filtered.\n The syntax follows the pattern `:[operator]''`. For example: name:'test'.\n Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern.\n For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language."
name: filter
- description: The name by which the exclusions should be filtered.
name: name
- description: A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and name.
name: ids
isArray: true
- - description: The limit of how many exclusions to retrieve. Default is 50. Applies only if the IDs argument is not supplied.
+ - description: The limit of how many exclusions to retrieve. Default is 50. Applies only if the ids argument is not supplied.
name: limit
- - description: The offset of how many exclusions to skip. Default is 0. Applies only if the IDs argument is not supplied.
+ - description: The offset of how many exclusions to skip. Default is 0. Applies only if the ids argument is not supplied.
name: offset
description: Get a list of IOA exclusions by specifying their IDs or a filter.
name: cs-falcon-search-ioa-exclusion
@@ -3463,7 +3479,7 @@ script:
description: The name of the pattern associated with the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.ifn_regex
- description: A regular expression used for file name matching.
+ description: A regular expression used for filename matching.
type: String
- contextPath: CrowdStrike.IOAExclusion.cl_regex
description: A regular expression used for command line matching.
@@ -3472,19 +3488,19 @@ script:
description: A JSON string that describes the detection logic for the IOA exclusion.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.id
- description: Groups ID that the exclusion rule is associated with.
+ description: Group ID that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.group_type
- description: Groups type that the exclusion rule is associated with.
+ description: Group type that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.name
- description: Groups name that the exclusion rule is associated with.
+ description: Group name that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.description
- description: Groups description that the exclusion rule is associated with.
+ description: Group description that the exclusion rule is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.assignment_rule
- description: Groups assignment rule that the exclusion is associated with.
+ description: Group assignment rule that the exclusion is associated with.
type: String
- contextPath: CrowdStrike.IOAExclusion.groups.created_by
description: Indicate who created the group.
@@ -3517,7 +3533,7 @@ script:
- description: A comma-separated list of quarantined file IDs to retrieve.
name: ids
isArray: true
- - description: A custom filter by which the retrieve quarantined file should be filtered.
+ - description: A custom filter by which the retrieved quarantined file should be filtered.
name: filter
- description: A comma-separated list of SHA256 hash of the files to retrieve.
name: sha256
@@ -3547,7 +3563,7 @@ script:
description: The agent identifier of the agent that quarantined the file.
type: String
- contextPath: CrowdStrike.QuarantinedFile.cid
- description: The unique identifier for the customer that who the agent.
+ description: The unique customer identifier of the agent that quarantined the file.
type: String
- contextPath: CrowdStrike.QuarantinedFile.sha256
description: The SHA256 hash value of the quarantined file.
@@ -3596,10 +3612,10 @@ script:
required: true
- description: Update files based on a custom filter.
name: filter
- - description: A comma-separated list of quarantined files SHA256 to update.
+ - description: A comma-separated list of quarantined SHA256 files to update.
name: sha256
isArray: true
- - description: A comma-separated list of quarantined file names to update.
+ - description: A comma-separated list of quarantined filenames to update.
name: filename
isArray: true
- description: Update files based on the state.
@@ -3607,10 +3623,10 @@ script:
- description: A comma-separated list of quarantined file hostnames to update.
name: hostname
isArray: true
- - description: A comma-separated list of quarantined files username to update.
+ - description: A comma-separated list of quarantined file usernames to update.
name: username
isArray: true
- description: Apply action to quarantined file by file IDs or filter.
+ description: Apply action to quarantined files by file IDs or filter.
name: cs-falcon-apply-quarantine-file-action
- arguments:
- description: A comma-separated list of incident IDs.
@@ -3620,7 +3636,7 @@ script:
- description: A comment added to the CrowdStrike incident.
name: comment
required: true
- description: Deprecated. Use the 'cs-falcon-resolve-incident' command using the 'add_comment' parameter instead.
+ description: Deprecated. Use the 'cs-falcon-resolve-incident' command using the 'add_comment' argument instead.
name: cs-falcon-update-incident-comment
deprecated: true
- arguments:
@@ -3646,9 +3662,9 @@ script:
- description: Comma-separated list of scan severities to filter by.
isArray: true
name: severity
- - description: UTC-format time of scan start to filter by.
+ - description: UTC-format of the scan start time to filter by.
name: scan_started_on
- - description: UTC-format time of the scan completion to filter by.
+ - description: UTC-format of the scan completion time to filter by.
name: scan_completed_on
- description: Starting index of overall result set from which to return IDs.
name: offset
@@ -3664,7 +3680,7 @@ script:
hidden: true
- name: hide_polling_output
hidden: true
- description: ''
+ description: Whether to hide the polling message and only print the final status at the end (automatically filled by polling. Can be used for testing purposes).
defaultValue: true
description: Retrieve ODS scan details.
name: cs-falcon-ods-query-scan
@@ -3740,7 +3756,7 @@ script:
description: A list of the host IDs that were scanned.
type: String
- contextPath: CrowdStrike.ODSScan.endpoint_notification
- description: A boolean value indicating whether endpoint notifications are enabled.
+ description: Indicates whether endpoint notifications are enabled.
type: Boolean
- contextPath: CrowdStrike.ODSScan.pause_duration
description: The number of hours to pause between scanning each file.
@@ -3793,11 +3809,11 @@ script:
- description: Comma-separated list of scan statuses to filter by.
isArray: true
name: status
- - description: UTC-format time of scan creation to filter by.
+ - description: UTC-format of the scan creation time to filter by.
name: created_on
- - description: UTC-format time of scan creator to filter by.
+ - description: UTC-format time of the scan creator to filter by.
name: created_by
- - description: UTC-format time of scan start to filter by.
+ - description: UTC-format of scan start time to filter by.
name: start_timestamp
- description: Deleted scans only.
name: deleted
@@ -3845,7 +3861,7 @@ script:
description: The pause duration of the scan in hours.
type: Number
- contextPath: CrowdStrike.ODSScheduledScan.max_duration
- description: The max duration of the scan in hours.
+ description: The maximum duration of the scan in hours.
type: Number
- contextPath: CrowdStrike.ODSScheduledScan.max_file_size
description: The maximum file size that the scan can handle in MB.
@@ -3872,7 +3888,7 @@ script:
description: The timestamp when the scan was last updated.
type: Date
- contextPath: CrowdStrike.ODSScheduledScan.deleted
- description: Whether the scan has been deleted.
+ description: Whether the scan was deleted.
type: Boolean
- contextPath: CrowdStrike.ODSScheduledScan.quarantine
description: Whether the scan was set to quarantine.
@@ -3901,11 +3917,11 @@ script:
- description: Comma-separated list of scan statuses to filter by.
isArray: true
name: status
- - description: UTC-format time of scan start to filter by.
+ - description: UTC-format of scan start time to filter by.
name: started_on
- - description: UTC-format time of scan completion to filter by.
+ - description: UTC-format of scan completion time to filter by.
name: completed_on
- - description: Starting index of overall result set from which to return IDs.
+ - description: Starting index of the overall result set from which to return IDs.
name: offset
- description: Maximum number of resources to return.
name: limit
@@ -3949,10 +3965,10 @@ script:
description: A severity score assigned to the scan, ranging from 0 to 100.
type: Number
- contextPath: CrowdStrike.ODSScanHost.started_on
- description: The date and time when the scan was started.
+ description: The date and time when the scan started.
type: Date
- contextPath: CrowdStrike.ODSScanHost.completed_on
- description: The date and time when the scan was completed.
+ description: The date and time when the scan completed.
type: Date
- contextPath: CrowdStrike.ODSScanHost.last_updated
description: The date and time when the scan event was last updated.
@@ -3972,13 +3988,13 @@ script:
- description: Comma-separated list of file paths to filter by.
isArray: true
name: file_paths
- - description: Comma-separated list of file names to filter by.
+ - description: Comma-separated list of filenames to filter by.
isArray: true
name: file_names
- description: Comma-separated list of hashes to filter by.
isArray: true
name: hash
- - description: Starting index of overall result set from which to return IDs.
+ - description: Starting index of the overall result set from which to return IDs.
name: offset
- description: Maximum number of resources to return.
name: limit
@@ -4007,7 +4023,7 @@ script:
description: The name of the malicious file.
type: String
- contextPath: CrowdStrike.ODSMaliciousFile.hash
- description: A SHA-256 hash of the malicious file, which can be used to identify it.
+ description: A SHA256 hash of the malicious file, which can be used to identify it.
type: String
- contextPath: CrowdStrike.ODSMaliciousFile.pattern_id
description: The identifier of the pattern used to detect the malicious file.
@@ -4016,30 +4032,30 @@ script:
description: A severity score assigned to the detection event, ranging from 0 to 100.
type: Number
- contextPath: CrowdStrike.ODSMaliciousFile.quarantined
- description: A Boolean value indicating whether the file has been quarantined.
+ description: Indicates whether the file was quarantined.
type: Boolean
- contextPath: CrowdStrike.ODSMaliciousFile.last_updated
description: The date and time when the detection event was last updated.
type: Date
- arguments:
- - description: List of hosts to be scanned. "hosts" OR "host_groups" must be set.
+ - description: A comma-separated list of hosts to be scanned. "hosts" OR "host_groups" must be set.
isArray: true
name: hosts
- - description: List of host groups to be scanned. "hosts" OR "host_groups" must be set.
+ - description: A comma-separated list of host groups to be scanned. "hosts" OR "host_groups" must be set.
isArray: true
name: host_groups
- - description: List of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set.
+ - description: A comma-separated list of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set.
isArray: true
name: file_paths
- - description: List of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set.
+ - description: A comma-separated list of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set.
isArray: true
name: scan_inclusions
- - description: List of excluded files or locations for this scan.
+ - description: A comma-separated list of excluded files or locations for this scan.
isArray: true
name: scan_exclusions
- description: Scan origin.
name: initiated_from
- - description: Set the scan CPU priority.
+ - description: The scan CPU priority.
name: cpu_priority
auto: PREDEFINED
defaultValue: Low
@@ -4073,7 +4089,7 @@ script:
- description: The timeout in seconds until polling ends.
name: timeout_in_seconds
defaultValue: '600'
- description: Create an ODS scan and wait for results.
+ description: Create an ODS scan and wait for the results.
name: cs-falcon-ods-create-scan
polling: true
outputs:
@@ -4147,7 +4163,7 @@ script:
description: A list of the host IDs that were scanned.
type: String
- contextPath: CrowdStrike.ODSScan.endpoint_notification
- description: A boolean value indicating whether endpoint notifications are enabled.
+ description: Indicates whether endpoint notifications are enabled.
type: Boolean
- contextPath: CrowdStrike.ODSScan.pause_duration
description: The number of hours to pause between scanning each file.
@@ -4189,22 +4205,22 @@ script:
description: The timestamp when the scan job was last updated.
type: Date
- arguments:
- - description: List of host groups to be scanned.
+ - description: A comma-separated list of host groups to be scanned.
isArray: true
name: host_groups
required: true
- - description: List of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set.
+ - description: A comma-separated list of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set.
isArray: true
name: file_paths
- - description: List of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set.
+ - description: A comma-separated list of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set.
isArray: true
name: scan_inclusions
- - description: List of excluded files or locations for this scan.
+ - description: A comma-separated list of excluded files or locations for this scan.
isArray: true
name: scan_exclusions
- description: Scan origin.
name: initiated_from
- - description: Set the scan CPU priority.
+ - description: The scan CPU priority.
name: cpu_priority
auto: PREDEFINED
defaultValue: Low
@@ -4232,10 +4248,10 @@ script:
- description: Maximum time (in hours) the scan is allowed to execute.
name: max_duration
defaultValue: '2'
- - description: When to start the first scan. Supports english expressions such as "tommorow" or "in an hour".
+ - description: When to start the first scan. Supports english expressions such as "tomorrow" or "in an hour".
name: schedule_start_timestamp
required: true
- - description: Set the schedule interval.
+ - description: The schedule interval.
name: schedule_interval
required: true
auto: PREDEFINED
@@ -4283,10 +4299,10 @@ script:
description: Whether notifications of the scan were sent to endpoints.
type: Boolean
- contextPath: CrowdStrike.ODSScheduledScan.pause_duration
- description: The pause duration of scan in hours.
+ description: The pause duration of the scan in hours.
type: Number
- contextPath: CrowdStrike.ODSScheduledScan.max_duration
- description: The max duration of scan in hours.
+ description: The maximum duration of the scan in hours.
type: Number
- contextPath: CrowdStrike.ODSScheduledScan.max_file_size
description: The maximum file size that the scan can handle in MB.
@@ -4313,7 +4329,7 @@ script:
description: The timestamp when the scan was last updated.
type: Date
- contextPath: CrowdStrike.ODSScheduledScan.deleted
- description: Whether the scan has been deleted.
+ description: Whether the scan was deleted.
type: Boolean
- contextPath: CrowdStrike.ODSScheduledScan.quarantine
description: Whether the scan was set to quarantine.
@@ -4363,13 +4379,13 @@ script:
- 'ASCENDING'
auto: PREDEFINED
defaultValue: ASCENDING
- - description: Comma separated list of entity IDs to look for.
+ - description: A comma-separated list of entity IDs to look for.
name: entity_id
isArray: true
- - description: Primary display name to filter by.
+ - description: A comma-separated list of primary display names to filter by.
name: primary_display_name
isArray: true
- - description: Secondary display name to filter by.
+ - description: A comma-separated list of secondary display names to filter by.
name: secondary_display_name
isArray: true
- description: The maximum risk score severity to filter by.
@@ -4392,7 +4408,7 @@ script:
- 'true'
- 'false'
auto: PREDEFINED
- - description: Filter by email.
+ - description: Email to filter by.
name: email
- description: The hash for the next page.
name: next_token
@@ -4502,7 +4518,7 @@ script:
description: The NIST benchmark IDs.
type: Array
- contextPath: CrowdStrike.CSPMPolicy.pci_benchmark_ids
- description: The pci benchmark IDs.
+ description: The PCI benchmark IDs.
type: Array
- contextPath: CrowdStrike.CSPMPolicy.policy_type
description: The policy type.
@@ -4535,7 +4551,7 @@ script:
description: The cloud asset type.
type: String
- contextPath: CrowdStrike.CSPMPolicy.is_remediable
- description: Whether the policy is remediable or not..
+ description: Whether the policy is remediable or not.
type: Boolean
- contextPath: CrowdStrike.CSPMPolicy.is_enabled
description: Whether the policy is enabled or not.
@@ -4544,7 +4560,7 @@ script:
description: The account scope.
type: String
- arguments:
- - description: The policy ID to look for its settings.
+ - description: The policy ID.
name: policy_id
- description: The cloud provider.
name: cloud_platform
@@ -4611,7 +4627,7 @@ script:
description: The account ID correlated to the policy.
type: String
- contextPath: CrowdStrike.CSPMPolicySetting.policy_settings.regions
- description: The regions in which the policy is configured at.
+ description: The regions in which the policy is configured.
type: Array
- contextPath: CrowdStrike.CSPMPolicySetting.policy_settings.severity
description: The severity of the policy.
@@ -4665,7 +4681,7 @@ script:
- description: Policy ID to be updated.
name: policy_id
required: true
- - description: Cloud Account ID to impact.
+ - description: Cloud account ID to impact.
name: account_id
- description: Flag indicating if this policy is enabled.
name: enabled
@@ -4674,7 +4690,7 @@ script:
- 'false'
- 'true'
auto: PREDEFINED
- - description: List of regions where this policy is enforced.
+ - description: A comma-separated list of regions where this policy is enforced.
name: regions
isArray: true
- description: Policy severity value.
@@ -4691,10 +4707,10 @@ script:
- 'false'
- 'true'
auto: PREDEFINED
- description: Updates a policy setting - can be used to override policy severity or to disable a policy entirely.
+ description: Updates a policy setting. Can be used to override policy severity or to disable a policy entirely.
name: cs-falcon-cspm-update-policy_settings
- arguments:
- - description: IDs of the alerts to update.
+ - description: A comma-separated list of IDs of the alerts to update.
name: ids
isArray: true
required: true
@@ -4708,7 +4724,7 @@ script:
name: add_tag
- description: Remove a tag from the specified detections.
name: remove_tag
- - description: Update status of the alert to the specified value.
+ - description: Update the status of the alert to the specified value.
name: update_status
predefined:
- 'new'
@@ -4731,11 +4747,11 @@ script:
description: Perform actions on identity detection alerts.
name: cs-falcon-resolve-identity-detection
- arguments:
- - description: IDs of the alerts to update.
+ - description: A comma-separated list of IDs of the alerts to update.
name: ids
isArray: true
required: true
- - description: 'Assign the specified detections to a user based on their username.'
+ - description: Assign the specified detections to a user based on their username.
name: assign_to_name
- description: Assign the specified detections to a user based on their UUID.
name: assign_to_uuid
@@ -4745,7 +4761,7 @@ script:
name: add_tag
- description: Remove a tag from the specified detections.
name: remove_tag
- - description: Update status of the alert to the specified value.
+ - description: Update the status of the alert to the specified value.
name: update_status
predefined:
- 'new'
@@ -4768,7 +4784,7 @@ script:
description: Perform actions on mobile detection alerts.
name: cs-falcon-resolve-mobile-detection
- arguments:
- - description: IDs (UUID) of specific users to list.
+ - description: A comma-separated list of IDs (UUIDs) of specific users to list.
name: id
isArray: true
- description: 'The filter expression that should be used to limit the results. FQL syntax. Available values: assigned_cids, cid, first_name, last_name, name, uid. Example: "first_name:''John''".'
@@ -4804,7 +4820,7 @@ script:
description: The timestamp of the user's creation.
type: String
- arguments:
- - description: ID(s) of behaviors to list. Behavior IDs can be retrieved by running the 'cs-falcon-get-detections-for-incident' command.
+ - description: A comma-separated list of ID(s) of behaviors to list. Behavior IDs can be retrieved by running the 'cs-falcon-get-detections-for-incident' command.
name: behavior_ids
isArray: true
required: true
@@ -4917,7 +4933,7 @@ script:
description: The SHA256 hash.
type: String
- contextPath: CrowdStrike.IncidentBehavior.user_name
- description: The user name.
+ description: The username.
type: String
- contextPath: CrowdStrike.IncidentBehavior.tactic
description: The tactic used.
@@ -4938,10 +4954,10 @@ script:
description: The objective.
type: String
- contextPath: CrowdStrike.IncidentBehavior.compound_tto
- description: The compound TTO.
+ description: The compound Time to Operate (TTO).
type: String
- arguments:
- - description: The rule ID to get IOA rules for.
+ - description: A comma-separated list of rule IDs to get IOA rules for.
name: rule_ids
isArray: true
required: true
@@ -4949,75 +4965,75 @@ script:
name: cs-falcon-get-ioarules
outputs:
- contextPath: CrowdStrike.IOARules.instance_id
- description: The IOA Rule's instance ID.
+ description: The IOA rule's instance ID.
type: String
- contextPath: CrowdStrike.IOARules.customer_id
description: The customer ID.
type: String
- contextPath: CrowdStrike.IOARules.action_label
- description: The IOA Rule's action label.
+ description: The IOA rule's action label.
type: String
- contextPath: CrowdStrike.IOARules.comment
- description: The IOA Rule's comment.
+ description: The IOA rule's comment.
type: String
- contextPath: CrowdStrike.IOARules.committed_on
- description: The timestamp of the IOA Rule's commitment.
+ description: The timestamp of the IOA rule's commitment.
type: String
- contextPath: CrowdStrike.IOARules.created_by
- description: The IOA Rule's creator.
+ description: The IOA rule's creator.
type: String
- contextPath: CrowdStrike.IOARules.created_on
- description: The timestamp of the IOA Rule's creation.
+ description: The timestamp of the IOA rule's creation.
type: String
- contextPath: CrowdStrike.IOARules.deleted
- description: Whether the IOA Rule is in deleted status.
+ description: Whether the IOA rule is in a deleted status.
type: Boolean
- contextPath: CrowdStrike.IOARules.description
- description: The IOA Rule's description.
+ description: The IOA rule's description.
type: String
- contextPath: CrowdStrike.IOARules.disposition_id
- description: The disposition ID used by the IOA Rule.
+ description: The disposition ID used by the IOA rule.
type: String
- contextPath: CrowdStrike.IOARules.enabled
- description: Whether the IOA Rule is enabled.
+ description: Whether the IOA rule is enabled.
type: Boolean
- contextPath: CrowdStrike.IOARules.field_values
- description: The IOA Rule's field values.
+ description: The IOA rule's field values.
type: String
- contextPath: CrowdStrike.IOARules.instance_version
- description: The IOA Rule's instance version.
+ description: The IOA rule's instance version.
type: String
- contextPath: CrowdStrike.IOARules.magic_cookie
- description: The IOA Rule's magic cookie.
+ description: The IOA rule's magic cookie.
type: String
- contextPath: CrowdStrike.IOARules.modified_by
- description: The last user who modified the IOA Rule.
+ description: The last user who modified the IOA rule.
type: String
- contextPath: CrowdStrike.IOARules.modified_on
- description: The timestamp of the IOA Rule's last modification.
+ description: The timestamp of the IOA rule's last modification.
type: String
- contextPath: CrowdStrike.IOARules.name
- description: The IOA Rule name.
+ description: The IOA rule name.
type: String
- contextPath: CrowdStrike.IOARules.pattern_id
- description: The IOA Rule's pattern ID.
+ description: The IOA rule's pattern ID.
type: String
- contextPath: CrowdStrike.IOARules.pattern_severity
- description: The IOA Rule's pattern severity.
+ description: The IOA rule's pattern severity.
type: String
- contextPath: CrowdStrike.IOARules.rulegroup_id
- description: The IOA Rule's Rule group ID.
+ description: The IOA rule's rule group ID.
type: String
- contextPath: CrowdStrike.IOARules.ruletype_id
- description: The IOA Rule's Rule type ID.
+ description: The IOA rule's rule type ID.
type: String
- contextPath: CrowdStrike.IOARules.ruletype_name
- description: The IOA Rule's Rule type name.
+ description: The IOA rule's rule type name.
type: String
- contextPath: CrowdStrike.IOARules.version_ids
- description: The IOA Rule's version ID.
+ description: The IOA rule's version ID.
type: String
- dockerimage: demisto/py3-tools:1.0.0.96102
+ dockerimage: demisto/py3-tools:1.0.0.97242
isfetch: true
ismappable: true
isremotesyncin: true
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md
index 08d4e18e2949..2199abcbf601 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md
@@ -1,4 +1,4 @@
-To get an The API client ID and secret please contact the crowdstrike support: support@crowdstrike.com
+To get an The API client ID and secret, contact the crowdstrike support: support@crowdstrike.com
### Required API client scope
In order to use the CrowdStrike Falcon integration, your API client must be provisioned with the following scope and permissions:
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
index 97e9fb03c6bb..23060ab99452 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
@@ -4174,6 +4174,50 @@ def test_get_remote_detection_data(mocker):
'behaviors.display_name': 'SampleTemplateDetection'}
+def test_get_remote_idp_or_mobile_detection_data_idp(mocker):
+ """
+ Given
+ - an idp detection ID on the remote system
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - returns the relevant detection entity from the remote system with the relevant incoming mirroring fields
+ """
+ from CrowdStrikeFalcon import get_remote_idp_or_mobile_detection_data
+ detection_entity = input_data.response_idp_detection.copy()
+ mocker.patch('CrowdStrikeFalcon.get_detection_entities', return_value={'resources': [detection_entity.copy()]})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(input_data.remote_idp_detection_id)
+ detection_entity['severity'] = 2
+ assert mirrored_data == detection_entity
+ assert detection_type == 'IDP'
+ assert updated_object == {'incident_type': 'IDP detection',
+ 'status': 'closed',
+ 'id': 'ind:20879a8064904ecfbb62c118a6a19411:C0BB6ACD-8FDC-4CBA-9CF9-EBF3E28B3E56'}
+
+
+def test_get_remote_idp_or_mobile_detection_data_mobile_detection(mocker):
+ """
+ Given
+ - an idp detection ID on the remote system
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - returns the relevant detection entity from the remote system with the relevant incoming mirroring fields
+ """
+ from CrowdStrikeFalcon import get_remote_idp_or_mobile_detection_data
+ detection_entity = input_data.response_mobile_detection.copy()
+ mocker.patch('CrowdStrikeFalcon.get_detection_entities', return_value={'resources': [detection_entity.copy()]})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(input_data.remote_mobile_detection_id)
+ detection_entity['severity'] = 90
+ assert mirrored_data == detection_entity
+ assert detection_type == 'Mobile'
+ assert updated_object == {'incident_type': 'MOBILE detection',
+ 'status': 'new',
+ 'mobile_detection_id': '1111111111111111111'}
+
+
@pytest.mark.parametrize('updated_object, entry_content, close_incident', input_data.set_xsoar_incident_entries_args)
def test_set_xsoar_incident_entries(mocker, updated_object, entry_content, close_incident):
"""
@@ -4187,14 +4231,40 @@ def test_set_xsoar_incident_entries(mocker, updated_object, entry_content, close
"""
from CrowdStrikeFalcon import set_xsoar_incident_entries
mocker.patch.object(demisto, 'params', return_value={'close_incident': close_incident})
+ mocker.patch.object(demisto, 'debug', return_value=None)
entries = []
- set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id)
+ reopen_statuses = ['New', 'Reopened', 'In Progress']
+ set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
if entry_content:
assert entry_content in entries[0].get('Contents')
else:
assert entries == []
+@pytest.mark.parametrize('updated_object', input_data.check_reopen_set_xsoar_incident_entries_args)
+def test_set_xsoar_incident_entries_reopen(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on an incident
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_incident_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
+ if updated_object.get('status') == 'Reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ else:
+ assert entries == []
+
+
@pytest.mark.parametrize('updated_object, entry_content, close_incident', input_data.set_xsoar_detection_entries_args)
def test_set_xsoar_detection_entries(mocker, updated_object, entry_content, close_incident):
"""
@@ -4209,13 +4279,66 @@ def test_set_xsoar_detection_entries(mocker, updated_object, entry_content, clos
from CrowdStrikeFalcon import set_xsoar_detection_entries
mocker.patch.object(demisto, 'params', return_value={'close_incident': close_incident})
entries = []
- set_xsoar_detection_entries(updated_object, entries, input_data.remote_incident_id)
+ reopen_statuses = ['New', 'In progress', 'True positive', 'False positive', 'Reopened', 'Ignored']
+ set_xsoar_detection_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
if entry_content:
assert entry_content in entries[0].get('Contents')
else:
assert entries == []
+@pytest.mark.parametrize('updated_object', input_data.check_reopen_set_xsoar_detections_entries_args)
+def test_set_xsoar_detection_entries_reopen_check(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_detection_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_detection_entries(updated_object, entries, input_data.remote_detection_id, reopen_statuses)
+ if updated_object.get('status') == 'reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ else:
+ assert entries == []
+
+
+@pytest.mark.parametrize('updated_object', input_data.set_xsoar_idp_or_mobile_detection_entries)
+def test_set_xsoar_idp_or_mobile_detection_entries(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_idp_or_mobile_detection_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_idp_or_mobile_detection_entries(updated_object, entries, input_data.remote_idp_detection_id, 'IDP', reopen_statuses)
+ if updated_object.get('status') == 'reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ elif updated_object.get('status') == 'closed':
+ assert 'dbotIncidentClose' in entries[0].get('Contents')
+ assert 'closeReason' in entries[0].get('Contents')
+ assert entries[0].get('Contents', {}).get('closeReason') == 'IDP was closed on CrowdStrike Falcon'
+ else:
+ assert entries == []
+
+
@pytest.mark.parametrize('updated_object, mirrored_data, mirroring_fields, output', input_data.set_updated_object_args)
def test_set_updated_object(updated_object, mirrored_data, mirroring_fields, output):
"""
@@ -4543,10 +4666,10 @@ def excpetion_raiser(*args, **kwargs):
"impact_score": 1.4}}
]
},
- '### List Vulnerabilities\n' \
- '|ID|Severity|Status|Base Score|Published Date|Impact Score|Exploitability Score|\n' \
- '|---|---|---|---|---|---|---|\n' \
- '| cveid1 | LOW | open | 3.3 | 2021-09-15T12:15:00Z | | |\n' \
+ '### List Vulnerabilities\n'
+ '|ID|Severity|Status|Base Score|Published Date|Impact Score|Exploitability Score|\n'
+ '|---|---|---|---|---|---|---|\n'
+ '| cveid1 | LOW | open | 3.3 | 2021-09-15T12:15:00Z | | |\n'
'| idcve4 | | open | | 2022-10-11T19:15:00Z | 1.4 | 1.8 |\n' # args list
)
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
index f1262f412b83..3539bf5a6273 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
@@ -1,4 +1,4 @@
-The CrowdStrike Falcon OAuth 2 API integration (formerly Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.
+The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.
## Configure CrowdStrike Falcon on Cortex XSOAR
@@ -8,17 +8,18 @@ The CrowdStrike Falcon OAuth 2 API integration (formerly Falcon Firehose API), e
| **Parameter** | **Description** | **Required** |
| --- | --- | --- |
- | Server URL (e.g., ) | | True |
- | Client ID | | True |
- | Secret | | True |
+ | Server URL (e.g., https://api.crowdstrike.com) | | True |
+ | Client ID | | False |
+ | Secret | | False |
| Source Reliability | Reliability of the source providing the intelligence data. Currently used for “CVE” reputation command. | False |
| First fetch timestamp (<number> <time unit>, e.g., 12 hours, 7 days) | | False |
| Max incidents per fetch | | False |
| Endpoint Detections fetch query | | False |
| Endpoint Incidents fetch query | | False |
| IDP Detections fetch query | | False |
- | IOM fetch query | Use the Falcon Query Language. For more information, refer to the integration docs. | False |
- | IOA fetch query | In the format: cloud_provider=aws&aws_account_id=1234. The query must have the argument 'cloud_provider' configured. For more information, refer to the integration docs. | False |
+ | Mobile Detections fetch query | | False |
+ | IOM fetch query | Use the Falcon Query Language. For more information, refer to https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql. | False |
+ | IOA fetch query | In the format: cloud_provider=aws&aws_account_id=1234. The query must have the argument 'cloud_provider' configured. Multiple values for the same parameter is not supported. For more information, refer to https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql. | False |
| Fetch incidents | | False |
| Incident type | | False |
| Mirroring Direction | Choose the direction to mirror the detection: Incoming \(from CrowdStrike Falcon to Cortex XSOAR\), Outgoing \(from Cortex XSOAR to CrowdStrike Falcon\), or Incoming and Outgoing \(to/from CrowdStrike Falcon and Cortex XSOAR\). | False |
@@ -27,12 +28,13 @@ The CrowdStrike Falcon OAuth 2 API integration (formerly Falcon Firehose API), e
| Close Mirrored XSOAR Incident | When selected, closes the CrowdStrike Falcon incident or detection, which is mirrored in the Cortex XSOAR incident. | False |
| Close Mirrored CrowdStrike Falcon Incident or Detection | When selected, closes the Cortex XSOAR incident, which is mirrored in the CrowdStrike Falcon incident or detection, according to the types that were chosen to be fetched and mirrored. | False |
| Fetch types | Choose what to fetch - incidents, detections, IDP detections. You can choose any combination. | False |
+ | Reopen Statuses | CrowdStrike Falcon statuses that will reopen an incident in Cortex XSOAR if closed. You can choose any combination. | False |
| Incidents Fetch Interval | | False |
- | Advanced: Time in minutes to look back when fetching incidents and detections | Use this parameter to determine how long backward to look in the search for incidents that were created before the last run time and did not match the query when they were created. | False |
-
+ | Advanced: Time in minutes to look back when fetching incidents and detections | Use this parameter to determine the look-back period for searching for incidents that were created before the last run time and did not match the query when they were created. | False |
4. Click **Test** to validate the URLs, token, and connection.
+
### Required API client scope
In order to use the CrowdStrike Falcon integration, your API client must be provisioned with the following scope and permissions:
@@ -55,28 +57,26 @@ In order to use the CrowdStrike Falcon integration, your API client must be prov
- Identity Protection Timeline - Read
- Identity Protection Assessment - Read
-### Incident Mirroring
-
-You can enable incident mirroring between Cortex XSOAR incidents and CrowdStrike Falcon incidents or detections (available from Cortex XSOAR version 6.0.0).
+## Incident Mirroring
-To setup the mirroring follow these instructions:
+You can enable incident mirroring between Cortex XSOAR incidents and CrowdStrike Falcon corresponding events (available from Cortex XSOAR version 6.0.0).
+To set up the mirroring:
+1. Enable *Fetching incidents* in your instance configuration.
+2. In the *Fetch types* integration parameter, select what types to mirror.
+3. Optional: You can go to one of the *fetch query* parameters and select the query to fetch the events from CrowdStrike Falcon.
+4. In the *Mirroring Direction* integration parameter, select in which direction the incidents should be mirrored:
-1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
-2. Search for **CrowdStrike Falcon** and select your integration instance.
-3. Enable **Fetches incidents**.
-4. In the *Fetch types* integration parameter, select what to mirror - incidents or detections or both.
-5. Optional: You can go to the *Incidents fetch query* or *Detections fetch query* parameter and select the query to fetch the incidents or detections from CrowdStrike Falcon.
-6. In the *Mirroring Direction* integration parameter, select in which direction the incidents should be mirrored:
- - Incoming - Any changes in CrowdStrike Falcon incidents (`state`, `status`, `tactics`, `techniques`, `objectives`, `tags`, `hosts.hostname`)
- or detections (`status`, `severity`, `behaviors.tactic`, `behaviors.scenario`, `behaviors.objective`, `behaviors.technique`, `device.hostname`)
- will be reflected in XSOAR incidents.
- - Outgoing - Any changes in XSOAR incidents will be reflected in CrowdStrike Falcon incidents (`tags`, `status`) or detections (`status`).
- - Incoming And Outgoing - Changes in XSOAR incidents and CrowdStrike Falcon incidents or detections will be reflected in both directions.
- - None - Turns off incident mirroring.
-7. Optional: Check the *Close Mirrored XSOAR Incident* integration parameter to close the Cortex XSOAR incident when the corresponding incident or detection is closed in CrowdStrike Falcon.
-8. Optional: Check the *Close Mirrored CrowdStrike Falcon Incident or Detection* integration parameter to close the CrowdStrike Falcon incident or detection when the corresponding Cortex XSOAR incident is closed.
-
-Newly fetched incidents or detections will be mirrored in the chosen direction. However, this selection does not affect existing incidents.
+ | **Option** | **Description** |
+ | --- | --- |
+ | None | Turns off incident mirroring. |
+ | Incoming | Any changes in CrowdStrike Falcon events (mirroring incoming fields) will be reflected in Cortex XSOAR incidents. |
+ | Outgoing | Any changes in Cortex XSOAR incidents will be reflected in CrowdStrike Falcon events (outgoing mirrored fields). |
+ | Incoming And Outgoing | Changes in Cortex XSOAR incidents and CrowdStrike Falcon events will be reflected in both directions. |
+
+5. Optional: Check the *Close Mirrored XSOAR Incident* integration parameter to close the Cortex XSOAR incident when the corresponding event is closed in CrowdStrike Falcon.
+6. Optional: Check the *Close Mirrored CrowdStrike Falcon Incident or Detection* integration parameter to close the CrowdStrike Falcon incident or detection when the corresponding Cortex XSOAR incident is closed.
+
+Newly fetched Cortex XSOAR incidents will be mirrored in the chosen direction. However, this selection does not affect existing incidents.
**Important Notes**
@@ -89,9 +89,9 @@ Newly fetched incidents or detections will be mirrored in the chosen direction.
If the integration was already set with lookback > 0, and the lookback is not being increased at any point of time, then those incident duplications would not occur.
-### Fetch Incidents
+## Fetch Incidents
-#### IOM Incidents
+### IOM Incidents
The [FQL](https://falconpy.io/Usage/Falcon-Query-Language.html) filter expression is used to configure the IOM fetch query.
Available filter:
@@ -114,9 +114,9 @@ Available filter:
- severity
- severity_string
-Exmample: `cloud_provider: 'aws'+account_id: 'my_id'`
+Example: `cloud_provider: 'aws'+account_id: 'my_id'`
-#### IOA Incidents
+### IOA Incidents
The IOA fetch query uses the following format:
`param1=val1¶m2=val2`
@@ -133,13 +133,17 @@ Available parameters:
- service
- state
-Exmample: `cloud_provider=aws®ion=eu-west-2`
+Example: `cloud_provider=aws®ion=eu-west-2`
More information about the parameters can be found [here](https://www.falconpy.io/Service-Collections/CSPM-Registration.html#keyword-arguments-13).
-### 1. Search for a device
+## Commands
----
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+### cs-falcon-search-device
+
+***
Searches for a device that matches the query.
#### Base Command
@@ -151,15 +155,15 @@ Searches for a device that matches the query.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| extended_data | Whether or not to get additional data about the device. Possible values are: Yes, No. | Optional |
-| filter | The query to filter the device. | Optional |
+| filter | The query by which to filter the device. | Optional |
+| limit | The maximum records to return [1-5000]. Default is 50. | Optional |
+| offset | The offset to start retrieving records from. | Optional |
| ids | A comma-separated list of device IDs to limit the results. | Optional |
-| status | The status of the device. Possible values are: "Normal", "containment_pending", "contained", and "lift_containment_pending". Possible values are: normal, containment_pending, contained, lift_containment_pending. | Optional |
-| hostname | The host name of the device. Possible values are: . | Optional |
-| platform_name | The platform name of the device. Possible values are: Windows, Mac, and Linux. Possible values are: Windows, Mac, Linux. | Optional |
+| status | The status of the device. Possible values are: normal, containment_pending, contained, lift_containment_pending. | Optional |
+| hostname | The hostname of the device. Possible values are: . | Optional |
+| platform_name | The platform name of the device. Possible values are: Windows, Mac, Linux. | Optional |
| site_name | The site name of the device. | Optional |
-| limit | The maximum number of records to return. Default is 50. | Optional |
-| offset | The offset to begin the list from. For example, start from the 10th record and return the list. Default is 0. | Optional |
-| sort | The property to sort by (e.g. status.desc or hostname.asc). | Optional |
+| sort | The property to sort by (e.g., status.desc or hostname.asc). | Optional |
#### Context Output
@@ -168,7 +172,7 @@ Searches for a device that matches the query.
| CrowdStrike.Device.ID | String | The ID of the device. |
| CrowdStrike.Device.LocalIP | String | The local IP address of the device. |
| CrowdStrike.Device.ExternalIP | String | The external IP address of the device. |
-| CrowdStrike.Device.Hostname | String | The host name of the device. |
+| CrowdStrike.Device.Hostname | String | The hostname of the device. |
| CrowdStrike.Device.OS | String | The operating system of the device. |
| CrowdStrike.Device.MacAddress | String | The MAC address of the device. |
| CrowdStrike.Device.FirstSeen | String | The first time the device was seen. |
@@ -250,9 +254,9 @@ Searches for a device that matches the query.
>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 154.132.82-test-co.in-addr.arpa | Mojave (10.14) | 8c-85-90-3d-ed-3e | 192.168.1.76 | 94.188.164.68 | 2017-12-28T22:38:11Z | 2019-03-28T02:36:41Z | contained |
>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 154.132.82-test-co.in-addr.arpa | Mojave (10.14) | f0-18-98-74-8c-31 | 172.22.14.237 | 94.188.164.68 | 2017-12-10T11:01:20Z | 2019-03-17T10:03:17Z | contained |
-### 2. Get a behavior
+### cs-falcon-get-behavior
----
+***
Searches for and fetches the behavior that matches the query.
#### Base Command
@@ -263,20 +267,20 @@ Searches for and fetches the behavior that matches the query.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| behavior_id | The ID of the behavior. | Required |
+| behavior_id | The ID of the behavior. The ID of the behavior can be retrieved by running the cs-falcon-search-detection or cs-falcon-get-detections-for-incident command. | Required |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.Behavior.FileName | String | The file name of the behavior. |
+| CrowdStrike.Behavior.FileName | String | The filename of the behavior. |
| CrowdStrike.Behavior.Scenario | String | The scenario name of the behavior. |
| CrowdStrike.Behavior.MD5 | String | The MD5 hash of the IOC in the behavior. |
| CrowdStrike.Behavior.SHA256 | String | The SHA256 hash of the IOC in the behavior. |
| CrowdStrike.Behavior.IOCType | String | The type of the indicator of compromise. |
-| CrowdStrike.Behavior.IOCValue | String | The value of the IOC. |
+| CrowdStrike.Behavior.IOCValue | String | The value of the indicator of compromise. |
| CrowdStrike.Behavior.CommandLine | String | The command line executed in the behavior. |
-| CrowdStrike.Behavior.UserName | String | The user name related to the behavior. |
+| CrowdStrike.Behavior.UserName | String | The username related to the behavior. |
| CrowdStrike.Behavior.SensorID | String | The sensor ID related to the behavior. |
| CrowdStrike.Behavior.ParentProcessID | String | The ID of the parent process. |
| CrowdStrike.Behavior.ProcessID | String | The process ID of the behavior. |
@@ -330,13 +334,11 @@ Searches for and fetches the behavior that matches the query.
>| 3206 | spokeshave.jn | /Library/spokeshave.jn/spokeshave.jn.app/Contents/MacOS/spokeshave.jn | known\_malware | sha256 | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1| 197949010450449117|
>| 3206 |xSf |./xSf |known\_malware |sha256 |a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1| | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |197949016741905142|
-
-### 3. Search for detections
+### cs-falcon-search-detection
----
-Search for details of specific detections, either using a filter query,
-or by providing the IDs of the detections.
+***
+Search for details of specific detections, either using a filter query, or by providing the IDs of the detections.
#### Base Command
@@ -346,22 +348,22 @@ or by providing the IDs of the detections.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ids | The IDs of the detections to search. If provided, will override other arguments. | Optional |
-| filter | Filter detections using a query in Falcon Query Language (FQL).
For example, filter="device.hostname:'CS-SE-TG-W7-01'"
For a full list of valid filter options, see: . | Optional |
+| ids | A comma-separated list of IDs of the detections to search. If provided, will override other arguments. | Optional |
+| filter | Filter detections using a query in Falcon Query Language (FQL).
For example, filter="device.hostname:'CS-SE-TG-W7-01'"
For a full list of valid filter options, see: https://falcon.crowdstrike.com/support/documentation/2/query-api-reference#detectionsearch. | Optional |
| extended_data | Whether to get additional data such as device and behaviors processed. Possible values are: Yes, No. | Optional |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.Detection.Behavior.FileName | String | The file name of the behavior. |
+| CrowdStrike.Detection.Behavior.FileName | String | The filename of the behavior. |
| CrowdStrike.Detection.Behavior.Scenario | String | The scenario name of the behavior. |
| CrowdStrike.Detection.Behavior.MD5 | String | The MD5 hash of the IOC of the behavior. |
| CrowdStrike.Detection.Behavior.SHA256 | String | The SHA256 hash of the IOC of the behavior. |
| CrowdStrike.Detection.Behavior.IOCType | String | The type of the IOC. |
| CrowdStrike.Detection.Behavior.IOCValue | String | The value of the IOC. |
| CrowdStrike.Detection.Behavior.CommandLine | String | The command line executed in the behavior. |
-| CrowdStrike.Detection.Behavior.UserName | String | The user name related to the behavior. |
+| CrowdStrike.Detection.Behavior.UserName | String | The username related to the behavior. |
| CrowdStrike.Detection.Behavior.SensorID | String | The sensor ID related to the behavior. |
| CrowdStrike.Detection.Behavior.ParentProcessID | String | The ID of the parent process. |
| CrowdStrike.Detection.Behavior.ProcessID | String | The process ID of the behavior. |
@@ -373,7 +375,6 @@ or by providing the IDs of the detections.
| CrowdStrike.Detection.ProcessStartTime | Date | The start time of the process that generated the detection. |
-
#### Command Example
`!cs-falcon-search-detection ids=ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:1898376850347,ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:1092318056279064902`
@@ -476,13 +477,11 @@ or by providing the IDs of the detections.
>|ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:1898376850347 | false\_positive | DESKTOP-S49VMIL | 2019-03-21T20:32:55.654489974Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 70|
>|ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:1092318056279064902| new | u-MacBook-Pro-2.local | 2019-02-04T07:05:57.083205971Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 30|
-
-### 4. Resolve a detection
+### cs-falcon-resolve-detection
-* * * * *
-
-Resolves and updates a detection using the provided arguments. At least one optional argument must be passed, otherwise no change will take place. Note: IDP detections are not supported.
+***
+Resolves and updates a detection using the provided arguments. At least one optional argument must be passed, otherwise no change will take place. Note that IDP detections are not supported.
#### Base Command
@@ -497,19 +496,15 @@ Resolves and updates a detection using the provided arguments. At least one opti
| assigned_to_uuid | A user ID, for example: 1234567891234567891. username and assigned_to_uuid are mutually exclusive. | Optional |
| comment | Optional comment to add to the detection. Comments are displayed with the detection in CrowdStrike Falcon and provide context or notes for other Falcon users. | Optional |
| show_in_ui | If true, displays the detection in the UI. Possible values are: true, false. | Optional |
-| username | Username to assign the detections to. (This is usually the user’s email address, but may vary based on your configuration). username and assigned_to_uuid are mutually exclusive. | Optional |
+| username | Username to assign the detections to. (This is usually the user's email address, but may vary based on your configuration). username and assigned_to_uuid are mutually exclusive. | Optional |
#### Context Output
There is no context output for this command.
+### cs-falcon-contain-host
-### 5. Contain a host
-
-* * * * *
-
-Contains containment for a specified host. When contained, a
-host can only communicate with the CrowdStrike cloud and any IPs
-specified in your containment policy.
+***
+Contains containment for a specified host. When contained, a host can only communicate with the CrowdStrike cloud and any IPs specified in your containment policy.
#### Base Command
@@ -519,16 +514,14 @@ specified in your containment policy.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ids | The host agent ID (AID) of the host to contain. Get an agent ID from a detection. | Required |
+| ids | A comma-separated list of host agent IDs (AID) of the host to contain. Get an agent ID from a detection. | Required |
#### Context Output
There is no context output for this command.
+### cs-falcon-lift-host-containment
-### 6. Lift the containment for a host
-
----
-
+***
Lifts containment on the host, which returns its network communications to normal.
#### Base Command
@@ -539,16 +532,14 @@ Lifts containment on the host, which returns its network communications to norma
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ids | The host agent ID (AID) of the host you want to contain. Get an agent ID from a detection. Can also be a comma separated list of IDs. | Required |
+| ids | A comma-separated list of host agent IDs (AIDs) of the hosts to contain. Get an agent ID from a detection. | Required |
#### Context Output
There is no context output for this command.
+### cs-falcon-run-command
-
-### 7. cs-falcon-run-command
-
----
+***
Sends commands to hosts.
#### Base Command
@@ -560,12 +551,12 @@ Sends commands to hosts.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| queue_offline | Any commands run against an offline-queued session will be queued up and executed when the host comes online. | Optional |
-| host_ids | A comma-separated list of host agent IDs to run commands for. (Can be retrieved by running the 'cs-falcon-search-device' command.). | Required |
+| host_ids | A comma-separated list of host agent IDs to run commands for. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command. | Required |
| command_type | The type of command to run. | Required |
| full_command | The full command to run. | Required |
-| scope | The scope to run the command for. Possible values are: "read", "write", and "admin". (NOTE: In order to run the CrowdStrike RTR `put` command, it is necessary to pass `scope=admin`.). Possible values are: read, write, admin. Default is read. | Optional |
+| scope | The scope to run the command for. (NOTE: In order to run the CrowdStrike RTR `put` command, it is necessary to pass `scope=admin`). Possible values are: read, write, admin. Default is read. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. Default is 180. | Optional |
-| target | The target to run the command for. Possible values are: "single" and "batch". Possible values are: batch, single. Default is batch. | Optional |
+| target | The target to run the command for. Possible values are: batch, single. Default is batch. | Optional |
| batch_id | A batch ID to execute the command on. | Optional |
#### Context Output
@@ -619,10 +610,10 @@ Sends commands to hosts.
>|---|---|---|---|---|---|
>| ls | ls C:\ | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | Directory listing for C:\ -
Name Type Size (bytes) Size (MB) Last Modified (UTC-5) Created (UTC-5)
---- ---- ------------ --------- --------------------- ---------------
$Recycle.Bin <Directory> -- -- 11/27/2018 10:54:44 AM 9/15/2017 3:33:40 AM
ITAYDI <Directory> -- -- 11/19/2018 1:31:42 PM 11/19/2018 1:31:42 PM | batch_id |
-### 8. cs-falcon-upload-script
+### cs-falcon-upload-script
----
-Uploads a script to Falcon.
+***
+Uploads a script to Falcon CrowdStrike.
#### Base Command
@@ -636,7 +627,6 @@ Uploads a script to Falcon.
| permission_type | The permission type for the custom script. Possible values are: "private", which is used only by the user who uploaded it, "group", which is used by all RTR Admins, and "public", which is used by all active-responders and RTR admins. Possible values are: private, group, public. Default is private. | Optional |
| content | The content of the PowerShell script. | Required |
-
#### Command Example
`!cs-falcon-upload-script name=greatscript content="Write-Output 'Hello, World!'"`
@@ -645,10 +635,14 @@ Uploads a script to Falcon.
The script was uploaded successfully.
-### 9. cs-falcon-upload-file
+#### Context Output
+
+There is no context output for this command.
+
+### cs-falcon-upload-file
----
-Uploads a file to the CrowdStrike cloud. (Can be used for the RTR 'put' command.)
+***
+Uploads a file to the CrowdStrike cloud. (Can be used for the RTR 'put' command).
#### Base Command
@@ -668,9 +662,12 @@ Uploads a file to the CrowdStrike cloud. (Can be used for the RTR 'put' command.
The file was uploaded successfully.
-### 10. cs-falcon-delete-file
+#### Context Output
+
+There is no context output for this command.
+### cs-falcon-delete-file
----
+***
Deletes a file based on the provided ID. Can delete only one file at a time.
#### Base Command
@@ -681,8 +678,7 @@ Deletes a file based on the provided ID. Can delete only one file at a time.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| file_id | The ID of the file to delete. (The ID of the file can be retrieved by running the 'cs-falcon-list-files' command). | Required |
-
+| file_id | The ID of the file to delete. The ID of the file can be retrieved by running the 'cs-falcon-list-files' command. | Required |
#### Command Example
@@ -692,9 +688,13 @@ Deletes a file based on the provided ID. Can delete only one file at a time.
File le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 was deleted successfully.
-### 11. cs-falcon-get-file
+#### Context Output
+
+There is no context output for this command.
+
+### cs-falcon-get-file
----
+***
Returns files based on the provided IDs. These files are used for the RTR 'put' command.
#### Base Command
@@ -705,7 +705,7 @@ Returns files based on the provided IDs. These files are used for the RTR 'put'
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| file_id | A comma-separated list of file IDs to get. (The list of file IDs can be retrieved by running the 'cs-falcon-list-files' command.). | Required |
+| file_id | A comma-separated list of file IDs to get. The list of file IDs can be retrieved by running the 'cs-falcon-list-files' command. | Required |
#### Context Output
@@ -713,11 +713,11 @@ Returns files based on the provided IDs. These files are used for the RTR 'put'
| --- | --- | --- |
| CrowdStrike.File.ID | String | The ID of the file. |
| CrowdStrike.File.CreatedBy | String | The email address of the user who created the file. |
-| CrowdStrike.File.CreatedTime | Date | The date and time the file was created. |
+| CrowdStrike.File.CreatedTime | Date | The datetime the file was created. |
| CrowdStrike.File.Description | String | The description of the file. |
| CrowdStrike.File.Type | String | The type of the file. For example, script. |
| CrowdStrike.File.ModifiedBy | String | The email address of the user who modified the file. |
-| CrowdStrike.File.ModifiedTime | Date | The date and time the file was modified. |
+| CrowdStrike.File.ModifiedTime | Date | The datetime the file was modified. |
| CrowdStrike.File.Name | String | The full name of the file. |
| CrowdStrike.File.Permission | String | The permission type of the file. Possible values are: "private", which is used only by the user who uploaded it, "group", which is used by all RTR Admins, and "public", which is used by all active-responders and RTR admins. |
| CrowdStrike.File.SHA256 | String | The SHA-256 hash of the file. |
@@ -759,10 +759,10 @@ Returns files based on the provided IDs. These files are used for the RTR 'put'
>|---|---|---|---|---|---|---|---|---|---|
>| | 2019-10-17T13:41:48.487520845Z | Demisto | le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | 2019-10-17T13:41:48.487521161Z | Demisto | private | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | script |
-### 12. cs-falcon-list-files
+### cs-falcon-list-files
----
-Returns a list of put-file ID's that are available for the user in the `put` command.
+***
+Returns a list of put-file IDs that are available for the user in the 'put' command. Due to an API limitation, the maximum number of files returned is 100.
#### Base Command
@@ -774,11 +774,11 @@ Returns a list of put-file ID's that are available for the user in the `put` com
| --- | --- | --- |
| CrowdStrike.File.ID | String | The ID of the file. |
| CrowdStrike.File.CreatedBy | String | The email address of the user who created the file. |
-| CrowdStrike.File.CreatedTime | Date | The date and time the file was created. |
+| CrowdStrike.File.CreatedTime | Date | The datetime the file was created. |
| CrowdStrike.File.Description | String | The description of the file. |
| CrowdStrike.File.Type | String | The type of the file. For example, script. |
| CrowdStrike.File.ModifiedBy | String | The email address of the user who modified the file. |
-| CrowdStrike.File.ModifiedTime | Date | The date and time the file was modified. |
+| CrowdStrike.File.ModifiedTime | Date | The datetime the file was modified. |
| CrowdStrike.File.Name | String | The full name of the file. |
| CrowdStrike.File.Permission | String | The permission type of the file. Possible values are: "private", which is used only by the user who uploaded it, "group", which is used by all RTR Admins, and "public", which is used by all active-responders and RTR admins. |
| CrowdStrike.File.SHA256 | String | The SHA-256 hash of the file. |
@@ -820,9 +820,9 @@ Returns a list of put-file ID's that are available for the user in the `put` com
>|---|---|---|---|---|---|---|---|---|---|
>| | 2019-10-17T13:41:48.487520845Z | Demisto | le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | 2019-10-17T13:41:48.487521161Z | Demisto | private | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | script |
-### 13. cs-falcon-get-script
+### cs-falcon-get-script
----
+***
Returns custom scripts based on the provided ID. Used for the RTR 'runscript' command.
#### Base Command
@@ -833,7 +833,7 @@ Returns custom scripts based on the provided ID. Used for the RTR 'runscript' co
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| script_id | A comma-separated list of script IDs to return. (The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.). | Required |
+| script_id | A comma-separated list of script IDs to return. The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command. | Required |
#### Context Output
@@ -841,10 +841,10 @@ Returns custom scripts based on the provided ID. Used for the RTR 'runscript' co
| --- | --- | --- |
| CrowdStrike.Script.ID | String | The ID of the script. |
| CrowdStrike.Script.CreatedBy | String | The email address of the user who created the script. |
-| CrowdStrike.Script.CreatedTime | Date | The date and time the script was created. |
+| CrowdStrike.Script.CreatedTime | Date | The datetime the script was created. |
| CrowdStrike.Script.Description | String | The description of the script. |
| CrowdStrike.Script.ModifiedBy | String | The email address of the user who modified the script. |
-| CrowdStrike.Script.ModifiedTime | Date | The date and time the script was modified. |
+| CrowdStrike.Script.ModifiedTime | Date | The datetime the script was modified. |
| CrowdStrike.Script.Name | String | The script name. |
| CrowdStrike.Script.Permission | String | Permission type of the script. Possible values are: "private", which is used only by the user who uploaded it, "group", which is used by all RTR Admins, and "public", which is used by all active-responders and RTR admins. |
| CrowdStrike.Script.SHA256 | String | The SHA-256 hash of the script file. |
@@ -889,9 +889,9 @@ Returns custom scripts based on the provided ID. Used for the RTR 'runscript' co
>| | 2019-10-17T13:41:48.487520845Z | Demisto | le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | 2019-10-17T13:41:48.487521161Z | Demisto | private | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
-### 14. cs-falcon-delete-script
+### cs-falcon-delete-script
----
+***
Deletes a custom-script based on the provided ID. Can delete only one script at a time.
#### Base Command
@@ -902,7 +902,7 @@ Deletes a custom-script based on the provided ID. Can delete only one script at
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| script_id | The script ID to delete. (Script IDs can be retrieved by running the 'cs-falcon-list-scripts' command.). | Required |
+| script_id | The script ID to delete. The script IDs can be retrieved by running the 'cs-falcon-list-scripts' command. | Required |
#### Command Example
@@ -912,9 +912,12 @@ Deletes a custom-script based on the provided ID. Can delete only one script at
Script le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 was deleted successfully.
-### 15. cs-falcon-list-scripts
+#### Context Output
+
+There is no context output for this command.
+### cs-falcon-list-scripts
----
+***
Returns a list of custom script IDs that are available for the user in the 'runscript' command.
#### Base Command
@@ -927,10 +930,10 @@ Returns a list of custom script IDs that are available for the user in the 'runs
| --- | --- | --- |
| CrowdStrike.Script.ID | String | The ID of the script. |
| CrowdStrike.Script.CreatedBy | String | The email address of the user who created the script. |
-| CrowdStrike.Script.CreatedTime | Date | The date and time the script was created. |
+| CrowdStrike.Script.CreatedTime | Date | The datetime the script was created. |
| CrowdStrike.Script.Description | String | The description of the script. |
| CrowdStrike.Script.ModifiedBy | String | The email address of the user who modified the script. |
-| CrowdStrike.Script.ModifiedTime | Date | The date and time the script was modified. |
+| CrowdStrike.Script.ModifiedTime | Date | The datetime the script was modified. |
| CrowdStrike.Script.Name | String | The script name. |
| CrowdStrike.Script.Permission | String | Permission type of the script. Possible values are: "private", which is used only by the user who uploaded it, "group", which is used by all RTR Admins, and "public", which is used by all active-responders and RTR admins. |
| CrowdStrike.Script.SHA256 | String | The SHA-256 hash of the script file. |
@@ -975,9 +978,9 @@ Returns a list of custom script IDs that are available for the user in the 'runs
>| | 2019-10-17T13:41:48.487520845Z | Demisto | le10098bf0e311e989190662caec3daa_a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | 2019-10-17T13:41:48.487521161Z | Demisto | private | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
-### 16. cs-falcon-run-script
+### cs-falcon-run-script
----
+***
Runs a script on the agent host.
#### Base Command
@@ -989,7 +992,7 @@ Runs a script on the agent host.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| script_name | The name of the script to run. | Optional |
-| host_ids | A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.). | Required |
+| host_ids | A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command. | Required |
| raw | The PowerShell script code to run. | Optional |
| timeout | Timeout for how long to wait for the request in seconds. Maximum is 600 (10 minutes). Default is 30. | Optional |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
@@ -1034,11 +1037,11 @@ Runs a script on the agent host.
>| runscript | runscript -Raw=Write-Output 'Hello, World! | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | | Hello, World! | Type Size (bytes) Size (MB) Last Modified (UTC-5) Created (UTC-5)
---- ---- ------------ --------- --------------------- ---------------
$Recycle.Bin <Directory> -- -- 11/27/2018 10:54:44 AM 9/15/2017 3:33:40 AM
ITAYDI <Directory> -- -- 11/19/2018 1:31:42 PM 11/19/2018 1:31:42 PM |
-### 17. cs-falcon-run-get-command
+### cs-falcon-run-get-command
***
Batch executes 'get' command across hosts to retrieve files.
-The running status you requested the `get` command can be checked with `cs-falcon-status-get-command`.
+The running status you requested in the get command can be checked with cs-falcon-status-get-command.
#### Base Command
@@ -1048,9 +1051,9 @@ The running status you requested the `get` command can be checked with `cs-falco
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_ids | List of host agent IDs on which to run the RTR command. | Required |
+| host_ids | A comma-separated list of host agent IDs on which to run the RTR command. | Required |
| file_path | Full path to the file that will be retrieved from each host in the batch. | Required |
-| optional_hosts | List of a subset of hosts on which to run the command. | Optional |
+| optional_hosts | A comma-separated list of a subset of hosts on which to run the command. | Optional |
| timeout | The number of seconds to wait for the request before it times out. In ISO time format. For example: 2019-10-17T13:41:48.487520845Z. | Optional |
| timeout_duration | The amount of time to wait for the request before it times out. In duration syntax. For example, 10s. Valid units are: ns, us, ms, s, m, h. Maximum value is 10 minutes. | Optional |
@@ -1101,7 +1104,7 @@ The running status you requested the `get` command can be checked with `cs-falco
-### 18. cs-falcon-status-get-command
+### cs-falcon-status-get-command
***
Retrieves the status of the specified batch 'get' command.
@@ -1114,7 +1117,7 @@ Retrieves the status of the specified batch 'get' command.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| request_ids | The list of IDs of the command requested. | Required |
+| request_ids | A comma-separated list of IDs of the command requested. | Required |
| timeout | The number of seconds to wait for the request before it times out. In ISO time format. For example: 2019-10-17T13:41:48.487520845Z. | Optional |
| timeout_duration | The amount of time to wait for the request before it times out. In duration syntax. For example, 10s. Valid units are: ns, us, ms, s, m, h. Maximum value is 10 minutes. | Optional |
@@ -1174,7 +1177,7 @@ Retrieves the status of the specified batch 'get' command.
>| 2020-05-01T16:09:00Z | | 185596 | \\Device\\HarddiskVolume2\\Windows\\notepad.exe | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 0 | b5c8f140-280b-43fd-8501-9900f837510b | 2020-05-01T16:09:00Z |
-### 19. cs-falcon-status-command
+### cs-falcon-status-command
***
Gets the status of a command executed on a host.
@@ -1189,7 +1192,7 @@ Gets the status of a command executed on a host.
| --- | --- | --- |
| request_id | The ID of the command requested. | Required |
| sequence_id | The sequence ID in chunk requests. | Optional |
-| scope | The scope to run the command for. Possible values are: "read", "write", or "admin". Possible values are: read, write, admin. Default is read. | Optional |
+| scope | The scope to run the command for. Possible values are: read, write, admin. Default is read. | Optional |
#### Context Output
@@ -1235,7 +1238,7 @@ Gets the status of a command executed on a host.
>| ls | true | Directory listing for C:\\ ...... | ae323961-5aa8-442e-8461-8d05c4541d7d |
-### 20. cs-falcon-get-extracted-file
+### cs-falcon-get-extracted-file
***
Gets the RTR extracted file contents for the specified session and SHA256 hash.
@@ -1252,25 +1255,17 @@ Gets the RTR extracted file contents for the specified session and SHA256 hash.
| sha256 | The SHA256 hash of the file. | Required |
| filename | The filename to use for the archive name and the file within the archive. | Optional |
-#### Context Output
-
-There is no context output for this command.
-
#### Command Example
`!cs-falcon-get-extracted-file host_id="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" sha256="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"`
-#### Context Example
+#### Context Output
There is no context output for this command.
-#### Human Readable Output
-
-There is no human readable for this command.
-
-### 21. cs-falcon-list-host-files
+### cs-falcon-list-host-files
***
Gets a list of files for the specified RTR session on a host.
@@ -1351,8 +1346,7 @@ Gets a list of files for the specified RTR session on a host.
>|---|---|---|---|---|---|---|---|---|
>| 2020-05-01T17:57:42Z | | 186811 | \\Device\\HarddiskVolume2\\Windows\\notepad.exe | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 0 | | | 2020-05-01T17:57:42Z |
-
-### 22. cs-falcon-refresh-session
+### cs-falcon-refresh-session
***
Refresh a session timeout on a single host.
@@ -1367,63 +1361,39 @@ Refresh a session timeout on a single host.
| --- | --- | --- |
| host_id | The ID of the host to extend the session for. | Required |
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.Command.HostID | string | The ID of the host for which the command was running. |
-| CrowdStrike.Command.TaskID | string | The ID of the command request which has been accepted. |
-| CrowdStrike.Command.SessionID | string | The ID of the session of the host. |
-| CrowdStrike.File.ID | string | The ID of the file. |
-| CrowdStrike.File.CreatedAt | date | The creation date of the file. |
-| CrowdStrike.File.DeletedAt | date | The deletion date of the file. |
-| CrowdStrike.File.UpdatedAt | date | The last updated date of the file. |
-| CrowdStrike.File.Name | string | The full file name. |
-| CrowdStrike.File.SHA256 | string | The SHA\-256 hash of the file. |
-| CrowdStrike.File.Size | number | The size of the file in bytes. |
-| File.Name | string | The full file name. |
-| File.Size | number | The size of the file in bytes. |
-| File.SHA256 | string | The SHA\-256 hash of the file. |
-
#### Command Example
`!cs-falcon-refresh-session host_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1`
-#### Context Example
-
-There is no context output for this command.
-
#### Human Readable Output
CrowdStrike Session Refreshed: fdd6408f-6688-441b-8659-41bcad25441c
+#### Context Output
-### 23. cs-falcon-search-iocs
+There is no context output for this command.
+### cs-falcon-search-custom-iocs
***
-Deprecated. Use the cs-falcon-search-custom-iocs command instead.
-
+Returns a list of your uploaded IOCs that match the search criteria.
#### Base Command
-`cs-falcon-search-iocs`
+`cs-falcon-search-custom-iocs`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| types | A comma-separated list of indicator types. Valid types are: "sha256", "sha1", "md5", "domain", "ipv4", "ipv6". | Optional |
+| types | A comma-separated list of indicator types. Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Optional |
| values | A comma-separated list of indicator values. | Optional |
-| policies | A comma-separated list of indicator policies. | Optional |
-| share_levels | The level at which the indicator will be shared. Only "red" share level (not shared) is supported, which indicates that the IOC is not shared with other Falcon Host customers. | Optional |
| sources | A comma-separated list of IOC sources. | Optional |
-| from_expiration_date | Start of date range in which to search (YYYY-MM-DD format). | Optional |
-| to_expiration_date | End of date range in which to search (YYYY-MM-DD format). | Optional |
-| limit | The maximum number of records to return. The minimum is 1 and the maximum is 500. Default is 100. | Optional |
-| sort | The order in which the results are returned. Possible values are: "type.asc", "type.desc", "value.asc", "value.desc", "policy.asc", "policy.desc", "share_level.asc", "share_level.desc", "expiration_timestamp.asc", and "expiration_timestamp.desc". | Optional |
-| offset | The offset to begin the list from. For example, start from the 10th record and return the list. Default is 0. | Optional |
-
+| expiration | The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ). | Optional |
+| limit | The maximum number of records to return. The minimum is 1 and the maximum is 500. Default is 50. | Optional |
+| sort | The order the results are returned in. Possible values are: type.asc, type.desc, value.asc, value.desc, policy.asc, policy.desc, share_level.asc, share_level.desc, expiration_timestamp.asc, expiration_timestamp.desc. | Optional |
+| offset | The offset to begin the list from. For example, start from the 10th record and return the list. | Optional |
+| next_page_token | A pagination token used with the limit parameter to manage pagination of results. Matching the 'after' parameter in the API. Use instead of offset. | Optional |
#### Context Output
@@ -1432,20 +1402,20 @@ Deprecated. Use the cs-falcon-search-custom-iocs command instead.
| CrowdStrike.IOC.Type | string | The type of the IOC. |
| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Policy | string | The policy of the indicator. |
+| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.ShareLevel | string | The level at which the indicator will be shared. |
+| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
| CrowdStrike.IOC.Expiration | string | The datetime the indicator will expire. |
| CrowdStrike.IOC.Description | string | The description of the IOC. |
| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
-
+| CrowdStrike.NextPageToken | unknown | A pagination token used with the limit parameter to manage pagination of results. |
#### Command Example
-```!cs-falcon-search-iocs types="domain"```
+```!cs-falcon-search-custom-iocs limit=2```
#### Context Example
@@ -1454,14 +1424,38 @@ Deprecated. Use the cs-falcon-search-custom-iocs command instead.
"CrowdStrike": {
"IOC": [
{
- "CreatedTime": "2020-09-30T10:59:37Z",
- "Expiration": "2020-10-30T00:00:00Z",
- "ID": "domain:value",
- "ModifiedTime": "2020-09-30T10:59:37Z",
- "Policy": "none",
- "ShareLevel": "red",
- "Type": "domain",
- "Value": "value"
+ "Action": "no_action",
+ "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "CreatedTime": "2022-02-16T17:17:25.992164453Z",
+ "Description": "test",
+ "Expiration": "2022-02-17T13:47:57Z",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedTime": "2022-02-16T17:17:25.992164453Z",
+ "Platforms": [
+ "mac"
+ ],
+ "Severity": "informational",
+ "Source": "Cortex XSOAR",
+ "Type": "ipv4",
+ "Value": "1.1.8.9"
+ },
+ {
+ "Action": "no_action",
+ "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "CreatedTime": "2022-02-16T17:16:44.514398876Z",
+ "Description": "test",
+ "Expiration": "2022-02-17T13:47:57Z",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedTime": "2022-02-16T17:16:44.514398876Z",
+ "Platforms": [
+ "mac"
+ ],
+ "Severity": "informational",
+ "Source": "Cortex XSOAR",
+ "Type": "ipv4",
+ "Value": "4.1.8.9"
}
]
}
@@ -1472,26 +1466,27 @@ Deprecated. Use the cs-falcon-search-custom-iocs command instead.
>### Indicators of Compromise
->|CreatedTime|Expiration|ID|ModifiedTime|Policy|ShareLevel|Type|Value|
->|---|---|---|---|---|---|---|---|
->| 2020-09-30T10:59:37Z | 2020-10-30T00:00:00Z | domain:value | 2020-09-30T10:59:37Z | none | red | domain | value |
+>|ID|Action|Severity|Type|Value|Expiration|CreatedBy|CreatedTime|Description|ModifiedBy|ModifiedTime|Platforms|Policy|ShareLevel|Source|Tags|
+>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 1.1.8.9 | 2022-02-17T13:47:57Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | test | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | mac | | | Cortex XSOAR | |
+>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 4.1.8.9 | 2022-02-17T13:47:57Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:16:44.514398876Z | test | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:16:44.514398876Z | mac | | | Cortex XSOAR | |
-### 24. cs-falcon-get-ioc
+### cs-falcon-get-custom-ioc
***
Gets the full definition of one or more indicators that you are watching.
#### Base Command
-`cs-falcon-get-ioc`
+`cs-falcon-get-custom-ioc`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| type | The IOC type to retrieve. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". | Required |
-| value | The string representation of the indicator. | Required |
-
+| type | The IOC type to retrieve. Either ioc_id or ioc_type and value must be provided. Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Optional |
+| value | The string representation of the indicator. Either ioc_id or ioc_type and value must be provided. | Optional |
+| ioc_id | The ID of the IOC to get. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command. Either ioc_id or ioc_type and value must be provided. | Optional |
#### Context Output
@@ -1500,9 +1495,9 @@ Gets the full definition of one or more indicators that you are watching.
| CrowdStrike.IOC.Type | string | The type of the IOC. |
| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Policy | string | The policy of the indicator. |
+| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.ShareLevel | string | The level at which the indicator will be shared. |
+| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
| CrowdStrike.IOC.Expiration | string | The datetime when the indicator will expire. |
| CrowdStrike.IOC.Description | string | The description of the IOC. |
| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
@@ -1513,7 +1508,7 @@ Gets the full definition of one or more indicators that you are watching.
#### Command Example
-```!cs-falcon-get-ioc type="domain" value="test.domain.com"```
+```!cs-falcon-get-custom-ioc type=ipv4 value=7.5.9.8```
#### Context Example
@@ -1521,16 +1516,24 @@ Gets the full definition of one or more indicators that you are watching.
{
"CrowdStrike": {
"IOC": {
- "CreatedTime": "2020-10-02T13:55:26Z",
- "Description": "Test ioc",
- "Expiration": "2020-11-01T00:00:00Z",
- "ID": "domain:test.domain.com",
- "ModifiedTime": "2020-10-02T13:55:26Z",
- "Policy": "none",
- "ShareLevel": "red",
- "Source": "Demisto playbook",
- "Type": "domain",
- "Value": "test.domain.com"
+ "Action": "no_action",
+ "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "CreatedTime": "2022-02-16T14:25:22.968603813Z",
+ "Expiration": "2022-02-17T17:55:09Z",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "ModifiedTime": "2022-02-16T14:25:22.968603813Z",
+ "Platforms": [
+ "linux"
+ ],
+ "Severity": "informational",
+ "Source": "cortex xsoar",
+ "Tags": [
+ "test",
+ "test1"
+ ],
+ "Type": "ipv4",
+ "Value": "7.5.9.8"
}
}
}
@@ -1540,32 +1543,36 @@ Gets the full definition of one or more indicators that you are watching.
>### Indicator of Compromise
->|CreatedTime|Description|Expiration|ID|ModifiedTime|Policy|ShareLevel|Source|Type|Value|
->|---|---|---|---|---|---|---|---|---|---|
->| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | domain:test.domain.com | 2020-10-02T13:55:26Z | none | red | Demisto playbook | domain | test.domain.com |
+>|ID|Action|Severity|Type|Value|Expiration|CreatedBy|CreatedTime|Description|ModifiedBy|ModifiedTime|Platforms|Policy|ShareLevel|Source|Tags|
+>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 7.5.9.8 | 2022-02-17T17:55:09Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T14:25:22.968603813Z | | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T14:25:22.968603813Z | linux | | | cortex xsoar | test,
test1 |
-### 25. cs-falcon-upload-ioc
+### cs-falcon-upload-custom-ioc
***
Uploads an indicator for CrowdStrike to monitor.
#### Base Command
-`cs-falcon-upload-ioc`
+`cs-falcon-upload-custom-ioc`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ioc_type | The type of the indicator. Possible values are: "sha256", "md5", "domain", "ipv4", and "ipv6". | Required |
-| value | The string representation of the indicator. | Required |
-| policy | The policy to enact when the value is detected on a host. Possible values are: "detect" and "none". A value of "none" is equivalent to turning the indicator off. Default is "detect". | Optional |
-| share_level | The level at which the indicator will be shared. Only "red" share level (not shared) is supported, which indicates that the IOC is not shared with other Falcon Host customers. | Optional |
-| expiration_days | The number of days for which the indicator should be valid. This only applies to domain, ipv4, and ipv6 types. Default is 30. | Optional |
+| ioc_type | The type of the indicator. Possible values are: sha256, md5, domain, ipv4, ipv6. | Required |
+| value | A comma-separated list of indicators.
More than one value can be supplied to upload multiple IOCs of the same type but with different values. Note that the uploaded IOCs will have the same properties (as supplied in other arguments). | Required |
+| action | Action to take when a host observes the custom IOC. Possible values are: no_action - Save the indicator for future use, but take no action. No severity required. allow - Applies to hashes only. Allow the indicator and do not detect it. Severity does not apply and should not be provided. prevent_no_ui - Applies to hashes only. Block and detect the indicator, but hide it from Activity > Detections. Has a default severity value. prevent - Applies to hashes only. Block the indicator and show it as a detection at the selected severity. detect - Enable detections for the indicator at the selected severity. Possible values are: no_action, allow, prevent_no_ui, prevent, detect. | Required |
+| platforms | A comma-separated list of the platforms that the indicator applies to. Possible values are: mac, windows, linux. | Required |
+| severity | The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action. Possible values are: informational, low, medium, high, critical. | Optional |
+| expiration | The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ). | Optional |
| source | The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters. | Optional |
| description | A meaningful description of the indicator. Limited to 200 characters. | Optional |
-
+| applied_globally | Whether the indicator is applied globally. Either applied_globally or host_groups must be provided. Possible values are: true, false. | Optional |
+| host_groups | A comma-separated list of host group IDs that the indicator applies to. The list of host group IDs can be retrieved by running the 'cs-falcon-list-host-groups' command. Either applied_globally or host_groups must be provided. | Optional |
+| tags | A comma-separated list of tags to apply to the indicator. | Optional |
+| file_name | Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs. | Optional |
#### Context Output
@@ -1574,20 +1581,22 @@ Uploads an indicator for CrowdStrike to monitor.
| CrowdStrike.IOC.Type | string | The type of the IOC. |
| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Policy | string | The policy of the indicator. |
+| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.ShareLevel | string | The level at which the indicator will be shared. |
+| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
| CrowdStrike.IOC.Expiration | string | The datetime when the indicator will expire. |
| CrowdStrike.IOC.Description | string | The description of the IOC. |
| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
-
+| CrowdStrike.IOC.Tags | Unknown | The tags of the IOC. |
+| CrowdStrike.IOC.Platforms | Unknown | The platforms of the IOC. |
+| CrowdStrike.IOC.Filename | string | Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs. |
#### Command Example
-```!cs-falcon-upload-ioc ioc_type="domain" value="test.domain.com" policy="none" share_level="red" source="Demisto playbook" description="Test ioc"```
+```!cs-falcon-upload-custom-ioc ioc_type="domain" value="test.domain.com" action="prevent" severity="high" source="Demisto playbook" description="Test ioc" platforms="mac"```
#### Context Example
@@ -1598,13 +1607,14 @@ Uploads an indicator for CrowdStrike to monitor.
"CreatedTime": "2020-10-02T13:55:26Z",
"Description": "Test ioc",
"Expiration": "2020-11-01T00:00:00Z",
- "ID": "domain:test.domain.com",
+ "ID": "4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r",
"ModifiedTime": "2020-10-02T13:55:26Z",
- "Policy": "none",
- "ShareLevel": "red",
+ "Action": "prevent",
+ "Severity": "high",
"Source": "Demisto playbook",
"Type": "domain",
- "Value": "test.domain.com"
+ "Value": "test.domain.com",
+ "Platforms": ["mac"]
}
}
}
@@ -1614,32 +1624,31 @@ Uploads an indicator for CrowdStrike to monitor.
>### Custom IOC was created successfully
->|CreatedTime|Description|Expiration|ID|ModifiedTime|Policy|ShareLevel|Source|Type|Value|
+>|CreatedTime|Description|Expiration|ID|ModifiedTime|Action|Severity|Source|Type|Value|
>|---|---|---|---|---|---|---|---|---|---|
->| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | domain:test.domain.com | 2020-10-02T13:55:26Z | none | red | Demisto playbook | domain | test.domain.com |
-
+>| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r | 2020-10-02T13:55:26Z | prevent | high | Demisto playbook | domain | test.domain.com |
-### 26. cs-falcon-update-ioc
+### cs-falcon-update-custom-ioc
***
-Deprecated. Use the cs-falcon-update-custom-ioc command instead.
-
+Updates an indicator for CrowdStrike to monitor.
#### Base Command
-`cs-falcon-update-ioc`
+`cs-falcon-update-custom-ioc`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ioc_type | The type of the indicator. Possible values are: "sha256", "md5", "sha1", "domain", "ipv4", and "ipv6". | Required |
-| value | The string representation of the indicator. | Required |
-| policy | The policy to enact when the value is detected on a host. Possible values are: "detect" and "none". A value of "none" is equivalent to turning the indicator off. Default is "detect". | Optional |
-| share_level | The level at which the indicator will be shared. Only "red" share level (not shared) is supported, which indicates that the IOC is not shared with other Falcon Host customers. | Optional |
-| expiration_days | The number of days for which the indicator should be valid. This only applies to domain, ipv4, and ipv6 types. Default is 30. | Optional |
+| ioc_id | The ID of the IOC to update. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command. | Required |
+| action | Action to take when a host observes the custom IOC. Possible values are: no_action - Save the indicator for future use, but take no action. No severity required. allow - Applies to hashes only. Allow the indicator and do not detect it. Severity does not apply and should not be provided. prevent_no_ui - Applies to hashes only. Block and detect the indicator, but hide it from Activity > Detections. Has a default severity value. prevent - Applies to hashes only. Block the indicator and show it as a detection at the selected severity. detect - Enable detections for the indicator at the selected severity. Possible values are: no_action, allow, prevent_no_ui, prevent, detect. | Optional |
+| platforms | A comma-separated list of the platforms that the indicator applies to. Possible values are: mac, windows, linux. | Optional |
+| severity | The severity level to apply to this indicator. Required for the prevent and detect actions. Optional for no_action. Possible values are: informational, low, medium, high, critical. | Optional |
+| expiration | The datetime the indicator will become inactive (ISO 8601 format, i.e., YYYY-MM-DDThh:mm:ssZ). | Optional |
| source | The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters. | Optional |
| description | A meaningful description of the indicator. Limited to 200 characters. | Optional |
+| file_name | Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs. | Optional |
#### Context Output
@@ -1648,21 +1657,21 @@ Deprecated. Use the cs-falcon-update-custom-ioc command instead.
| --- | --- | --- |
| CrowdStrike.IOC.Type | string | The type of the IOC. |
| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
-| CrowdStrike.IOC.ID | string | The full ID of the indicator \(type:value\). |
-| CrowdStrike.IOC.Policy | string | The policy of the indicator. |
+| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
+| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.ShareLevel | string | The level at which the indicator will be shared. |
+| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
| CrowdStrike.IOC.Expiration | string | The datetime when the indicator will expire. |
| CrowdStrike.IOC.Description | string | The description of the IOC. |
-| CrowdStrike.IOC.CreatedTime | string | The datetime the IOC was created. |
+| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
-| CrowdStrike.IOC.ModifiedTime | string | The date and time the indicator was last modified. |
+| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
-
+| CrowdStrike.IOC.Filename | string | Name of the file for file indicators. Applies to hashes only. A common filename, or a filename in your environment. Filenames can be helpful for identifying hashes or filtering IOCs. |
#### Command Example
-```!cs-falcon-update-ioc ioc_type="domain" value="test.domain.com" policy="detect" description="Benign domain IOC"```
+```!cs-falcon-update-custom-ioc ioc_id="4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r" severity="high"```
#### Context Example
@@ -1671,12 +1680,12 @@ Deprecated. Use the cs-falcon-update-custom-ioc command instead.
"CrowdStrike": {
"IOC": {
"CreatedTime": "2020-10-02T13:55:26Z",
- "Description": "Benign domain IOC",
+ "Description": "Test ioc",
"Expiration": "2020-11-01T00:00:00Z",
- "ID": "domain:test.domain.com",
- "ModifiedTime": "2020-10-02T13:55:33Z",
- "Policy": "detect",
- "ShareLevel": "red",
+ "ID": "4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r",
+ "ModifiedTime": "2020-10-02T13:55:26Z",
+ "Action": "prevent",
+ "Severity": "high",
"Source": "Demisto playbook",
"Type": "domain",
"Value": "test.domain.com"
@@ -1687,29 +1696,26 @@ Deprecated. Use the cs-falcon-update-custom-ioc command instead.
#### Human Readable Output
->### Custom IOC was created successfully
+>### Custom IOC was updated successfully
->|CreatedTime|Description|Expiration|ID|ModifiedTime|Policy|ShareLevel|Source|Type|Value|
+>|CreatedTime|Description|Expiration|ID|ModifiedTime|Action|Severity|Source|Type|Value|
>|---|---|---|---|---|---|---|---|---|---|
->| 2020-10-02T13:55:26Z | Benign domain IOC | 2020-11-01T00:00:00Z | domain:test.domain.com | 2020-10-02T13:55:33Z | detect | red | Demisto playbook | domain | test.domain.com |
-
+>| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r | 2020-10-02T13:55:26Z | prevent | high | Demisto playbook | domain | test.domain.com |
-### 27. cs-falcon-delete-ioc
+### cs-falcon-delete-custom-ioc
***
-Deprecated. Use the cs-falcon-delete-custom-ioc command instead.
-
+Deletes a monitored indicator.
#### Base Command
-`cs-falcon-delete-ioc`
+`cs-falcon-delete-custom-ioc`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| type | The IOC type to delete. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". | Required |
-| value | The string representation of the indicator to delete. | Required |
+| ioc_id | The ID of the IOC to delete. The ID of the IOC can be retrieved by running the 'cs-falcon-search-custom-iocs' command. | Required |
#### Context Output
@@ -1718,14 +1724,14 @@ There is no context output for this command.
#### Command Example
-```!cs-falcon-delete-ioc type="domain" value="test.domain.com"```
+```!cs-falcon-delete-custom-ioc ioc_id="4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r"```
#### Human Readable Output
->Custom IOC domain:test.domain.com was successfully deleted.
+>Custom IOC 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r was successfully deleted.
-### 28. cs-falcon-device-count-ioc
+### cs-falcon-device-count-ioc
***
The number of hosts that observed the provided IOC.
@@ -1738,7 +1744,7 @@ The number of hosts that observed the provided IOC.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| type | The IOC type. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Required |
+| type | The IOC type. Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Required |
| value | The string representation of the indicator. | Required |
#### Context Output
@@ -1774,7 +1780,7 @@ The number of hosts that observed the provided IOC.
>Indicator of Compromise **domain:value** device count: **1**
-### 29. cs-falcon-processes-ran-on
+### cs-falcon-processes-ran-on
***
Get processes associated with a given IOC.
@@ -1787,7 +1793,7 @@ Get processes associated with a given IOC.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| type | The IOC type. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Required |
+| type | The IOC type. Possible values are: sha256, sha1, md5, domain, ipv4, ipv6. | Required |
| value | The string representation of the indicator. | Required |
| device_id | The device ID to check against. | Required |
@@ -1834,8 +1840,7 @@ Get processes associated with a given IOC.
>|---|
>| pid:pid:650164094720 |
-
-### 30. cs-falcon-process-details
+### cs-falcon-process-details
***
Retrieves the details of a process, according to the process ID that is running or that previously ran.
@@ -1898,7 +1903,7 @@ Retrieves the details of a process, according to the process ID that is running
>| "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" | deviceId | \Device\HarddiskVolume1\Program Files (x86)\Google\Chrome\Application\chrome.exe | device_id:pid | pid | 2020-10-01T09:05:51Z | 132460167512852140 | 2020-10-02T06:43:45Z | 132460946259334768 |
-### 31. cs-falcon-device-ran-on
+### cs-falcon-device-ran-on
***
Returns a list of device IDs an indicator ran on.
@@ -1945,7 +1950,7 @@ Returns a list of device IDs an indicator ran on.
>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
-### 32. cs-falcon-list-detection-summaries
+### cs-falcon-list-detection-summaries
***
Lists detection summaries.
@@ -1959,7 +1964,7 @@ Lists detection summaries.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| fetch_query | The query used to filter the results. | Optional |
-| ids | A comma separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678, If you use this argument, fetch_query argument will be ignored. | Optional |
+| ids | A comma-separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678. If you use this argument, the fetch_query argument will be ignored. | Optional |
#### Context Output
@@ -1979,7 +1984,7 @@ Lists detection summaries.
| CrowdStrike.Detections.device.config_id_build | String | The version of the sensor that the device is running. For example: 11406. |
| CrowdStrike.Detections.device.config_id_platform | String | The platform ID of the sensor that the device is running. |
| CrowdStrike.Detections.device.external_ip | String | The external IP address of the device. |
-| CrowdStrike.Detections.device.hostname | String | The host name of the device. |
+| CrowdStrike.Detections.device.hostname | String | The hostname of the device. |
| CrowdStrike.Detections.device.first_seen | Date | The datetime the host was first seen by CrowdStrike Falcon. |
| CrowdStrike.Detections.device.last_seen | Date | The datetime the host was last seen by CrowdStrike Falcon. |
| CrowdStrike.Detections.device.local_ip | String | The local IP address of the device. |
@@ -1997,7 +2002,7 @@ Lists detection summaries.
| CrowdStrike.Detections.behaviors.device_id | String | The ID of the device associated with the behavior. |
| CrowdStrike.Detections.behaviors.timestamp | Date | The datetime the behavior detection occurred in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
| CrowdStrike.Detections.behaviors.behavior_id | String | The ID of the behavior. |
-| CrowdStrike.Detections.behaviors.filename | String | The file name of the triggering process. |
+| CrowdStrike.Detections.behaviors.filename | String | The filename of the triggering process. |
| CrowdStrike.Detections.behaviors.alleged_filetype | String | The file extension of the behavior's filename. |
| CrowdStrike.Detections.behaviors.cmdline | String | The command line of the triggering process. |
| CrowdStrike.Detections.behaviors.scenario | String | The name of the scenario the behavior belongs to. |
@@ -2015,7 +2020,7 @@ Lists detection summaries.
| CrowdStrike.Detections.behaviors.control_graph_id | String | The behavior hit key for the Threat Graph API. |
| CrowdStrike.Detections.behaviors.triggering_process_graph_id | String | The ID of the process that triggered the behavior detection. |
| CrowdStrike.Detections.behaviors.sha256 | String | The SHA256 of the triggering process. |
-| CrowdStrike.Detections.behaviors.md5 | String | The MD5 of the triggering process. |
+| CrowdStrike.Detections.behaviors.md5 | String | The MD5 hash of the triggering process. |
| CrowdStrike.Detections.behaviors.parent_details.parent_sha256 | String | The SHA256 hash of the parent process. |
| CrowdStrike.Detections.behaviors.parent_details.parent_md5 | String | The MD5 hash of the parent process. |
| CrowdStrike.Detections.behaviors.parent_details.parent_cmdline | String | The command line of the parent process. |
@@ -2181,7 +2186,7 @@ Lists detection summaries.
>| ldt:ldt:ldt | 2020-07-06T08:10:55.538668036Z | new | Low |
-### 33. cs-falcon-list-incident-summaries
+### cs-falcon-list-incident-summaries
***
Lists incident summaries.
@@ -2195,7 +2200,7 @@ Lists incident summaries.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| fetch_query | The query used to filter the results. | Optional |
-| ids | A comma separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678, If you use this argument, fetch_query argument will be ignored. | Optional |
+| ids | A comma-separated list of detection IDs. For example, ldt:1234:1234,ldt:5678:5678. If you use this argument, the fetch_query argument will be ignored. | Optional |
#### Context Output
@@ -2216,8 +2221,8 @@ Lists incident summaries.
| CrowdStrike.Incidents.hosts.config_id_platform | String | The platform ID of the sensor that the device is running. |
| CrowdStrike.Incidents.hosts.external_ip | String | The external IP address of the host. |
| CrowdStrike.Incidents.hosts.hostname | String | The name of the host. |
-| CrowdStrike.Incidents.hosts.first_seen | Date | The date and time the host was first seen by CrowdStrike Falcon. |
-| CrowdStrike.Incidents.hosts.last_seen | Date | The date and time the host was last seen by CrowdStrike Falcon. |
+| CrowdStrike.Incidents.hosts.first_seen | Date | The datetime the host was first seen by CrowdStrike Falcon. |
+| CrowdStrike.Incidents.hosts.last_seen | Date | The datetime the host was last seen by CrowdStrike Falcon. |
| CrowdStrike.Incidents.hosts.local_ip | String | The device local IP address. |
| CrowdStrike.Incidents.hosts.mac_address | String | The device MAC address. |
| CrowdStrike.Incidents.hosts.major_version | String | The major version of the operating system. |
@@ -2230,9 +2235,9 @@ Lists incident summaries.
| CrowdStrike.Incidents.hosts.system_manufacturer | String | The system manufacturer of the device. |
| CrowdStrike.Incidents.hosts.system_product_name | String | The product name of the system. |
| CrowdStrike.Incidents.hosts.modified_timestamp | Date | The datetime a user modified the incident in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-| CrowdStrike.Incidents.created | Date | The time that the incident was created. |
-| CrowdStrike.Incidents.start | Date | The recorded time of the earliest incident. |
-| CrowdStrike.Incidents.end | Date | The recorded time of the latest incident. |
+| CrowdStrike.Incidents.created | Date | The datetime that the incident was created. |
+| CrowdStrike.Incidents.start | Date | The recorded datetime of the earliest incident. |
+| CrowdStrike.Incidents.end | Date | The recorded datetime of the latest incident. |
| CrowdStrike.Incidents.state | String | The state of the incident. |
| CrowdStrike.Incidents.status | Number | The status of the incident. |
| CrowdStrike.Incidents.name | String | The name of the incident. |
@@ -2240,16 +2245,14 @@ Lists incident summaries.
| CrowdStrike.Incidents.tags | String | The tags of the incident. |
| CrowdStrike.Incidents.fine_score | Number | The incident score. |
-
#### Command Example
```!cs-falcon-list-incident-summaries```
-
-### 34. Endpoint
+### endpoint
***
-Returns information about an endpoint, does not support regex.
+Returns information about an endpoint. Does not support regex.
#### Base Command
@@ -2308,7 +2311,7 @@ Returns information about an endpoint, does not support regex.
>|---|---|---|---|---|---|---|---|
>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 1.1.1.1 | Windows | Windows Server 2019| Hostname | Online | 1-1-1-1 | CrowdStrike Falcon|\n"
-### 35. cs-falcon-create-host-group
+### cs-falcon-create-host-group
***
Create a host group.
@@ -2322,11 +2325,10 @@ Create a host group.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| name | The name of the host. | Required |
-| group_type | The group type of the group. Can be 'static' or 'dynamic'. Possible values are: static, dynamic. | Required |
+| group_type | The group type of the group. Possible values are: static, dynamic. | Required |
| description | The description of the host. | Optional |
| assignment_rule | The assignment rule. | Optional |
-
#### Context Output
| **Path** | **Type** | **Description** |
@@ -2340,7 +2342,6 @@ Create a host group.
| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-
#### Command Example
```!cs-falcon-create-host-group name="test_name_1" description="test_description" group_type=static```
@@ -2372,23 +2373,22 @@ Create a host group.
>|---|---|---|---|---|---|---|---|
>| api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:02.060242909Z | test_description | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:02.060242909Z | test_name_1 |
-### 36. cs-falcon-update-host-group
+### cs-falcon-list-host-groups
***
-Updates a host group.
+List the available host groups.
#### Base Command
-`cs-falcon-update-host-group`
+`cs-falcon-list-host-groups`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_group_id | The ID of the host group. | Required |
-| name | The name of the host group. | Optional |
-| description | The description of the host group. | Optional |
-| assignment_rule | The assignment rule. | Optional |
+| filter | The query by which to filter the devices that belong to the host group. | Optional |
+| offset | Page offset. | Optional |
+| limit | Maximum number of results on a page. Default is 50. | Optional |
#### Context Output
@@ -2406,343 +2406,7 @@ Updates a host group.
#### Command Example
-```!cs-falcon-update-host-group host_group_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 name="test_name_update_1" description="test_description_update"```
-
-#### Context Example
-
-```json
-{
- "CrowdStrike": {
- "HostGroup": {
- "assignment_rule": "device_id:[''],hostname:['']",
- "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "created_timestamp": "2021-08-22T07:48:35.111070562Z",
- "description": "test_description_update",
- "group_type": "static",
- "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
- "name": "test_name_update_1"
- }
- }
-}
-```
-
-#### Human Readable Output
-
->### Results
-
->|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
->|---|---|---|---|---|---|---|---|---|
->| device_id:[''],hostname:[''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-
-### 37. cs-falcon-list-host-group-members
-
-***
-Gets the list of host group members.
-
-#### Base Command
-
-`cs-falcon-list-host-group-members`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| host_group_id | The ID of the host group. | Optional |
-| filter | The query to filter the devices that belong to the host group. | Optional |
-| offset | Page offset. | Optional |
-| limit | The maximum number of results on a page. Default is 50. | Optional |
-| sort | The property to sort by (e.g. status.desc or hostname.asc). | Optional |
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.Device.ID | String | The ID of the device. |
-| CrowdStrike.Device.LocalIP | String | The local IP address of the device. |
-| CrowdStrike.Device.ExternalIP | String | The external IP address of the device. |
-| CrowdStrike.Device.Hostname | String | The host name of the device. |
-| CrowdStrike.Device.OS | String | The operating system of the device. |
-| CrowdStrike.Device.MacAddress | String | The MAC address of the device. |
-| CrowdStrike.Device.FirstSeen | String | The first time the device was seen. |
-| CrowdStrike.Device.LastSeen | String | The last time the device was seen. |
-| CrowdStrike.Device.Status | String | The device status. |
-
-
-#### Command Example
-
-```!cs-falcon-list-host-group-members```
-
-#### Context Example
-
-```json
-{
- "CrowdStrike": {
- "Device": [
- {
- "ExternalIP": "35.224.136.145",
- "FirstSeen": "2021-08-12T16:13:26Z",
- "Hostname": "FALCON-CROWDSTR",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "LastSeen": "2021-08-23T04:59:48Z",
- "LocalIP": "10.128.0.21",
- "MacAddress": "42-01-0a-80-00-15",
- "OS": "Windows Server 2019",
- "Status": "normal"
- },
- {
- "ExternalIP": "35.224.136.145",
- "FirstSeen": "2020-02-10T12:40:18Z",
- "Hostname": "FALCON-CROWDSTR",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "LastSeen": "2021-08-25T07:42:47Z",
- "LocalIP": "10.128.0.7",
- "MacAddress": "42-01-0a-80-00-07",
- "OS": "Windows Server 2019",
- "Status": "contained"
- },
- {
- "ExternalIP": "35.224.136.145",
- "FirstSeen": "2021-08-23T05:04:41Z",
- "Hostname": "INSTANCE-1",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "LastSeen": "2021-08-25T07:49:06Z",
- "LocalIP": "10.128.0.20",
- "MacAddress": "42-01-0a-80-00-14",
- "OS": "Windows Server 2019",
- "Status": "normal"
- },
- {
- "ExternalIP": "35.224.136.145",
- "FirstSeen": "2021-08-11T13:57:29Z",
- "Hostname": "INSTANCE-1",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "LastSeen": "2021-08-23T04:45:37Z",
- "LocalIP": "10.128.0.20",
- "MacAddress": "42-01-0a-80-00-14",
- "OS": "Windows Server 2019",
- "Status": "normal"
- },
- {
- "ExternalIP": "35.224.136.145",
- "FirstSeen": "2021-08-08T11:33:21Z",
- "Hostname": "falcon-crowdstrike-sensor-centos7",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "LastSeen": "2021-08-25T07:50:47Z",
- "LocalIP": "10.128.0.19",
- "MacAddress": "42-01-0a-80-00-13",
- "OS": "CentOS 7.9",
- "Status": "normal"
- }
- ]
- }
-}
-```
-
-#### Human Readable Output
-
->### Devices
-
->|ID|External IP|Local IP|Hostname|OS|Mac Address|First Seen|Last Seen|Status|
->|---|---|---|---|---|---|---|---|---|
->| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 35.224.136.145 | 10.128.0.19 | falcon-crowdstrike-sensor-centos7 | CentOS 7.9 | 42-01-0a-80-00-13 | 2021-08-08T11:33:21Z | 2021-08-25T07:50:47Z | normal |
-
-### 38. cs-falcon-add-host-group-members
-
-***
-Add host group members.
-
-#### Base Command
-
-`cs-falcon-add-host-group-members`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| host_group_id | The ID of the host group. | Required |
-| host_ids | A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.). | Required |
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.HostGroup.id | String | The ID of the host group. |
-| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
-| CrowdStrike.HostGroup.name | String | The name of the host group. |
-| CrowdStrike.HostGroup.description | String | The description of the host group. |
-| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
-| CrowdStrike.HostGroup.created_timestamp | Date | The datetime the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
-| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-
-
-#### Command Example
-
-```!cs-falcon-add-host-group-members host_group_id="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" host_ids="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"```
-
-#### Context Example
-
-```json
-{
- "CrowdStrike": {
- "HostGroup": {
- "assignment_rule": "device_id:[''],hostname:['falcon-crowdstrike-sensor-centos7','']",
- "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "created_timestamp": "2021-08-22T07:48:35.111070562Z",
- "description": "test_description_update",
- "group_type": "static",
- "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
- "name": "test_name_update_1"
- }
- }
-}
-```
-
-#### Human Readable Output
-
->### Results
-
->|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
->|---|---|---|---|---|---|---|---|---|
->| device_id:[''],hostname:['falcon-crowdstrike-sensor-centos7',''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-
-### 39. cs-falcon-remove-host-group-members
-
-***
-Remove host group members.
-
-#### Base Command
-
-`cs-falcon-remove-host-group-members`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| host_group_id | The ID of the host group. | Required |
-| host_ids | A comma-separated list of host agent IDs to run commands. (The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command.). | Required |
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.HostGroup.id | String | The ID of the host group. |
-| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
-| CrowdStrike.HostGroup.name | String | The name of the host group. |
-| CrowdStrike.HostGroup.description | String | The description of the host group. |
-| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
-| CrowdStrike.HostGroup.created_timestamp | Date | The datetime the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
-| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-
-
-#### Command Example
-
-```!cs-falcon-remove-host-group-members host_group_id="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" host_ids="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"```
-
-#### Context Example
-
-```json
-{
- "CrowdStrike": {
- "HostGroup": {
- "assignment_rule": "device_id:[''],hostname:['']",
- "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "created_timestamp": "2021-08-22T07:48:35.111070562Z",
- "description": "test_description_update",
- "group_type": "static",
- "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
- "name": "test_name_update_1"
- }
- }
-}
-```
-
-#### Human Readable Output
-
->### Results
-
->|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
->|---|---|---|---|---|---|---|---|---|
->| device_id:[''],hostname:[''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-
-### 40. cs-falcon-resolve-incident
-
-***
-Resolve and update incidents using the specified settings.
-
-#### Base Command
-
-`cs-falcon-resolve-incident`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| ids | A comma-separated list of incident IDs. | Required |
-| status | The new status of the incident. Can be "New", "In Progress", "Reopened", "Closed". Possible values are: New, In Progress, Reopened, Closed. | Optional |
-| assigned_to_uuid | UUID of a user to assign the incident to. Mutually exclusive with the 'username' argument. | Optional |
-| username | Username of a user to assign the incident to. Mutually exclusive with the 'assigned_to_uuid' argument. Using this parameter instead of 'assigned_to_uuid' will result in an additional API call in order to fetch the UUID of the user. | Optional |
-| add_tag | Add a new tag to the incidents. | Optional |
-| remove_tag | Remove a tag from the incidents. | Optional |
-| add_comment | Add a comment to the incident. | Optional |
-
-
-#### Context Output
-
-There is no context output for this command.
-
-#### Command Example
-
-```!cs-falcon-resolve-incident ids="inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1,inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" status="Closed"```
-
-#### Human Readable Output
-
->inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 changed successfully to Closed
->inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 changed successfully to Closed
-
-### 41. cs-falcon-list-host-groups
-
-***
-List the available host groups.
-
-
-#### Base Command
-
-`cs-falcon-list-host-groups`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| filter | The query by which to filter the devices that belong to the host group. | Optional |
-| offset | Page offset. | Optional |
-| limit | Maximum number of results on a page. Default is 50. | Optional |
-
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.HostGroup.id | String | The ID of the host group. |
-| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
-| CrowdStrike.HostGroup.name | String | The name of the host group. |
-| CrowdStrike.HostGroup.description | String | The description of the host group. |
-| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
-| CrowdStrike.HostGroup.created_timestamp | Date | The datetime when the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
-| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime when the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-
-
-#### Command Example
-
-```!cs-falcon-list-host-groups```
+```!cs-falcon-list-host-groups```
#### Context Example
@@ -3291,11 +2955,10 @@ List the available host groups.
>|---|---|---|---|---|---|---|---|---|
>| device_id:[''],hostname:[''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-26T10:02:50.175530821Z | description2 | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-26T10:02:52.026307768Z | test_16299721694081629972169408 |
-### 42. cs-falcon-delete-host-groups
+### cs-falcon-delete-host-groups
***
-Delete the requested host groups.
-
+Deletes the requested host groups.
#### Base Command
@@ -3307,7 +2970,6 @@ Delete the requested host groups.
| --- | --- | --- |
| host_group_id | A comma-separated list of the IDs of the host groups to be deleted. | Required |
-
#### Context Output
There is no context output for this command.
@@ -3322,249 +2984,231 @@ There is no context output for this command.
>host group id a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 deleted successfully
-### 43. cs-falcon-search-custom-iocs
+### cs-falcon-update-host-group
***
-Returns a list of your uploaded IOCs that match the search criteria.
-
+Updates a host group.
#### Base Command
-`cs-falcon-search-custom-iocs`
+`cs-falcon-update-host-group`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| types | A comma-separated list of indicator types. Valid types are: "sha256", "sha1", "md5", "domain", "ipv4", "ipv6". | Optional |
-| values | A comma-separated list of indicator values. | Optional |
-| sources | A comma-separated list of IOC sources. | Optional |
-| expiration | The date on which the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ). | Optional |
-| limit | The maximum number of records to return. The minimum is 1 and the maximum is 500. Default is 50. | Optional |
-| sort | The order in which the results are returned. Possible values are: "type.asc", "type.desc", "value.asc", "value.desc", "policy.asc", "policy.desc", "share_level.asc", "share_level.desc", "expiration_timestamp.asc", and "expiration_timestamp.desc". | Optional |
-| offset | The offset to begin the list from. For example, start from the 10th record and return the list. Default is 0. | Optional |
-
+| host_group_id | The ID of the host group. | Required |
+| name | The name of the host group. | Optional |
+| description | The description of the host group. | Optional |
+| assignment_rule | The assignment rule. | Optional |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.IOC.Type | string | The type of the IOC. |
-| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
-| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
-| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
-| CrowdStrike.IOC.Expiration | date | The datetime when the indicator will expire. |
-| CrowdStrike.IOC.Description | string | The description of the IOC. |
-| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
-| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
-| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
-| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
+| CrowdStrike.HostGroup.id | String | The ID of the host group. |
+| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
+| CrowdStrike.HostGroup.name | String | The name of the host group. |
+| CrowdStrike.HostGroup.description | String | The description of the host group. |
+| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
+| CrowdStrike.HostGroup.created_timestamp | Date | The datetime the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
+| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
+| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
-#### Command example
+#### Command Example
-```!cs-falcon-search-custom-iocs limit=2```
+```!cs-falcon-update-host-group host_group_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 name="test_name_update_1" description="test_description_update"```
#### Context Example
```json
{
"CrowdStrike": {
- "IOC": [
- {
- "Action": "no_action",
- "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "CreatedTime": "2022-02-16T17:17:25.992164453Z",
- "Description": "test",
- "Expiration": "2022-02-17T13:47:57Z",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedTime": "2022-02-16T17:17:25.992164453Z",
- "Platforms": [
- "mac"
- ],
- "Severity": "informational",
- "Source": "Cortex XSOAR",
- "Type": "ipv4",
- "Value": "1.1.8.9"
- },
- {
- "Action": "no_action",
- "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "CreatedTime": "2022-02-16T17:16:44.514398876Z",
- "Description": "test",
- "Expiration": "2022-02-17T13:47:57Z",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedTime": "2022-02-16T17:16:44.514398876Z",
- "Platforms": [
- "mac"
- ],
- "Severity": "informational",
- "Source": "Cortex XSOAR",
- "Type": "ipv4",
- "Value": "4.1.8.9"
- }
- ]
+ "HostGroup": {
+ "assignment_rule": "device_id:[''],hostname:['']",
+ "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "created_timestamp": "2021-08-22T07:48:35.111070562Z",
+ "description": "test_description_update",
+ "group_type": "static",
+ "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
+ "name": "test_name_update_1"
+ }
}
}
```
#### Human Readable Output
->### Indicators of Compromise
-
->|ID|Action|Severity|Type|Value|Expiration|CreatedBy|CreatedTime|Description|ModifiedBy|ModifiedTime|Platforms|Policy|ShareLevel|Source|Tags|
->|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
->| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 1.1.8.9 | 2022-02-17T13:47:57Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | test | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | mac | | | Cortex XSOAR | |
->| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 4.1.8.9 | 2022-02-17T13:47:57Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:16:44.514398876Z | test | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:16:44.514398876Z | mac | | | Cortex XSOAR | |
+>### Results
+
+>|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
+>|---|---|---|---|---|---|---|---|---|
+>| device_id:[''],hostname:[''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-### 44. cs-falcon-get-custom-ioc
+### cs-falcon-list-host-group-members
***
-Gets the full definition of one or more indicators that you are watching.
-
+Gets the list of host group members.
#### Base Command
-`cs-falcon-get-custom-ioc`
+`cs-falcon-list-host-group-members`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| type | The IOC type to retrieve. Possible values are: "sha256", "sha1", "md5", "domain", "ipv4", and "ipv6". Either ioc_id or ioc_type and value must be provided. | Optional |
-| value | The string representation of the indicator. Either ioc_id or ioc_type and value must be provided. | Optional |
-| ioc_id | The ID of the IOC to get. Can be retrieved by running the cs-falcon-search-custom-iocs command. Either ioc_id or ioc_type and value must be provided. | Optional |
-
+| host_group_id | The ID of the host group. | Optional |
+| filter | The query to filter the devices that belong to the host group. | Optional |
+| offset | Page offset. | Optional |
+| limit | The maximum number of results on a page. Default is 50. | Optional |
+| sort | The property to sort by (e.g., status.desc or hostname.asc). | Optional |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.IOC.Type | string | The type of the IOC. |
-| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
-| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
-| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
-| CrowdStrike.IOC.Expiration | date | The datetime when the indicator will expire. |
-| CrowdStrike.IOC.Description | string | The description of the IOC. |
-| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
-| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
-| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
-| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
+| CrowdStrike.Device.ID | String | The ID of the device. |
+| CrowdStrike.Device.LocalIP | String | The local IP address of the device. |
+| CrowdStrike.Device.ExternalIP | String | The external IP address of the device. |
+| CrowdStrike.Device.Hostname | String | The hostname of the device. |
+| CrowdStrike.Device.OS | String | The operating system of the device. |
+| CrowdStrike.Device.MacAddress | String | The MAC address of the device. |
+| CrowdStrike.Device.FirstSeen | String | The first time the device was seen. |
+| CrowdStrike.Device.LastSeen | String | The last time the device was seen. |
+| CrowdStrike.Device.Status | String | The device status. |
-#### Command example
+#### Command Example
-```!cs-falcon-get-custom-ioc type=ipv4 value=7.5.9.8```
+```!cs-falcon-list-host-group-members```
#### Context Example
```json
{
"CrowdStrike": {
- "IOC": {
- "Action": "no_action",
- "CreatedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "CreatedTime": "2022-02-16T14:25:22.968603813Z",
- "Expiration": "2022-02-17T17:55:09Z",
- "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedBy": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
- "ModifiedTime": "2022-02-16T14:25:22.968603813Z",
- "Platforms": [
- "linux"
- ],
- "Severity": "informational",
- "Source": "cortex xsoar",
- "Tags": [
- "test",
- "test1"
- ],
- "Type": "ipv4",
- "Value": "7.5.9.8"
- }
+ "Device": [
+ {
+ "ExternalIP": "35.224.136.145",
+ "FirstSeen": "2021-08-12T16:13:26Z",
+ "Hostname": "FALCON-CROWDSTR",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "LastSeen": "2021-08-23T04:59:48Z",
+ "LocalIP": "10.128.0.21",
+ "MacAddress": "42-01-0a-80-00-15",
+ "OS": "Windows Server 2019",
+ "Status": "normal"
+ },
+ {
+ "ExternalIP": "35.224.136.145",
+ "FirstSeen": "2020-02-10T12:40:18Z",
+ "Hostname": "FALCON-CROWDSTR",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "LastSeen": "2021-08-25T07:42:47Z",
+ "LocalIP": "10.128.0.7",
+ "MacAddress": "42-01-0a-80-00-07",
+ "OS": "Windows Server 2019",
+ "Status": "contained"
+ },
+ {
+ "ExternalIP": "35.224.136.145",
+ "FirstSeen": "2021-08-23T05:04:41Z",
+ "Hostname": "INSTANCE-1",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "LastSeen": "2021-08-25T07:49:06Z",
+ "LocalIP": "10.128.0.20",
+ "MacAddress": "42-01-0a-80-00-14",
+ "OS": "Windows Server 2019",
+ "Status": "normal"
+ },
+ {
+ "ExternalIP": "35.224.136.145",
+ "FirstSeen": "2021-08-11T13:57:29Z",
+ "Hostname": "INSTANCE-1",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "LastSeen": "2021-08-23T04:45:37Z",
+ "LocalIP": "10.128.0.20",
+ "MacAddress": "42-01-0a-80-00-14",
+ "OS": "Windows Server 2019",
+ "Status": "normal"
+ },
+ {
+ "ExternalIP": "35.224.136.145",
+ "FirstSeen": "2021-08-08T11:33:21Z",
+ "Hostname": "falcon-crowdstrike-sensor-centos7",
+ "ID": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "LastSeen": "2021-08-25T07:50:47Z",
+ "LocalIP": "10.128.0.19",
+ "MacAddress": "42-01-0a-80-00-13",
+ "OS": "CentOS 7.9",
+ "Status": "normal"
+ }
+ ]
}
}
```
#### Human Readable Output
->### Indicator of Compromise
+>### Devices
->|ID|Action|Severity|Type|Value|Expiration|CreatedBy|CreatedTime|Description|ModifiedBy|ModifiedTime|Platforms|Policy|ShareLevel|Source|Tags|
->|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
->| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | no_action | informational | ipv4 | 7.5.9.8 | 2022-02-17T17:55:09Z | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T14:25:22.968603813Z | | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T14:25:22.968603813Z | linux | | | cortex xsoar | test,
test1 |
+>|ID|External IP|Local IP|Hostname|OS|Mac Address|First Seen|Last Seen|Status|
+>|---|---|---|---|---|---|---|---|---|
+>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 35.224.136.145 | 10.128.0.19 | falcon-crowdstrike-sensor-centos7 | CentOS 7.9 | 42-01-0a-80-00-13 | 2021-08-08T11:33:21Z | 2021-08-25T07:50:47Z | normal |
-### 45. cs-falcon-upload-custom-ioc
+### cs-falcon-add-host-group-members
***
-Uploads an indicator for CrowdStrike to monitor.
-
+Add host group members.
#### Base Command
-`cs-falcon-upload-custom-ioc`
+`cs-falcon-add-host-group-members`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ioc_type | The type of the indicator. Possible values are: "sha256", "md5", "domain", "ipv4", and "ipv6". | Required |
-| value | A comma separated list of indicators. More than one value can be supplied in order to upload multiple IOCs of the same type but with different values. Note that the uploaded IOCs will have the same properties (as supplied in other arguments). | Required |
-| action | Action to take when a host observes the custom IOC. Possible values are: no_action - Save the indicator for future use, but take no action. No severity required. allow - Applies to hashes only. Allow the indicator and do not detect it. Severity does not apply and should not be provided. prevent_no_ui - Applies to hashes only. Block and detect the indicator, but hide it from Activity > Detections. Has a default severity value. prevent - Applies to hashes only. Block the indicator and show it as a detection at the selected severity. detect - Enable detections for the indicator at the selected severity. | Required |
-| platforms | The platforms that the indicator applies to. You can enter multiple platform names, separated by commas. Possible values are: mac, windows and linux. | Required |
-| severity | The severity level to apply to this indicator. Possible values are: informational, low, medium, high and critical. | Required for the prevent and detect actions. Optional for no_action. |
-| expiration | The date on which the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ). | Optional |
-| source | The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters. | Optional |
-| description | A meaningful description of the indicator. Limited to 200 characters. | Optional |
-| applied_globally | Whether the indicator is applied globally. Either applied_globally or host_groups must be provided. Possible values are: true, false. | Optional |
-| host_groups | List of host group IDs that the indicator applies to. Can be retrieved by running the cs-falcon-list-host-groups command. Either applied_globally or host_groups must be provided. | Optional |
-| tags | List of tags to apply to the indicator. | Optional |
+| host_group_id | The ID of the host group. | Required |
+| host_ids | A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command. | Required |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.IOC.Type | string | The type of the IOC. |
-| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
-| CrowdStrike.IOC.ID | string | The full ID of the indicator. |
-| CrowdStrike.IOC.Severity | string | The severity level to apply to this indicator. |
-| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.Action | string | Action to take when a host observes the custom IOC. |
-| CrowdStrike.IOC.Expiration | date | The datetime when the indicator will expire. |
-| CrowdStrike.IOC.Description | string | The description of the IOC. |
-| CrowdStrike.IOC.CreatedTime | date | The datetime the IOC was created. |
-| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
-| CrowdStrike.IOC.ModifiedTime | date | The datetime the indicator was last modified. |
-| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
-| CrowdStrike.IOC.Tags | Unknown | The tags of the IOC. |
-| CrowdStrike.IOC.Platforms | Unknown | The platforms of the IOC. |
+| CrowdStrike.HostGroup.id | String | The ID of the host group. |
+| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
+| CrowdStrike.HostGroup.name | String | The name of the host group. |
+| CrowdStrike.HostGroup.description | String | The description of the host group. |
+| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
+| CrowdStrike.HostGroup.created_timestamp | Date | The datetime the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
+| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
+| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
+
#### Command Example
-```!cs-falcon-upload-custom-ioc ioc_type="domain" value="test.domain.com" action="prevent" severity="high" source="Demisto playbook" description="Test ioc" platforms="mac"```
+```!cs-falcon-add-host-group-members host_group_id="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" host_ids="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"```
#### Context Example
```json
{
"CrowdStrike": {
- "IOC": {
- "CreatedTime": "2020-10-02T13:55:26Z",
- "Description": "Test ioc",
- "Expiration": "2020-11-01T00:00:00Z",
- "ID": "4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r",
- "ModifiedTime": "2020-10-02T13:55:26Z",
- "Action": "prevent",
- "Severity": "high",
- "Source": "Demisto playbook",
- "Type": "domain",
- "Value": "test.domain.com",
- "Platforms": ["mac"]
+ "HostGroup": {
+ "assignment_rule": "device_id:[''],hostname:['falcon-crowdstrike-sensor-centos7','']",
+ "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "created_timestamp": "2021-08-22T07:48:35.111070562Z",
+ "description": "test_description_update",
+ "group_type": "static",
+ "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
+ "name": "test_name_update_1"
}
}
}
@@ -3572,74 +3216,61 @@ Uploads an indicator for CrowdStrike to monitor.
#### Human Readable Output
->### Custom IOC was created successfully
+>### Results
->|CreatedTime|Description|Expiration|ID|ModifiedTime|Action|Severity|Source|Type|Value|
->|---|---|---|---|---|---|---|---|---|---|
->| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r | 2020-10-02T13:55:26Z | prevent | high | Demisto playbook | domain | test.domain.com |
+>|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
+>|---|---|---|---|---|---|---|---|---|
+>| device_id:[''],hostname:['falcon-crowdstrike-sensor-centos7',''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-### 46. cs-falcon-update-custom-ioc
+### cs-falcon-remove-host-group-members
***
-Updates an indicator for CrowdStrike to monitor.
-
+Remove host group members.
#### Base Command
-`cs-falcon-update-custom-ioc`
+`cs-falcon-remove-host-group-members`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ioc_id | The ID of the IOC to delete. Can be retrieved by running the cs-falcon-search-custom-iocs command. | Required |
-| action | Action to take when a host observes the custom IOC. Possible values are: no_action - Save the indicator for future use, but take no action. No severity required. allow - Applies to hashes only. Allow the indicator and do not detect it. Severity does not apply and should not be provided. prevent_no_ui - Applies to hashes only. Block and detect the indicator, but hide it from Activity > Detections. Has a default severity value. prevent - Applies to hashes only. Block the indicator and show it as a detection at the selected severity. detect - Enable detections for the indicator at the selected severity. | Optional |
-| platforms | The platforms that the indicator applies to. You can enter multiple platform names, separated by commas. Possible values are: mac, windows and linux. | Optional |
-| severity | The severity level to apply to this indicator. Possible values are: informational, low, medium, high and critical. | Required for the prevent and detect actions. Optional for no_action. |
-| expiration | The date on which the indicator will become inactive (ISO 8601 format, i.e. YYYY-MM-DDThh:mm:ssZ). | Optional |
-| source | The source where this indicator originated. This can be used for tracking where this indicator was defined. Limited to 200 characters. | Optional |
-| description | A meaningful description of the indicator. Limited to 200 characters. | Optional |
-| applied_globally | Whether the indicator is applied globally. Possible values are: true and false. Either applied_globally or host_groups must be provided. | Optional |
-| host_groups | List of host group IDs that the indicator applies to. Can be retrieved by running the cs-falcon-list-host-groups command. Either applied_globally or host_groups must be provided. | Optional |
+| host_group_id | The ID of the host group. | Required |
+| host_ids | A comma-separated list of host agent IDs to run commands. The list of host agent IDs can be retrieved by running the 'cs-falcon-search-device' command. | Required |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.IOC.Type | string | The type of the IOC. |
-| CrowdStrike.IOC.Value | string | The string representation of the indicator. |
-| CrowdStrike.IOC.ID | string | The full ID of the indicator \(type:value\). |
-| CrowdStrike.IOC.Policy | string | The policy of the indicator. |
-| CrowdStrike.IOC.Source | string | The source of the IOC. |
-| CrowdStrike.IOC.ShareLevel | string | The level at which the indicator will be shared. |
-| CrowdStrike.IOC.Expiration | string | The datetime when the indicator will expire. |
-| CrowdStrike.IOC.Description | string | The description of the IOC. |
-| CrowdStrike.IOC.CreatedTime | string | The datetime the IOC was created. |
-| CrowdStrike.IOC.CreatedBy | string | The identity of the user/process who created the IOC. |
-| CrowdStrike.IOC.ModifiedTime | string | The date and time the indicator was last modified. |
-| CrowdStrike.IOC.ModifiedBy | string | The identity of the user/process who last updated the IOC. |
+| CrowdStrike.HostGroup.id | String | The ID of the host group. |
+| CrowdStrike.HostGroup.group_type | String | The group type of the host group. |
+| CrowdStrike.HostGroup.name | String | The name of the host group. |
+| CrowdStrike.HostGroup.description | String | The description of the host group. |
+| CrowdStrike.HostGroup.created_by | String | The client that created the host group. |
+| CrowdStrike.HostGroup.created_timestamp | Date | The datetime the host group was created in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
+| CrowdStrike.HostGroup.modified_by | String | The client that modified the host group. |
+| CrowdStrike.HostGroup.modified_timestamp | Date | The datetime the host group was last modified in ISO time format. For example: 2019-10-17T13:41:48.487520845Z. |
#### Command Example
-```!cs-falcon-update-custom-ioc ioc_id="4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r" severity="high"```
+```!cs-falcon-remove-host-group-members host_group_id="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" host_ids="a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"```
#### Context Example
```json
{
"CrowdStrike": {
- "IOC": {
- "CreatedTime": "2020-10-02T13:55:26Z",
- "Description": "Test ioc",
- "Expiration": "2020-11-01T00:00:00Z",
- "ID": "4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r",
- "ModifiedTime": "2020-10-02T13:55:26Z",
- "Action": "prevent",
- "Severity": "high",
- "Source": "Demisto playbook",
- "Type": "domain",
- "Value": "test.domain.com"
+ "HostGroup": {
+ "assignment_rule": "device_id:[''],hostname:['']",
+ "created_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "created_timestamp": "2021-08-22T07:48:35.111070562Z",
+ "description": "test_description_update",
+ "group_type": "static",
+ "id": "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_by": "api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1",
+ "modified_timestamp": "2021-08-25T08:02:05.295663156Z",
+ "name": "test_name_update_1"
}
}
}
@@ -3647,43 +3278,47 @@ Updates an indicator for CrowdStrike to monitor.
#### Human Readable Output
->### Custom IOC was updated successfully
+>### Results
->|CreatedTime|Description|Expiration|ID|ModifiedTime|Action|Severity|Source|Type|Value|
->|---|---|---|---|---|---|---|---|---|---|
->| 2020-10-02T13:55:26Z | Test ioc | 2020-11-01T00:00:00Z | 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r | 2020-10-02T13:55:26Z | prevent | high | Demisto playbook | domain | test.domain.com |
+>|assignment_rule|created_by|created_timestamp|description|group_type|id|modified_by|modified_timestamp|name|
+>|---|---|---|---|---|---|---|---|---|
+>| device_id:[''],hostname:[''] | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-22T07:48:35.111070562Z | test_description_update | static | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | api-client-id:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2021-08-25T08:02:05.295663156Z | test_name_update_1 |
-### 47. cs-falcon-delete-custom-ioc
+### cs-falcon-resolve-incident
***
-Deletes a monitored indicator.
-
+Resolve and update incidents using the specified settings.
#### Base Command
-`cs-falcon-delete-custom-ioc`
+`cs-falcon-resolve-incident`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ioc_id | The ID of the IOC to delete. Can be retrieved by running the cs-falcon-search-custom-iocs command. | Required |
-
+| ids | A comma-separated list of incident IDs. | Required |
+| status | The new status of the incident. Possible values are: New, In Progress, Reopened, Closed. | Optional |
+| assigned_to_uuid | UUID of a user to assign the incident to. Mutually exclusive with the 'username' argument. | Optional |
+| username | Username of a user to assign the incident to. Mutually exclusive with the 'assigned_to_uuid' argument. Using this parameter instead of 'assigned_to_uuid' will result in an additional API call in order to fetch the UUID of the user. | Optional |
+| add_tag | Add a new tag to the incidents. | Optional |
+| remove_tag | Remove a tag from the incidents. | Optional |
+| add_comment | Add a comment to the incident. | Optional |
#### Context Output
There is no context output for this command.
-#### Command Example
-
-```!cs-falcon-delete-custom-ioc ioc_id="4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r"```
+#### Command Example
+```!cs-falcon-resolve-incident ids="inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1,inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1" status="Closed"```
#### Human Readable Output
->Custom IOC 4f8c43311k1801ca4359fc07t319610482c2003mcde8934d5412b1781e841e9r was successfully deleted.
+>inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 changed successfully to Closed
+>inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 changed successfully to Closed
-### 48. cs-falcon-batch-upload-custom-ioc
+### cs-falcon-batch-upload-custom-ioc
***
Uploads a batch of indicators.
@@ -3696,7 +3331,7 @@ Uploads a batch of indicators.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| multiple_indicators_json | A JSON object with list of CS Falcon indicators to upload. | Required |
+| multiple_indicators_json | A JSON object with a list of CrowdStrike Falcon indicators to upload. | Required |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. Default is 180. | Optional |
#### Context Output
@@ -3718,7 +3353,7 @@ Uploads a batch of indicators.
| CrowdStrike.IOC.Tags | Unknown | The tags of the IOC. |
| CrowdStrike.IOC.Platforms | Unknown | The platforms of the IOC. |
-#### Command example
+#### Command Example
```!cs-falcon-batch-upload-custom-ioc multiple_indicators_json=`[{"description": "test", "expiration": "2022-02-17T13:47:57Z", "type": "ipv4", "severity": "Informational", "value": "1.1.8.9", "action": "no_action", "platforms": ["mac"], "source": "Cortex XSOAR", "applied_globally": true}]` ```
@@ -3756,7 +3391,7 @@ Uploads a batch of indicators.
>|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| no_action | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | test | 2022-02-17T13:47:57Z | "a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | 2022-02-16T17:17:25.992164453Z | mac | informational | Cortex XSOAR | ipv4 | 1.1.8.9 |
-### 49. cs-falcon-rtr-kill-process
+### cs-falcon-rtr-kill-process
***
Execute an active responder kill command on a single host.
@@ -3769,7 +3404,7 @@ Execute an active responder kill command on a single host.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_id | The host ID you would like to kill the given process for. | Required |
+| host_id | The host ID to kill the given process for. | Required |
| process_ids | A comma-separated list of process IDs to kill. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -3779,10 +3414,10 @@ Execute an active responder kill command on a single host.
| **Path** | **Type** | **Description** |
| --- | --- | --- |
| CrowdStrike.Command.kill.ProcessID | String | The process ID that was killed. |
-| CrowdStrike.Command.kill.Error | String | The error message raised if the command was failed. |
+| CrowdStrike.Command.kill.Error | String | The error message raised if the command failed. |
| CrowdStrike.Command.kill.HostID | String | The host ID. |
-#### Command example
+#### Command Example
```!cs-falcon-rtr-kill-process host_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 process_ids=5260,123```
@@ -3820,7 +3455,7 @@ Execute an active responder kill command on a single host.
>Note: you don't see the following IDs in the results as the request was failed for them.
> ID 123 failed as it was not found.
-### 50. cs-falcon-rtr-remove-file
+### cs-falcon-rtr-remove-file
***
Batch executes an RTR active-responder remove file across the hosts mapped to the given batch ID.
@@ -3833,8 +3468,8 @@ Batch executes an RTR active-responder remove file across the hosts mapped to th
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_ids | A comma-separated list of the hosts IDs you would like to remove the file for. | Required |
-| file_path | The path to a file or a directory you want to remove. | Required |
+| host_ids | A comma-separated list of the hosts IDs to remove the file for. | Required |
+| file_path | The path to a file or a directory to remove. | Required |
| os | The operating system of the hosts given. Since the remove command is different in each operating system, you can choose only one operating system. Possible values are: Windows, Linux, Mac. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -3846,7 +3481,7 @@ Batch executes an RTR active-responder remove file across the hosts mapped to th
| CrowdStrike.Command.rm.HostID | String | The host ID. |
| CrowdStrike.Command.rm.Error | String | The error message raised if the command failed. |
-#### Command example
+#### Command Example
```!cs-falcon-rtr-remove-file file_path="c:\\testfolder" host_ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 os=Windows```
@@ -3873,7 +3508,7 @@ Batch executes an RTR active-responder remove file across the hosts mapped to th
>|---|---|
>| a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 | Success |
-### 51. cs-falcon-rtr-list-processes
+### cs-falcon-rtr-list-processes
***
Executes an RTR active-responder ps command to get a list of active processes across the given host.
@@ -3886,7 +3521,7 @@ Executes an RTR active-responder ps command to get a list of active processes ac
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_id | The host ID you want to get the processes list from. | Required |
+| host_id | The host ID to get the processes list from. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -3894,9 +3529,9 @@ Executes an RTR active-responder ps command to get a list of active processes ac
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.Command.ps.Filename | String | The the name of the result file to be returned. |
+| CrowdStrike.Command.ps.Filename | String | The name of the result file to be returned. |
-#### Command example
+#### Command Example
```!cs-falcon-rtr-list-processes host_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1```
@@ -3934,7 +3569,7 @@ Executes an RTR active-responder ps command to get a list of active processes ac
>|---|
>|TOO MUCH INFO TO DISPLAY|
-### 52. cs-falcon-rtr-list-network-stats
+### cs-falcon-rtr-list-network-stats
***
Executes an RTR active-responder netstat command to get a list of network status and protocol statistics across the given host.
@@ -3947,7 +3582,7 @@ Executes an RTR active-responder netstat command to get a list of network status
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_id | The host ID you want to get the network status and protocol statistics list from. | Required |
+| host_id | The host ID to get the network status and protocol statistics list from. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -3955,9 +3590,9 @@ Executes an RTR active-responder netstat command to get a list of network status
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.Command.netstat.Filename | String | The the name of the result file to be returned. |
+| CrowdStrike.Command.netstat.Filename | String | The name of the result file to be returned. |
-#### Command example
+#### Command Example
```!cs-falcon-rtr-list-network-stats host_id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1```
@@ -3995,7 +3630,7 @@ Executes an RTR active-responder netstat command to get a list of network status
>|---|
>|TOO MUCH INFO TO DISPLAY|
-### 53. cs-falcon-rtr-read-registry
+### cs-falcon-rtr-read-registry
***
Executes an RTR active-responder read registry keys command across the given hosts. This command is valid only for Windows hosts.
@@ -4008,8 +3643,8 @@ Executes an RTR active-responder read registry keys command across the given hos
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_ids | A comma-separated list of the host IDs you want to get the registry keys from. | Required |
-| registry_keys | A comma-separated list of the registry keys, sub keys or value to get. | Required |
+| host_ids | A comma-separated list of the host IDs to get the registry keys from. | Required |
+| registry_keys | A comma-separated list of the registry keys, sub-keys, or value to get. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -4017,7 +3652,7 @@ Executes an RTR active-responder read registry keys command across the given hos
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-rtr-read-registry host_ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 registry_keys=`
HKEY_LOCAL_MACHINE,HKEY_USERS````
@@ -4064,7 +3699,7 @@ HKEY_LOCAL_MACHINE,HKEY_USERS````
>| reg-a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1HKEY_USERS | TOO MUCH INFO TO DISPLAY |
>| reg-a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1HKEY_LOCAL_MACHINE | TOO MUCH INFO TO DISPLAY |
-### 54. cs-falcon-rtr-list-scheduled-tasks
+### cs-falcon-rtr-list-scheduled-tasks
***
Executes an RTR active-responder netstat command to get a list of scheduled tasks across the given host. This command is valid only for Windows hosts.
@@ -4077,7 +3712,7 @@ Executes an RTR active-responder netstat command to get a list of scheduled task
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_ids | A comma-separated list of the hosts IDs you want to get the list of scheduled tasks from. | Required |
+| host_ids | A comma-separated list of the hosts IDs to get the list of scheduled tasks from. | Required |
| queue_offline | Whether the command will run against an offline-queued session and be queued for execution when the host comes online. | Optional |
| timeout | The amount of time (in seconds) that a request will wait for a client to establish a connection to a remote machine before a timeout occurs. | Optional |
@@ -4085,7 +3720,7 @@ Executes an RTR active-responder netstat command to get a list of scheduled task
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-rtr-list-scheduled-tasks host_ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1```
@@ -4123,7 +3758,7 @@ There is no context output for this command.
---------------------------|---|
>| TOO MUCH INFO TO DISPLAY |
-### 55. cs-falcon-rtr-retrieve-file
+### cs-falcon-rtr-retrieve-file
***
Gets the RTR extracted file contents for the specified file path.
@@ -4136,9 +3771,9 @@ Gets the RTR extracted file contents for the specified file path.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_ids | A comma-separated list of the hosts IDs you want to get the file from. | Required |
+| host_ids | A comma-separated list of the hosts IDs to get the file from. | Required |
| file_path | The file path of the required file to extract. | Required |
-| filename | The file name to use for the archive name and the file within the archive. | Optional |
+| filename | The filename to use for the archive name and the file within the archive. | Optional |
| interval_in_seconds | Interval between polling. Default is 60 seconds. Must be higher than 10. | Optional |
| hosts_and_requests_ids | This is an internal argument used for the polling process, not to be used by the user. | Optional |
| SHA256 | This is an internal argument used for the polling process, not to be used by the user. | Optional |
@@ -4150,387 +3785,147 @@ Gets the RTR extracted file contents for the specified file path.
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.File.FileName | String | The file name. |
+| CrowdStrike.File.FileName | String | The filename. |
| CrowdStrike.File.HostID | String | The host ID. |
| File.Size | Number | The size of the file. |
| File.SHA1 | String | The SHA1 hash of the file. |
| File.SHA256 | String | The SHA256 hash of the file. |
-| File.SHA512 | String | The SHA512 hash of the file. |
-| File.Name | String | The name of the file. |
-| File.SSDeep | String | The SSDeep hash of the file. |
-| File.EntryID | String | The entry ID of the file. |
-| File.Info | String | Information about the file. |
-| File.Type | String | The file type. |
-| File.MD5 | String | The MD5 hash of the file. |
-| File.Extension | String | The extension of the file. |
-
-#### Command example
-
-```!cs-falcon-rtr-retrieve-file file_path=`C:\Windows\System32\Windows.Media.FaceAnalysis.dll` host_ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1,a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1```
-
-#### Human Readable Output
-
-> Waiting for the polling execution
-
-### 56. cs-falcon-get-detections-for-incident
-
-***
-Gets the detections for a specific incident.
-
-#### Base Command
-
-`cs-falcon-get-detections-for-incident`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| incident_id | The incident ID to get detections for. A list of all available incident IDs can be retrieved by running the 'cs-falcon-list-incident-summaries' command. | Required |
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.IncidentDetection.incident_id | String | The incident ID. |
-| CrowdStrike.IncidentDetection.behavior_id | String | The behavior ID connected to the incident. |
-| CrowdStrike.IncidentDetection.detection_ids | String | A list of detection IDs connected to the incident. |
-
-#### Command example
-
-```!cs-falcon-get-detections-for-incident incident_id=`inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1````
-
-#### Context Example
-
-```json
-{
- "CrowdStrike": {
- "IncidentDetection": {
- "behavior_id": "ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162589633341-10303-6705920",
- "detection_ids": [
- "ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38655034604"
- ],
- "incident_id": "inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"
- }
- }
-}
-```
-
-#### Human Readable Output
-
->### Detection For Incident
-
->|behavior_id|detection_ids|incident_id|
->|---|---|---|
->| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162590282130-10303-6707968 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38656254663 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
->| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162596456872-10303-6710016 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38657629548 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
->| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162597577534-10305-6712576 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38658614774 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
->| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162589633341-10303-6705920 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38655034604 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
-
-
-### 17. cs-falcon-update-incident-comment
-
----
-Updates CrowdStrike Incident with the comment.
-
-#### Base Command
-
-`cs-falcon-update-incident-comment`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| ids | A comma-separated list of incident IDs. | Required |
-| comment | A comment added to the CrowdStrike incident. | Required |
-
-#### Context Output
-
-#### Command Example
-
-`cs-falcon-update-incident-comment ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 comment="Some comment"`
-
-
-# Spotlight
-
-### Using Spotlight APIs
-
-Spotlight identifies and gives info about specific vulnerabilities on your hosts using the Falcon sensor.
-
-### Required API client scope
-
-To access the Spotlight API, your API client must be assigned the spotlight-vulnerabilities:read scope.
-
-### Validating API data
-
-The Falcon sensor continuously monitors hosts for any changes and reports them as they occur.
-Depending on the timing of requests, Spotlight APIs can return values that are different from those shown by the Falcon console or an external source.
-There are other factors that can cause differences between API responses and other data sources.
-
-### API query syntax
-
-If an API query doesn’t exactly match the query used on the Spotlight Vulnerabilities page, the values might differ.
-
-### Expired vulnerabilities in Spotlight APIs
-
-If a host is deleted or inactive for 45 days, the status of vulnerabilities on that host changes to expired. Expired vulnerabilities are removed from Spotlight after 3 days.
-Expired vulnerabilities are only visible in API responses and are not included in reports or the Falcon console.
-An external data source might not use the same data retention policy, which can lead to discrepancies with Spotlight APIs. For more info, see Data retention in Spotlight [https://falcon.crowdstrike.com/login/?next=%2Fdocumentation%2F43%2Ffalcon-spotlight-overview#data-retention-in-spotlight].
-
-### The following commands uses the Spotlight API:
-
-### cs-falcon-spotlight-search-vulnerability
-
-***
-Retrieve vulnerability details according to the selected filter. Each request requires at least one filter parameter. Supported with the CrowdStrike Spotlight license.
-
-#### Base Command
-
-`cs-falcon-spotlight-search-vulnerability`
-
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| filter | Limit the vulnerabilities returned to specific properties. Each value must be enclosed in single quotes and placed immediately after the colon with no space. For example, 'filter=status:'open'+cve.id:['CVE-2013-3900','CVE-2021-1675']'. | Optional |
-| aid | Unique agent identifier (AID) of a sensor. | Optional |
-| cve_id | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation. | Optional |
-| cve_severity | Severity of the CVE. The possible values are: CRITICAL, HIGH, MEDIUM, LOW, UNKNOWN, or NONE. | Optional |
-| tags | Name of a tag assigned to a host. Retrieve tags from Host Tags APIs. | Optional |
-| status | Status of a vulnerability. This filter supports multiple values and negation. The possible values are: open, closed, reopen, expired. | Optional |
-| platform_name | Operating system platform. This filter supports negation. The possible values are: Windows, Mac, Linux. | Optional |
-| host_group | Unique system-assigned ID of a host group. Retrieve the host group ID from Host Group APIs. | Optional |
-| host_type | Type of host a sensor is running on. | Optional |
-| last_seen_within | Filter for vulnerabilities based on the number of days since a host last connected to CrowdStrike Falcon. Enter a numeric value from 3 to 45 to indicate the number of days you want to look back. Example- last_seen_within:10. | Optional |
-| is_suppressed | Indicates if the vulnerability is suppressed by a suppression rule. Possible values are: true, false. | Optional |
-| display_remediation_info | Display remediation information type of data to be returned for each vulnerability entity. Possible values are: True, False. Default is True. | Optional |
-| display_evaluation_logic_info | Whether to return logic information type of data for each vulnerability entity. Possible values are: True, False. Default is True. | Optional |
-| display_host_info | Whether to return host information type of data for each vulnerability entity. Possible values are: True, False. Default is False. | Optional |
-| limit | Maximum number of items to return (1-5000). Default is 50. | Optional |
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| CrowdStrike.Vulnerability.id | String | Unique system-assigned ID of the vulnerability. |
-| CrowdStrike.Vulnerability.cid | String | Unique system-generated customer identifier \(CID\) of the account. |
-| CrowdStrike.Vulnerability.aid | String | Unique agent identifier \(AID\) of the sensor where the vulnerability was found. |
-| CrowdStrike.Vulnerability.created_timestamp | Date | UTC date and time of when the vulnerability was created in Spotlight. |
-| CrowdStrike.Vulnerability.updated_timestamp | Date | UTC date and time of the last update made on the vulnerability. |
-| CrowdStrike.Vulnerability.status | String | Vulnerability's current status. Possible values are: open, closed, reopen, or expired. |
-| CrowdStrike.Vulnerability.apps.product_name_version | String | Name and version of the product associated with the vulnerability. |
-| CrowdStrike.Vulnerability.apps.sub_status | String | Status of each product associated with the vulnerability. Possible values are: open, closed, or reopen. |
-| CrowdStrike.Vulnerability.apps.remediation.ids | String | Remediation ID of each product associated with the vulnerability. |
-| CrowdStrike.Vulnerability.host_info.hostname | String | Name of the machine. |
-| CrowdStrike.Vulnerability.host_info.instance_id | String | Cloud instance ID of the host. |
-| CrowdStrike.Vulnerability.host_info.service_provider_account_id | String | Cloud service provider account ID for the host. |
-| CrowdStrike.Vulnerability.host_info.service_provider | String | Cloud service provider for the host. |
-| CrowdStrike.Vulnerability.host_info.os_build | String | Operating system build. |
-| CrowdStrike.Vulnerability.host_info.product_type_desc | String | Type of host a sensor is running on. |
-| CrowdStrike.Vulnerability.host_info.local_ip | String | Device's local IP address. |
-| CrowdStrike.Vulnerability.host_info.machine_domain | String | Active Directory domain name. |
-| CrowdStrike.Vulnerability.host_info.os_version | String | Operating system version. |
-| CrowdStrike.Vulnerability.host_info.ou | String | Active directory organizational unit name. |
-| CrowdStrike.Vulnerability.host_info.site_name | String | Active directory site name. |
-| CrowdStrike.Vulnerability.host_info.system_manufacturer | String | Name of the system manufacturer. |
-| CrowdStrike.Vulnerability.host_info.groups.id | String | Array of host group IDs that the host is assigned to. |
-| CrowdStrike.Vulnerability.host_info.groups.name | String | Array of host group names that the host is assigned to. |
-| CrowdStrike.Vulnerability.host_info.tags | String | Name of a tag assigned to a host. |
-| CrowdStrike.Vulnerability.host_info.platform | String | Operating system platform. This filter supports negation. |
-| CrowdStrike.Vulnerability.remediation.entities.id | String | Unique ID of the remediation. |
-| CrowdStrike.Vulnerability.remediation.entities.reference | String | Relevant reference for the remediation that can be used to get additional details for the remediation. |
-| CrowdStrike.Vulnerability.remediation.entities.title | String | Short description of the remediation. |
-| CrowdStrike.Vulnerability.remediation.entities.action | String | Expanded description of the remediation. |
-| CrowdStrike.Vulnerability.remediation.entities.link | String | Link to the remediation page for the vendor. In certain cases, this field is null. |
-| CrowdStrike.Vulnerability.cve.id | String | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database \(NVD\). |
-| CrowdStrike.Vulnerability.cve.base_score | Number | Base score of the CVE \(float value between 1 and 10\). |
-| CrowdStrike.Vulnerability.cve.severity | String | CVSS severity rating of the vulnerability. |
-| CrowdStrike.Vulnerability.cve.exploit_status | Number | Numeric value of the most severe known exploit. |
-| CrowdStrike.Vulnerability.cve.exprt_rating | String | ExPRT rating assigned by CrowdStrike's predictive AI rating system. |
-| CrowdStrike.Vulnerability.cve.description | String | Brief description of the CVE. |
-| CrowdStrike.Vulnerability.cve.published_date | Date | UTC timestamp with the date and time of when the vendor published the CVE. |
-| CrowdStrike.Vulnerability.cve.vendor_advisory | String | Link to the vendor page where the CVE was disclosed. |
-| CrowdStrike.Vulnerability.cve.exploitability_score | Number | Exploitability score of the CVE \(float values from 1-4\). |
-| CrowdStrike.Vulnerability.cve.impact_score | Number | Impact score of the CVE \(float values from 1-6\). |
-| CrowdStrike.Vulnerability.cve.vector | String | Textual representation of the metric values used to score the vulnerability. |
-| CrowdStrike.Vulnerability.cve.remediation_level | String | CVSS remediation level of the vulnerability \(U = Unavailable, or O = Official fix\). |
-| CrowdStrike.Vulnerability.cve.cisa_info.is_cisa_kev | Boolean | Whether to filter for vulnerabilities that are in the CISA Known Exploited Vulnerabilities \(KEV\) catalog. |
-| CrowdStrike.Vulnerability.cve.cisa_info.due_date | Date | Date before which CISA mandates subject organizations to patch the vulnerability. |
-| CrowdStrike.Vulnerability.cve.spotlight_published_date | Date | UTC timestamp with the date and time Spotlight enabled coverage for the vulnerability. |
-| CrowdStrike.Vulnerability.cve.actors | String | Adversaries associated with the vulnerability. |
-| CrowdStrike.Vulnerability.cve.name | String | The vulnerability name. |
-
-#### Command example
-
-``` cs-falcon-spotlight-search-vulnerability filter=status:['open','closed'] cve_id=CVE-2021-2222 cve_severity='LOW,HIGH' display_host_info=false display_evaluation_logic_info=false display_remediation_info=false limit=1 ```
-
-#### Context Example
-
-```json
-{
- "resources": [
- {
- "id": "id_num",
- "cid": "cid_num",
- "aid": "aid_num",
- "created_timestamp": "2021-07-13T01:12:57Z",
- "updated_timestamp": "2022-10-27T18:32:21Z",
- "status": "open",
- "apps": [
- {
- "product_name_version": "product",
- "sub_status": "open",
- "remediation": {
- "ids": [
- "1234"
- ]
- },
- "evaluation_logic": {
- "id": "1234"
- }
- }
- ],
- "suppression_info": {
- "is_suppressed": false
- },
- "cve": {
- "id": "CVE-2021-2222",
- "base_score": 5.5,
- "severity": "MEDIUM",
- "exploit_status": 0,
- "exprt_rating": "LOW",
- "remediation_level": "O",
- "cisa_info": {
- "is_cisa_kev": false
- },
- "spotlight_published_date": "2021-05-10T17:08:00Z",
- "description": "description\n",
- "published_date": "2021-02-25T23:15:00Z",
- "vendor_advisory": [
- "web address"
- ],
- "exploitability_score": 1.8,
- "impact_score": 3.6,
- "vector": "vendor"
- }
- }
- ]
-}
-```
+| File.SHA512 | String | The SHA512 hash of the file. |
+| File.Name | String | The name of the file. |
+| File.SSDeep | String | The SSDeep hash of the file. |
+| File.EntryID | String | The entry ID of the file. |
+| File.Info | String | Information about the file. |
+| File.Type | String | The file type. |
+| File.MD5 | String | The MD5 hash of the file. |
+| File.Extension | String | The extension of the file. |
-| CVE ID | CVE Severity | CVE Base Score | CVE Published Date | CVE Impact Score | CVE Exploitability Score | CVE Vector |
-| --- | --- | --- | --- | --- | --- | --- |
-| CVE-2021-2222 | LOW | 5.5 | 2021-05-10T17:08:00Z | 3.6 | 0 | vendor |
+#### Command Example
-### cs-falcon-spotlight-list-host-by-vulnerability
+```!cs-falcon-rtr-retrieve-file file_path=`C:\Windows\System32\Windows.Media.FaceAnalysis.dll` host_ids=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1,a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1```
+
+#### Human Readable Output
+
+> Waiting for the polling execution
+
+### cs-falcon-get-detections-for-incident
***
-Retrieve vulnerability details for a specific ID and host. Supported with the CrowdStrike Spotlight license.
+Gets the detections for a specific incident.
#### Base Command
-`cs-falcon-spotlight-list-host-by-vulnerability`
+`cs-falcon-get-detections-for-incident`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| limit | Maximum number of items to return (1-5000). Default is 50. | Optional |
-| cve_ids | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation. | Required |
+| incident_id | The incident ID to get detections for. A list of all available incident IDs can be retrieved by running the 'cs-falcon-list-incident-summaries' command. | Required |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.VulnerabilityHost.id | String | Unique system-assigned ID of the vulnerability. |
-| CrowdStrike.VulnerabilityHost.cid | String | Unique system-generated customer identifier \(CID\) of the account. |
-| CrowdStrike.VulnerabilityHost.aid | String | Unique agent identifier \(AID\) of the sensor where the vulnerability was found. |
-| CrowdStrike.VulnerabilityHost.created_timestamp | Date | UTC date and time of when the vulnerability was created in Spotlight. |
-| CrowdStrike.VulnerabilityHost.updated_timestamp | Date | UTC date and time of the last update made on the vulnerability. |
-| CrowdStrike.VulnerabilityHost.status | String | Vulnerability's current status. Possible values are: open, closed, reopen, or expired. |
-| CrowdStrike.VulnerabilityHost.apps.product_name_version | String | Name and version of the product associated with the vulnerability. |
-| CrowdStrike.VulnerabilityHost.apps.sub_status | String | Status of each product associated with the vulnerability. Possible values are: open, closed, or reopen. |
-| CrowdStrike.VulnerabilityHost.apps.remediation.ids | String | Remediation ID of each product associated with the vulnerability. |
-| CrowdStrike.VulnerabilityHost.apps.evaluation_logic.id | String | Unique system-assigned ID of the vulnerability evaluation logic. |
-| CrowdStrike.VulnerabilityHost.suppression_info.is_suppressed | Boolean | Indicates if the vulnerability is suppressed by a suppression rule. |
-| CrowdStrike.VulnerabilityHost.host_info.hostname | String | Name of the machine. |
-| CrowdStrike.VulnerabilityHost.host_info.local_ip | String | Device's local IP address. |
-| CrowdStrike.VulnerabilityHost.host_info.machine_domain | String | Active Directory domain name. |
-| CrowdStrike.VulnerabilityHost.host_info.os_version | String | Operating system version. |
-| CrowdStrike.VulnerabilityHost.host_info.ou | String | Active directory organizational unit name. |
-| CrowdStrike.VulnerabilityHost.host_info.site_name | String | Active directory site name. |
-| CrowdStrike.VulnerabilityHost.host_info.system_manufacturer | String | Name of the system manufacturer. |
-| CrowdStrike.VulnerabilityHost.host_info.platform | String | Operating system platform. This filter supports negation. |
-| CrowdStrike.VulnerabilityHost.host_info.instance_id | String | Cloud instance ID of the host. |
-| CrowdStrike.VulnerabilityHost.host_info.service_provider_account_id | String | Cloud service provider account ID for the host. |
-| CrowdStrike.VulnerabilityHost.host_info.service_provider | String | Cloud service provider for the host. |
-| CrowdStrike.VulnerabilityHost.host_info.os_build | String | Operating system build. |
-| CrowdStrike.VulnerabilityHost.host_info.product_type_desc | String | Type of host a sensor is running on. |
-| CrowdStrike.VulnerabilityHost.cve.id | String | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database \(NVD\). |
+| CrowdStrike.IncidentDetection.incident_id | String | The incident ID. |
+| CrowdStrike.IncidentDetection.behavior_id | String | The behavior ID connected to the incident. |
+| CrowdStrike.IncidentDetection.detection_ids | String | A list of detection IDs connected to the incident. |
-#### Command example
+#### Command Example
-``` cs-falcon-spotlight-list-host-by-vulnerability cve_ids=CVE-2021-2222 ```
+```!cs-falcon-get-detections-for-incident incident_id=`inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1````
#### Context Example
```json
{
- {
- "id": "id",
- "cid": "cid",
- "aid": "aid",
- "created_timestamp": "2021-09-16T15:12:42Z",
- "updated_timestamp": "2022-10-19T00:54:43Z",
- "status": "open",
- "apps": [
- {
- "product_name_version": "prod",
- "sub_status": "open",
- "remediation": {
- "ids": [
- "id"
- ]
- },
- "evaluation_logic": {
- "id": "id"
- }
- }
+ "CrowdStrike": {
+ "IncidentDetection": {
+ "behavior_id": "ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162589633341-10303-6705920",
+ "detection_ids": [
+ "ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38655034604"
],
- "suppression_info": {
- "is_suppressed": false
- },
- "host_info": {
- "hostname": "host",
- "local_ip": "10.128.0.7",
- "machine_domain": "",
- "os_version": "version",
- "ou": "",
- "site_name": "",
- "system_manufacturer": "manufactor",
- "tags": [],
- "platform": "Windows",
- "instance_id": "instance id",
- "service_provider_account_id": "id",
- "service_provider": "id",
- "os_build": "os build",
- "product_type_desc": "Server"
- },
- "cve": {
- "id": "CVE-20212-2222"
- }
+ "incident_id": "inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"
}
-
+ }
}
```
#### Human Readable Output
-| CVE ID | Host Info hostname | Host Info os Version | Host Info Product Type Desc | Host Info Local IP | Host Info ou | Host Info Machine Domain | Host Info Site Name | CVE Exploitability Score | CVE Vector |
-| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |
-| CVE-20212-2222 | host | 1 | Server | ip | | | site | 5.5 | |
+>### Detection For Incident
+
+>|behavior_id|detection_ids|incident_id|
+>|---|---|---|
+>| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162590282130-10303-6707968 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38656254663 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
+>| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162596456872-10303-6710016 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38657629548 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
+>| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162597577534-10305-6712576 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38658614774 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
+>| ind:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:162589633341-10303-6705920 | ldt:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:38655034604 | inc:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 |
+
+### get-mapping-fields
+
+***
+Returns the list of fields to map in outgoing mirroring. This command is only used for debugging purposes.
+
+#### Base Command
+
+`get-mapping-fields`
+
+#### Context Output
+
+There is no context output for this command.
+### get-remote-data
+
+***
+Gets remote data from a remote incident or detection. This method does not update the current incident or detection, and should be used for debugging purposes only.
+
+#### Base Command
+
+`get-remote-data`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | The remote incident or detection ID. | Required |
+| lastUpdate | The UTC timestamp in seconds of the last update. The incident or detection is only updated if it was modified after the last update time. Default is 0. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### get-modified-remote-data
+
+***
+Gets the list of incidents and detections that were modified since the last update time. This method is used for debugging purposes. The get-modified-remote-data command is used as part of the Mirroring feature that was introduced in Cortex XSOAR version 6.1.
+
+#### Base Command
+
+`get-modified-remote-data`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| lastUpdate | Date string representing the local time in UTC timestamp in seconds. The incident or detection is only returned if it was modified after the last update time. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### update-remote-system
+
+***
+Updates the remote incident or detection with local incident or detection changes. This method is only used for debugging purposes and will not update the current incident or detection.
+
+#### Base Command
+
+`update-remote-system`
+
+#### Context Output
+
+There is no context output for this command.
### cve
+***
Retrieve vulnerability details according to the selected filter. Each request requires at least one filter parameter. Supported with the CrowdStrike Spotlight license.
#### Base Command
@@ -4541,10 +3936,19 @@ Retrieve vulnerability details according to the selected filter. Each request re
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| cve_id | Deprecated. Use cve instead. | Optional |
-| cve | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation | Optional |
+| cve_id | Deprecated. Use cve instead. | Optional |
+| cve | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| DBotScore.Indicator | String | The indicator that was tested. |
+| DBotScore.Type | String | The indicator type. |
+| DBotScore.Vendor | String | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
-#### Command example
+#### Command Example
``` cve cve_id=CVE-2021-2222 ```
@@ -4554,6 +3958,7 @@ Retrieve vulnerability details according to the selected filter. Each request re
| --- | --- | --- | --- |
| CVE-2021-2222 | HIGH | 2021-09-16T15:12:42Z | 1 |
+
### cs-falcon-create-ml-exclusion
***
@@ -4581,11 +3986,11 @@ Create an ML exclusion.
| CrowdStrike.MLExclusion.regexp_value | String | A regular expression for matching the excluded value. |
| CrowdStrike.MLExclusion.value_hash | String | An hash of the value field. |
| CrowdStrike.MLExclusion.excluded_from | String | What the exclusion applies to \(e.g., a specific ML model\). |
-| CrowdStrike.MLExclusion.groups.id | String | Group's ID that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.MLExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.MLExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.MLExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.MLExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -4596,7 +4001,7 @@ Create an ML exclusion.
| CrowdStrike.MLExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.MLExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-create-ml-exclusion value=/demo-test excluded_from=blocking groups=999999```
@@ -4648,7 +4053,7 @@ Create an ML exclusion.
### cs-falcon-update-ml-exclusion
***
-Updates an ML exclusion. At least one argument is required in addition to the ID argument.
+Updates an ML exclusion. At least one argument is required in addition to the id argument.
#### Base Command
@@ -4670,13 +4075,13 @@ Updates an ML exclusion. At least one argument is required in addition to the ID
| CrowdStrike.MLExclusion.id | String | The ML exclusion ID. |
| CrowdStrike.MLExclusion.value | String | The ML exclusion value. |
| CrowdStrike.MLExclusion.regexp_value | String | A regular expression for matching the excluded value. |
-| CrowdStrike.MLExclusion.value_hash | String | An hash of the value field. |
+| CrowdStrike.MLExclusion.value_hash | String | A hash of the value field. |
| CrowdStrike.MLExclusion.excluded_from | String | What the exclusion applies to \(e.g., a specific ML model\). |
-| CrowdStrike.MLExclusion.groups.id | String | Groups ID that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.MLExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.MLExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.MLExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.MLExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -4687,7 +4092,7 @@ Updates an ML exclusion. At least one argument is required in addition to the ID
| CrowdStrike.MLExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.MLExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-update-ml-exclusion id=a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 comment=demo-comment```
@@ -4756,7 +4161,7 @@ Delete the ML exclusions by ID.
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-delete-ml-exclusion ids=123456```
@@ -4771,24 +4176,17 @@ Get a list of ML exclusions by specifying their IDs, value, or a specific filter
#### Base Command
-### cs-falcon-search-ml-exclusion
-
-***
-Get a list of ML exclusions by specifying their IDs, value, or a specific filter.
-
-#### Base Command
-
`cs-falcon-search-ml-exclusion`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| filter | A custom filter by which the exclusions should be filtered.
The syntax follows the pattern `<property>:[operator]'<value>'` for example: value:'test'.
Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value.
For more information, see: . | Optional |
+| filter | A custom filter by which the exclusions should be filtered.
The syntax follows the pattern `<property>:[operator]'<value>'`. For example: value:'test'.
Available filters: applied_globally, created_by, created_on, last_modified, modified_by, value.
For more information, see: https://falcon.crowdstrike.com/documentation/page/d3c84a1b/falcon-query-language-fql. | Optional |
| value | The value by which the exclusions should be filtered. | Optional |
| ids | A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and value. | Optional |
-| limit | The maximum number of records to return. [1-500]. Applies only if the IDs argument is not supplied. | Optional |
-| offset | The offset to start retrieving records from. Applies only if the IDs argument is not supplied. | Optional |
+| limit | The maximum number of records to return. [1-500]. Applies only if the ids argument is not supplied. | Optional |
+| offset | The offset to start retrieving records from. Applies only if the ids argument is not supplied. | Optional |
| sort | How to sort the retrieved exclusions. Possible values are: applied_globally.asc, applied_globally.desc, created_by.asc, created_by.desc, created_on.asc, created_on.desc, last_modified.asc, last_modified.desc, modified_by.asc, modified_by.desc, value.asc, value.desc. | Optional |
#### Context Output
@@ -4800,11 +4198,11 @@ Get a list of ML exclusions by specifying their IDs, value, or a specific filter
| CrowdStrike.MLExclusion.regexp_value | String | A regular expression for matching the excluded value. |
| CrowdStrike.MLExclusion.value_hash | String | A hash of the value field. |
| CrowdStrike.MLExclusion.excluded_from | String | What the exclusion applies to \(e.g., a specific ML model\). |
-| CrowdStrike.MLExclusion.groups.id | String | Groups ID that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.MLExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.MLExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.MLExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.MLExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.MLExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.MLExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -4815,7 +4213,7 @@ Get a list of ML exclusions by specifying their IDs, value, or a specific filter
| CrowdStrike.MLExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.MLExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-search-ml-exclusion limit=1```
@@ -4881,7 +4279,7 @@ Create an IOA exclusion.
| pattern_name | Name of the exclusion pattern. | Optional |
| pattern_id | ID of the exclusion pattern. | Required |
| cl_regex | Command line regular expression. | Required |
-| ifn_regex | Image file name regular expression. | Required |
+| ifn_regex | Image filename regular expression. | Required |
| comment | Comment describing why the exclusions were created. | Optional |
| description | Exclusion description. | Optional |
| detection_json | JSON formatted detection template. | Optional |
@@ -4896,14 +4294,14 @@ Create an IOA exclusion.
| CrowdStrike.IOAExclusion.description | String | A description of the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_id | String | The identifier of the pattern associated with the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_name | String | The name of the pattern associated with the IOA exclusion. |
-| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for file name matching. |
+| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for filename matching. |
| CrowdStrike.IOAExclusion.cl_regex | String | A regular expression used for command line matching. |
| CrowdStrike.IOAExclusion.detection_json | String | A JSON string that describes the detection logic for the IOA exclusion. |
-| CrowdStrike.IOAExclusion.groups.id | String | Groups ID that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.IOAExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.IOAExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.IOAExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.IOAExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -4914,7 +4312,7 @@ Create an IOA exclusion.
| CrowdStrike.IOAExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.IOAExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-create-ioa-exclusion exclusion_name=demo-test pattern_id=101010 cl_regex=.* ifn_regex="c:\\\\windows\\\\system32\\\\test.exe" groups=999999```
@@ -4967,7 +4365,7 @@ Create an IOA exclusion.
### cs-falcon-update-ioa-exclusion
***
-Updates an IOA exclusion. At least one argument is required in addition to the ID argument.
+Updates an IOA exclusion. At least one argument is required in addition to the id argument.
#### Base Command
@@ -4982,7 +4380,7 @@ Updates an IOA exclusion. At least one argument is required in addition to the I
| pattern_id | ID of the exclusion pattern to update. | Optional |
| pattern_name | Name of the exclusion pattern. | Optional |
| cl_regex | Command line regular expression. | Optional |
-| ifn_regex | Image file name regular expression. | Optional |
+| ifn_regex | Image filename regular expression. | Optional |
| comment | Comment describing why the exclusions was created. | Optional |
| description | Exclusion description. | Optional |
| detection_json | JSON formatted detection template. | Optional |
@@ -4997,14 +4395,14 @@ Updates an IOA exclusion. At least one argument is required in addition to the I
| CrowdStrike.IOAExclusion.description | String | A description of the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_id | String | The identifier of the pattern associated with the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_name | String | The name of the pattern associated with the IOA exclusion. |
-| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for file name matching. |
+| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for filename matching. |
| CrowdStrike.IOAExclusion.cl_regex | String | A regular expression used for command line matching. |
| CrowdStrike.IOAExclusion.detection_json | String | A JSON string that describes the detection logic for the IOA exclusion. |
-| CrowdStrike.IOAExclusion.groups.id | String | Groups ID that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.IOAExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.IOAExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.IOAExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.IOAExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -5015,7 +4413,7 @@ Updates an IOA exclusion. At least one argument is required in addition to the I
| CrowdStrike.IOAExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.IOAExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-update-ioa-exclusion id=123456 description=demo-description```
@@ -5084,7 +4482,7 @@ Delete the IOA exclusions by ID.
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-delete-ioa-exclusion ids=123456```
@@ -5106,11 +4504,11 @@ Get a list of IOA exclusions by specifying their IDs or a filter.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| filter | A custom filter by which the exclusions should be filtered.
The syntax follows the pattern `<property>:[operator]'<value>'` for example: name:'test'.
Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern.
For more information, see: . | Optional |
+| filter | A custom filter by which the exclusions should be filtered.
The syntax follows the pattern `<property>:[operator]'<value>'`. For example: name:'test'.
Available filters: applied_globally, created_by, created_on, name, last_modified, modified_by, value, pattern.
For more information, see: https://www.falconpy.io/Service-Collections/Falcon-Query-Language. | Optional |
| name | The name by which the exclusions should be filtered. | Optional |
| ids | A comma-separated list of exclusion IDs to retrieve. The IDs overwrite the filter and name. | Optional |
-| limit | The limit of how many exclusions to retrieve. Default is 50. Applies only if the IDs argument is not supplied. | Optional |
-| offset | The offset of how many exclusions to skip. Default is 0. Applies only if the IDs argument is not supplied. | Optional |
+| limit | The limit of how many exclusions to retrieve. Default is 50. Applies only if the ids argument is not supplied. | Optional |
+| offset | The offset of how many exclusions to skip. Default is 0. Applies only if the ids argument is not supplied. | Optional |
#### Context Output
@@ -5121,14 +4519,14 @@ Get a list of IOA exclusions by specifying their IDs or a filter.
| CrowdStrike.IOAExclusion.description | String | A description of the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_id | String | The identifier of the pattern associated with the IOA exclusion. |
| CrowdStrike.IOAExclusion.pattern_name | String | The name of the pattern associated with the IOA exclusion. |
-| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for file name matching. |
+| CrowdStrike.IOAExclusion.ifn_regex | String | A regular expression used for filename matching. |
| CrowdStrike.IOAExclusion.cl_regex | String | A regular expression used for command line matching. |
| CrowdStrike.IOAExclusion.detection_json | String | A JSON string that describes the detection logic for the IOA exclusion. |
-| CrowdStrike.IOAExclusion.groups.id | String | Groups ID that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.group_type | String | Groups type that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.name | String | Groups name that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.description | String | Groups description that the exclusion rule is associated with. |
-| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Groups assignment rule that the exclusion is associated with. |
+| CrowdStrike.IOAExclusion.groups.id | String | Group ID that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.group_type | String | Group type that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.name | String | Group name that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.description | String | Group description that the exclusion rule is associated with. |
+| CrowdStrike.IOAExclusion.groups.assignment_rule | String | Group assignment rule that the exclusion is associated with. |
| CrowdStrike.IOAExclusion.groups.created_by | String | Indicate who created the group. |
| CrowdStrike.IOAExclusion.groups.created_timestamp | Date | The date when the group was created. |
| CrowdStrike.IOAExclusion.groups.modified_by | String | Indicate who last modified the group. |
@@ -5139,7 +4537,7 @@ Get a list of IOA exclusions by specifying their IDs or a filter.
| CrowdStrike.IOAExclusion.created_on | Date | The date when the exclusion rule was created. |
| CrowdStrike.IOAExclusion.created_by | String | Indicate who created the rule. |
-#### Command example
+#### Command Example
```!cs-falcon-search-ioa-exclusion limit=1```
@@ -5191,7 +4589,7 @@ Get quarantine file metadata by specified IDs or filter.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| ids | A comma-separated list of quarantined file IDs to retrieve. | Optional |
-| filter | A custom filter by which the retrieve quarantined file should be filtered. | Optional |
+| filter | A custom filter by which the retrieved quarantined file should be filtered. | Optional |
| sha256 | A comma-separated list of SHA256 hash of the files to retrieve. | Optional |
| filename | A comma-separated list of the name of the files to retrieve. | Optional |
| state | Filter the retrieved files by state. | Optional |
@@ -5206,7 +4604,7 @@ Get quarantine file metadata by specified IDs or filter.
| --- | --- | --- |
| CrowdStrike.QuarantinedFile.id | String | A unique identifier for the quarantined file. |
| CrowdStrike.QuarantinedFile.aid | String | The agent identifier of the agent that quarantined the file. |
-| CrowdStrike.QuarantinedFile.cid | String | The unique identifier for the customer that who the agent. |
+| CrowdStrike.QuarantinedFile.cid | String | The unique customer identifier of the agent that quarantined the file. |
| CrowdStrike.QuarantinedFile.sha256 | String | The SHA256 hash value of the quarantined file. |
| CrowdStrike.QuarantinedFile.paths.path | String | The full path of the quarantined file. |
| CrowdStrike.QuarantinedFile.paths.filename | String | The name of the quarantined file. |
@@ -5218,7 +4616,7 @@ Get quarantine file metadata by specified IDs or filter.
| CrowdStrike.QuarantinedFile.date_updated | Date | The date the quarantined file was last updated. |
| CrowdStrike.QuarantinedFile.date_created | Date | The date the quarantined file was created. |
-#### Command example
+#### Command Example
```!cs-falcon-list-quarantined-file limit=1```
@@ -5264,7 +4662,7 @@ Get quarantine file metadata by specified IDs or filter.
### cs-falcon-apply-quarantine-file-action
***
-Apply action to quarantined file by file IDs or filter.
+Apply action to quarantined files by file IDs or filter.
#### Base Command
@@ -5278,17 +4676,17 @@ Apply action to quarantined file by file IDs or filter.
| action | Action to perform against the quarantined file. Possible values are: delete, release, unrelease. | Required |
| comment | Comment to appear along with the action taken. | Required |
| filter | Update files based on a custom filter. | Optional |
-| sha256 | A comma-separated list of quarantined files SHA256 to update. | Optional |
-| filename | A comma-separated list of quarantined file names to update. | Optional |
+| sha256 | A comma-separated list of quarantined SHA256 files to update. | Optional |
+| filename | A comma-separated list of quarantined filenames to update. | Optional |
| state | Update files based on the state. | Optional |
| hostname | A comma-separated list of quarantined file hostnames to update. | Optional |
-| username | A comma-separated list of quarantined files username to update. | Optional |
+| username | A comma-separated list of quarantined file usernames to update. | Optional |
#### Context Output
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-apply-quarantine-file-action filename=nc.exe action=delete comment=demo-comment```
@@ -5315,12 +4713,13 @@ Retrieve ODS scan details.
| initiated_from | Comma-separated list of scan initiation sources to filter by. | Optional |
| status | Comma-separated list of scan statuses to filter by. | Optional |
| severity | Comma-separated list of scan severities to filter by. | Optional |
-| scan_started_on | UTC-format time of scan start to filter by. | Optional |
-| scan_completed_on | UTC-format time of the scan completion to filter by. | Optional |
+| scan_started_on | UTC-format of the scan start time to filter by. | Optional |
+| scan_completed_on | UTC-format of the scan completion time to filter by. | Optional |
| offset | Starting index of overall result set from which to return IDs. | Optional |
| limit | Maximum number of resources to return. | Optional |
| interval_in_seconds | The interval in seconds between each poll. Default is 30. | Optional |
| timeout_in_seconds | The timeout in seconds until polling ends. Default is 600. | Optional |
+| hide_polling_output | Whether to hide the polling message and only print the final status at the end (automatically filled by polling. Can be used for testing purposes). Default is True. | Optional |
#### Context Output
@@ -5349,7 +4748,7 @@ Retrieve ODS scan details.
| CrowdStrike.ODSScan.metadata.last_updated | Date | The date and time that the metadata was last updated. |
| CrowdStrike.ODSScan.status | String | The status of the scan \(e.g., "pending", "running", "completed", or "failed"\). |
| CrowdStrike.ODSScan.hosts | String | A list of the host IDs that were scanned. |
-| CrowdStrike.ODSScan.endpoint_notification | Boolean | A boolean value indicating whether endpoint notifications are enabled. |
+| CrowdStrike.ODSScan.endpoint_notification | Boolean | Indicates whether endpoint notifications are enabled. |
| CrowdStrike.ODSScan.pause_duration | Number | The number of hours to pause between scanning each file. |
| CrowdStrike.ODSScan.max_duration | Number | The maximum amount of time to allow for the scan job in hours. |
| CrowdStrike.ODSScan.max_file_size | Number | The maximum file size \(in MB\) to scan. |
@@ -5364,7 +4763,7 @@ Retrieve ODS scan details.
| CrowdStrike.ODSScan.created_by | String | The ID of the user who created the scan job. |
| CrowdStrike.ODSScan.last_updated | Date | The timestamp when the scan job was last updated. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-query-scan initiated_from=some_admin_name severity=high scan_started_on=2023-02-27T09:51:33.91608286Z```
@@ -5534,9 +4933,9 @@ Retrieve ODS scheduled scan details.
| ids | Comma-separated list of scan IDs to retrieve details about. If set, will override all other arguments. | Optional |
| initiated_from | Comma-separated list of scan initiation sources to filter by. | Optional |
| status | Comma-separated list of scan statuses to filter by. | Optional |
-| created_on | UTC-format time of scan creation to filter by. | Optional |
-| created_by | UTC-format time of scan creator to filter by. | Optional |
-| start_timestamp | UTC-format time of scan start to filter by. | Optional |
+| created_on | UTC-format of the scan creation time to filter by. | Optional |
+| created_by | UTC-format time of the scan creator to filter by. | Optional |
+| start_timestamp | UTC-format of scan start time to filter by. | Optional |
| deleted | Deleted scans only. | Optional |
| offset | Starting index of overall result set from which to return IDs. | Optional |
| limit | Maximum number of resources to return. | Optional |
@@ -5557,7 +4956,7 @@ Retrieve ODS scheduled scan details.
| CrowdStrike.ODSScheduledScan.host_groups | String | The host groups targeted by the scan. |
| CrowdStrike.ODSScheduledScan.endpoint_notification | Boolean | Whether notifications of the scan were sent to endpoints. |
| CrowdStrike.ODSScheduledScan.pause_duration | Number | The pause duration of the scan in hours. |
-| CrowdStrike.ODSScheduledScan.max_duration | Number | The max duration of the scan in hours. |
+| CrowdStrike.ODSScheduledScan.max_duration | Number | The maximum duration of the scan in hours. |
| CrowdStrike.ODSScheduledScan.max_file_size | Number | The maximum file size that the scan can handle in MB. |
| CrowdStrike.ODSScheduledScan.sensor_ml_level_detection | Number | The machine learning detection level for the sensor. |
| CrowdStrike.ODSScheduledScan.cloud_ml_level_detection | Number | The machine learning detection level for the cloud. |
@@ -5566,14 +4965,14 @@ Retrieve ODS scheduled scan details.
| CrowdStrike.ODSScheduledScan.created_on | Date | The timestamp when the scan was created. |
| CrowdStrike.ODSScheduledScan.created_by | String | The user who created the scan. |
| CrowdStrike.ODSScheduledScan.last_updated | Date | The timestamp when the scan was last updated. |
-| CrowdStrike.ODSScheduledScan.deleted | Boolean | Whether the scan has been deleted. |
+| CrowdStrike.ODSScheduledScan.deleted | Boolean | Whether the scan was deleted. |
| CrowdStrike.ODSScheduledScan.quarantine | Boolean | Whether the scan was set to quarantine. |
| CrowdStrike.ODSScheduledScan.metadata.host_id | String | Scan host IDs. |
| CrowdStrike.ODSScheduledScan.metadata.last_updated | Date | The date and time when the detection event was last updated. |
| CrowdStrike.ODSScheduledScan.sensor_ml_level_prevention | Number | The machine learning prevention level for the sensor. |
| CrowdStrike.ODSScheduledScan.cloud_ml_level_prevention | Number | The machine learning prevention level for the cloud. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-query-scheduled-scan ids=123456789```
@@ -5688,9 +5087,9 @@ Retrieve ODS scan host details.
| host_ids | Comma-separated list of host IDs to filter by. | Optional |
| scan_ids | Comma-separated list of scan IDs to filter by. | Optional |
| status | Comma-separated list of scan statuses to filter by. | Optional |
-| started_on | UTC-format time of scan start to filter by. | Optional |
-| completed_on | UTC-format time of scan completion to filter by. | Optional |
-| offset | Starting index of overall result set from which to return IDs. | Optional |
+| started_on | UTC-format of scan start time to filter by. | Optional |
+| completed_on | UTC-format of scan completion time to filter by. | Optional |
+| offset | Starting index of the overall result set from which to return IDs. | Optional |
| limit | Maximum number of resources to return. | Optional |
#### Context Output
@@ -5709,11 +5108,11 @@ Retrieve ODS scan host details.
| CrowdStrike.ODSScanHost.filecount.skipped | Number | The number of files that were skipped during the scan. |
| CrowdStrike.ODSScanHost.status | String | The status of the scan. \(e.g., "completed", "pending", "cancelled", "running", or "failed"\). |
| CrowdStrike.ODSScanHost.severity | Number | A severity score assigned to the scan, ranging from 0 to 100. |
-| CrowdStrike.ODSScanHost.started_on | Date | The date and time when the scan was started. |
-| CrowdStrike.ODSScanHost.completed_on | Date | The date and time when the scan was completed. |
+| CrowdStrike.ODSScanHost.started_on | Date | The date and time when the scan started. |
+| CrowdStrike.ODSScanHost.completed_on | Date | The date and time when the scan completed. |
| CrowdStrike.ODSScanHost.last_updated | Date | The date and time when the scan event was last updated. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-query-scan-host filter="scan_id:[\"123456789\",\"987654321\"]"```
@@ -5784,9 +5183,9 @@ Retrieve ODS malicious file details.
| host_ids | Comma-separated list of host IDs to filter by. | Optional |
| scan_ids | Comma-separated list of scan IDs to filter by. | Optional |
| file_paths | Comma-separated list of file paths to filter by. | Optional |
-| file_names | Comma-separated list of file names to filter by. | Optional |
+| file_names | Comma-separated list of filenames to filter by. | Optional |
| hash | Comma-separated list of hashes to filter by. | Optional |
-| offset | Starting index of overall result set from which to return IDs. | Optional |
+| offset | Starting index of the overall result set from which to return IDs. | Optional |
| limit | Maximum number of resources to return. | Optional |
#### Context Output
@@ -5800,13 +5199,13 @@ Retrieve ODS malicious file details.
| CrowdStrike.ODSMaliciousFile.host_scan_id | String | A unique identifier for the scan that detected the file on the host. |
| CrowdStrike.ODSMaliciousFile.filepath | String | The full path to the malicious file on the host system. |
| CrowdStrike.ODSMaliciousFile.filename | String | The name of the malicious file. |
-| CrowdStrike.ODSMaliciousFile.hash | String | A SHA-256 hash of the malicious file, which can be used to identify it. |
+| CrowdStrike.ODSMaliciousFile.hash | String | A SHA256 hash of the malicious file, which can be used to identify it. |
| CrowdStrike.ODSMaliciousFile.pattern_id | Number | The identifier of the pattern used to detect the malicious file. |
| CrowdStrike.ODSMaliciousFile.severity | Number | A severity score assigned to the detection event, ranging from 0 to 100. |
-| CrowdStrike.ODSMaliciousFile.quarantined | Boolean | A Boolean value indicating whether the file has been quarantined. |
+| CrowdStrike.ODSMaliciousFile.quarantined | Boolean | Indicates whether the file was quarantined. |
| CrowdStrike.ODSMaliciousFile.last_updated | Date | The date and time when the detection event was last updated. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-query-malicious-files```
@@ -5817,7 +5216,7 @@ Retrieve ODS malicious file details.
### cs-falcon-ods-create-scan
***
-Create an ODS scan and wait for results.
+Create an ODS scan and wait for the results.
#### Base Command
@@ -5827,13 +5226,13 @@ Create an ODS scan and wait for results.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| hosts | List of hosts to be scanned. "hosts" OR "host_groups" must be set. | Optional |
-| host_groups | List of host groups to be scanned. "hosts" OR "host_groups" must be set. | Optional |
-| file_paths | List of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set. | Optional |
-| scan_inclusions | List of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set. | Optional |
-| scan_exclusions | List of excluded files or locations for this scan. | Optional |
+| hosts | A comma-separated list of hosts to be scanned. "hosts" OR "host_groups" must be set. | Optional |
+| host_groups | A comma-separated list of host groups to be scanned. "hosts" OR "host_groups" must be set. | Optional |
+| file_paths | A comma-separated list of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set. | Optional |
+| scan_inclusions | A comma-separated list of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set. | Optional |
+| scan_exclusions | A comma-separated list of excluded files or locations for this scan. | Optional |
| initiated_from | Scan origin. | Optional |
-| cpu_priority | Set the scan CPU priority. Possible values are: Highest, High, Medium, Low, Lowest. Default is Low. | Optional |
+| cpu_priority | The scan CPU priority. Possible values are: Highest, High, Medium, Low, Lowest. Default is Low. | Optional |
| description | Scan description. | Optional |
| quarantine | Flag indicating if identified threats should be quarantined. | Optional |
| pause_duration | Amount of time (in hours) for scan pauses. Default is 2. | Optional |
@@ -5872,7 +5271,7 @@ Create an ODS scan and wait for results.
| CrowdStrike.ODSScan.metadata.last_updated | Date | The date and time that the metadata was last updated. |
| CrowdStrike.ODSScan.status | String | The status of the scan \(e.g., "pending", "running", "completed", or "failed"\). |
| CrowdStrike.ODSScan.hosts | String | A list of the host IDs that were scanned. |
-| CrowdStrike.ODSScan.endpoint_notification | Boolean | A boolean value indicating whether endpoint notifications are enabled. |
+| CrowdStrike.ODSScan.endpoint_notification | Boolean | Indicates whether endpoint notifications are enabled. |
| CrowdStrike.ODSScan.pause_duration | Number | The number of hours to pause between scanning each file. |
| CrowdStrike.ODSScan.max_duration | Number | The maximum amount of time to allow for the scan job in hours. |
| CrowdStrike.ODSScan.max_file_size | Number | The maximum file size \(in MB\) to scan. |
@@ -5887,7 +5286,7 @@ Create an ODS scan and wait for results.
| CrowdStrike.ODSScan.created_by | String | The ID of the user who created the scan job. |
| CrowdStrike.ODSScan.last_updated | Date | The timestamp when the scan job was last updated. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-create-scan host_groups=7471ba0636b34cbb8c65fae7979a6a9b scan_inclusions=* cpu_priority=Highest max_duration=1 pause_duration=1```
@@ -5974,12 +5373,12 @@ Create an ODS scheduled scan.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| host_groups | List of host groups to be scanned. | Required |
-| file_paths | List of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set. | Optional |
-| scan_inclusions | List of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set. | Optional |
-| scan_exclusions | List of excluded files or locations for this scan. | Optional |
+| host_groups | A comma-separated list of host groups to be scanned. | Required |
+| file_paths | A comma-separated list of file paths to be scanned. "file_paths" OR "scan_inclusions" must be set. | Optional |
+| scan_inclusions | A comma-separated list of included files or locations for this scan. "file_paths" OR "scan_inclusions" must be set. | Optional |
+| scan_exclusions | A comma-separated list of excluded files or locations for this scan. | Optional |
| initiated_from | Scan origin. | Optional |
-| cpu_priority | Set the scan CPU priority. Possible values are: Highest, High, Medium, Low, Lowest. Default is Low. | Optional |
+| cpu_priority | The scan CPU priority. Possible values are: Highest, High, Medium, Low, Lowest. Default is Low. | Optional |
| description | Scan description. | Optional |
| quarantine | Flag indicating if identified threats should be quarantined. | Optional |
| pause_duration | Amount of time (in hours) for scan pauses. Default is 2. | Optional |
@@ -5988,8 +5387,8 @@ Create an ODS scheduled scan.
| cloud_ml_level_detection | Cloud ML detection level for the scan. | Optional |
| cloud_ml_level_prevention | Cloud ML prevention level for the scan. | Optional |
| max_duration | Maximum time (in hours) the scan is allowed to execute. Default is 2. | Optional |
-| schedule_start_timestamp | When to start the first scan. Supports english expressions such as "tommorow" or "in an hour". | Required |
-| schedule_interval | Set the schedule interval. Possible values are: Never, Daily, Weekly, Every other week, Every four weeks, Monthly. | Required |
+| schedule_start_timestamp | When to start the first scan. Supports english expressions such as "tomorrow" or "in an hour". | Required |
+| schedule_interval | The schedule interval. Possible values are: Never, Daily, Weekly, Every other week, Every four weeks, Monthly. | Required |
#### Context Output
@@ -6006,8 +5405,8 @@ Create an ODS scheduled scan.
| CrowdStrike.ODSScheduledScan.status | String | The status of the scan, whether it's "scheduled", "running", "completed", etc. |
| CrowdStrike.ODSScheduledScan.host_groups | String | The host groups targeted by the scan. |
| CrowdStrike.ODSScheduledScan.endpoint_notification | Boolean | Whether notifications of the scan were sent to endpoints. |
-| CrowdStrike.ODSScheduledScan.pause_duration | Number | The pause duration of scan in hours. |
-| CrowdStrike.ODSScheduledScan.max_duration | Number | The max duration of scan in hours. |
+| CrowdStrike.ODSScheduledScan.pause_duration | Number | The pause duration of the scan in hours. |
+| CrowdStrike.ODSScheduledScan.max_duration | Number | The maximum duration of the scan in hours. |
| CrowdStrike.ODSScheduledScan.max_file_size | Number | The maximum file size that the scan can handle in MB. |
| CrowdStrike.ODSScheduledScan.sensor_ml_level_detection | Number | The machine learning detection level for the sensor. |
| CrowdStrike.ODSScheduledScan.cloud_ml_level_detection | Number | The machine learning detection level for the cloud. |
@@ -6016,14 +5415,14 @@ Create an ODS scheduled scan.
| CrowdStrike.ODSScheduledScan.created_on | Date | The timestamp when the scan was created. |
| CrowdStrike.ODSScheduledScan.created_by | String | The user who created the scan. |
| CrowdStrike.ODSScheduledScan.last_updated | Date | The timestamp when the scan was last updated. |
-| CrowdStrike.ODSScheduledScan.deleted | Boolean | Whether the scan has been deleted. |
+| CrowdStrike.ODSScheduledScan.deleted | Boolean | Whether the scan was deleted. |
| CrowdStrike.ODSScheduledScan.quarantine | Boolean | Whether the scan was set to quarantine. |
| CrowdStrike.ODSScheduledScan.metadata.host_id | String | Scan host IDs. |
| CrowdStrike.ODSScheduledScan.metadata.last_updated | Date | The date and time when the detection event was last updated. |
| CrowdStrike.ODSScheduledScan.sensor_ml_level_prevention | Number | The machine learning prevention level for the sensor. |
| CrowdStrike.ODSScheduledScan.cloud_ml_level_prevention | Number | The machine learning prevention level for the cloud. |
-#### Command example
+#### Command Example
```!cs-falcon-ods-create-scheduled-scan host_groups=7471ba0636b34cbb8c65fae7979a6a9b schedule_interval=daily schedule_start_timestamp=tomorrow cpu_priority=Highest scan_inclusions=*```
@@ -6141,7 +5540,7 @@ Delete ODS scheduled scans.
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-ods-delete-scheduled-scan ids=9acf0c069d3d4a5b82badb170966e77c```
@@ -6169,13 +5568,13 @@ List identity entities.
| type | API type. Possible values are: USER, ENDPOINT. | Required |
| sort_key | The key to sort by. Possible values are: RISK_SCORE, PRIMARY_DISPLAY_NAME, SECONDARY_DISPLAY_NAME, MOST_RECENT_ACTIVITY, ENTITY_ID. | Optional |
| sort_order | The sort order. Possible values are: DESCENDING, ASCENDING. Default is ASCENDING. | Optional |
-| entity_id | Comma separated list of entity IDs to look for. | Optional |
-| primary_display_name | Primary display name to filter by. | Optional |
-| secondary_display_name | Secondary display name to filter by. | Optional |
+| entity_id | A comma-separated list of entity IDs to look for. | Optional |
+| primary_display_name | A comma-separated list of primary display names to filter by. | Optional |
+| secondary_display_name | A comma-separated list of secondary display names to filter by. | Optional |
| max_risk_score_severity | The maximum risk score severity to filter by. Possible values are: NORMAL, MEDIUM, HIGH. | Optional |
| min_risk_score_severity | The minimum risk score severity to filter by. Possible values are: NORMAL, MEDIUM, HIGH. | Optional |
| enabled | Whether to get only enabled or disabled identity entities. Possible values are: true, false. | Optional |
-| email | Filter by email. | Optional |
+| email | Email to filter by. | Optional |
| next_token | The hash for the next page. | Optional |
| page_size | The maximum number of items to fetch per page. The maximum value allowed is 1000. Default is 50. | Optional |
| page | The page number. Default is 1. | Optional |
@@ -6196,6 +5595,11 @@ List identity entities.
| CrowdStrike.IDPEntity.SecondaryDisplayName | String | The identity entity secondary display name. |
| CrowdStrike.IDPEntity.EmailAddresses | String | The identity entity email address. |
+### cs-falcon-cspm-list-policy-details
+
+***
+Given a CSV list of policy IDs, returns detailed policy information.
+
#### Base Command
`cs-falcon-cspm-list-policy-details`
@@ -6229,7 +5633,7 @@ List identity entities.
| CrowdStrike.CSPMPolicy.default_severity | String | The default severity. |
| CrowdStrike.CSPMPolicy.cis_benchmark_ids | Array | The CIS benchmark IDs. |
| CrowdStrike.CSPMPolicy.nist_benchmark_ids | Array | The NIST benchmark IDs. |
-| CrowdStrike.CSPMPolicy.pci_benchmark_ids | Array | The pci benchmark IDs. |
+| CrowdStrike.CSPMPolicy.pci_benchmark_ids | Array | The PCI benchmark IDs. |
| CrowdStrike.CSPMPolicy.policy_type | String | The policy type. |
| CrowdStrike.CSPMPolicy.tactic_url | String | The tactic URL. |
| CrowdStrike.CSPMPolicy.technique_url | String | The technique URL. |
@@ -6240,11 +5644,11 @@ List identity entities.
| CrowdStrike.CSPMPolicy.attack_types | Array | The attack types. |
| CrowdStrike.CSPMPolicy.asset_type_id | Integer | The asset type ID. |
| CrowdStrike.CSPMPolicy.cloud_asset_type | String | The cloud asset type. |
-| CrowdStrike.CSPMPolicy.is_remediable | Boolean | Whether the policy is remediable or not.. |
+| CrowdStrike.CSPMPolicy.is_remediable | Boolean | Whether the policy is remediable or not. |
| CrowdStrike.CSPMPolicy.is_enabled | Boolean | Whether the policy is enabled or not. |
| CrowdStrike.CSPMPolicy.account_scope | String | The account scope. |
-#### Command example
+#### Command Example
```!cs-falcon-cspm-list-policy-details policy_ids=1,2```
@@ -6382,7 +5786,7 @@ Returns information about current policy settings.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| policy_id | The policy ID to look for its settings. | Optional |
+| policy_id | The policy ID. | Optional |
| cloud_platform | The cloud provider. Possible values are: aws, gcp, azure. Default is aws. | Optional |
| service | Service type to filter by. | Optional |
| limit | The maximum number of entities to list. Default is 50. | Optional |
@@ -6407,7 +5811,7 @@ Returns information about current policy settings.
| CrowdStrike.CSPMPolicySetting.policy_timestamp | Date | The policy timestamp. |
| CrowdStrike.CSPMPolicySetting.policy_settings | Array | An array that holds policy settings. |
| CrowdStrike.CSPMPolicySetting.policy_settings.account_id | String | The account ID correlated to the policy. |
-| CrowdStrike.CSPMPolicySetting.policy_settings.regions | Array | The regions in which the policy is configured at. |
+| CrowdStrike.CSPMPolicySetting.policy_settings.regions | Array | The regions in which the policy is configured. |
| CrowdStrike.CSPMPolicySetting.policy_settings.severity | String | The severity of the policy. |
| CrowdStrike.CSPMPolicySetting.policy_settings.enabled | Boolean | Whether the policy settings are enabled or not. |
| CrowdStrike.CSPMPolicySetting.policy_settings.tag_excluded | Boolean | Whether the tag is excluded or not. |
@@ -6425,7 +5829,7 @@ Returns information about current policy settings.
| CrowdStrike.CSPMPolicySetting.nist_benchmark.recommendation_number | String | The NIST benchmark recommendation number. |
| CrowdStrike.CSPMPolicySetting.attack_types | Array | The attack types. |
-#### Command example
+#### Command Example
```!cs-falcon-cspm-list-service-policy-settings limit=2```
@@ -6579,7 +5983,7 @@ Returns information about current policy settings.
### cs-falcon-cspm-update-policy_settings
***
-Updates a policy setting - can be used to override policy severity or to disable a policy entirely.
+Updates a policy setting. Can be used to override policy severity or to disable a policy entirely.
#### Base Command
@@ -6590,9 +5994,9 @@ Updates a policy setting - can be used to override policy severity or to disable
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| policy_id | Policy ID to be updated. | Required |
-| account_id | Cloud Account ID to impact. | Optional |
+| account_id | Cloud account ID to impact. | Optional |
| enabled | Flag indicating if this policy is enabled. Possible values are: false, true. Default is true. | Optional |
-| regions | List of regions where this policy is enforced. | Optional |
+| regions | A comma-separated list of regions where this policy is enforced. | Optional |
| severity | Policy severity value. Possible values are: critical, high, medium, informational. | Optional |
| tag_excluded | Tag exclusion flag. Possible values are: false, true. | Optional |
@@ -6600,7 +6004,7 @@ Updates a policy setting - can be used to override policy severity or to disable
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-cspm-update-policy_settings policy_id=1 enabled=true regions="eu-central-1,eu-central-2" severity=high tag_excluded=false```
@@ -6621,13 +6025,13 @@ Perform actions on identity detection alerts.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ids | IDs of the alerts to update. | Required |
+| ids | A comma-separated list of IDs of the alerts to update. | Required |
| assign_to_name | Assign the specified detections to a user based on their username. | Optional |
| assign_to_uuid | Assign the specified detections to a user based on their UUID. | Optional |
| append_comment | Appends a new comment to any existing comments for the specified detections. | Optional |
| add_tag | Add a tag to the specified detections. | Optional |
| remove_tag | Remove a tag from the specified detections. | Optional |
-| update_status | Update status of the alert to the specified value. Possible values are: new, in_progress, closed, reopened. | Optional |
+| update_status | Update the status of the alert to the specified value. Possible values are: new, in_progress, closed, reopened. | Optional |
| unassign | Whether to unassign any assigned users to the specified detections. Possible values are: false, true. | Optional |
| show_in_ui | If true, displays the detection in the UI. Possible values are: false, true. | Optional |
@@ -6635,7 +6039,7 @@ Perform actions on identity detection alerts.
There is no context output for this command.
-#### Command example
+#### Command Example
```!cs-falcon-resolve-identity-detection ids="id_1,id_2" add_tag="Demo tag" append_comment="Demo comment" assign_to_name="morganf" show_in_ui=true update_status=in_progress```
@@ -6643,6 +6047,40 @@ There is no context output for this command.
>IDP Detection(s) id_1, id_2 were successfully updated
+### cs-falcon-resolve-mobile-detection
+
+***
+Perform actions on mobile detection alerts.
+
+#### Base Command
+
+`cs-falcon-resolve-mobile-detection`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ids | A comma-separated list of IDs of the alerts to update. | Required |
+| assign_to_name | Assign the specified detections to a user based on their username. | Optional |
+| assign_to_uuid | Assign the specified detections to a user based on their UUID. | Optional |
+| append_comment | Appends a new comment to any existing comments for the specified detections. | Optional |
+| add_tag | Add a tag to the specified detections. | Optional |
+| remove_tag | Remove a tag from the specified detections. | Optional |
+| update_status | Update the status of the alert to the specified value. Possible values are: new, in_progress, closed, reopened. | Optional |
+| unassign | Whether to unassign any assigned users to the specified detections. Possible values are: false, true. | Optional |
+| show_in_ui | If true, displays the detection in the UI. Possible values are: false, true. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+
+#### Command Example
+
+```!cs-falcon-resolve-mobile-detection ids="id_1,id_2" add_tag="Demo tag" append_comment="Demo comment" assign_to_name="morganf" show_in_ui=true update_status=in_progress```
+
+#### Human Readable Output
+
+>Mobile Detection(s) id_1, id_2 were successfully updated
### cs-falcon-list-users
***
@@ -6656,10 +6094,10 @@ List users.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| id | ID(s) of specific users to list. | Optional |
-| filter | The filter expression that should be used to limit the results. FQL syntax. Available values: assigned_cids, cid, first_name, last_name, name, uid. | Optional |
+| id | A comma-separated list of IDs (UUIDs) of specific users to list. | Optional |
+| filter | The filter expression that should be used to limit the results. FQL syntax. Available values: assigned_cids, cid, first_name, last_name, name, uid. Example: "first_name:'John'". | Optional |
| offset | The integer offset to start retrieving records from. | Optional |
-| limit | The maximum number of records to return. Default is 100. | Optional |
+| limit | The maximum number of records to return. Default is 50. | Optional |
#### Context Output
@@ -6676,7 +6114,7 @@ List users.
### cs-falcon-get-incident-behavior
***
-Get incident behavior information
+Get incident behavior information.
#### Base Command
@@ -6686,7 +6124,7 @@ Get incident behavior information
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| behavior_ids | ID(s) of behaviors to list. Behavior IDs can be retrieved by running the 'cs-falcon-get-detections-for-incident' command. | Required |
+| behavior_ids | A comma-separated list of ID(s) of behaviors to list. Behavior IDs can be retrieved by running the 'cs-falcon-get-detections-for-incident' command. | Required |
#### Context Output
@@ -6727,94 +6165,339 @@ Get incident behavior information
| CrowdStrike.IncidentBehavior.pattern_disposition_details.suspend_process | Boolean | Whether the process was suspended. |
| CrowdStrike.IncidentBehavior.pattern_disposition_details.suspend_parent | Boolean | Whether the parent was suspended. |
| CrowdStrike.IncidentBehavior.sha256 | String | The SHA256 hash. |
-| CrowdStrike.IncidentBehavior.user_name | String | The user name. |
+| CrowdStrike.IncidentBehavior.user_name | String | The username. |
| CrowdStrike.IncidentBehavior.tactic | String | The tactic used. |
| CrowdStrike.IncidentBehavior.tactic_id | String | The tactic ID. |
| CrowdStrike.IncidentBehavior.technique | String | The technique used. |
| CrowdStrike.IncidentBehavior.technique_id | String | The technique ID. |
| CrowdStrike.IncidentBehavior.display_name | String | The display name. |
| CrowdStrike.IncidentBehavior.objective | String | The objective. |
-| CrowdStrike.IncidentBehavior.compound_tto | String | The compound TTO. |
+| CrowdStrike.IncidentBehavior.compound_tto | String | The compound Time to Operate \(TTO\). |
+
+
+### cs-falcon-get-ioarules
+
+***
+Get IOA Rules.
+
+#### Base Command
+
+`cs-falcon-get-ioarules`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| rule_ids | A comma-separated list of rule IDs to get IOA rules for. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CrowdStrike.IOARules.instance_id | String | The IOA rule's instance ID. |
+| CrowdStrike.IOARules.customer_id | String | The customer ID. |
+| CrowdStrike.IOARules.action_label | String | The IOA rule's action label. |
+| CrowdStrike.IOARules.comment | String | The IOA rule's comment. |
+| CrowdStrike.IOARules.committed_on | String | The timestamp of the IOA rule's commitment. |
+| CrowdStrike.IOARules.created_by | String | The IOA rule's creator. |
+| CrowdStrike.IOARules.created_on | String | The timestamp of the IOA rule's creation. |
+| CrowdStrike.IOARules.deleted | Boolean | Whether the IOA rule is in a deleted status. |
+| CrowdStrike.IOARules.description | String | The IOA rule's description. |
+| CrowdStrike.IOARules.disposition_id | String | The disposition ID used by the IOA rule. |
+| CrowdStrike.IOARules.enabled | Boolean | Whether the IOA rule is enabled. |
+| CrowdStrike.IOARules.field_values | String | The IOA rule's field values. |
+| CrowdStrike.IOARules.instance_version | String | The IOA rule's instance version. |
+| CrowdStrike.IOARules.magic_cookie | String | The IOA rule's magic cookie. |
+| CrowdStrike.IOARules.modified_by | String | The last user who modified the IOA rule. |
+| CrowdStrike.IOARules.modified_on | String | The timestamp of the IOA rule's last modification. |
+| CrowdStrike.IOARules.name | String | The IOA rule name. |
+| CrowdStrike.IOARules.pattern_id | String | The IOA rule's pattern ID. |
+| CrowdStrike.IOARules.pattern_severity | String | The IOA rule's pattern severity. |
+| CrowdStrike.IOARules.rulegroup_id | String | The IOA rule's rule group ID. |
+| CrowdStrike.IOARules.ruletype_id | String | The IOA rule's rule type ID. |
+| CrowdStrike.IOARules.ruletype_name | String | The IOA rule's rule type name. |
+| CrowdStrike.IOARules.version_ids | String | The IOA rule's version ID. |
+# Spotlight
-### cs-falcon-get-ioarules
+### Using Spotlight APIs
+
+Spotlight identifies and gives info about specific vulnerabilities on your hosts using the Falcon sensor.
+
+### Required API client scope
+
+To access the Spotlight API, your API client must be assigned the spotlight-vulnerabilities:read scope.
+
+### Validating API data
+
+The Falcon sensor continuously monitors hosts for any changes and reports them as they occur.
+Depending on the timing of requests, Spotlight APIs can return values that are different from those shown by the Falcon console or an external source.
+There are other factors that can cause differences between API responses and other data sources.
+
+### API query syntax
+
+If an API query doesn’t exactly match the query used on the Spotlight Vulnerabilities page, the values might differ.
+
+### Expired vulnerabilities in Spotlight APIs
+
+If a host is deleted or inactive for 45 days, the status of vulnerabilities on that host changes to expired. Expired vulnerabilities are removed from Spotlight after 3 days.
+Expired vulnerabilities are only visible in API responses and are not included in reports or the Falcon console.
+An external data source might not use the same data retention policy, which can lead to discrepancies with Spotlight APIs. For more info, see Data retention in Spotlight [https://falcon.crowdstrike.com/login/?next=%2Fdocumentation%2F43%2Ffalcon-spotlight-overview#data-retention-in-spotlight].
+
+### The following commands uses the Spotlight API:
+
+### cs-falcon-spotlight-search-vulnerability
***
-Get IOA Rules for Custom IOA rule triggered detections
+Retrieve vulnerability details according to the selected filter. Each request requires at least one filter parameter. Supported with the CrowdStrike Spotlight license.
#### Base Command
-`cs-falcon-get-ioarules`
+`cs-falcon-spotlight-search-vulnerability`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| rule_ids | ID(s) of rules to list. Rule IDs can be retrieved by combining cid and rule_instance_id from 'cs-falcon-list-detection-summaries' output using this format cid:rule_instance_id. Example: 1123casdcccxxaafq13fdasf:2003 | Required |
+| filter | Limit the vulnerabilities returned to specific properties. Each value must be enclosed in single quotes and placed immediately after the colon with no space. For example, 'filter=status:'open'+cve.id:['CVE-2013-3900','CVE-2021-1675']'. | Optional |
+| aid | A comma-separated list of unique agent identifiers (AIDs) of a sensor. | Optional |
+| cve_id | A comma-separated list of unique identifiers for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation. | Optional |
+| cve_severity | A comma-separated list of severities of the CVE. The possible values are: CRITICAL, HIGH, MEDIUM, LOW, UNKNOWN, or NONE. | Optional |
+| tags | A comma-separated list of names of a tag assigned to a host. Retrieve tags from Host Tags APIs. | Optional |
+| status | Status of a vulnerability. This filter supports multiple values and negation. The possible values are: open, closed, reopen, expired. | Optional |
+| platform_name | Operating system platform. This filter supports negation. The possible values are: Windows, Mac, Linux. | Optional |
+| host_group | A comma-separated list of unique system-assigned IDs of a host group. Retrieve the host group ID from Host Group APIs. | Optional |
+| host_type | A comma-separated list of types of hosts a sensor is running on. | Optional |
+| last_seen_within | Filter for vulnerabilities based on the number of days since a host last connected to CrowdStrike Falcon. Enter a numeric value from 3 to 45 to indicate the number of days to look back. For example, last_seen_within:10. | Optional |
+| is_suppressed | Indicates if the vulnerability is suppressed by a suppression rule. Possible values are: true, false. | Optional |
+| display_remediation_info | Display remediation information type of data to be returned for each vulnerability entity. Possible values are: True, False. Default is True. | Optional |
+| display_evaluation_logic_info | Whether to return logic information type of data for each vulnerability entity. Possible values are: True, False. Default is True. | Optional |
+| display_host_info | Whether to return host information type of data for each vulnerability entity. Possible values are: True, False. Default is False. | Optional |
+| limit | Maximum number of items to return (1-5000). Default is 50. | Optional |
#### Context Output
| **Path** | **Type** | **Description** |
| --- | --- | --- |
-| CrowdStrike.IOARules.instance_id | String | The IOA Rule's Instance ID. |
-| CrowdStrike.IOARules.customer_id | String | The customer ID. |
-| CrowdStrike.IOARules.action_label | String | The IOA Rule's Action Label. |
-| CrowdStrike.IOARules.comment | String | The IOA Rule's Comment.. |
-| CrowdStrike.IOARules.committed_on | String | The timestamp of the IOA Rule's commitment. |
-| CrowdStrike.IOARules.created_by | String | The IOA Rule's creator. |
-| CrowdStrike.IOARules.created_on | String | The timestamp of the IOA Rule's creation. |
-| CrowdStrike.IOARules.deleted | Boolean | Whether the IOA Rule is in deleted status. |
-| CrowdStrike.IOARules.description | String | The IOA Rule's Description. |
-| CrowdStrike.IOARules.disposition_id | String | The Disposition ID used by the IOA Rule. |
-| CrowdStrike.IOARules.enabled | Boolean | Whether the IOA Rule is enabled. |
-| CrowdStrike.IOARules.field_values | String | The IOA Rule's field values. |
-| CrowdStrike.IOARules.instance_version | String | The IOA Rule's Instance Version. |
-| CrowdStrike.IOARules.magic_cookie | String | The IOA Rule's Magic Cookie. |
-| CrowdStrike.IOARules.modified_by | String | The IOA Rule's last modified user.
-| CrowdStrike.IOARules.modified_on| String | The timestamp of the IOA Rule's last modification. |
-| CrowdStrike.IOARules.name | String | The IOA Rule Name. |
-| CrowdStrike.IOARules.pattern_id | String | The IOA Rule's Pattern ID. |
-| CrowdStrike.IOARules.pattern_severity | String | The IOA Rule's Pattern Severity. |
-| CrowdStrike.IOARules.rulegroup_id | String | The IOA Rule's Rule Group ID. |
-| CrowdStrike.IOARules.ruletype_id | String | The IOA Rule's Rule Type ID. |
-| CrowdStrike.IOARules.ruletype_name | String | The IOA Rule's Rule Type Name. |
-| CrowdStrike.IOARules.version_ids | String | The IOA Rule's Version ID. |
+| CrowdStrike.Vulnerability.id | String | Unique system-assigned ID of the vulnerability. |
+| CrowdStrike.Vulnerability.cid | String | Unique system-generated customer identifier \(CID\) of the account. |
+| CrowdStrike.Vulnerability.aid | String | Unique agent identifier \(AID\) of the sensor where the vulnerability was found. |
+| CrowdStrike.Vulnerability.created_timestamp | Date | UTC date and time of when the vulnerability was created in Spotlight. |
+| CrowdStrike.Vulnerability.updated_timestamp | Date | UTC date and time of the last update made on the vulnerability. |
+| CrowdStrike.Vulnerability.status | String | Vulnerability's current status. Possible values are: open, closed, reopen, or expired. |
+| CrowdStrike.Vulnerability.apps.product_name_version | String | Name and version of the product associated with the vulnerability. |
+| CrowdStrike.Vulnerability.apps.sub_status | String | Status of each product associated with the vulnerability. Possible values are: open, closed, or reopen. |
+| CrowdStrike.Vulnerability.apps.remediation.ids | String | Remediation ID of each product associated with the vulnerability. |
+| CrowdStrike.Vulnerability.host_info.hostname | String | Name of the machine. |
+| CrowdStrike.Vulnerability.host_info.instance_id | String | Cloud instance ID of the host. |
+| CrowdStrike.Vulnerability.host_info.service_provider_account_id | String | Cloud service provider account ID for the host. |
+| CrowdStrike.Vulnerability.host_info.service_provider | String | Cloud service provider for the host. |
+| CrowdStrike.Vulnerability.host_info.os_build | String | Operating system build. |
+| CrowdStrike.Vulnerability.host_info.product_type_desc | String | Type of host a sensor is running on. |
+| CrowdStrike.Vulnerability.host_info.local_ip | String | Device's local IP address. |
+| CrowdStrike.Vulnerability.host_info.machine_domain | String | Active directory domain name. |
+| CrowdStrike.Vulnerability.host_info.os_version | String | Operating system version. |
+| CrowdStrike.Vulnerability.host_info.ou | String | Active directory organizational unit name. |
+| CrowdStrike.Vulnerability.host_info.site_name | String | Active directory site name. |
+| CrowdStrike.Vulnerability.host_info.system_manufacturer | String | Name of the system manufacturer. |
+| CrowdStrike.Vulnerability.host_info.groups.id | String | Array of host group IDs that the host is assigned to. |
+| CrowdStrike.Vulnerability.host_info.groups.name | String | Array of host group names that the host is assigned to. |
+| CrowdStrike.Vulnerability.host_info.tags | String | Name of a tag assigned to a host. |
+| CrowdStrike.Vulnerability.host_info.platform | String | Operating system platform. This filter supports negation. |
+| CrowdStrike.Vulnerability.remediation.entities.id | String | Unique ID of the remediation. |
+| CrowdStrike.Vulnerability.remediation.entities.reference | String | Relevant reference for the remediation that can be used to get additional details for the remediation. |
+| CrowdStrike.Vulnerability.remediation.entities.title | String | Short description of the remediation. |
+| CrowdStrike.Vulnerability.remediation.entities.action | String | Expanded description of the remediation. |
+| CrowdStrike.Vulnerability.remediation.entities.link | String | Link to the remediation page for the vendor. In certain cases, this field is null. |
+| CrowdStrike.Vulnerability.cve.id | String | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database \(NVD\). |
+| CrowdStrike.Vulnerability.cve.base_score | Number | Base score of the CVE \(float value between 1 and 10\). |
+| CrowdStrike.Vulnerability.cve.severity | String | CVSS severity rating of the vulnerability. |
+| CrowdStrike.Vulnerability.cve.exploit_status | Number | Numeric value of the most severe known exploit. |
+| CrowdStrike.Vulnerability.cve.exprt_rating | String | ExPRT rating assigned by CrowdStrike's predictive AI rating system. |
+| CrowdStrike.Vulnerability.cve.description | String | Brief description of the CVE. |
+| CrowdStrike.Vulnerability.cve.published_date | Date | UTC timestamp with the date and time of when the vendor published the CVE. |
+| CrowdStrike.Vulnerability.cve.vendor_advisory | String | Link to the vendor page where the CVE was disclosed. |
+| CrowdStrike.Vulnerability.cve.exploitability_score | Number | Exploitability score of the CVE \(float values from 1-4\). |
+| CrowdStrike.Vulnerability.cve.impact_score | Number | Impact score of the CVE \(float values from 1-6\). |
+| CrowdStrike.Vulnerability.cve.vector | String | Textual representation of the metric values used to score the vulnerability. |
+| CrowdStrike.Vulnerability.cve.remediation_level | String | CVSS remediation level of the vulnerability \(U = Unavailable, or O = Official fix\). |
+| CrowdStrike.Vulnerability.cve.cisa_info.is_cisa_kev | Boolean | Whether to filter for vulnerabilities that are in the CISA Known Exploited Vulnerabilities \(KEV\) catalog. |
+| CrowdStrike.Vulnerability.cve.cisa_info.due_date | Date | Date before which CISA mandates subject organizations to patch the vulnerability. |
+| CrowdStrike.Vulnerability.cve.spotlight_published_date | Date | UTC timestamp with the date and time Spotlight enabled coverage for the vulnerability. |
+| CrowdStrike.Vulnerability.cve.actors | String | Adversaries associated with the vulnerability. |
+| CrowdStrike.Vulnerability.cve.name | String | The vulnerability name. |
-### cs-falcon-resolve-mobile-detection
+#### Command Example
+
+``` cs-falcon-spotlight-search-vulnerability filter=status:['open','closed'] cve_id=CVE-2021-2222 cve_severity='LOW,HIGH' display_host_info=false display_evaluation_logic_info=false display_remediation_info=false limit=1 ```
+
+#### Context Example
+
+```json
+{
+ "resources": [
+ {
+ "id": "id_num",
+ "cid": "cid_num",
+ "aid": "aid_num",
+ "created_timestamp": "2021-07-13T01:12:57Z",
+ "updated_timestamp": "2022-10-27T18:32:21Z",
+ "status": "open",
+ "apps": [
+ {
+ "product_name_version": "product",
+ "sub_status": "open",
+ "remediation": {
+ "ids": [
+ "1234"
+ ]
+ },
+ "evaluation_logic": {
+ "id": "1234"
+ }
+ }
+ ],
+ "suppression_info": {
+ "is_suppressed": false
+ },
+ "cve": {
+ "id": "CVE-2021-2222",
+ "base_score": 5.5,
+ "severity": "MEDIUM",
+ "exploit_status": 0,
+ "exprt_rating": "LOW",
+ "remediation_level": "O",
+ "cisa_info": {
+ "is_cisa_kev": false
+ },
+ "spotlight_published_date": "2021-05-10T17:08:00Z",
+ "description": "description\n",
+ "published_date": "2021-02-25T23:15:00Z",
+ "vendor_advisory": [
+ "web address"
+ ],
+ "exploitability_score": 1.8,
+ "impact_score": 3.6,
+ "vector": "vendor"
+ }
+ }
+ ]
+}
+```
+#### Human Readable Output
+
+| CVE ID | CVE Severity | CVE Base Score | CVE Published Date | CVE Impact Score | CVE Exploitability Score | CVE Vector |
+| --- | --- | --- | --- | --- | --- | --- |
+| CVE-2021-2222 | LOW | 5.5 | 2021-05-10T17:08:00Z | 3.6 | 0 | vendor |
+
+### cs-falcon-spotlight-list-host-by-vulnerability
***
-Perform actions on mobile detection alerts.
+Retrieve vulnerability details for a specific ID and host. Supported with the CrowdStrike Spotlight license.
#### Base Command
-`cs-falcon-resolve-mobile-detection`
+`cs-falcon-spotlight-list-host-by-vulnerability`
#### Input
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| ids | IDs of the alerts to update. | Required |
-| assign_to_name | Assign the specified detections to a user based on their username. | Optional |
-| assign_to_uuid | Assign the specified detections to a user based on their UUID. | Optional |
-| append_comment | Appends a new comment to any existing comments for the specified detections. | Optional |
-| add_tag | Add a tag to the specified detections. | Optional |
-| remove_tag | Remove a tag from the specified detections. | Optional |
-| update_status | Update status of the alert to the specified value. Possible values are: new, in_progress, closed, reopened. | Optional |
-| unassign | Whether to unassign any assigned users to the specified detections. Possible values are: false, true. | Optional |
-| show_in_ui | If true, displays the detection in the UI. Possible values are: false, true. | Optional |
+| limit | Maximum number of items to return (1-5000). Default is 50. | Optional |
+| cve_ids | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database (NVD). This filter supports multiple values and negation. | Required |
#### Context Output
-There is no context output for this command.
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| CrowdStrike.VulnerabilityHost.id | String | Unique system-assigned ID of the vulnerability. |
+| CrowdStrike.VulnerabilityHost.cid | String | Unique system-generated customer identifier \(CID\) of the account. |
+| CrowdStrike.VulnerabilityHost.aid | String | Unique agent identifier \(AID\) of the sensor where the vulnerability was found. |
+| CrowdStrike.VulnerabilityHost.created_timestamp | Date | UTC date and time of when the vulnerability was created in Spotlight. |
+| CrowdStrike.VulnerabilityHost.updated_timestamp | Date | UTC date and time of the last update made on the vulnerability. |
+| CrowdStrike.VulnerabilityHost.status | String | Vulnerability's current status. Possible values are: open, closed, reopen, or expired. |
+| CrowdStrike.VulnerabilityHost.apps.product_name_version | String | Name and version of the product associated with the vulnerability. |
+| CrowdStrike.VulnerabilityHost.apps.sub_status | String | Status of each product associated with the vulnerability. Possible values are: open, closed, or reopen. |
+| CrowdStrike.VulnerabilityHost.apps.remediation.ids | String | Remediation ID of each product associated with the vulnerability. |
+| CrowdStrike.VulnerabilityHost.apps.evaluation_logic.id | String | Unique system-assigned ID of the vulnerability evaluation logic. |
+| CrowdStrike.VulnerabilityHost.suppression_info.is_suppressed | Boolean | Indicates if the vulnerability is suppressed by a suppression rule. |
+| CrowdStrike.VulnerabilityHost.host_info.hostname | String | Name of the machine. |
+| CrowdStrike.VulnerabilityHost.host_info.local_ip | String | Device's local IP address. |
+| CrowdStrike.VulnerabilityHost.host_info.machine_domain | String | Active directory domain name. |
+| CrowdStrike.VulnerabilityHost.host_info.os_version | String | Operating system version. |
+| CrowdStrike.VulnerabilityHost.host_info.ou | String | Active directory organizational unit name. |
+| CrowdStrike.VulnerabilityHost.host_info.site_name | String | Active directory site name. |
+| CrowdStrike.VulnerabilityHost.host_info.system_manufacturer | String | Name of the system manufacturer. |
+| CrowdStrike.VulnerabilityHost.host_info.platform | String | Operating system platform. This filter supports negation. |
+| CrowdStrike.VulnerabilityHost.host_info.instance_id | String | Cloud instance ID of the host. |
+| CrowdStrike.VulnerabilityHost.host_info.service_provider_account_id | String | Cloud service provider account ID for the host. |
+| CrowdStrike.VulnerabilityHost.host_info.service_provider | String | Cloud service provider for the host. |
+| CrowdStrike.VulnerabilityHost.host_info.os_build | String | Operating system build. |
+| CrowdStrike.VulnerabilityHost.host_info.product_type_desc | String | Type of host a sensor is running on. |
+| CrowdStrike.VulnerabilityHost.cve.id | String | Unique identifier for a vulnerability as cataloged in the National Vulnerability Database \(NVD\). |
-#### Command example
+#### Command Example
-```!cs-falcon-resolve-mobile-detection ids="id_1,id_2" add_tag="Demo tag" append_comment="Demo comment" assign_to_name="morganf" show_in_ui=true update_status=in_progress```
+``` cs-falcon-spotlight-list-host-by-vulnerability cve_ids=CVE-2021-2222 ```
+
+#### Context Example
+
+```json
+{
+ {
+ "id": "id",
+ "cid": "cid",
+ "aid": "aid",
+ "created_timestamp": "2021-09-16T15:12:42Z",
+ "updated_timestamp": "2022-10-19T00:54:43Z",
+ "status": "open",
+ "apps": [
+ {
+ "product_name_version": "prod",
+ "sub_status": "open",
+ "remediation": {
+ "ids": [
+ "id"
+ ]
+ },
+ "evaluation_logic": {
+ "id": "id"
+ }
+ }
+ ],
+ "suppression_info": {
+ "is_suppressed": false
+ },
+ "host_info": {
+ "hostname": "host",
+ "local_ip": "10.128.0.7",
+ "machine_domain": "",
+ "os_version": "version",
+ "ou": "",
+ "site_name": "",
+ "system_manufacturer": "manufactor",
+ "tags": [],
+ "platform": "Windows",
+ "instance_id": "instance id",
+ "service_provider_account_id": "id",
+ "service_provider": "id",
+ "os_build": "os build",
+ "product_type_desc": "Server"
+ },
+ "cve": {
+ "id": "CVE-20212-2222"
+ }
+ }
+
+}
+```
#### Human Readable Output
->Mobile Detection(s) id_1, id_2 were successfully updated
+| CVE ID | Host Info hostname | Host Info os Version | Host Info Product Type Desc | Host Info Local IP | Host Info ou | Host Info Machine Domain | Host Info Site Name | CVE Exploitability Score | CVE Vector |
+| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |
+| CVE-20212-2222 | host | 1 | Server | ip | | | site | 5.5 | |
-### Troubleshooting
+## Troubleshooting
When encountering connectivity or authorization errors within Cortex XSOAR 8,
-it necessary to include the IP corresponding to the relevant region into the CrowdStrike Falcon allow list.
\ No newline at end of file
+it is necessary to include the IP address corresponding to the relevant region in the CrowdStrike Falcon allow list.
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
index 978e52a9bddd..c41f91ca190d 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
@@ -193,6 +193,92 @@
"updated_timestamp": "2023-06-27T09:29:52.448779709Z"
}
+response_mobile_detection = {'agent_id': '111...111',
+ 'agent_load_flags': '0',
+ 'agent_version': '2024.04.4060003',
+ 'aggregate_id': '',
+ 'android_sdk_version': '31',
+ 'bootloader_unlocked': '1',
+ 'bootloader_version': 'slider-1.0-7683913',
+ 'cid': '2222...222',
+ 'composite_id': '1111111111111111111111:ind:22222222222222222222222222222222:33333|4444444444444444444',
+ 'computer_name': 'computer_name',
+ 'confidence': 100,
+ 'config_id_base': 'config_id_base',
+ 'config_id_build': 'config_id_build',
+ 'config_id_platform': 'config_id_platform',
+ 'config_version': '0',
+ 'context_timestamp': '2024-05-30T12:26:34.384Z',
+ 'crawled_timestamp': '2024-05-30T13:26:35.874005623Z',
+ 'created_timestamp': '2024-05-30T12:27:35.879609848Z',
+ 'data_domains': ['Endpoint'],
+ 'description': 'Mobile detection description',
+ 'developer_options_enabled': '1',
+ 'display_name': 'DisplayName',
+ 'enrollment_email': 'test@test.com',
+ 'falcon_app_trusted': True,
+ 'falcon_host_link': 'https://falcon.crowdstrike.com/mobile/detections/1111111111111111111111:ind:22222222222222222222222222222222:33333|4444444444444444444?_cid=1111111111111111111111',
+ 'firmware_build_fingerprint': 'firmware_build_fingerprint',
+ 'firmware_build_time': '2021-09-02T12:01:16.000Z',
+ 'firmware_build_type': 'user',
+ 'fma_version_code': 'fma_version_code',
+ 'id': 'ind:22222222222222222222222222222222:33333|4444444444444444444',
+ 'keystore_check_failed': False,
+ 'keystore_inconclusive': False,
+ 'keystore_insecure': False,
+ 'lock_screen_enabled': '0',
+ 'mobile_brand': 'mobile_brand',
+ 'mobile_design': 'mobile_design',
+ 'mobile_detection_id': '1111111111111111111',
+ 'mobile_hardware': 'mobile_hardware',
+ 'mobile_manufacturer': 'mobile_manufacturer',
+ 'mobile_model': 'mobile_model',
+ 'mobile_product': 'mobile_product',
+ 'mobile_serial': 'unknown',
+ 'name': 'name',
+ 'objective': 'Falcon Detection Method',
+ 'os_integrity_intact': '0',
+ 'os_major_version': '12',
+ 'os_minor_version': '0',
+ 'os_version': 'Android 12',
+ 'pattern_id': 'pattern_id',
+ 'platform': 'Android',
+ 'platform_version': 'platform_version',
+ 'playintegrity_compatibility_failed': False,
+ 'playintegrity_insecure_device': True,
+ 'playintegrity_meets_basic_integrity': False,
+ 'playintegrity_meets_device_integrity': False,
+ 'playintegrity_meets_partial_integrity': False,
+ 'playintegrity_meets_strong_integrity': False,
+ 'playintegrity_only_basic_integrity': False,
+ 'playintegrity_timestamp_expired': False,
+ 'poly_id': 'poly_id',
+ 'product': 'mobile',
+ 'radio_version': 'radio_version',
+ 'safetynet_verify_apps_enabled': '1',
+ 'scenario': 'attacker_methodology',
+ 'seconds_to_resolved': 590841,
+ 'seconds_to_triaged': 591762,
+ 'security_patch_level': '2021-10-05',
+ 'selinux_enforcement_policy': '1',
+ 'severity': 90,
+ 'severity_name': 'Critical',
+ 'show_in_ui': True,
+ 'source_products': ['Falcon for Mobile'],
+ 'source_vendors': ['CrowdStrike'],
+ 'status': 'new',
+ 'storage_encrypted': '1',
+ 'supported_arch': '7',
+ 'tactic': 'Insecure security posture',
+ 'tactic_id': 'CSTA0009',
+ 'technique': 'Bad device settings',
+ 'technique_id': 'CST0024',
+ 'timestamp': '2024-05-30T12:26:34.384Z',
+ 'type': 'mobile-android-attestation',
+ 'updated_timestamp': '2024-06-06T08:57:44.904557373Z',
+ 'user_name': 'test@test.com',
+ 'verified_boot_state': 2}
+
context_idp_detection = {
'name': 'IDP Detection ID: 20879a8064904ecfbb62c118a6a19411:ind:20879a8064904ecfbb62c118a6a19411:C0BB6ACD-8FDC-4CBA-9CF9-EBF3E28B3E56',
'occurred': '2023-04-20T11:12:03.089Z', 'last_updated': '2023-06-27T09:29:52.448779709Z',
@@ -201,6 +287,7 @@
remote_incident_id = 'inc:afb5d1512a00480f53e9ad91dc3e4b55:1cf23a95678a421db810e11b5db693bd'
remote_detection_id = 'ldt:15dbb9d8f06b89fe9f61eb46e829d986:528715079668'
remote_idp_detection_id = '20879a8064904e:ind:20879a8064904ecfbb62c118a6a19411:26DF54C9-8803-4F97-AD22-A725EE820EA9'
+remote_mobile_detection_id = '1111111111111111111'
# remote_id, close_incident, incident_status, detection_status, mirrored_object, entries
get_remote_incident = (remote_incident_id,
@@ -311,6 +398,31 @@
incident_no_status,
]
+# updated_object
+incident_new_status = ({'status': 'New'})
+incident_in_progress_status = ({'status': 'In Progress'})
+incident_reopened_status = ({'status': 'Reopened'})
+check_reopen_set_xsoar_incident_entries_args = [incident_new_status, incident_in_progress_status, incident_reopened_status]
+
+# updated_object
+detection_new_status = ({'status': 'new'})
+detection_in_progress_status = ({'status': 'in_progress'})
+detection_reopened_status = ({'status': 'reopened'})
+detection_true_positive_status = ({'status': 'true_positive'})
+detection_false_positive_status = ({'status': 'false_positive'})
+detection_ignored_status = ({'status': 'ignored'})
+check_reopen_set_xsoar_detections_entries_args = [detection_new_status, detection_in_progress_status, detection_reopened_status,
+ detection_true_positive_status, detection_false_positive_status,
+ detection_ignored_status]
+
+# updated_object
+idp_mobile_detection_new_status = ({'status': 'new'})
+idp_mobile_detection_in_progress_status = ({'status': 'in_progress'})
+idp_mobile_detection_reopened_status = ({'status': 'reopened'})
+idp_mobile_detection_closed_status = ({'status': 'closed'})
+set_xsoar_idp_or_mobile_detection_entries = [idp_mobile_detection_new_status, idp_mobile_detection_in_progress_status,
+ idp_mobile_detection_reopened_status, idp_mobile_detection_closed_status]
+
# updated_object, entry_content, close_incident
detection_closes = ({'status': 'closed'},
'dbotIncidentClose',
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md
new file mode 100644
index 000000000000..879575ce1237
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md
@@ -0,0 +1,8 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+- Added a new parameter **Reopen Statuses** to the integration configuration, representing the Crowdsrike Falcon statuses that will reopen an incident in XSOAR if closed.
+- Added the fields **id** and **mobile_detection_id** to the mirroring process of **CrowdStrike Falcon IDP Detection** and **CrowdStrike Falcon Mobile Detection** respectively.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.97242*.
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_11.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_11.md
new file mode 100644
index 000000000000..aa3b91c42392
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_11.md
@@ -0,0 +1,5 @@
+#### Integrations
+
+##### CrowdStrike Falcon
+- Documentation improvements.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.98401*.
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_12.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_12.md
new file mode 100644
index 000000000000..3a8621e6ed8b
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_12.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+Documentation and metadata improvements.
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_13.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_13.md
new file mode 100644
index 000000000000..3a8621e6ed8b
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_13.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+Documentation and metadata improvements.
diff --git a/Packs/CrowdStrikeFalcon/pack_metadata.json b/Packs/CrowdStrikeFalcon/pack_metadata.json
index dcafc8efe580..b2c4a6083455 100644
--- a/Packs/CrowdStrikeFalcon/pack_metadata.json
+++ b/Packs/CrowdStrikeFalcon/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon",
"description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.",
"support": "xsoar",
- "currentVersion": "1.13.9",
+ "currentVersion": "1.13.13",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/CrowdStrikeFalconSandboxV2.yml b/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/CrowdStrikeFalconSandboxV2.yml
index 170dca42ce69..b828cff442cd 100644
--- a/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/CrowdStrikeFalconSandboxV2.yml
+++ b/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/CrowdStrikeFalconSandboxV2.yml
@@ -45,7 +45,7 @@ script:
script: "-"
subtype: python3
type: python
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.14.99865
commands:
- name: cs-falcon-sandbox-scan
polling: true
@@ -2249,7 +2249,7 @@ script:
description: The reason the vendor decided the file was malicious.
type: string
- contextPath: DBotScore.Indicator
- description: The tested indicator.
+ description: The indicator that was tested.
type: string
- contextPath: DBotScore.Type
description: The indicator type.
diff --git a/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/README.md b/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/README.md
index 779ca9155d82..767f141d3d4e 100644
--- a/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/README.md
+++ b/Packs/CrowdStrikeFalconSandbox/Integrations/CrowdStrikeFalconSandboxV2/README.md
@@ -12,9 +12,11 @@ Make sure you have the API key for CrowdStrike Falcon Sandbox v2.
Each API key has an associated authorization level, which determines the available endpoints. By default, all free, non-vetted accounts can issue restricted keys. You can upgrade to full default keys, enabling file submissions and downloads.
### Get an API Key
+
You must have an account with Hybrid-Analysis. If you do not have an account, you can [sign up here.](https://www.hybrid-analysis.com/signup)
Obtain an API key by:
+
1. In your [profile page](https://www.hybrid-analysis.com/my-account?tab=%23api-key-tab), from the top right menu navigate to the **API key** tab.
2. Click the **Create API key** button.
@@ -61,10 +63,12 @@ Obtain an API key by:
The maximum file upload size is 100 MB.
## Commands
+
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
After you successfully execute a command, a DBot message appears in the War Room with the command details.
### cs-falcon-sandbox-scan
+
***
Gets summary information for a given MD5, SHA1, or SHA256 and all the reports generated for any environment ID.
@@ -72,6 +76,7 @@ Gets summary information for a given MD5, SHA1, or SHA256 and all the reports ge
#### Base Command
`cs-falcon-sandbox-scan`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -132,8 +137,11 @@ Gets summary information for a given MD5, SHA1, or SHA256 and all the reports ge
| DBotScore.Score | number | The actual score. |
#### Command example
+
```!cs-falcon-sandbox-scan file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51,9745bd652c50ac081e28981b96f41230c1ed2f84724c1e5b0f0d407a90aefe22```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -932,6 +940,7 @@ Gets summary information for a given MD5, SHA1, or SHA256 and all the reports ge
#### Human Readable Output
>### Scan Results:
+
>|submit name|threat level|verdict|total network connections|total processes|environment description|interesting|environment id|url analysis|analysis start time|total signatures|type|type short|sha256|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| file | 0 | whitelisted | 0 | 0 | Static Analysis | false | | false | 2020-09-15T16:47:06+00:00 | 0 | PDF document, version 1.3 | pdf | 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 |
@@ -943,6 +952,7 @@ Gets summary information for a given MD5, SHA1, or SHA256 and all the reports ge
### cs-falcon-sandbox-get-environments
+
***
Gets a list of all available environments.
@@ -950,6 +960,7 @@ Gets a list of all available environments.
#### Base Command
`cs-falcon-sandbox-get-environments`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -969,8 +980,11 @@ Gets a list of all available environments.
| CrowdStrike.Environment.groupicon | string | The environment icon. |
#### Command example
+
```!cs-falcon-sandbox-get-environments```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -1043,6 +1057,7 @@ Gets a list of all available environments.
#### Human Readable Output
>### Execution Environments:
+
>|_ID|Description|Architecture|Total VMS|Busy VMS|Analysis mode|Group icon|
>|---|---|---|---|---|---|---|
>| 100 | Windows 7 32 bit | WINDOWS | 9223372036854775807 | 9223372036854775807 | KERNELMODE | windows |
@@ -1053,6 +1068,7 @@ Gets a list of all available environments.
### cs-falcon-sandbox-submit-sample
+
***
Submits a file from the investigation to the analysis server.
@@ -1060,6 +1076,7 @@ Submits a file from the investigation to the analysis server.
#### Base Command
`cs-falcon-sandbox-submit-sample`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -1141,6 +1158,7 @@ Submits a file from the investigation to the analysis server.
| DBotScore.Score | number | The actual score. |
### cs-falcon-sandbox-search
+
***
Searches the database using the Falcon Sandbox search syntax.
@@ -1148,6 +1166,7 @@ Searches the database using the Falcon Sandbox search syntax.
#### Base Command
`cs-falcon-sandbox-search`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -1203,8 +1222,11 @@ Searches the database using the Falcon Sandbox search syntax.
| CrowdStrike.Search.result.type_short | String | The short description of the file type. |
#### Command example
+
```!cs-falcon-sandbox-search filename=sample.pdf```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -1950,6 +1972,7 @@ Searches the database using the Falcon Sandbox search syntax.
#### Human Readable Output
>### Search Results:
+
>|Submit Name|Verdict|Vx Family|Threat Score|Sha 256|Size|Environment Id|Type Short|Analysis Start Time|
>|---|---|---|---|---|---|---|---|---|
>| sample.pdf | | | | 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 | 3028 | 300 | pdf | 2022-01-10T08:33:11+00:00 |
@@ -1961,6 +1984,7 @@ Searches the database using the Falcon Sandbox search syntax.
### cs-falcon-sandbox-result
+
***
Retrieves result data on a file. Note: This command returns a file.
@@ -1968,6 +1992,7 @@ Retrieves result data on a file. Note: This command returns a file.
#### Base Command
`cs-falcon-sandbox-result`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2036,6 +2061,7 @@ Retrieves result data on a file. Note: This command returns a file.
| InfoFile.Extension | string | The file extension. |
### cs-falcon-sandbox-submit-url
+
***
Submits a URL for analysis.
@@ -2043,6 +2069,7 @@ Submits a URL for analysis.
#### Base Command
`cs-falcon-sandbox-submit-url`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2125,8 +2152,11 @@ Submits a URL for analysis.
| DBotScore.Score | number | The actual score. |
#### Command example
+
```!cs-falcon-sandbox-submit-url url=example.com environmentID=300```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -2491,12 +2521,14 @@ Submits a URL for analysis.
#### Human Readable Output
>### Scan Results:
+
>|submit name|threat level|verdict|total network connections|total processes|environment description|interesting|url analysis|analysis start time|total signatures|sha256|
>|---|---|---|---|---|---|---|---|---|---|---|
>| http://example.com/ | 1 | suspicious | 0 | 0 | Static Analysis | false | true | 2020-02-03T08:39:15+00:00 | 0 | 0b1d27c7ef8651eac6933608d4cb0a4b9fd74c45b883d5a4da1eeaa540f6cc5c |
### cs-falcon-sandbox-get-screenshots
+
***
Retrieves screenshots from a report
@@ -2504,6 +2536,7 @@ Retrieves screenshots from a report
#### Base Command
`cs-falcon-sandbox-get-screenshots`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2525,8 +2558,11 @@ Retrieves screenshots from a report
| InfoFile.Extension | string | The file extension. |
#### Command example
+
```!cs-falcon-sandbox-get-screenshots file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 environmentID=100```
+
#### Context Example
+
```json
{
"InfoFile": [
@@ -2603,6 +2639,7 @@ Retrieves screenshots from a report
### cs-falcon-sandbox-analysis-overview
+
***
Gets the hash overview.
@@ -2610,6 +2647,7 @@ Gets the hash overview.
#### Base Command
`cs-falcon-sandbox-analysis-overview`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2653,8 +2691,11 @@ Gets the hash overview.
| File.type | string | The file type. |
#### Command example
+
```!cs-falcon-sandbox-analysis-overview file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -2906,12 +2947,14 @@ Gets the hash overview.
#### Human Readable Output
>### Analysis Overview:
+
>|Last File Name|Other File Name|Sha 256|Verdict|Url Analysis|Size|Type|Type Short|
>|---|---|---|---|---|---|---|---|
>| file | 5_Journals_3_Manuscripts_10_Version_1_Revision_0_CoverLetter.pdf,
dyUQ2JAbImyU0WNH7TI1K3UYqUwDMsQBh1RwXWHG.pdf,
k18zpzsrq3om4q1pu18mftdo2caaivqq.pdf,
kuc86odvmimp0vd0tseubdekn9dg41jrff6lso01_parsed.eml,
sample.pdf,
samplePdf.pdf,
test.pdf | 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 | whitelisted | false | 3506 | PDF document, version 1.3 | pdf |
### cs-falcon-sandbox-analysis-overview-summary
+
***
Returns the hash overview.
@@ -2919,6 +2962,7 @@ Returns the hash overview.
#### Base Command
`cs-falcon-sandbox-analysis-overview-summary`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2938,8 +2982,11 @@ Returns the hash overview.
| CrowdStrike.AnalysisOverviewSummary.multiscan_result | Number | The multi-scan result. |
#### Command example
+
```!cs-falcon-sandbox-analysis-overview-summary file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -2958,12 +3005,14 @@ Returns the hash overview.
#### Human Readable Output
>### Analysis Overview Summary:
+
>|Analysis Start Time|Last Multi Scan|Multiscan Result|Sha256|Verdict|
>|---|---|---|---|---|
>| 2022-01-10T08:33:11+00:00 | 2022-02-07T12:52:10+00:00 | 0 | 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 | whitelisted |
### cs-falcon-sandbox-analysis-overview-refresh
+
***
Refreshes the overview and downloads fresh data from external services.
@@ -2971,6 +3020,7 @@ Refreshes the overview and downloads fresh data from external services.
#### Base Command
`cs-falcon-sandbox-analysis-overview-refresh`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -2981,13 +3031,17 @@ Refreshes the overview and downloads fresh data from external services.
#### Context Output
There is no context output for this command.
+
#### Command example
+
```!cs-falcon-sandbox-analysis-overview-refresh file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51```
+
#### Human Readable Output
>The request to refresh the analysis overview was sent successfully.
### file
+
***
Returns file information and reputation.
@@ -2995,6 +3049,7 @@ Returns file information and reputation.
#### Base Command
`file`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -3047,14 +3102,17 @@ Returns file information and reputation.
| File.MalwareFamily | string | The file family classification. |
| File.Malicious.Vendor | string | The vendor that decided the file was malicious. |
| File.Malicious.Description | string | The reason the vendor decided the file was malicious. |
-| DBotScore.Indicator | string | The tested indicator. |
+| DBotScore.Indicator | string | The indicator that was tested. |
| DBotScore.Type | string | The indicator type. |
| DBotScore.Vendor | string | The vendor used to calculate the score. |
| DBotScore.Score | number | The actual score. |
#### Command example
+
```!file file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51```
+
#### Context Example
+
```json
{
"CofenseIntelligence": {
@@ -3739,12 +3797,14 @@ Returns file information and reputation.
#### Human Readable Output
>### Scan Results:
+
>|submit name|threat level|verdict|total network connections|total processes|environment description|interesting|environment id|url analysis|analysis start time|total signatures|type|type short|sha256|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51_1549672910345_sample.pdf | 0 | whitelisted | 0 | 1 | Windows 7 32 bit | false | 100 | false | 2019-02-09T01:41:57+00:00 | 9 | PDF document, version 1.3 | pdf | 8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 |
### cs-falcon-sandbox-sample-download
+
***
Downloads the sample file.
@@ -3752,6 +3812,7 @@ Downloads the sample file.
#### Base Command
`cs-falcon-sandbox-sample-download`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -3774,8 +3835,11 @@ Downloads the sample file.
| File.Extension | String | The file extension. |
#### Command example
+
```!cs-falcon-sandbox-sample-download file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51```
+
#### Context Example
+
```json
{
"File": {
@@ -3796,6 +3860,7 @@ Downloads the sample file.
### cs-falcon-sandbox-report-state
+
***
Gets the report state for the given ID.
@@ -3803,6 +3868,7 @@ Gets the report state for the given ID.
#### Base Command
`cs-falcon-sandbox-report-state`
+
#### Input
| **Argument Name** | **Description** | **Required** |
@@ -3822,8 +3888,11 @@ Gets the report state for the given ID.
| CrowdStrike.State.error | String | The error description. |
#### Command example
+
```!cs-falcon-sandbox-report-state file=8decc8571946d4cd70a024949e033a2a2a54377fe9f1c1b944c20f9ee11a9e51 environmentID=300```
+
#### Context Example
+
```json
{
"CrowdStrike": {
@@ -3841,6 +3910,7 @@ Gets the report state for the given ID.
#### Human Readable Output
>### State
+
>|Error|Error Origin|Error Type|Related Reports|State|
>|---|---|---|---|---|
>| The requested environment ID "300" and file type "pdf" have no available execution environment | CLIENT | FILE_TYPE_BAD_ERROR | | ERROR |
diff --git a/Packs/CrowdStrikeFalconSandbox/ReleaseNotes/2_0_21.md b/Packs/CrowdStrikeFalconSandbox/ReleaseNotes/2_0_21.md
new file mode 100644
index 000000000000..0f11937f8f6d
--- /dev/null
+++ b/Packs/CrowdStrikeFalconSandbox/ReleaseNotes/2_0_21.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon Sandbox v2 (Hybrid-Analysis)
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+- Documentation and metadata improvements.
diff --git a/Packs/CrowdStrikeFalconSandbox/doc_files/CrowdStrike_Falcon_Sandbox_Detonate_file.png b/Packs/CrowdStrikeFalconSandbox/doc_files/CrowdStrike_Falcon_Sandbox_Detonate_file.png
new file mode 100644
index 000000000000..6e411b82050b
Binary files /dev/null and b/Packs/CrowdStrikeFalconSandbox/doc_files/CrowdStrike_Falcon_Sandbox_Detonate_file.png differ
diff --git a/Packs/CrowdStrikeFalconSandbox/doc_files/Detonate_URL_CrowdStrike.png b/Packs/CrowdStrikeFalconSandbox/doc_files/Detonate_URL_CrowdStrike.png
new file mode 100644
index 000000000000..ecf5517c39dd
Binary files /dev/null and b/Packs/CrowdStrikeFalconSandbox/doc_files/Detonate_URL_CrowdStrike.png differ
diff --git a/Packs/CrowdStrikeFalconSandbox/pack_metadata.json b/Packs/CrowdStrikeFalconSandbox/pack_metadata.json
index 446e24f98dcc..0eb63a443ff5 100644
--- a/Packs/CrowdStrikeFalconSandbox/pack_metadata.json
+++ b/Packs/CrowdStrikeFalconSandbox/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon Sandbox",
"description": "Fully automated malware analysis (formerly Payload Security VxStream).",
"support": "xsoar",
- "currentVersion": "2.0.20",
+ "currentVersion": "2.0.21",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CrowdStrikeHost/doc_files/CrowdStrike_Endpoint_Enrichment.png b/Packs/CrowdStrikeHost/doc_files/CrowdStrike_Endpoint_Enrichment.png
new file mode 100644
index 000000000000..e4e90a599548
Binary files /dev/null and b/Packs/CrowdStrikeHost/doc_files/CrowdStrike_Endpoint_Enrichment.png differ
diff --git a/Packs/CrowdStrikeHost/doc_files/Search_Endpoints_By_Hash_CrowdStrike.png b/Packs/CrowdStrikeHost/doc_files/Search_Endpoints_By_Hash_CrowdStrike.png
new file mode 100644
index 000000000000..e3cdf5083036
Binary files /dev/null and b/Packs/CrowdStrikeHost/doc_files/Search_Endpoints_By_Hash_CrowdStrike.png differ
diff --git a/Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.yml b/Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.yml
index fd17e25b4b66..50669cb3184f 100644
--- a/Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.yml
+++ b/Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.yml
@@ -86,7 +86,7 @@ configuration:
advanced: true
required: false
script:
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.14.99865
type: python
subtype: python3
script: '-'
@@ -343,7 +343,7 @@ script:
description: The indicator that was tested.
type: String
- contextPath: DBotScore.Type
- description: The type of the indicator.
+ description: The indicator type.
type: String
- contextPath: DBotScore.Vendor
description: The vendor used to calculate the score.
diff --git a/Packs/CrowdStrikeIntel/ReleaseNotes/2_0_37.md b/Packs/CrowdStrikeIntel/ReleaseNotes/2_0_37.md
new file mode 100644
index 000000000000..d3d68fd38f03
--- /dev/null
+++ b/Packs/CrowdStrikeIntel/ReleaseNotes/2_0_37.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon Intel v2
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
+- Documentation and metadata improvements.
diff --git a/Packs/CrowdStrikeIntel/pack_metadata.json b/Packs/CrowdStrikeIntel/pack_metadata.json
index cd6ef9bbdea1..2ab57a20eb70 100644
--- a/Packs/CrowdStrikeIntel/pack_metadata.json
+++ b/Packs/CrowdStrikeIntel/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon Intel",
"description": "Threat intelligence service by CrowdStrike focused on delivering a technical feed to help organizations better defend themselves against adversary activity.",
"support": "xsoar",
- "currentVersion": "2.0.36",
+ "currentVersion": "2.0.37",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CuckooSandbox/doc_files/Detonate_File_Cuckoo.png b/Packs/CuckooSandbox/doc_files/Detonate_File_Cuckoo.png
new file mode 100644
index 000000000000..c722406bc39c
Binary files /dev/null and b/Packs/CuckooSandbox/doc_files/Detonate_File_Cuckoo.png differ
diff --git a/Packs/CuckooSandbox/doc_files/Detonate_URL_Cuckoo.png b/Packs/CuckooSandbox/doc_files/Detonate_URL_Cuckoo.png
new file mode 100644
index 000000000000..7807400f61e2
Binary files /dev/null and b/Packs/CuckooSandbox/doc_files/Detonate_URL_Cuckoo.png differ
diff --git a/Packs/CyCognito/ReleaseNotes/1_0_23.md b/Packs/CyCognito/ReleaseNotes/1_0_23.md
new file mode 100644
index 000000000000..6d2d369e4096
--- /dev/null
+++ b/Packs/CyCognito/ReleaseNotes/1_0_23.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ThreeDigitAlphaCountryCodeToCountryName
+
+- Updated the Docker image to: *demisto/pycountry:1.0.0.96960*.
diff --git a/Packs/CyCognito/Scripts/ThreeDigitAlphaCountryCodeToCountryName/ThreeDigitAlphaCountryCodeToCountryName.yml b/Packs/CyCognito/Scripts/ThreeDigitAlphaCountryCodeToCountryName/ThreeDigitAlphaCountryCodeToCountryName.yml
index 2bfe2d3a072a..3cfdf149176d 100644
--- a/Packs/CyCognito/Scripts/ThreeDigitAlphaCountryCodeToCountryName/ThreeDigitAlphaCountryCodeToCountryName.yml
+++ b/Packs/CyCognito/Scripts/ThreeDigitAlphaCountryCodeToCountryName/ThreeDigitAlphaCountryCodeToCountryName.yml
@@ -10,11 +10,11 @@ comment: Script for converting country names based on 3 letter Alpha codes.
enabled: true
args:
- name: value
- description: Three digit country code
+ description: Three digit country code.
isArray: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/pycountry:1.0.0.36195
+dockerimage: demisto/pycountry:1.0.0.96960
runas: DBotWeakRole
fromversion: 6.2.0
tests:
diff --git a/Packs/CyCognito/pack_metadata.json b/Packs/CyCognito/pack_metadata.json
index fde6ca8ddc1c..3d7fefa84ba1 100644
--- a/Packs/CyCognito/pack_metadata.json
+++ b/Packs/CyCognito/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyCognito",
"description": "Fetches the issues associated with a particular asset from the CyCognito platform.",
"support": "partner",
- "currentVersion": "1.0.22",
+ "currentVersion": "1.0.23",
"author": "CyCognito",
"url": "",
"email": "support@cycognito.com",
diff --git a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
index eb36d6e400b9..75c7ee151011 100644
--- a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
+++ b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
@@ -297,7 +297,8 @@ def get_events_command(client: Client, event_type: str, last_run: dict, limit: i
return events_list, CommandResults(readable_output=human_readable, raw_response=events_list)
-def fetch_events(client: Client, last_run: dict, max_fetch: int = MAX_FETCH) -> tuple[list, dict]:
+def fetch_events(client: Client, last_run: dict, max_fetch: int = MAX_FETCH,
+ enable_admin_audits: bool = False) -> tuple[list, dict]:
""" Fetches 3 types of events from CyberArkEPM
- admin_audits
- policy_audits
@@ -306,16 +307,18 @@ def fetch_events(client: Client, last_run: dict, max_fetch: int = MAX_FETCH) ->
client (Client): CyberArkEPM client to use.
last_run (dict): The last run information.
max_fetch (int): The max events to return per fetch default is 250.
+ enable_admin_audits (bool): Whether to fetch admin audits events. Defaults is False.
Return:
(list, dict) A list of events to push to XSIAM, A dict with information for next fetch.
"""
events: list = []
demisto.info(f'Start fetching last run: {last_run}')
- for set_id, admin_audits in get_admin_audits(client, last_run, max_fetch).items():
- if admin_audits:
- last_run[set_id]['admin_audits']['from_date'] = prepare_datetime(admin_audits[-1].get('EventTime'), increase=True)
- events.extend(admin_audits)
+ if enable_admin_audits:
+ for set_id, admin_audits in get_admin_audits(client, last_run, max_fetch).items():
+ if admin_audits:
+ last_run[set_id]['admin_audits']['from_date'] = prepare_datetime(admin_audits[-1].get('EventTime'), increase=True)
+ events.extend(admin_audits)
for set_id, policy_audits_last_run in get_events(client.get_policy_audits, 'policy_audits', last_run, max_fetch).items():
if policy_audits := policy_audits_last_run.get('events', []):
@@ -362,6 +365,7 @@ def main(): # pragma: no cover
username = params.get('credentials').get('identifier')
password = params.get('credentials').get('password')
set_names = argToList(params.get('set_name'))
+ enable_admin_audits = argToBoolean(params.get('enable_admin_audits', False))
policy_audits_event_type = argToList(params.get('policy_audits_event_type'))
raw_events_event_type = argToList(params.get('raw_events_event_type'))
verify_certificate = not params.get('insecure', False)
@@ -420,7 +424,7 @@ def main(): # pragma: no cover
return_results(command_result)
elif command in 'fetch-events':
- events, next_run = fetch_events(client, last_run, max_fetch) # type: ignore
+ events, next_run = fetch_events(client, last_run, max_fetch, enable_admin_audits) # type: ignore
send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT)
demisto.setLastRun(next_run)
diff --git a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.yml b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.yml
index a9e8f07f53a8..40af060142b8 100644
--- a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.yml
+++ b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.yml
@@ -61,6 +61,12 @@ configuration:
required: false
type: 0
section: Collect
+- defaultvalue: 'false'
+ display: Enable admin audits events
+ name: enable_admin_audits
+ required: false
+ type: 8
+ section: Collect
- display: Policy Audit Events
additionalinfo: A comma-separated list of policy audits event type to fetch
name: policy_audits_event_type
diff --git a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector_test.py b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector_test.py
index ecef772ff77e..158d4e60d88c 100644
--- a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector_test.py
+++ b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector_test.py
@@ -212,7 +212,7 @@ def test_fetch_events(requests_mock):
"""
from CyberArkEPMEventCollector import create_last_run, fetch_events
last_run = create_last_run(['id1', 'id2'], '2023-01-01T00:00:00Z')
- events, next_run = fetch_events(mocked_client(requests_mock), last_run, 10)
+ events, next_run = fetch_events(mocked_client(requests_mock), last_run, 10, True)
assert len(events) == 18
assert next_run['id1'] == next_run['id2'] == {
diff --git a/Packs/CyberArkEPM/ReleaseNotes/1_0_2.md b/Packs/CyberArkEPM/ReleaseNotes/1_0_2.md
new file mode 100644
index 000000000000..c3b16ba19ea1
--- /dev/null
+++ b/Packs/CyberArkEPM/ReleaseNotes/1_0_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CyberArk EPM Event Collector
+
+Added new instance parameter *Enable admin audits events* to enable or disable admin audits.
diff --git a/Packs/CyberArkEPM/pack_metadata.json b/Packs/CyberArkEPM/pack_metadata.json
index 8f3b28dd26c6..253a9e2a7786 100644
--- a/Packs/CyberArkEPM/pack_metadata.json
+++ b/Packs/CyberArkEPM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyberArk EPM",
"description": "Endpoint Privilege Manager helps remove local admin rights while improving user experience and optimizing IT operations.",
"support": "xsoar",
- "currentVersion": "1.0.1",
+ "currentVersion": "1.0.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CyberArkIdentity/Integrations/CyberArkIdentityEventCollector/CyberArkIdentityEventCollector.yml b/Packs/CyberArkIdentity/Integrations/CyberArkIdentityEventCollector/CyberArkIdentityEventCollector.yml
index 5c017c7f966c..4a9c8acf1b6d 100644
--- a/Packs/CyberArkIdentity/Integrations/CyberArkIdentityEventCollector/CyberArkIdentityEventCollector.yml
+++ b/Packs/CyberArkIdentity/Integrations/CyberArkIdentityEventCollector/CyberArkIdentityEventCollector.yml
@@ -76,7 +76,7 @@ script:
isfetchevents: true
type: python
subtype: python3
- dockerimage: demisto/fastapi:1.0.0.70530
+ dockerimage: demisto/py3-tools:1.0.0.94051
marketplaces:
- marketplacev2
fromversion: 6.8.0
diff --git a/Packs/CyberArkIdentity/ReleaseNotes/1_1_0.md b/Packs/CyberArkIdentity/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..d57f2688641a
--- /dev/null
+++ b/Packs/CyberArkIdentity/ReleaseNotes/1_1_0.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### CyberArk Identity Event Collector
+
+- Changed the Docker image to `py3-tools`.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.94051*.
diff --git a/Packs/CyberArkIdentity/pack_metadata.json b/Packs/CyberArkIdentity/pack_metadata.json
index b83c75542b8d..10e33626820f 100644
--- a/Packs/CyberArkIdentity/pack_metadata.json
+++ b/Packs/CyberArkIdentity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyberArk Identity",
"description": "This integration collects events from the Idaptive Next-Gen Access (INGA) using REST APIs.",
"support": "xsoar",
- "currentVersion": "1.0.31",
+ "currentVersion": "1.1.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml b/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml
index 426d886033ca..ed07bf7f3397 100644
--- a/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml
+++ b/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml
@@ -53,7 +53,7 @@ script:
- contextPath: CyberChef.Magic
description: Output of the Magic operation
type: string
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: ''
subtype: python3
diff --git a/Packs/CyberChef/ReleaseNotes/1_0_5.md b/Packs/CyberChef/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..0c7c9d0866b4
--- /dev/null
+++ b/Packs/CyberChef/ReleaseNotes/1_0_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CyberChef
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/CyberChef/doc_files/138084891-3509076f-3491-4eab-b280-1707d2227d08.png b/Packs/CyberChef/doc_files/138084891-3509076f-3491-4eab-b280-1707d2227d08.png
new file mode 100644
index 000000000000..1fba1f94704f
Binary files /dev/null and b/Packs/CyberChef/doc_files/138084891-3509076f-3491-4eab-b280-1707d2227d08.png differ
diff --git a/Packs/CyberChef/doc_files/138084951-8e8225a5-50d5-42df-904f-9c9d0981767a.png b/Packs/CyberChef/doc_files/138084951-8e8225a5-50d5-42df-904f-9c9d0981767a.png
new file mode 100644
index 000000000000..963e30560e64
Binary files /dev/null and b/Packs/CyberChef/doc_files/138084951-8e8225a5-50d5-42df-904f-9c9d0981767a.png differ
diff --git a/Packs/CyberChef/pack_metadata.json b/Packs/CyberChef/pack_metadata.json
index 114d88e3f15c..351591dc44ed 100644
--- a/Packs/CyberChef/pack_metadata.json
+++ b/Packs/CyberChef/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyberChef",
"description": "Integration with your CyberChef server or https://prod.apifor.io service for CyberChef.",
"support": "community",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Harri Ruuttila",
"url": "",
"email": "",
diff --git a/Packs/CyberX-CentralManager/Integrations/CyberXCentralManager/CyberXCentralManager.yml b/Packs/CyberX-CentralManager/Integrations/CyberXCentralManager/CyberXCentralManager.yml
index e4f37d626921..309872680250 100644
--- a/Packs/CyberX-CentralManager/Integrations/CyberXCentralManager/CyberXCentralManager.yml
+++ b/Packs/CyberX-CentralManager/Integrations/CyberXCentralManager/CyberXCentralManager.yml
@@ -12,7 +12,7 @@ configuration:
name: Api-Token
required: true
type: 4
-- defaultvalue: "false"
+- defaultvalue: 'false'
display: Use system proxy settings
name: proxy
type: 8
@@ -39,7 +39,7 @@ script:
required: true
description: Updating the alert
name: cyberx-update-alert
- dockerimage: demisto/python3-deb:3.9.1.15758
+ dockerimage: demisto/python3-deb:3.11.9.102626
script: ''
subtype: python3
type: python
diff --git a/Packs/CyberX-CentralManager/ReleaseNotes/1_0_5.md b/Packs/CyberX-CentralManager/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..49ee9816175d
--- /dev/null
+++ b/Packs/CyberX-CentralManager/ReleaseNotes/1_0_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CyberX - Central Manager
+
+- Updated the Docker image to: *demisto/python3-deb:3.11.9.102626*.
diff --git a/Packs/CyberX-CentralManager/pack_metadata.json b/Packs/CyberX-CentralManager/pack_metadata.json
index e51f22c24aff..bffb2e126f87 100644
--- a/Packs/CyberX-CentralManager/pack_metadata.json
+++ b/Packs/CyberX-CentralManager/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyberX - Central Manager",
"description": "This is a small integration which is able to update alerts inside of Cyber X.",
"support": "community",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Konrad Zacharias",
"url": "",
"email": "",
@@ -17,4 +17,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/Cybereason/doc_files/Search_Endpoints_By_Hash_-_Cybereason.png b/Packs/Cybereason/doc_files/Search_Endpoints_By_Hash_-_Cybereason.png
new file mode 100644
index 000000000000..57363c282ca1
Binary files /dev/null and b/Packs/Cybereason/doc_files/Search_Endpoints_By_Hash_-_Cybereason.png differ
diff --git a/Packs/CybleEventsV2/doc_files/Author_image.png b/Packs/CybleEventsV2/doc_files/Author_image.png
new file mode 100644
index 000000000000..07d9f70c5a8f
Binary files /dev/null and b/Packs/CybleEventsV2/doc_files/Author_image.png differ
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
index b5d4f2bf1def..9be1544361ce 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
@@ -13,6 +13,7 @@
from dateutil import parser
from typing import *
+
# Disable insecure warnings
urllib3.disable_warnings()
@@ -86,46 +87,45 @@ def build_indicators(self, args: Dict[str, Any], data: list):
indicator_obj = {
"service": "Cyble Feed"
}
- for eachtype in FeedIndicatorType.list_all_supported_indicators():
- if eachtype.lower() in args.get('collection').lower(): # type: ignore
- indicator_obj['type'] = eachtype
- break
multi_data = True
try:
- data = self.get_recursively(eachres['indicators'][0]['observable'], 'value')
- if not data:
- data = self.get_recursively(eachres['indicators'][0]['observable'], 'address_value')
+ data_r = self.get_recursively(eachres['indicators'][0]['observable'], 'value')
+ if not data_r:
+ data_r = self.get_recursively(eachres['indicators'][0]['observable'], 'address_value')
except Exception:
try:
- data = self.get_recursively(eachres['observables']['observables'][0], 'value')
+ data_r = self.get_recursively(eachres['observables']['observables'][0], 'value')
except Exception:
demisto.debug(f'Found indicator without observable field: {eachres}')
continue
+ if not data_r:
+ continue
+
if multi_data:
ind_val = {}
- for eachindicator in data:
+ for eachindicator in data_r:
typeval = auto_detect_indicator_type(eachindicator)
indicator_obj['type'] = typeval
if typeval:
ind_val[typeval] = eachindicator
- if len(data) == 1:
- indicator_obj['value'] = str(data[0])
+ if len(data_r) == 1:
+ indicator_obj['value'] = str(data_r[0])
elif indicator_obj['type'] in list(ind_val.keys()):
indicator_obj['value'] = str(ind_val[indicator_obj['type']])
elif len(ind_val) != 0:
indicator_obj['type'] = list(ind_val.keys())[0]
indicator_obj['value'] = ind_val[list(ind_val.keys())[0]]
- #
+
if eachres.get('indicators'):
- for eachindicator in eachres.get('indicators'):
- indicator_obj['title'] = eachindicator.get('title')
- indicator_obj['time'] = eachindicator.get('timestamp')
+ ind_content = eachres.get('indicators')
else:
- for eachindicator in eachres.get('ttps').get('ttps'):
- indicator_obj['title'] = eachindicator.get('title')
- indicator_obj['time'] = eachindicator.get('timestamp')
+ ind_content = eachres.get('ttps').get('ttps')
+
+ for eachindicator in ind_content:
+ indicator_obj['title'] = eachindicator.get('title')
+ indicator_obj['time'] = eachindicator.get('timestamp')
indicator_obj['rawJSON'] = eachres
indicators.append(indicator_obj)
@@ -155,6 +155,10 @@ def get_taxii(self, args: Dict[str, Any], is_first_fetch: bool = False):
count = 0
try:
+
+ if 'begin' not in args or 'end' not in args:
+ raise ValueError("Last fetch time retrieval failed.")
+
for data in self.fetch(args.get('begin'), args.get('end'), args.get('collection')):
skip = False
response = self.parse_to_json(data)
@@ -164,7 +168,7 @@ def get_taxii(self, args: Dict[str, Any], is_first_fetch: bool = False):
elif response.get('ttps') or False:
content = response.get('ttps').get('ttps')
else:
- raise ValueError("Last fetch time retrieval failed.")
+ continue
for eachone in content:
if eachone.get('confidence'):
@@ -286,7 +290,6 @@ def fetch_indicators(client: Client):
'''
args = {}
last_run = demisto.getLastRun()
- is_first_fetch = None
if isinstance(last_run, dict):
last_fetch_time = last_run.get('lastRun_{}'.format(client.collection_name), None)
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
index 0c28ed4f7e60..a59e1b6f49bb 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
@@ -136,7 +136,7 @@ script:
- contextPath: CybleIntel.collection.names
description: Available collection names for the feed service.
description: Get the data feed collection names for the taxii feed.
- dockerimage: demisto/taxii-server:1.0.0.87636
+ dockerimage: demisto/taxii-server:1.0.0.96806
subtype: python3
feed: true
fromversion: 6.2.0
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
index 7d2a973bd080..5a4a2a8085cb 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
@@ -5,6 +5,7 @@
input_value = json.load(open("test_data/input.json", "r"))
params = input_value['params']
args = input_value['args']
+args2 = input_value['args2']
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S+00:00"
@@ -74,7 +75,7 @@ def test_get_taxii_invalid(mocker, capfd):
mocker.patch.object(client, 'fetch', return_value=[mock_response_1])
with capfd.disabled():
try:
- val, time = Client.get_taxii(client, args)
+ val, time = Client.get_taxii(client, args2)
except Exception as e:
error_val = e.args[0]
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
index 54c35943c8ab..00bc231fd569 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
@@ -12,11 +12,16 @@
"initial_interval": 2,
"limit": 5
},
- "args": {
+ "args": {
"limit": 5,
"begin": "2022-06-11 00:00:00",
"end": "2022-06-13 00:00:00",
"collection": "phishing_url1",
"override_limit": "True"
+ },
+ "args2": {
+ "limit": 5,
+ "collection": "phishing_url1",
+ "override_limit": "True"
}
}
\ No newline at end of file
diff --git a/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md b/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md
new file mode 100644
index 000000000000..cb85f776836e
--- /dev/null
+++ b/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md
@@ -0,0 +1,4 @@
+#### Integrations
+##### Cyble Threat Intel
+- Fixed an issue where the **fetch-indicators** command failed when encountering improperly structured indicators
+- Updated the Docker image to: *demisto/taxii-server:1.0.0.96806*.
\ No newline at end of file
diff --git a/Packs/CybleThreatIntel/pack_metadata.json b/Packs/CybleThreatIntel/pack_metadata.json
index d9f87469412c..31501c6564e9 100644
--- a/Packs/CybleThreatIntel/pack_metadata.json
+++ b/Packs/CybleThreatIntel/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cyble Threat Intel",
"description": "Cyble Threat Intelligence for Vision Users. Must have access to Vision Taxii feed to access the threat intelligence.",
"support": "partner",
- "currentVersion": "2.0.22",
+ "currentVersion": "2.0.23",
"author": "Cyble Infosec",
"url": "https://cyble.com",
"email": "",
diff --git a/Packs/Cylance_Protect/doc_files/Endpoint_Enrichment_Cylance_Protect_v2.png b/Packs/Cylance_Protect/doc_files/Endpoint_Enrichment_Cylance_Protect_v2.png
new file mode 100644
index 000000000000..1ab6d69019a4
Binary files /dev/null and b/Packs/Cylance_Protect/doc_files/Endpoint_Enrichment_Cylance_Protect_v2.png differ
diff --git a/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect.png b/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect.png
new file mode 100644
index 000000000000..06b950660561
Binary files /dev/null and b/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect.png differ
diff --git a/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect_v2.png b/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect_v2.png
new file mode 100644
index 000000000000..828d3ac6692a
Binary files /dev/null and b/Packs/Cylance_Protect/doc_files/Get_File_Sample_By_Hash_Cylance_Protect_v2.png differ
diff --git a/Packs/Cymulate/.pack-ignore b/Packs/Cymulate/.pack-ignore
index 63500f45a60b..38ffece6b8e4 100644
--- a/Packs/Cymulate/.pack-ignore
+++ b/Packs/Cymulate/.pack-ignore
@@ -8,7 +8,7 @@ ignore=BA101
ignore=BA101
[file:Cymulate_v2.yml]
-ignore=BA108,BA109,IN124
+ignore=BA108,BA109
[file:classifier-mapper-incoming-Cymulate.json]
ignore=BA101
diff --git a/Packs/Cymulate/Integrations/Cymulate_v2/Cymulate_v2.yml b/Packs/Cymulate/Integrations/Cymulate_v2/Cymulate_v2.yml
index d00a09ddb521..72f636c00776 100644
--- a/Packs/Cymulate/Integrations/Cymulate_v2/Cymulate_v2.yml
+++ b/Packs/Cymulate/Integrations/Cymulate_v2/Cymulate_v2.yml
@@ -356,13 +356,13 @@ script:
description: Assessment categories.
type: String
- arguments:
- - description: Browsing address
+ - description: Browsing address.
name: browsing_address
- description: Browsing Agent profile name to run the assessment with.
name: browsing_address_profile_name
- description: Agent email address.
name: mail_address
- - description: EDR address
+ - description: EDR address.
name: edr_address
- description: EDR Agent profile name to run the assessment with.
name: edr_address_profile_name
@@ -567,7 +567,7 @@ script:
name: cymulate-simulations-list
outputs:
- contextPath: Cymulate.Simulations.Attack_Type
- description: Attack payload
+ description: Attack payload.
type: String
- contextPath: Cymulate.Simulations.Classification
description: Attack classification.
@@ -585,22 +585,22 @@ script:
description: Attack name.
type: String
- contextPath: Cymulate.Simulations.Status
- description: Attack status
+ description: Attack status.
type: String
- contextPath: Cymulate.Simulations.PrevStatus
- description: Attack Previous status
+ description: Attack Previous status.
type: String
- contextPath: Cymulate.Simulations.Risk
description: Attack risk level.
type: String
- contextPath: Cymulate.Simulations.Source
- description: Attack Source
+ description: Attack Source.
type: String
- contextPath: Cymulate.Simulations.User
description: User committed the attack ot was attacked.
type: String
- contextPath: Cymulate.Simulations.Attack_Vector
- description: Attack vector
+ description: Attack vector.
type: String
- contextPath: Cymulate.Simulations.Source_Email_Address
description: Source email address.
@@ -621,7 +621,7 @@ script:
description: Mitigation details.
type: String
- contextPath: Cymulate.Simulations.Description
- description: Attack description
+ description: Attack description.
type: String
- contextPath: Cymulate.Simulations.Id
description: Attack ID.
@@ -653,7 +653,7 @@ script:
description: Attack ID.
type: String
- contextPath: Cymulate.Simulations.Timestamp
- description: Attack timestamp
+ description: Attack timestamp.
type: String
- contextPath: Cymulate.Simulations.Agent
description: Agent connected to the attack.
@@ -661,7 +661,7 @@ script:
- contextPath: Cymulate.Simulations.Template
description: Attack template.
type: String
- dockerimage: demisto/python3:3.10.13.72123
+ dockerimage: demisto/python3:3.10.14.99865
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Cymulate/ReleaseNotes/2_0_26.md b/Packs/Cymulate/ReleaseNotes/2_0_26.md
new file mode 100644
index 000000000000..3cbd39647e0a
--- /dev/null
+++ b/Packs/Cymulate/ReleaseNotes/2_0_26.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cymulate v2
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/Cymulate/pack_metadata.json b/Packs/Cymulate/pack_metadata.json
index 7b61c88ae7a1..9bb5eda2ffca 100644
--- a/Packs/Cymulate/pack_metadata.json
+++ b/Packs/Cymulate/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cymulate",
"description": "You can now verify your security posture on-demand using the Cymulate integration, which allows you to launch simulations of cyberattacks, breach, and attacks against yourself",
"support": "partner",
- "currentVersion": "2.0.25",
+ "currentVersion": "2.0.26",
"author": "Cymulate",
"url": "",
"email": "support@cymulate.com",
diff --git a/Packs/D2/doc_files/D2_Endpoint_data_collection.png b/Packs/D2/doc_files/D2_Endpoint_data_collection.png
new file mode 100644
index 000000000000..1f384a514bd0
Binary files /dev/null and b/Packs/D2/doc_files/D2_Endpoint_data_collection.png differ
diff --git a/Packs/D2/doc_files/Get_File_Sample_From_Path_D2.png b/Packs/D2/doc_files/Get_File_Sample_From_Path_D2.png
new file mode 100644
index 000000000000..65d0abe8264e
Binary files /dev/null and b/Packs/D2/doc_files/Get_File_Sample_From_Path_D2.png differ
diff --git a/Packs/DB2/Integrations/DB2/DB2.yml b/Packs/DB2/Integrations/DB2/DB2.yml
index da6b015cad1d..22fce5ada98e 100644
--- a/Packs/DB2/Integrations/DB2/DB2.yml
+++ b/Packs/DB2/Integrations/DB2/DB2.yml
@@ -40,10 +40,10 @@ script:
- description: The DB2 query to run
name: query
required: true
- - defaultValue: "50"
+ - defaultValue: '50'
description: The maximum number of results
name: limit
- - defaultValue: "0"
+ - defaultValue: '0'
description: The offset at which to start the result
name: offset
- description: A comma separated list of names which will be replaced in query having ':'.
@@ -52,7 +52,7 @@ script:
name: bind_variables_values
description: Running a DB2 query command
name: db2-query
- dockerimage: demisto/ibm-db2:1.0.0.27972
+ dockerimage: demisto/ibm-db2:1.0.0.100241
script: ''
subtype: python3
type: python
diff --git a/Packs/DB2/ReleaseNotes/1_0_1.md b/Packs/DB2/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..f74c72a97903
--- /dev/null
+++ b/Packs/DB2/ReleaseNotes/1_0_1.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### DB2
+
+- Updated the Docker image to: *demisto/ibm-db2:1.0.0.100241*.
diff --git a/Packs/DB2/pack_metadata.json b/Packs/DB2/pack_metadata.json
index c64fb1091692..6f7265d1bf5b 100644
--- a/Packs/DB2/pack_metadata.json
+++ b/Packs/DB2/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DB2",
"description": "This pack's purpose is to provide helpful commands to build a connectivity and run queries on IBM DB2 database.",
"support": "community",
- "currentVersion": "1.0.0",
+ "currentVersion": "1.0.1",
"author": "Shubham Kaushik",
"url": "",
"email": "its.shubham.kaushik@gmail.com",
diff --git a/Packs/DBotTruthBombs/doc_files/dbot4.png b/Packs/DBotTruthBombs/doc_files/dbot4.png
new file mode 100644
index 000000000000..95af3c8d19f9
Binary files /dev/null and b/Packs/DBotTruthBombs/doc_files/dbot4.png differ
diff --git a/Packs/DNSOverHttps/Integrations/DNSOverHttps/DNSOverHttps.yml b/Packs/DNSOverHttps/Integrations/DNSOverHttps/DNSOverHttps.yml
index 4303cee81de8..0ae9d03d6e7a 100644
--- a/Packs/DNSOverHttps/Integrations/DNSOverHttps/DNSOverHttps.yml
+++ b/Packs/DNSOverHttps/Integrations/DNSOverHttps/DNSOverHttps.yml
@@ -41,19 +41,19 @@ script:
- DNSKEY
- NS
- auto: PREDEFINED
- defaultValue: "True"
+ defaultValue: 'True'
description: If you only want to return the answers
name: only_answers
predefined:
- - "True"
- - "False"
+ - 'True'
+ - 'False'
description: Resolve a name to IP over HTTPS
name: doh-resolve
outputs:
- contextPath: DNSoOverHTTPS.Results
description: DNS query results
type: list
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
script: ''
subtype: python3
type: python
diff --git a/Packs/DNSOverHttps/ReleaseNotes/1_0_5.md b/Packs/DNSOverHttps/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..1a07431acfb7
--- /dev/null
+++ b/Packs/DNSOverHttps/ReleaseNotes/1_0_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### DNSOverHttps
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/DNSOverHttps/pack_metadata.json b/Packs/DNSOverHttps/pack_metadata.json
index d74b878cc1ce..eec3dc6c2202 100644
--- a/Packs/DNSOverHttps/pack_metadata.json
+++ b/Packs/DNSOverHttps/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DNSOverHttps",
"description": "Use this pack to make DNS queries over HTTPS to Cloudflare or Google DoH service.",
"support": "community",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Harri Ruuttila",
"url": "",
"email": "",
diff --git a/Packs/Darktrace/doc_files/Handle_Darktrace_Model_Breach_Wed_Dec_02_2020.png b/Packs/Darktrace/doc_files/Handle_Darktrace_Model_Breach_Wed_Dec_02_2020.png
new file mode 100644
index 000000000000..d81748adab32
Binary files /dev/null and b/Packs/Darktrace/doc_files/Handle_Darktrace_Model_Breach_Wed_Dec_02_2020.png differ
diff --git a/Packs/DarktraceASM/.pack-ignore b/Packs/DarktraceASM/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/DarktraceASM/.secrets-ignore b/Packs/DarktraceASM/.secrets-ignore
new file mode 100644
index 000000000000..52d7da2bdabd
--- /dev/null
+++ b/Packs/DarktraceASM/.secrets-ignore
@@ -0,0 +1,4 @@
+https://cheatsheetseries.owasp.org
+https://soar.monstersofhack.com
+104.16.61.2
+104.16.60.2
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Author_image.png b/Packs/DarktraceASM/Author_image.png
new file mode 100644
index 000000000000..b6bf14634873
Binary files /dev/null and b/Packs/DarktraceASM/Author_image.png differ
diff --git a/Packs/DarktraceASM/Classifiers/classifier-DarktraceASM_Risk_Incoming_Mapper.json b/Packs/DarktraceASM/Classifiers/classifier-DarktraceASM_Risk_Incoming_Mapper.json
new file mode 100644
index 000000000000..8f7c6c62cc06
--- /dev/null
+++ b/Packs/DarktraceASM/Classifiers/classifier-DarktraceASM_Risk_Incoming_Mapper.json
@@ -0,0 +1,182 @@
+{
+ "defaultIncidentType": "",
+ "definitionId": "",
+ "description": "",
+ "feed": false,
+ "mapping": {
+ "Darktrace ASM Risk": {
+ "dontMapEventToLabels": true,
+ "internalMapping": {
+ "DarktraceASM Asset Brand": {
+ "complex": {
+ "accessor": "brand",
+ "filters": [],
+ "root": "asset",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Asset Creation Time": {
+ "complex": {
+ "filters": [],
+ "root": "asset.createdAt",
+ "transformers": [
+ {
+ "args": {
+ "format": {
+ "value": {
+ "simple": "02 Jan 2006 15:04 MST"
+ }
+ }
+ },
+ "operator": "toString"
+ }
+ ]
+ }
+ },
+ "DarktraceASM Asset ID": {
+ "complex": {
+ "accessor": "id",
+ "filters": [],
+ "root": "asset",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Asset Security Rating": {
+ "complex": {
+ "accessor": "securityrating",
+ "filters": [],
+ "root": "asset",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Asset State": {
+ "complex": {
+ "accessor": "state",
+ "filters": [],
+ "root": "asset",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Asset Updated Time": {
+ "complex": {
+ "filters": [],
+ "root": "asset.updatedAt",
+ "transformers": [
+ {
+ "args": {
+ "format": {
+ "value": {
+ "simple": "02 Jan 2006 15:04 MST"
+ }
+ }
+ },
+ "operator": "toString"
+ }
+ ]
+ }
+ },
+ "DarktraceASM Asset is Malicious": {
+ "complex": {
+ "filters": [],
+ "root": "asset.isMalicious",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk Description": {
+ "complex": {
+ "filters": [],
+ "root": "description",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk End Time": {
+ "complex": {
+ "filters": [],
+ "root": "endedAt",
+ "transformers": [
+ {
+ "args": {
+ "format": {
+ "value": {
+ "simple": "02 Jan 2006 15:04 MST"
+ }
+ }
+ },
+ "operator": "toString"
+ }
+ ]
+ }
+ },
+ "DarktraceASM Risk Evidence": {
+ "complex": {
+ "filters": [],
+ "root": "evidence",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk ID": {
+ "complex": {
+ "filters": [],
+ "root": "id",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk Proposed Action": {
+ "complex": {
+ "filters": [],
+ "root": "proposedAction",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk Security Rating": {
+ "complex": {
+ "filters": [],
+ "root": "securityRating",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk Start Time": {
+ "complex": {
+ "filters": [],
+ "root": "startedAt",
+ "transformers": [
+ {
+ "args": {
+ "format": {
+ "value": {
+ "simple": "02 Jan 2006 15:04 MST"
+ }
+ }
+ },
+ "operator": "toString"
+ }
+ ]
+ }
+ },
+ "DarktraceASM Risk Title": {
+ "complex": {
+ "filters": [],
+ "root": "title",
+ "transformers": []
+ }
+ },
+ "DarktraceASM Risk Type": {
+ "complex": {
+ "filters": [],
+ "root": "type",
+ "transformers": []
+ }
+ }
+ }
+ },
+ "dbot_classification_incident_type_all": {
+ "dontMapEventToLabels": false,
+ "internalMapping": {}
+ }
+ },
+ "name": "Darktrace ASM Risk Mapper",
+ "id": "Darktrace ASM Risk Mapper",
+ "type": "mapping-incoming",
+ "fromVersion": "6.6.0",
+ "version": -1
+}
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Brand.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Brand.json
new file mode 100644
index 000000000000..c295430db1d9
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Brand.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetbrand",
+ "version": -1,
+ "modified": "2024-01-30T19:57:21.352753259Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset Brand",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetbrand",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Creation_Time.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Creation_Time.json
new file mode 100644
index 000000000000..c861865a2957
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Creation_Time.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetcreationtime",
+ "version": -1,
+ "modified": "2024-02-28T00:42:54.112345906Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset Creation Time",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetcreationtime",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Id.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Id.json
new file mode 100644
index 000000000000..aa9986c138a7
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Id.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetid",
+ "version": -1,
+ "modified": "2024-01-30T19:49:44.878013767Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset ID",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetid",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Is_Malicious.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Is_Malicious.json
new file mode 100644
index 000000000000..365b12e1c0ef
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Is_Malicious.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetismalicious",
+ "version": -1,
+ "modified": "2024-02-28T00:44:13.350507021Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset is Malicious",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetismalicious",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Security_Rating.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Security_Rating.json
new file mode 100644
index 000000000000..5ee3db0f708f
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Security_Rating.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetsecurityrating",
+ "version": -1,
+ "modified": "2024-01-30T19:52:54.955022061Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset Security Rating",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetsecurityrating",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_State.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_State.json
new file mode 100644
index 000000000000..bb61b992382d
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_State.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetstate",
+ "version": -1,
+ "fromVersion": "6.6.0",
+ "modified": "2024-01-30T20:48:20.201728561Z",
+ "name": "DarktraceASM Asset State",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetstate",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Updated_Time.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Updated_Time.json
new file mode 100644
index 000000000000..625b90935975
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_Asset_Updated_Time.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmassetupdatedtime",
+ "version": -1,
+ "modified": "2024-02-28T00:43:18.396209487Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Asset Updated Time",
+ "ownerOnly": false,
+ "cliName": "darktraceasmassetupdatedtime",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Description.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Description.json
new file mode 100644
index 000000000000..fea78d154111
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Description.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskdescription",
+ "version": -1,
+ "modified": "2024-01-30T19:47:04.470842128Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Description",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskdescription",
+ "type": "longText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_End_Time.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_End_Time.json
new file mode 100644
index 000000000000..5989f5a3a892
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_End_Time.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskendtime",
+ "version": -1,
+ "modified": "2024-02-28T19:37:31.951385914Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk End Time",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskendtime",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Evidence.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Evidence.json
new file mode 100644
index 000000000000..38cbc64649e4
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Evidence.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskevidence",
+ "version": -1,
+ "modified": "2024-01-30T19:47:23.87179797Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Evidence",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskevidence",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Id.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Id.json
new file mode 100644
index 000000000000..b4e0c537e752
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Id.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskid",
+ "version": -1,
+ "modified": "2024-01-26T22:28:09.656199895Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk ID",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskid",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": false,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Proposed_Action.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Proposed_Action.json
new file mode 100644
index 000000000000..ed8866a504b5
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Proposed_Action.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskproposedaction",
+ "version": -1,
+ "modified": "2024-01-30T19:48:21.30183839Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Proposed Action",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskproposedaction",
+ "type": "longText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Security_Rating.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Security_Rating.json
new file mode 100644
index 000000000000..ec1c97182ce0
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Security_Rating.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmrisksecurityrating",
+ "version": -1,
+ "modified": "2024-01-30T20:02:35.371217706Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Security Rating",
+ "ownerOnly": false,
+ "cliName": "darktraceasmrisksecurityrating",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Start_Time.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Start_Time.json
new file mode 100644
index 000000000000..028d256f9545
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Start_Time.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmriskstarttime",
+ "version": -1,
+ "modified": "2024-02-28T19:37:16.244341931Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Start Time",
+ "ownerOnly": false,
+ "cliName": "darktraceasmriskstarttime",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Title.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Title.json
new file mode 100644
index 000000000000..f809be7984e2
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Title.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmrisktitle",
+ "version": -1,
+ "modified": "2024-01-30T19:46:36.027968293Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Title",
+ "ownerOnly": false,
+ "cliName": "darktraceasmrisktitle",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Type.json b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Type.json
new file mode 100644
index 000000000000..552c4678833a
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentFields/incidentfield-DarktraceASM_ASM_RISK_Type.json
@@ -0,0 +1,31 @@
+{
+ "id": "incident_darktraceasmrisktype",
+ "version": -1,
+ "modified": "2024-01-26T22:29:02.714905738Z",
+ "fromVersion": "6.6.0",
+ "name": "DarktraceASM Risk Type",
+ "ownerOnly": false,
+ "cliName": "darktraceasmrisktype",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Darktrace ASM Risk"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/IncidentTypes/incidenttype-DarktraceASM_ASM_Risk.json b/Packs/DarktraceASM/IncidentTypes/incidenttype-DarktraceASM_ASM_Risk.json
new file mode 100644
index 000000000000..4fd090cbef1e
--- /dev/null
+++ b/Packs/DarktraceASM/IncidentTypes/incidenttype-DarktraceASM_ASM_Risk.json
@@ -0,0 +1,30 @@
+{
+ "id": "Darktrace ASM Risk",
+ "version": -1,
+ "fromVersion": "6.10.0",
+ "vcShouldIgnore": false,
+ "locked": false,
+ "name": "Darktrace ASM Risk",
+ "prevName": "Darktrace ASM Risk",
+ "color": "#D4E8F1",
+ "playbookId": "Darktrace ASM Basic Risk Handler",
+ "hours": 0,
+ "days": 0,
+ "weeks": 0,
+ "hoursR": 0,
+ "daysR": 0,
+ "weeksR": 0,
+ "system": false,
+ "readonly": false,
+ "default": false,
+ "autorun": false,
+ "disabled": false,
+ "reputationCalc": 0,
+ "onChangeRepAlg": 0,
+ "layout": "Darktrace ASM Risk Layout",
+ "detached": false,
+ "extractSettings": {
+ "mode": "Specific",
+ "fieldCliNameToExtractSettings": {}
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.py b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.py
new file mode 100644
index 000000000000..b82d5c3c0722
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.py
@@ -0,0 +1,1031 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+'''Imports'''
+
+import json
+import traceback
+from datetime import datetime
+from typing import Any
+import dateparser
+import urllib3
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+'''CONSTANTS'''
+ASM_URI = "/graph/v1.0/api"
+ASM_RISK_QUERY = '''
+ id
+ type
+ startedAt
+ endedAt
+ title
+ description
+ evidence
+ proposedAction
+ securityRating
+ mitigatedAt
+ asset {
+ id
+ state
+ brand
+ createdAt
+ updatedAt
+ securityrating
+ isMalicious
+ tags
+ }
+ comments {
+ edges {
+ node {
+ id
+ text
+ }
+ }
+ }
+ '''
+ASM_ASSET_QUERY = '''
+ id
+ state
+ brand
+ createdAt
+ updatedAt
+ securityrating
+ isMalicious
+ tags
+ comments {
+ id
+ text
+ }
+ discoverySources {
+ id
+ description
+ }
+ risks {
+ id
+ title
+ }
+ '''
+ASM_ASSET_QUERY_DICT = {
+ 'application': ASM_ASSET_QUERY + '''
+ protocol
+ uri
+ fqdns {
+ id
+ name
+ }
+ ipaddresses {
+ id
+ address
+ }
+ screenshot
+ technologies {
+ id
+ name
+ }
+ ''', 'fqdn': ASM_ASSET_QUERY + '''
+ name
+ dnsRecords
+ resolvesTo {
+ id
+ address
+ }
+ whois
+ registeredDomain {
+ id
+ name
+ }
+ ''', 'ipaddress': ASM_ASSET_QUERY + '''
+ lat
+ lon
+ geoCity
+ geoCountry
+ address
+ netblock {
+ id
+ netname
+ }
+ ''', 'netblock': ASM_ASSET_QUERY + '''
+ netname
+ ipAddresses {
+ id
+ address
+ }
+ '''
+}
+
+SEVERITY_MAP = {"Low": 1,
+ "Medium": 2,
+ "High": 3,
+ "Critical": 4
+ }
+
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
+MIN_SEVERITY_TO_FETCH = 1
+MAX_INCIDENTS_TO_FETCH = 50
+ALERT_TYPES = ['gdpr', 'informational', 'misconfiguration', 'reported', 'ssl', 'vulnerable software']
+PLEASE_CONTACT = "Please contact your Darktrace representative."
+
+DARKTRACE_API_ERRORS = {
+ 'SIGNATURE_ERROR': 'API Signature Error. You have invalid credentials in your config.',
+ 'DATE_ERROR': 'API Date Error. Check that the time on this machine matches that of the Darktrace instance.',
+ 'ENDPOINT_ERROR': f'Invalid Endpoint. - {PLEASE_CONTACT}',
+ 'PRIVILEGE_ERROR': 'User has insufficient permissions to access the API endpoint.',
+ 'UNDETERMINED_ERROR': f'Darktrace was unable to process your request - {PLEASE_CONTACT}',
+ 'FAILED_TO_PARSE': 'N/A'
+}
+
+"""*****CUSTOM EXCEPTIONS*****"""
+
+
+class InvalidAssetStateError(Exception):
+ def __init__(self, state: str):
+ super().__init__(f'{state} is not a valid state. Valid states include "Confirmed" and "Unconfirmed".')
+
+
+class InvalidAssetID(Exception):
+ def __init__(self, asset_id: Optional[str] = None):
+ super().__init__(f"ASM Asset ID \"{asset_id}\" is not a valid ID.")
+
+
+class AssetNotFound(Exception):
+ def __init__(self, asset_type: str, id: Optional[str] = None, message: Optional[str] = None):
+ super().__init__(f"ASM {asset_type} Asset with id \"{id}\" not found. {message}")
+
+
+class MitigationError(Exception):
+ def __init__(self, risk_id: str, message: Optional[str] = ""):
+ super().__init__(f"Could not mitigate ASM Risk \"{risk_id}\" due to the following:\n{message}")
+
+
+class TagError(Exception):
+ def __init__(self, action: str, name: str, id: Optional[str] = "", message: Optional[str] = ""):
+ if action == 'create':
+ super().__init__(f"Could not create ASM Tag \"{name}\" due to the following:\n{message}")
+ if action == 'assign':
+ super().__init__(f"Could not assign ASM Tag \"{name}\" to ASM object \"{id}\" due to the following:\n{message}")
+ if action == 'unassign':
+ super().__init__(f"Could not unassign ASM Tag \"{name}\" from ASM object \"{id}\" due to the following:\n{message}")
+
+
+class CommentError(Exception):
+ def __init__(self, action: str, id: Optional[str] = "", message: Optional[str] = ""):
+ if action == 'post':
+ super().__init__(f"Could not post comment to ASM object \"{id}\" due to the following:\n{message}")
+ elif action == 'edit':
+ super().__init__(f"Could not edit comment \"{id}\" due to the following:\n{message}")
+ elif action == 'delete':
+ super().__init__(f"Could not delete comment \"{id}\" due to the following:\n{message}")
+
+
+"""*****CLIENT CLASS*****
+Wraps all the code that interacts with the Darktrace API."""
+
+
+class Client(BaseClient):
+ """Client class to interact with the Darktrace API
+ This Client implements API calls, and does not contain any Demisto logic.
+ Should only do requests and return data.
+ It inherits from BaseClient defined in CommonServer Python.
+ Most calls use _http_request() that handles proxy, SSL verification, etc.
+ """
+
+ def asm_post(self, query_uri: str, json: Optional[dict] = None):
+ headers = self._headers
+ return self._asm_api_call(query_uri, method='POST', json=json, headers=headers)
+
+ def _asm_api_call(
+ self,
+ query_uri: str,
+ method: str,
+ params: Optional[dict] = None,
+ data: Optional[dict] = None,
+ json: Optional[dict] = None,
+ headers: Optional[dict[str, str]] = None,
+ ):
+ """Handles Darktrace API calls"""
+ try:
+ res = self._http_request(
+ method,
+ url_suffix=query_uri,
+ params=params,
+ data=data,
+ json_data=json,
+ resp_type='response',
+ headers=headers,
+ error_handler=self.error_handler,
+ )
+
+ if res.status_code not in [200, 204]:
+ raise Exception('Your request failed with the following error: ' + str(res.content)
+ + '. Response Status code: ' + str(res.status_code))
+ except Exception as e:
+ raise Exception(e)
+ try:
+ return res.json()
+ except Exception as e:
+ raise ValueError(
+ f'Failed to process the API response - {str(e)}'
+ )
+
+ def error_handler(self, res: requests.Response):
+ """Handles authentication errors"""
+ if res.status_code == 400:
+ raise_message = 'Invalid field names in query:\n'
+ for error in res.json()['errors']:
+ error_message = error['message']
+ error_location = f"Line {error['locations'][0]['line']}, column {error['locations'][0]['column']}."
+ raise_message += f"{error_message} {error_location}\n"
+ raise ValueError(raise_message)
+ elif res.status_code == 401:
+ raise PermissionError(f"Authentication issue: {res.json()['detail']}")
+ elif res.status_code == 404:
+ raise ValueError("Error 404. Page not found." + PLEASE_CONTACT)
+ elif res.status_code == 500:
+ raise ValueError("Error 500. Possibly malformed GraphQL query.")
+
+ def get_asm_risk(self, risk_id: str):
+ """Function to query for specific Risk using a Risk ID.
+ :type risk_id: ``str``
+ :param risk_id: Unique ID of Risk to query for.
+ :return: dict containing Risk info.
+ :rtype: ``Dict[str, Any]``
+ """
+ query = f'''query risk {{
+ risk(id:"{risk_id}") {{
+ id
+ type
+ startedAt
+ endedAt
+ title
+ description
+ evidence
+ proposedAction
+ securityRating
+ mitigatedAt
+ asset {{
+ id
+ state
+ brand
+ createdAt
+ updatedAt
+ securityrating
+ isMalicious
+ tags
+ }}
+ comments {{
+ edges {{
+ node {{
+ id
+ text
+ }}
+ }}
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": query}
+ response = self.asm_post(ASM_URI, payload)
+ return response["data"]["risk"]
+
+ def mitigate_asm_risk(self, risk_id: str):
+ """Function to manually mitigate a Risk given a specific Risk ID.
+ :type risk_id: ``str``
+ :param risk_id: Unique ID of Risk to mitigate.
+ :return: dict containing status of mitigation.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ closeRisk(id:"{risk_id}") {{
+ success
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response["data"]["closeRisk"]:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise MitigationError(risk_id, errors_string)
+ return response["data"]["closeRisk"]
+
+ def get_asm_asset(self, asset_id: str):
+ """Function to query for a specific Asset using the Asset's ID.
+ :type asset_id: ``str``
+ :param asset_id: Unique ID of Asset to query for.
+ :return: dict containing Asset info.
+ :rtype: ``Dict[str, Any]``
+ """
+ if asset_id[0] == 'Q':
+ asset_type = 'application'
+ elif asset_id[0] == 'R':
+ asset_type = 'fqdn'
+ elif asset_id[0] == 'T':
+ asset_type = 'netblock'
+ elif asset_id[0] == 'S':
+ asset_type = 'ipaddress'
+ else:
+ raise InvalidAssetID(asset_id)
+ query_type = ASM_ASSET_QUERY_DICT[asset_type]
+ query = f'''query {asset_type} {{ {asset_type}(id:"{asset_id}") {{
+ {query_type}
+ }}
+ }}'''
+ payload = {"query": query}
+ response = self.asm_post(ASM_URI, payload)
+ if not response["data"][asset_type]:
+ message = response["errors"][0]["message"]
+ raise AssetNotFound(asset_type, asset_id, message)
+ asset = response["data"][asset_type]
+ asset["type"] = asset_type
+ return asset
+
+ def post_asm_comment(self, id: str, comment: str):
+ """Function to post a comment to a Risk or Asset given a specific ID and comment string.
+ :type id: ``str``
+ :param id: Unique ID of Risk or Asset to comment on.
+ :type comment: ``str``
+ :param comment: Text of comment.
+ :return: dict containing status of comment.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ placeComment(text:"{comment}", objectId:"{id}") {{
+ success
+ comment {{
+ id
+ text
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['placeComment']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise CommentError('post', id, errors_string)
+ return response["data"]["placeComment"]
+
+ def edit_asm_comment(self, comment_id: str, comment: str):
+ """Function to edit an existing comment given the comment's ID and a new comment string.
+ :type comment_id: ``str``
+ :param comment_id: Unique ID of Comment to edit.
+ :type comment: ``str``
+ :param comment: New text to replace old Comment.
+ :return: dict containing status of comment edit.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ editComment(text:"{comment}", id:"{comment_id}") {{
+ success
+ comment {{
+ id
+ text
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['editComment']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise CommentError('edit', comment_id, errors_string)
+ return response["data"]["editComment"]
+
+ def delete_asm_comment(self, comment_id: str):
+ """Function to delete an existing comment given the comment's ID.
+ :type comment_id: ``str``
+ :param comment_id: Unique ID of Comment to delete.
+ :return: dict containing status of comment deletion.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ deleteComment(id:"{comment_id}") {{
+ success
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['deleteComment']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise CommentError('delete', comment_id, errors_string)
+ return response["data"]["deleteComment"]
+
+ def create_asm_tag(self, tag_name: str):
+ """Function to create a new Tag.
+ :type tag_name: ``str``
+ :param tag_name: Label of new Tag.
+ :return: dict including the status and info of the new Tag.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ createTag(name:"{tag_name}") {{
+ success
+ tag {{
+ id
+ name
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['createTag']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise TagError('create', name=tag_name, message=errors_string)
+ return response['data']['createTag']
+
+ def assign_asm_tag(self, tag_name: str, asset_id: str):
+ """Function to assign an existing tag to an Asset.
+ :type tag_name: ``str``
+ :param tag_name: Label of Tag to assign.
+ :type asset_id: ``str``
+ :param asset_id: Unique ID of Asset to apply Tag to.
+ :return: dict including the status of assignment and info on Asset.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ assignTag(id:"{asset_id}", tagName:"{tag_name}") {{
+ success
+ asset {{
+ id
+ tags
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['assignTag']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise TagError('assign', id=asset_id, name=tag_name, message=errors_string)
+ return response['data']['assignTag']
+
+ def unassign_asm_tag(self, tag_name: str, asset_id: str):
+ """Function to unassign an existing tag from an Asset.
+ :type tag_name: ``str``
+ :param tag_name: Label of Tag to unassign.
+ :type asset_id: ``str``
+ :param asset_id: Unique ID of Asset to remove Tag from.
+ :return: dict including the status of unassignment and info on Asset.
+ :rtype: ``Dict[str, Any]``
+ """
+ mutation = f'''mutation MyMutation {{
+ unassignTag(id:"{asset_id}", tagName:"{tag_name}") {{
+ success
+ asset {{
+ id
+ tags
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": mutation}
+ response = self.asm_post(ASM_URI, payload)
+ if not response['data']['unassignTag']:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ raise TagError('unassign', id=asset_id, name=tag_name, message=errors_string)
+ return response['data']['unassignTag']
+
+ def get_asm_risks(self, start_time) -> List[Dict[str, Any]]:
+ """Function to pull all Risks after a given start time.
+ :type start_time: ``datetime``
+ :param start_time: Date to start pulling Risks from.
+ :return: list of Risk dicts.
+ :rtype: ``List[Dict[str, Any]]``
+ """
+ start_string = start_time.strftime(DATE_FORMAT)
+ query = f'''query allRisks {{
+ allRisks(startedAt:"{start_string}", orderBy:"startedAt") {{
+ edges {{
+ node {{
+ {ASM_RISK_QUERY}
+ }}
+ }}
+ }}
+ }}
+ '''
+ payload = {"query": query}
+ response = self.asm_post(ASM_URI, payload)
+ return response["data"]["allRisks"]["edges"]
+
+
+"""*****HELPER FUNCTIONS****"""
+
+
+def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> float:
+ """Converts an XSOAR argument to a timestamp (seconds from epoch)
+ This function is used to quickly validate an argument provided to XSOAR
+ via ``demisto.args()`` into an ``int`` containing a timestamp (seconds
+ since epoch). It will throw a ValueError if the input is invalid.
+ If the input is None, it will throw a ValueError if required is ``True``,
+ or ``None`` if required is ``False.
+ :type arg: ``Any``
+ :param arg: argument to convert
+ :type arg_name: ``str``
+ :param arg_name: argument name
+ :type required: ``bool``
+ :param required:
+ throws exception if ``True`` and argument provided is None
+ :return:
+ returns a ``float`` containing a timestamp (seconds from epoch) if conversion works
+ otherwise throws an Exception
+ :rtype: ``float``
+ """
+
+ if arg is None:
+ if required is True:
+ raise ValueError(f'Missing \'{arg_name}\'')
+ raise ValueError(f"'{arg_name}' cannot be None.")
+
+ if isinstance(arg, str) and arg.isdigit():
+ # timestamp is a str containing digits - we just convert it to int
+ return int(arg)
+ if isinstance(arg, str):
+ # we use dateparser to handle strings either in ISO8601 format, or
+ # relative time stamps.
+ # For example: format 2019-10-23T00:00:00 or "3 days", etc
+ date = dateparser.parse(arg, settings={'TIMEZONE': 'UTC'})
+ if date is None:
+ # if d is None it means dateparser failed to parse it
+ raise ValueError(f'Invalid date: {arg_name}')
+
+ return float(date.timestamp())
+ if isinstance(arg, int):
+ # Convert to float if the input is an int
+ return float(arg)
+ raise ValueError(f'Invalid date: \'{arg_name}\'')
+
+
+def check_required_fields(args, *fields):
+ """Checks that required fields are found, raises a value error otherwise.
+ :type args: ``Dict[str, Any]``
+ :param args: dict of arguments to search for given fields within.
+ :type fields: ``str``
+ :param fields: Required fields to check for.
+ :raise: ValueError if any fields not in args.
+ """
+ for field in fields:
+ if field not in args:
+ raise ValueError(f'Argument error could not find {field} in {args}')
+
+
+def format_JSON_for_risk(risk: dict[str, Any]) -> dict[str, Any]:
+ """Formats JSON for get_risk command, specifically reformat comments from API response.
+ :type risk: ``Dict[str, Any]``
+ :param risk: JSON risk as returned by API for fetch incident.
+ :return: Reformatted JSON risk.
+ :rtype: ``Dict[str, Any]``
+ """
+ new_json: Dict[str, Any] = {}
+ for key in risk:
+ if key == 'comments':
+ if risk[key] is None:
+ new_json[key] = {}
+ else:
+ comments = {comment['node']['id']: comment['node']['text'] for comment in risk[key]['edges']}
+ new_json[key] = comments
+ else:
+ new_json[key] = risk[key]
+ return new_json
+
+
+def format_JSON_for_asset(asset: dict[str, Any]) -> dict[str, Any]:
+ """Formats JSON for get_asm command, specifically lists of dicts.
+ :type asset: ``Dict[str, Any]``
+ :param asset: JSON asset as returned by API.
+ :return: Reformatted JSON asset.
+ :rtype: ``Dict[str, Any]``
+ """
+ new_json: Dict[str, Any] = {}
+ for key in asset:
+ if key == 'comments':
+ if asset[key] is None:
+ new_json[key] = {}
+ else:
+ comments = {comment['id']: comment['text'] for comment in asset[key]}
+ new_json[key] = comments
+ elif key == 'discoverySources':
+ if asset[key] is None:
+ new_json[key] = {}
+ else:
+ sources = {source["id"]: source["description"] for source in asset[key]}
+ new_json[key] = sources
+ elif key == 'risks':
+ if asset[key] is None:
+ new_json[key] = {}
+ else:
+ risks = {risk["id"]: risk["title"] for risk in asset[key]}
+ new_json[key] = risks
+ elif key in ['fqdns', 'technologies', 'registeredDomain']:
+ if asset[key] is None:
+ new_json[key] = {}
+ else:
+ values = {value["id"]: value["name"] for value in asset[key]}
+ new_json[key] = values
+ elif key in ['ipaddresses', 'ipAddresses', 'resolvesTo']:
+ if asset[key] is None:
+ new_json[key] = {}
+ else:
+ addresses = {ip["id"]: ip["address"] for ip in asset[key]}
+ new_json[key] = addresses
+ else:
+ new_json[key] = asset[key]
+ return new_json
+
+
+def _compute_xsoar_severity(security_rating: str) -> int:
+ """Translates Darktrace ASM security rating into XSOAR Severity.
+ :type security_rating: ``str``
+ :param security_rating: ASM security rating to convert.
+ :return: Integer equivalent of XSOAR severity scores.
+ :rtype: ``int``
+ """
+ if security_rating in ['c', 'd']:
+ return 2
+ if security_rating in ['e']:
+ return 3
+ if security_rating in ['f']:
+ return 4
+ return 1
+
+
+"""*****COMMAND FUNCTIONS****"""
+
+
+def test_module(client: Client, first_fetch_time: float) -> str:
+ """
+ Returning 'ok' indicates that the integration works like it is supposed to. Connection to the service is successful.
+
+ :type client: ``Client``
+ :param client:
+ Darktrace Client.
+ :type first_fetch_time: ``float``
+ :param first_fetch_time:
+ First fetch time.
+ :return:
+ A message to indicate the integration works as it is supposed to.
+ :rtype: ``str``
+ """
+ try:
+ first_fetch_datetime = datetime.fromtimestamp(first_fetch_time)
+ client.get_asm_risks(start_time=first_fetch_datetime)
+
+ except DemistoException as e:
+ if 'Forbidden' in str(e):
+ return 'Authorization Error: make sure API Key is correctly set'
+ else:
+ raise e
+ return 'ok'
+
+
+def fetch_incidents(client: Client,
+ last_run: dict[str, str],
+ first_fetch_time: float,
+ max_alerts: int,
+ min_severity: int,
+ alert_types: list[str]) -> tuple[dict[str, Any], list[dict]]:
+ """Function used to pull incidents into XSOAR every few minutes. """
+ # Get the last fetch time, if exists
+ # last_run is a dict with a single key, called last_fetch
+ last_fetch = last_run.get('last_fetch', None)
+ first_fetch_datetime = datetime.fromtimestamp(first_fetch_time)
+ # Handle first fetch time
+ if last_fetch is None:
+ last_fetch_datetime = first_fetch_datetime
+ else:
+ last_fetch_datetime = datetime.strptime(last_fetch, DATE_FORMAT)
+
+ latest_created_time = last_fetch_datetime
+
+ # Each incident is a dict with a string as a key
+ incidents: List[Dict[str, Any]] = []
+
+ asm_risks: List[Dict[str, Any]] = client.get_asm_risks(start_time=last_fetch_datetime)
+
+ for alert in asm_risks:
+ # Convert startedAt time to datetime object and add to alert
+ # grabbing first 26 characters from start time since that provides ms resolution
+ incident_created_time = datetime.strptime(alert['node']['startedAt'][:26], DATE_FORMAT)
+
+ # to prevent duplicates, we are only adding incidents with creation_time > last fetched incident
+ if last_fetch_datetime and incident_created_time <= last_fetch_datetime:
+ demisto.debug(
+ f'''Incident created time: {incident_created_time} was part of a previous poll cycle.
+ Last fetch time: {last_fetch_datetime}''')
+ continue
+
+ brand = alert.get('node', {}).get('asset', {}).get('brand')
+ title = alert['node']['title']
+ incident_name = f'Darktrace ASM | Risk Title: {title} | Brand: {brand}'
+
+ xsoar_severity = _compute_xsoar_severity(alert['node']['securityRating'])
+
+ # Skip incidents with a lower severity score than the desired minimum
+ if xsoar_severity < min_severity:
+ demisto.debug(f"Incident severity: {xsoar_severity} is lower than chosen minimum threshold: {min_severity}")
+ continue
+
+ incident_type = alert['node']['type'].lower()
+
+ # Skip incidents with a type not included in the chosen alert types to ingest
+ if incident_type not in alert_types:
+ demisto.debug(f"Alert type {incident_type} is not part of chosen alerts: {alert_types}")
+ continue
+
+ incident = {
+ 'name': incident_name,
+ 'occurred': alert['node']['startedAt'],
+ 'rawJSON': json.dumps(alert['node']),
+ 'severity': xsoar_severity
+ }
+
+ incidents.append(incident)
+
+ # Update last run and add incident if the incident is newer than last fetch
+ if incident_created_time > latest_created_time:
+ latest_created_time = incident_created_time
+
+ if len(incidents) >= max_alerts:
+ break
+
+ # Save the next_run as a dict with the last_fetch key to be stored
+ next_run = {'last_fetch': latest_created_time.strftime(DATE_FORMAT)}
+ return next_run, incidents
+
+
+def get_asm_risk_command(client: Client, args: dict[str, Any]) -> CommandResults:
+ check_required_fields(args, 'risk_id')
+ risk_id = str(args.get('risk_id', None))
+
+ response = client.get_asm_risk(risk_id)
+
+ formatted_response = format_JSON_for_risk(response)
+
+ readable_output = tableToMarkdown('Darktrace ASM Risk', formatted_response)
+
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix='Darktrace.risk',
+ outputs_key_field='id',
+ outputs=response
+ )
+
+
+def mitigate_asm_risk_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'risk_id')
+ risk_id = str(args.get('risk_id', None))
+
+ client.mitigate_asm_risk(risk_id)
+
+ readable_output = f'Successfully mitigated risk. Risk ID: {risk_id}'
+ return readable_output
+
+
+def get_asm_asset_command(client: Client, args: dict[str, Any]) -> CommandResults:
+ check_required_fields(args, 'asset_id')
+ asset_id = str(args.get('asset_id', None))
+
+ response = client.get_asm_asset(asset_id)
+
+ formatted_response = format_JSON_for_asset(response)
+
+ readable_output = tableToMarkdown('Darktrace ASM Asset', formatted_response)
+
+ return CommandResults(
+ readable_output=readable_output,
+ outputs_prefix='Darktrace.asset',
+ outputs_key_field='id',
+ outputs=response
+ )
+
+
+def post_asm_comment_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'id', 'comment')
+ id = str(args.get('id', None))
+ comment = str(args.get('comment', None))
+
+ response = client.post_asm_comment(id, comment)
+
+ if response.get("success", False):
+ comment_dict = response.get("comment", {})
+ readable_output = f'Comment successful. Comment ID: {comment_dict.get("id", "Failed to get comment ID.")}'
+ else:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ readable_output = f'Comment failed due to following errors:\n{errors_string}'
+ return readable_output
+
+
+def edit_asm_comment_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'comment_id', 'comment')
+ comment_id = str(args.get('comment_id', None))
+ comment = str(args.get('comment', None))
+
+ response = client.edit_asm_comment(comment_id, comment)
+
+ if response.get("success", False):
+ comment_dict = response.get("comment", {})
+ readable_output = f'Comment successfully edited. Comment ID: {comment_dict.get("id", "Failed to get comment ID.")}'
+ else:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ readable_output = f'Failed to edit comment due to following errors:\n{errors_string}'
+ return readable_output
+
+
+def delete_asm_comment_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'comment_id')
+ comment_id = str(args.get('comment_id', None))
+
+ response = client.delete_asm_comment(comment_id)
+
+ if response.get("success", False):
+ readable_output = f'Comment successfully deleted. Comment ID: {comment_id}'
+ else:
+ errors = [error.get("message", '') for error in response.get("errors", {})]
+ errors_string = '\n'.join(errors)
+ readable_output = f'Comment deletion failed due to following errors:\n{errors_string}'
+ return readable_output
+
+
+def create_asm_tag_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'tag_name')
+ tag_name = str(args.get('tag_name', None))
+
+ client.create_asm_tag(tag_name)
+
+ # TODO: add error handling depending on XSOAR response on best practice
+
+ readable_output = f'Successfully created tag {tag_name}.'
+ return readable_output
+
+
+def assign_asm_tag_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'tag_name', 'asset_id')
+ tag_name = str(args.get('tag_name', None))
+ asset_id = str(args.get('asset_id', None))
+
+ response = client.assign_asm_tag(tag_name, asset_id)
+
+ asset = response.get("asset")
+ tags = asset.get("tags")
+ tags_string = "\n".join(tags)
+
+ readable_output = f'Successfully assigned tag {tag_name} to asset {asset_id}. Tags applied to asset:\n{tags_string}'
+ return readable_output
+
+
+def unassign_asm_tag_command(client: Client, args: dict[str, Any]) -> str:
+ check_required_fields(args, 'tag_name', 'asset_id')
+ tag_name = str(args.get('tag_name', None))
+ asset_id = str(args.get('asset_id', None))
+
+ response = client.unassign_asm_tag(tag_name, asset_id)
+
+ asset = response.get("asset")
+ tags = asset.get("tags")
+ tags_string = "\n".join(tags)
+
+ readable_output = f'Successfully unassigned tag {tag_name} from asset {asset_id}. Tags applied to asset:\n{tags_string}'
+ return readable_output
+
+
+"""*****MAIN FUNCTIONS****
+Takes care of reading the integration parameters via
+the ``demisto.params()`` function, initializes the Client class and checks the
+different options provided to ``demisto.commands()``, to invoke the correct
+command function passing to it ``demisto.args()`` and returning the data to
+``return_results()``. If implemented, ``main()`` also invokes the function
+``fetch_incidents()``with the right parameters and passes the outputs to the
+``demisto.incidents()`` function. ``main()`` also catches exceptions and
+returns an error message via ``return_error()``.
+"""
+
+
+def main() -> None: # pragma: no cover
+ """main function, parses params and runs command functions
+ :return:
+ :rtype:
+ """
+
+ # Collect Darktrace URL
+ base_url = demisto.params().get('url')
+
+ # API key
+ api_token = (demisto.params().get('apikey', ''))
+ headers = {"Authorization": f"Token {api_token}"}
+
+ # Client class inherits from BaseClient, so SSL verification is
+ # handled out of the box by it. Pass ``verify_certificate`` to
+ # the Client constructor.
+ verify_certificate = not demisto.params().get('insecure', False)
+
+ # How much time before the first fetch to retrieve incidents
+ first_fetch_time = arg_to_timestamp(
+ arg=demisto.params().get('first_fetch', '1 day'),
+ arg_name='First fetch time',
+ required=True
+ )
+
+ # Client class inherits from BaseClient, so system proxy is handled
+ # out of the box by it, just pass ``proxy`` to the Client constructor
+ proxy = demisto.params().get('proxy', False)
+
+ # ``demisto.debug()``, ``demisto.info()``, prints information in the XSOAR server log.
+ demisto.debug(f'Command being called is {demisto.command()}')
+
+ try:
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy,
+ headers=headers
+ )
+
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration Test button.
+ return_results(test_module(client, first_fetch_time))
+
+ elif demisto.command() == 'fetch-incidents':
+ # Set and define the fetch incidents command to run after activated via integration settings.
+
+ # Convert the argument to an int using helper map or set to MIN_SEVERITY_TO_FETCH
+ min_severity = SEVERITY_MAP.get(demisto.params().get('min_severity', None), None)
+ if not min_severity or min_severity < MIN_SEVERITY_TO_FETCH:
+ min_severity = MIN_SEVERITY_TO_FETCH
+
+ # Get the list of alert types to ingest and make sure each item is all lower case or set to ALERT_TYPES
+ alert_types = demisto.params().get('alert_type', None)
+ if not alert_types:
+ alert_types = ALERT_TYPES
+ else:
+ alert_types = [item.lower() for item in alert_types]
+
+ # Convert the argument to an int using helper function or set to MAX_INCIDENTS_TO_FETCH
+ max_alerts = arg_to_number(
+ arg=demisto.params().get('max_fetch', MAX_INCIDENTS_TO_FETCH),
+ arg_name='max_fetch',
+ required=False
+ )
+ if not max_alerts or max_alerts > MAX_INCIDENTS_TO_FETCH:
+ max_alerts = MAX_INCIDENTS_TO_FETCH
+
+ next_run, incidents = fetch_incidents(
+ client=client,
+ max_alerts=max_alerts,
+ min_severity=min_severity,
+ alert_types=alert_types,
+ last_run=demisto.getLastRun(), # getLastRun() gets the last run dict
+ first_fetch_time=first_fetch_time
+ )
+
+ # Use the variables defined above as the outputs of fetch_incidents to set up the next call and create incidents:
+ # saves next_run for the time fetch-incidents is invoked
+ demisto.setLastRun(next_run)
+ # fetch-incidents calls ``demisto.incidents()`` to provide the list
+ # of incidents to create
+ demisto.incidents(incidents)
+
+ elif demisto.command() == 'darktrace-asm-get-risk':
+ return_results(get_asm_risk_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-mitigate-risk':
+ return_results(mitigate_asm_risk_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-post-comment':
+ return_results(post_asm_comment_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-edit-comment':
+ return_results(edit_asm_comment_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-delete-comment':
+ return_results(delete_asm_comment_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-get-asset':
+ return_results(get_asm_asset_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-create-tag':
+ return_results(create_asm_tag_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-assign-tag':
+ return_results(assign_asm_tag_command(client, demisto.args()))
+
+ elif demisto.command() == 'darktrace-asm-unassign-tag':
+ return_results(unassign_asm_tag_command(client, demisto.args()))
+
+ except Exception as e:
+ demisto.error(traceback.format_exc()) # print the traceback
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
+
+
+"""*****ENTRY POINT****"""
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.yml b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.yml
new file mode 100644
index 000000000000..043a5e2af083
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk.yml
@@ -0,0 +1,301 @@
+commonfields:
+ id: Darktrace ASM
+ version: -1
+name: Darktrace ASM
+display: Darktrace ASM
+category: Network Security
+description: 'This pack includes configurations to combine the world-class threat
+ detection of Darktrace with the synchrony and automation abilities of XSOAR, allowing
+ security teams to monitor their attack surface for risks, high-impact vulnerabilities
+ and external threats.\nTo configure the connection to your Darktrace Attack Surface
+ Management instance, you will provide:\n- Server URL of Darktrace ASM instance (ex:
+ darktrace.yourcompany.com) and any necessary proxy information\n- The API Token from the Darktrace ASM instance.'
+detaileddescription: "### Partner Contributed Integration\n#### Integration Author:
+ Darktrace\nSupport and maintenance for this integration are provided by the author. Please create a
+ Darktrace Customer Portal support ticket for any questions or concerns [here](https://customerportal.darktrace.com).
+ \n***\n## Darktrace ASM \nTo configure the integration with your
+ Darktrace PREVENT/ASM instance, you will need:\n- URL of your Darktrace PREVENT/ASM instance
+ (ex: example-asm.darktrace.com)\n- Darktrace PREVENT/ASM API token which can be retrieved by making
+ a [Darktrace Customer Portal support ticket](https://customerportal.darktrace.com). \n\n"
+fromversion: 6.6.0
+defaultmapperin: Darktrace ASM Risk Mapper
+configuration:
+- display: Server URL (e.g. https://soar.monstersofhack.com)
+ name: url
+ defaultvalue: https://soar.monstersofhack.com
+ type: 0
+ required: true
+- display: Fetch incidents
+ name: isFetch
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ type: 13
+ required: false
+- display: Maximum number of incidents per fetch
+ name: max_fetch
+ defaultvalue: "100"
+ type: 0
+ required: false
+- display: API Key
+ name: apikey
+ type: 4
+ required: true
+- display: Fetch alerts with type
+ name: alert_type
+ defaultvalue: GDPR, Informational, Misconfiguration, Reported, SSL, Vulnerable Software
+ type: 16
+ required: false
+ options:
+ - GDPR
+ - Informational
+ - Misconfiguration
+ - Reported
+ - SSL
+ - Vulnerable Software
+ additionalinfo: Comma-separated list of types of alerts to fetch. Types might change
+ over time. Some examples are 'Bug' and 'Vulnerability'
+- display: Minimum severity of alerts to fetch
+ name: min_severity
+ defaultvalue: Medium
+ type: 15
+ required: false
+ options:
+ - Low
+ - Medium
+ - High
+ - Critical
+- display: First fetch time
+ name: first_fetch
+ defaultvalue: 3 days
+ type: 0
+ required: false
+- display: Incidents Fetch Interval
+ name: incidentFetchInterval
+ defaultvalue: "1"
+ type: 19
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: darktrace-asm-get-risk
+ arguments:
+ - name: risk_id
+ required: true
+ description: Unique ID of Risk.
+ outputs:
+ - contextPath: Darktrace.risk.id
+ description: Risk ID.
+ type: string
+ - contextPath: Darktrace.risk.type
+ description: Risk type.
+ type: string
+ - contextPath: Darktrace.risk.startedAt
+ description: Risk creation time.
+ type: date
+ - contextPath: Darktrace.risk.endedAt
+ description: Risk end time, no user mitigation.
+ type: date
+ - contextPath: Darktrace.risk.title
+ description: Risk title.
+ type: string
+ - contextPath: Darktrace.risk.description
+ description: Risk description.
+ type: string
+ - contextPath: Darktrace.risk.evidence
+ description: Evidence of risk.
+ type: string
+ - contextPath: Darktrace.risk.proposedAction
+ description: Proposed action to mitigate risk.
+ type: string
+ - contextPath: Darktrace.risk.securityRating
+ description: Security rating of the risk, A to F.
+ type: string
+ - contextPath: Darktrace.risk.mitigatedAt
+ description: Risk user mitigation time.
+ type: date
+ - contextPath: Darktrace.risk.asset.id
+ description: Associated asset ID.
+ type: string
+ - contextPath: Darktrace.risk.asset.state
+ description: State of asset.
+ type: string
+ - contextPath: Darktrace.risk.asset.brand
+ description: Brand asset is associated with.
+ type: string
+ - contextPath: Darktrace.risk.asset.createdAt
+ description: Asset creation time.
+ type: date
+ - contextPath: Darktrace.risk.asset.updatedAt
+ description: Time of asset's last update.
+ type: date
+ - contextPath: Darktrace.risk.asset.securityrating
+ description: Security rating of the asset, A to F.
+ type: string
+ - contextPath: Darktrace.risk.asset.isMalicious
+ description: Whether or note the asset is malicious.
+ type: boolean
+ - contextPath: Darktrace.risk.asset.tags
+ description: Tags applied to the asset.
+ - contextPath: Darktrace.risk.comments
+ description: Comments posted to the risk.
+ - contextPath: Darktrace.risk
+ description: Risk object dictionary.
+ - contextPath: Darktrace.risk.asset
+ description: Asset object dictionary.
+ description: Get a specific Darktrace ASM Risk.
+ - name: darktrace-asm-get-asset
+ arguments:
+ - name: asset_id
+ required: true
+ description: Unique ID of Asset.
+ outputs:
+ - contextPath: Darktrace.asset
+ description: Asset object dictionary.
+ - contextPath: Darktrace.asset.id
+ description: Asset ID.
+ type: string
+ - contextPath: Darktrace.asset.state
+ description: State of asset.
+ type: string
+ - contextPath: Darktrace.asset.brand
+ description: Brand asset is associated with.
+ type: string
+ - contextPath: Darktrace.asset.createdAt
+ description: Asset creation time.
+ - contextPath: Darktrace.asset.updatedAt
+ description: Time of asset's last update.
+ - contextPath: Darktrace.asset.securityrating
+ description: Security rating of the asset, A to F.
+ - contextPath: Darktrace.asset.isMalicious
+ description: Whether or note the asset is malicious.
+ - contextPath: Darktrace.asset.tags
+ description: Tags applied to the asset.
+ - contextPath: Darktrace.asset.type
+ description: Type of Asset.
+ - contextPath: Darktrace.asset.comments
+ description: Comments posted to the risk.
+ - contextPath: Darktrace.asset.discoverySources
+ description: Sources used in discovery of asset.
+ - contextPath: Darktrace.asset.risks
+ description: Risks associated with asset.
+ description: Get a specific Darktrace ASM Asset.
+ - name: darktrace-asm-post-comment
+ arguments:
+ - name: id
+ required: true
+ description: Can be a Risk ID or an Asset ID.
+ - name: comment
+ description: 'enter your comment.'
+ outputs:
+ - contextPath: Darktrace.comment.success
+ description: Whether or not comment was posted.
+ type: boolean
+ - contextPath: Darktrace.comment.comment.id
+ description: Comment ID.
+ type: string
+ - contextPath: Darktrace.comment.comment.text
+ description: Comment text.
+ type: string
+ description: Post a comment to a Darktrace ASM risk or asset within the Darktrace
+ UI.
+ - name: darktrace-asm-edit-comment
+ arguments:
+ - name: comment_id
+ required: true
+ description: Unique ID of Comment.
+ - name: comment
+ required: true
+ description: Enter comment here.
+ outputs:
+ - contextPath: Darktrace.comment.success
+ description: Whether or not comment was edited.
+ type: boolean
+ - contextPath: Darktrace.comment.comment.id
+ description: Comment ID.
+ type: string
+ - contextPath: Darktrace.comment.comment.text
+ description: Comment text.
+ type: string
+ description: Edit a comment within the Darktrace UI.
+ - name: darktrace-asm-delete-comment
+ arguments:
+ - name: comment_id
+ required: true
+ description: Unique ID of Comment.
+ outputs:
+ - contextPath: Darktrace.comment.success
+ description: Whether or not comment was deleted.
+ description: Edit a comment within the Darktrace UI.
+ - name: darktrace-asm-mitigate-risk
+ arguments:
+ - name: risk_id
+ required: true
+ description: Unique ID of Risk.
+ outputs:
+ - contextPath: Darktrace.risk.success
+ description: Whether or not risk was mitigated.
+ type: boolean
+ description: Mitigate Darktrace ASM Risk within the Darktrace UI.
+ execution: true
+ - name: darktrace-asm-create-tag
+ arguments:
+ - name: tag_name
+ required: true
+ description: Name of Tag being created.
+ outputs:
+ - contextPath: Darktrace.tag.success
+ description: Whether or not the tag was created.
+ type: boolean
+ - contextPath: Darktrace.tag.tag.id
+ description: Tag ID.
+ type: string
+ - contextPath: Darktrace.tag.tag.name
+ description: Tag name.
+ type: string
+ description: Create a tag to assign to Darktrace ASM Assets within the Darktrace
+ UI.
+ - name: darktrace-asm-assign-tag
+ arguments:
+ - name: tag_name
+ required: true
+ description: Name of tag to be assigned.
+ - name: asset_id
+ required: true
+ description: ID of asset tag should be assigned to.
+ outputs:
+ - contextPath: Darktrace.tag.success
+ description: Whether or not the tag was successfully assigned.
+ type: boolean
+ - contextPath: Darktrace.tag.asset.id
+ description: Asset ID.
+ type: string
+ - contextPath: Darktrace.tag.asset.tags
+ description: All tags assigned to asset.
+ description: Assign an existing tag to a Darktrace ASM Asset within the Darktrace
+ UI.
+ - name: darktrace-asm-unassign-tag
+ arguments:
+ - name: tag_name
+ required: true
+ description: Name of Tag to be assigned.
+ - name: asset_id
+ required: true
+ description: ID of asset tag should be assigned to.
+ outputs:
+ - contextPath: Darktrace.tag.success
+ description: Whether or not the tag was successfully unassigned.
+ type: boolean
+ - contextPath: Darktrace.tag.asset.id
+ description: Asset ID.
+ type: string
+ - contextPath: Darktrace.tag.asset.tags
+ description: All tags assigned to asset.
+ description: Unassign an existing tag to a Darktrace ASM Asset within the Darktrace
+ UI.
+ dockerimage: demisto/python3:3.10.14.95663
+ isfetch: true
+ runonce: false
+ subtype: python3
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_image.png b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_image.png
new file mode 100644
index 000000000000..6a87598726d7
Binary files /dev/null and b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_image.png differ
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_test.py b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_test.py
new file mode 100644
index 000000000000..218600a99e6c
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/DarktraceASMRisk_test.py
@@ -0,0 +1,371 @@
+import json
+import pytest
+from DarktraceASMRisk import (Client,
+ TagError,
+ CommentError,
+ MitigationError,
+ fetch_incidents,
+ get_asm_risk_command,
+ get_asm_asset_command,
+ mitigate_asm_risk_command,
+ post_asm_comment_command,
+ edit_asm_comment_command,
+ delete_asm_comment_command,
+ create_asm_tag_command,
+ assign_asm_tag_command,
+ unassign_asm_tag_command)
+
+"""*****CONSTANTS****"""
+
+command_dict = {"get_asm_risk": {"command": get_asm_risk_command,
+ "args": {"risk_id": "Umlza1R5cGU6MTE5Nzc="},
+ },
+ "get_asm_asset": {"command": get_asm_asset_command,
+ "args": {"asset_id": "QXBwbGljYXRpb25UeXBlOjI2NjI4"},
+ },
+ "mitigate_asm_risk": {"command": mitigate_asm_risk_command,
+ "args": {"risk_id": "Umlza1R5cGU6MTE5Nzc="}
+ },
+ "post_asm_comment": {"command": post_asm_comment_command,
+ "args": {"id": "Umlza1R5cGU6MTE5Nzc=",
+ "comment": "API Test Comment"}
+ },
+ "edit_asm_comment": {"command": edit_asm_comment_command,
+ "args": {"comment_id": "Q29tbWVudFR5cGU6OTg=",
+ "comment": "API Test Comment Edited"}
+ },
+ "delete_asm_comment": {"command": delete_asm_comment_command,
+ "args": {"comment_id": "Q29tbWVudFR5cGU6OTg="}
+ },
+ "create_asm_tag": {"command": create_asm_tag_command,
+ "args": {"tag_name": "API TEST"}
+ },
+ "assign_asm_tag": {"command": assign_asm_tag_command,
+ "args": {"asset_id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tag_name": "API TEST"}
+ },
+ "unassign_asm_tag": {"command": unassign_asm_tag_command,
+ "args": {"asset_id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tag_name": "API TEST"}
+ },
+ }
+
+"""*****HELPER FUNCTIONS****"""
+
+
+def util_load_json(path):
+ with open(path, encoding='utf-8') as f:
+ return json.loads(f.read())
+
+
+def func_template(requests_mock, command):
+ """
+ Tests a given Darktrace ASM command function for functions that return CommandResults types.
+ Mainly for GET requests.
+
+ Configures requests_mock instance to generate the appropriate
+ API response, loaded from a local JSON file. Checks
+ the output of the command function with the expected output.
+ """
+
+ # GIVEN an integration is configured to Darktrace
+ mock_api_response = util_load_json(f'test_data/{command}.json')
+ requests_mock.post('https://mock.darktrace.com/graph/v1.0/api', json=mock_api_response)
+
+ client = Client(
+ base_url='https://mock.darktrace.com',
+ verify=False,
+ headers={"Authorization": "Token example_token"}
+ )
+
+ args = command_dict[command]['args']
+
+ integration_response = command_dict[command]["command"](client, args)
+ expected_response = util_load_json(f'test_data/formatted_{command}.json')
+
+ prefix = command.split('_')[-1]
+
+ # THEN the response should be returned and formatted
+ assert integration_response.outputs == expected_response
+ assert integration_response.outputs_prefix == f'Darktrace.{prefix}'
+
+
+def func_template_post(requests_mock, command):
+ """
+ Tests a given Darktrace ASM command function for functions that return string types.
+ Mainly for POST requests.
+
+ Configures requests_mock instance to generate the appropriate
+ API response, loaded from a local JSON file. Checks
+ the output of the command function with the expected output.
+ """
+
+ # GIVEN an integration is configured to Darktrace
+ mock_api_response = util_load_json(f'test_data/{command}.json')
+ requests_mock.post('https://mock.darktrace.com/graph/v1.0/api', json=mock_api_response)
+
+ client = Client(
+ base_url='https://mock.darktrace.com',
+ verify=False,
+ headers={"Authorization": "Token example_token"}
+ )
+
+ args = command_dict[command]['args']
+
+ integration_response = command_dict[command]["command"](client, args)
+ expected_response = util_load_json(f'test_data/formatted_{command}.json').get("readable_output")
+
+ # THEN the response should be returned and formatted
+ assert integration_response == expected_response
+
+
+def func_template_error(requests_mock, command):
+ """
+ Tests a given Darktrace ASM command function to ensure it raises the correct errors.
+
+ Configures requests_mock instance to generate the appropriate
+ API response, loaded from a local JSON file. Verifies it raises the expected error.
+ """
+
+ # GIVEN an integration is configured to Darktrace
+ mock_api_response = util_load_json(f'test_data/{command}_error.json')
+ requests_mock.post('https://mock.darktrace.com/graph/v1.0/api', json=mock_api_response)
+
+ client = Client(
+ base_url='https://mock.darktrace.com',
+ verify=False,
+ headers={"Authorization": "Token example_token"}
+ )
+
+ args = command_dict[command]['args']
+
+ if 'tag' in command:
+ with pytest.raises(TagError):
+ command_dict[command]["command"](client, args)
+ elif 'comment' in command:
+ with pytest.raises(CommentError):
+ command_dict[command]["command"](client, args)
+ elif 'mitigate' in command:
+ with pytest.raises(MitigationError):
+ command_dict[command]["command"](client, args)
+
+
+"""*****TEST FUNCTIONS****"""
+
+
+def test_fetch_incidents(requests_mock):
+ """
+ Given
+ Integration pulls in incidents from ASM
+ When
+ Regular interval defined by user, default is one minute
+ Then
+ Incident info will be formatted for XSOAR UI and required info for next call will be returned
+ """
+ mock_api_response = util_load_json('test_data/fetch_incidents.json')
+ requests_mock.post('https://mock.darktrace.com/graph/v1.0/api', json=mock_api_response)
+
+ client = Client(
+ base_url='https://mock.darktrace.com',
+ verify=False,
+ headers={"Authorization": "Token example_token"}
+ )
+
+ integration_response = fetch_incidents(client, last_run={}, first_fetch_time=0, max_alerts=50, min_severity=1, alert_types=[
+ 'gdpr', 'informational', 'misconfiguration', 'reported', 'ssl', 'vulnerable software'])
+ expected_response = util_load_json('test_data/formatted_fetch_incidents.json')
+
+ assert integration_response[0]['last_fetch'] == expected_response['last_fetch']
+ assert integration_response[1] == expected_response['incidents']
+
+
+def test_get_asm_risk(requests_mock):
+ """
+ Given
+ You want to pull a risk from ASM
+ When
+ Calling the darktrace-asm-get-risk command with a specified risk id
+ Then
+ The context will be updated with information pertaining to that risk id
+ """
+ func_template(requests_mock, 'get_asm_risk')
+
+
+def test_get_asm_asset(requests_mock):
+ """
+ Given
+ You want to get an asset's information
+ When
+ Calling the darktrace-asm-get-asset command with a specified asset id
+ Then
+ The context will be updated with information pertaining to that asset id
+ """
+ func_template(requests_mock, 'get_asm_asset')
+
+
+def test_mitigate_risk(requests_mock):
+ """
+ Given
+ You want to mitigate a risk on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-mitigate-risk command with a specified risk id
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'mitigate_asm_risk')
+
+
+def test_post_comment(requests_mock):
+ """
+ Given
+ You want to post a comment on a risk or asset on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-post-comment command with a specified risk id
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'post_asm_comment')
+
+
+def test_edit_comment(requests_mock):
+ """
+ Given
+ You want to edit a comment on a risk or asset on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-edit-comment command with a specified risk id (or asset id)
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'edit_asm_comment')
+
+
+def test_delete_comment(requests_mock):
+ """
+ Given
+ You want to delete a comment on a risk or asset on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-delete-comment command with a specified risk id (or asset id)
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'delete_asm_comment')
+
+
+def test_create_tag(requests_mock):
+ """
+ Given
+ You want to create a tag on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-create-tag command with a specified tag name
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'create_asm_tag')
+
+
+def test_assign_tag(requests_mock):
+ """
+ Given
+ You want to assign a tag to an asset on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-assign-tag command with a specified tag name and asset id
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'assign_asm_tag')
+
+
+def test_unassign_tag(requests_mock):
+ """
+ Given
+ You want to unassign a tag to an asset on Darktrace PREVENT /ASM
+ When
+ Calling the darktrace-asm-unassign-tag command with a specified tag name and asset id
+ Then
+ The context will be updated to indicate a success or failure
+ """
+ func_template_post(requests_mock, 'unassign_asm_tag')
+
+
+def test_assign_tag_error(requests_mock):
+ """
+ Given
+ An error when assigning a Tag
+ When
+ Calling the darktrace-asm-assign-tag command with a specified tag name and asset id
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'assign_asm_tag')
+
+
+def test_unassign_tag_error(requests_mock):
+ """
+ Given
+ An error when unassigning a Tag
+ When
+ Calling the darktrace-asm-unassign-tag command with a specified tag name and asset id
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'unassign_asm_tag')
+
+
+def test_create_tag_error(requests_mock):
+ """
+ Given
+ An error when creating a Tag
+ When
+ Calling the darktrace-asm-create-tag command with a specified tag name
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'create_asm_tag')
+
+
+def test_post_comment_error(requests_mock):
+ """
+ Given
+ An error when posting a comment
+ When
+ Calling the darktrace-asm-post-comment command with a specified risk id
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'post_asm_comment')
+
+
+def test_edit_comment_error(requests_mock):
+ """
+ Given
+ An error when editing a comment
+ When
+ Calling the darktrace-asm-edit-comment command with a specified comment id and new comment text
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'edit_asm_comment')
+
+
+def test_delete_comment_error(requests_mock):
+ """
+ Given
+ An error when deleting a comment
+ When
+ Calling the darktrace-asm-delete-comment command with a specified comment id
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'delete_asm_comment')
+
+
+def test_mitigate_risk_error(requests_mock):
+ """
+ Given
+ An error when mitigating a risk
+ When
+ Calling the darktrace-asm-mitigate-risk command with a specified risk id
+ Then
+ The proper error will be raised
+ """
+ func_template_error(requests_mock, 'mitigate_asm_risk')
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/README.md b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/README.md
new file mode 100644
index 000000000000..b66ea92a9ab5
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/README.md
@@ -0,0 +1,619 @@
+Darktrace is a Cyber AI platform for threat detection and response across cloud, email, industrial, and the network.
+This integration was integrated and tested with version 6.0.0 of Darktrace
+
+## Configure Darktrace ASM on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Darktrace ASM.
+3. Click **Add instance** to create and configure a new integration instance.
+
+| **Parameter** | **Description** | **Required** |
+| --- | --- | --- |
+| url | Server URL \(e.g. https://example.net\) | True |
+| isFetch | Fetch incidents | False |
+| insecure | Trust any certificate \(not secure\) | False |
+| api_token | API Token | True |
+| alert_type | Incident types to fetch | False |
+| min_severity | Minimum Risk severity to fetch | False |
+| max_alerts | Maximum Risks per fetch | False |
+| first_fetch | First fetch time | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### darktrace-asm-get-risk
+
+***
+Returns the Risk object associated with the given Risk ID.
+
+#### Base Command
+
+`darktrace-asm-get-risk`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| risk_id | Darktrace ASM Risk ID | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.risk | dict | Darktrace Risk object. |
+| Darktrace.risk.asset | dict | Darktrace ASM Asset object associated with the given Risk. |
+| Darktrace.risk.asset.brand | string | Brand of associtated Asset. |
+| Darktrace.risk.asset.tags | list | List of Tags associated with Asset. |
+| Darktrace.risk.asset.id | string | Asset ID. |
+| Darktrace.risk.asset.updatedAt | timestamp | Last time Asset was updated. |
+| Darktrace.risk.asset.securityrating | string | Security rating of Asset. |
+| Darktrace.risk.asset.isMalicious | boolean | Malicious state of the Asset. |
+| Darktrace.risk.asset.createdAt | timestamp | Time Asset was created. |
+| Darktrace.risk.asset.state | string | State of Asset. |
+| Darktrace.risk.comments | dict | Dictionary of comments by comment ID. |
+| Darktrace.risk.description | string | Description of Risk. |
+| Darktrace.risk.endedAt | timestamp | End time of Risk. |
+| Darktrace.risk.evidence | string | Evidence gathered indicating the Risk. |
+| Darktrace.risk.id | string | Risk ID. |
+| Darktrace.risk.mitigatedAt | timestamp | Mitigation time of Risk. |
+| Darktrace.risk.proposedAction | string | Recommended action to solve Risk. |
+| Darktrace.risk.securityRating | string | Security rating of Risk. |
+| Darktrace.risk.startedAt | timestamp | Start time of Risk. |
+| Darktrace.risk.title | string | Name of Risk. |
+| Darktrace.risk.type | string | Type of Risk. |
+
+#### Command Example
+
+```!darktrace-asm-get-risk risk_id=Umlza1R5cGU6MTE5Nzc=```
+
+#### Context Example
+
+```
+"risk": {
+ "id": "Umlza1R5cGU6MTE5Nzc=",
+ "type": "SSL",
+ "startedAt": "2022-05-27T18:38:45.439551+00:00",
+ "endedAt": "2023-06-07T09:59:49.344739+00:00",
+ "title": "HSTS header missing",
+ "description": "The HSTS header enforces users to always visit your website through SSL, after their first visit.",
+ "evidence": "No HSTS header present.",
+ "proposedAction": "Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html",
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjIyNjg0",
+ "state": "Unconfirmed",
+ "brand": "Darktrace",
+ "createdAt": "2022-05-27 14:18:24.264958+00:00",
+ "updatedAt": "2023-06-29 06:40:41.007652+00:00",
+ "securityrating": "f",
+ "isMalicious": true,
+ "tags": []
+ },
+ "securityRating": "b",
+ "mitigatedAt": 2023-06-06T09:59:49.344739+00:00,
+ "comments": {
+ "edges": [
+ {
+ "node": {
+ "id": "Q29tbWVudFR5cGU6ODM=",
+ "text": "API TEST EDIT"
+ }
+ }
+ ]
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| asset | id: QXBwbGljYXRpb25UeXBlOjIyNjg0
state: Unconfirmed
brand: Darktrace
createdAt: "2022-05-27 14:18:24.264958+00:00
updatedAt: 2023-06-29 06:40:41.007652+00:00
securityrating: f
isMalicious: true
tags: EXAMPLE_TAG |
+>| comments | Q29tbWVudFR5cGU6ODM=: "XSOAR Test Comment"
Q29tbWVudFR5cGU6ODN=: "XSOAR Test Comment 2" |
+>| descirption | The HSTS header enforces users to always visit your website through SSL, after their first visit. |
+>| endedAt | 2023-06-07T09:59:49.344739+00:00 |
+>| evidence | No HSTS header present. |
+>| id | Umlza1R5cGU6MTE5Nzc= |
+>| mitigatedAt | 2023-06-06T09:59:49.344739+00:00 |
+>| proposedAction | Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html |
+>| securityRating | b |
+>| startedAt | 2022-05-27T18:38:45.439551+00:00 |
+>| title | HSTS header missing |
+>| type | SSL |
+
+### darktrace-asm-get-asset
+
+***
+Returns the Asset object associated with the given Asset ID. The output will depend on the type of Asset(IP Address, Netblock, FQDN or Application).
+
+#### Base Command
+
+`darktrace-asm-get-asset`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| asset_id | Darktrace ASM Asset ID | Required |
+
+#### Context Output: All Asset types
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.asset | dict | Darktrace ASM Asset object. |
+| Darktrace.asset.brand | string | Brand that the Asset is associated with. |
+| Darktrace.asset.comments | list | List of comments by comment ID. |
+| Darktrace.asset.createdAt | timestamp | Creation time of Asset. |
+| Darktrace.asset.discoverySources | list | List of discovery sources. |
+| Darktrace.asset.id | string | Asset ID. |
+| Darktrace.asset.isMalicious | bool | Malicious state of Asset. |
+| Darktrace.asset.risks | list | List of Risks associated with Asset. |
+| Darktrace.asset.securityrating | string | Security rating of Asset. |
+| Darktrace.asset.state | string | State of Asset. |
+| Darktrace.asset.tags | list | List of tags applied to Asset within Darktrace UI. |
+| Darktrace.asset.type | string | Type of Asset. |
+| Darktrace.asset.updatedAt | timestamp | Last time Asset was updated. |
+
+
+#### Context Output: Application Asset type
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.asset.fqdns | list | List of FQDNS associated with Asset. |
+| Darktrace.asset.ipaddresses | list | List of IPs associated with Asset. |
+| Darktrace.asset.protocol | string | Protocol associated with the Asset |
+| Darktrace.asset.screenshot | string | Screenshot of webpage associated with Asset. |
+| Darktrace.asset.technologies | list | List of technologies associated with Asset. |
+| Darktrace.asset.uri | string | URI associated with Asset. |
+
+#### Context Output: IP Address Asset type
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.asset.lat | string | Latitude of the Asset. |
+| Darktrace.asset.lon | string | Longitude of the Asset. |
+| Darktrace.asset.geoCity | string | City Asset is located. |
+| Darktrace.asset.geoCountry | string | Country Asset is located. |
+| Darktrace.asset.address | string | IP address of the Asset |
+| Darktrace.asset.netblock | string | Netblock of the Asset. |
+
+#### Context Output: FQDN Asset type
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.asset.name | string | Hostname associated with Asset. |
+| Darktrace.asset.dnsRecords | string | DNS records associated with Asset. |
+| Darktrace.asset.resolvesTo | list | List of IPs the Asset hostname resolves to. |
+| Darktrace.asset.whois | string | WhoIs information associated with Asset. |
+| Darktrace.asset.registeredDomain | string | Domain associated with Asset. |
+
+#### Context Output: Netblock Asset type
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.asset.netname | string | Name of the Asset. |
+| Darktrace.asset.ipAddresses | list | List of IP addresses associated with Asset. |
+
+#### Command Example
+
+```!darktrace-asm-get-asset asset_id=QXBwbGljYXRpb25UeXBlOjI2NjI4```
+
+#### Context Example
+
+```
+"application": {
+ "brand": "Darktrace",
+ "comments": [
+ {
+ "id": "Q29tbWVudFR5cGU6OTc=",
+ "text": "Test comment"
+ }
+ ],
+ "createdAt": "2022-06-27 18:34:50.473256+00:00",
+ "discoverySources": [
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc0Ng==",
+ "description": "Record retrieved from FQDN careers.darktrace.com"
+ },
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc1Nw==",
+ "description": "Application from https://careers.darktrace.com/"
+ }
+ ],
+ "fqdns": [
+ {
+ "id": "RnFkblR5cGU6MjY2Mjc=",
+ "name": "careers.darktrace.com"
+ }
+ ],
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "ipaddresses": [
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njc=",
+ "address": "1.1.1.1"
+ },
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njg=",
+ "address": "1.1.1.1"
+ }
+ ],
+ "isMalicious": false,
+ "risks": [
+ {
+ "id": "Umlza1R5cGU6NjYzNjA=",
+ "title": "Vulnerable software found - jquery ui/1.13.0 (highest CVE score 4.3)"
+ },
+ {
+ "id": "Umlza1R5cGU6MTU1ODQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ },
+ {
+ "id": "Umlza1R5cGU6MzQ4MzQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ }
+ ],
+ "screenshot": "https://storage.googleapis.com/asm-prod-1931-z5b5n7ow5w-copy/http_screenshot/screenshot_155822.jpg?Expires=1710617440&GoogleAccessId=asm-prod-1931-cyberweb%40dt-asm-prod.iam.gserviceaccount.com&Signature=Vbz1hBo%2Bo3ZYTRvg5p%2F%2F%2FTFFf4PHRgPaVUrcpaDG8Kp%2BOT2dSm8O2NC1HFJXQW420yD2zppJ5IbOCt46vJ6LZMvx5kcdm7IY1U6yKbedRGACfbpUQaXEjmXN1gLhVawnoET94CYqnmlYue6%2Fy4B6cS4fZwvH6sllm2OnbDZ%2FZacoSw9Xmf214R0M%2FgY3OjKuXapaAnu779r5c8fkjL8cSvX8E8PzkxToGF9ysTNuWVqZc46H05xxUtb8QSauiggAijBeSLg%2Blol1wVj0ZuMP%2Fb1kJvXNpCr6x0Dem6ITe4C%2FPrbiqcNMvwSZChptiDBhgoXGRAm%2FRJokWqktST19Nw%3D%3D",
+ "securityrating": "b",
+ "state": "Confirmed",
+ "tags": [
+ "MANAGED BY INTERNAL DEV"
+ ],
+ "updatedAt": "2023-08-21 00:31:57.299904+00:00",
+ "uri": "https://careers.darktrace.com",
+ "technologies": [
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY2",
+ "name": "Amazon ALB"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY3",
+ "name": "Amazon Web Services"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTE1MjU3",
+ "name": "Bootstrap"
+ }
+ ],
+ "protocol": "HTTP"
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| brand | Darktrace |
+>| comments | Q29tbWVudFR5cGU6OTc=: "Test comment" |
+>| createdAt | 2022-06-27 18:34:50.473256+00:00 |
+>| discoverySources | RGlzY292ZXJ5U291cmNlVHlwZTo1NDc0Ng==: Record retrieved from FQDN careers.darktrace.com
RGlzY292ZXJ5U291cmNlVHlwZTo1NDc1Nw==: Application from https://careers.darktrace.com/ |
+>| fqdns | RnFkblR5cGU6MjY2Mjc=: careers.darktrace.com |
+>| id | QXBwbGljYXRpb25UeXBlOjI2NjI4 |
+>| ipaddresses | SVBBZGRyZXNzVHlwZToxNTU3Njc=: 1.1.1.1
SVBBZGRyZXNzVHlwZToxNTU3Njg=: 1.1.1.1 |
+>| isMalicious | false |
+>| protocol | HTTP |
+>| risks | Umlza1R5cGU6NjYzNjA=: Vulnerable software found - jquery ui/1.13.0 (highest CVE score 4.3)
Umlza1R5cGU6MTU1ODQ=: Excessive cookie lifetime (> 1 year)
Umlza1R5cGU6MzQ4MzQ=: Excessive cookie lifetime (> 1 year) |
+>| screenshot | https://storage.googleapis.com/asm-prod-1931-z5b5n7ow5w-copy/http_screenshot/screenshot_155822.jpg?Expires=1710617295&GoogleAccessId=asm-prod-1931-cyberweb%40dt-asm-prod.iam.gserviceaccount.com&Signature=HjT83fw4EV%2F6notDq7tQB24oAr049F4UZ8OUDJ3hiuAaD%2F3y7xFOniBLDyZNtZBMlUDDJgrG6%2BhXbuJ0Sdobhsk%2Bj6KZknqa6xao0eyv%2BT%2FQGysZSxol8YHn%2BykRBkX8Umajs%2F5KRR8GRWc46o7m%2FnW1Rdop4qUuGKPy82UUOWwbyfcI7yYOGH8nky2b0o95QyfvR4%2Fa4GeCEHL8cz8RksGh4imWICWcTDu18OlGNruI%2F0sAiivHVbzPnOnBBFwFunAIXez9THr5oItqIoTzV%2FrNdwIFHc0rRIvtvNpuUVcrQo7%2FqaDunYZSmPu0Hf6eaL7cR6ZbYbXuKchlr2eAOQ%3D%3D |
+>| securityrating | b|
+>| state | Confirmed |
+>| tags | MANAGED BY INTERNAL DEV |
+>| technologies | VGVjaG5vbG9neVR5cGU6MTU4MjY2: Amazon ALB
VGVjaG5vbG9neVR5cGU6MTU4MjY3: Amazon Web Services
VGVjaG5vbG9neVR5cGU6MTE1MjU3: Bootstrap |
+>| type | application |
+>| updatedAt | 2023-08-21 00:31:57.299904+00:00 |
+>| uri | https://careers.darktrace.com |
+
+### darktrace-asm-mitigate-risk
+
+***
+Mitigates Risk within the Darktrace UI. **Warning: Mitigating a Risk without taking action to resolve the Risk means you accept a Risk and it will no longer appear with the Darktrace UI.**
+
+#### Base Command
+
+`darktrace-asm-mitigate-risk`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| risk_id | Darktrace ASM Risk ID | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.risk.success | boolean | Status of mitigation. |
+
+### Command Example
+
+```!darktrace-asm-mitigate-risk risk_id=Umlza1R5cGU6MTE5Nzc=```
+
+#### Context Example
+
+```
+"closeRisk": {
+ "success": true,
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| success | true |
+
+### darktrace-asm-post-comment
+
+***
+Post a comment to a Risk or an Asset within the Darktrace UI.
+
+#### Base Command
+
+`darktrace-asm-post-comment`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | Darktrace ASM Risk or Asset ID | Required |
+| comment | Text of comment to be applied | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.comment.comment.id | string | Unique ID of Comment. |
+| Darktrace.comment.comment.text | string | Text of Comment. |
+| Darktrace.comment.success | boolean | Status of post. |
+
+### Command Example
+
+```!darktrace-asm-post-comment id=QXBwbGljYXRpb25UeXBlOjI2NjI4 comment="API Test Comment"```
+
+#### Context Example
+
+```
+"placeComment": {
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment"
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| comment | id: Q29tbWVudFR5cGU6OTg=
text: API Test Comment |
+>| success | true |
+
+### darktrace-asm-edit-comment
+
+***
+Edit an existing comment within the Darktrace UI.
+
+#### Base Command
+
+`darktrace-asm-edit-comment`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment_id | ID of comment to be edited | Required |
+| comment | Text of comment to be applied | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.comment.comment.id | string | Unique ID of Comment. |
+| Darktrace.comment.comment.text | string | Text of Comment. |
+| Darktrace.comment.success | boolean | Status of edit. |
+
+### Command Example
+
+```!darktrace-asm-edit-comment comment_id=Q29tbWVudFR5cGU6OTg= comment="API Test Comment Edited"```
+
+#### Context Example
+
+```
+"editComment": {
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment Edited"
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| comment | id: Q29tbWVudFR5cGU6OTg=
text: API Test Comment Edited |
+>| success | true |
+
+### darktrace-asm-delete-comment
+
+***
+Delete an existing comment within the Darktrace UI.
+
+#### Base Command
+
+`darktrace-asm-delete-comment`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| comment_id | ID of comment to be deleted | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.comment.success | boolean | Status of deletion. |
+
+### Command Example
+
+```!darktrace-asm-delete-comment comment_id=Q29tbWVudFR5cGU6OTg=```
+
+#### Context Example
+
+```
+"deleteComment": {
+ "success": true
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| success | true |
+
+### darktrace-asm-create-tag
+
+***
+Creat a new Tag within the Darktrace UI. Tags can be applied to Assets.
+
+#### Base Command
+
+`darktrace-asm-create-tag`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| tag_name | Name of Tag to create | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.tag.success | boolean | Status of creation. |
+| Darktrace.tag.tag.id | string | Tag ID. |
+| Darktrace.tag.tag.name | string | Name of Tag. |
+
+### Command Example
+
+```!darktrace-asm-create-tag tag_name="API TEST"```
+
+#### Context Example
+
+```
+"createTag": {
+ "success": true,
+ "tag": {
+ "id": "VGFnVHlwZTo1Mg==",
+ "name": "API TEST"
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| success | true |
+>| tag | id: VGFnVHlwZTo1Mg==
name: API TEST |
+
+### darktrace-asm-assign-tag
+
+***
+Assign an existing Tag to an Asset within the Darktrace UI.
+
+#### Base Command
+
+`darktrace-asm-assign-tag`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| tag_name | Name of Tag to apply to Asset | Required |
+| asset_id | Asset ID to apply Tag to | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.tag.success | boolean | Status of assignment. |
+| Darktrace.tag.asset.id | string | Asset ID. |
+| Darktrace.tag.asset.tags | list | List of Tags assigned to Asset. |
+
+### Command Example
+
+```!darktrace-asm-assign-tag tag_name="API TEST" asset_id=SVBBZGRyZXNzVHlwZTox```
+
+#### Context Example
+
+```
+"assignTag": {
+ "success": true,
+ "asset": {
+ "id": "SVBBZGRyZXNzVHlwZTox",
+ "tags": [
+ "API TEST"
+ ]
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| asset | id: SVBBZGRyZXNzVHlwZTox
tags: API TEST |
+>| success | true |
+
+### darktrace-asm-unassign-tag
+
+***
+Unssign an existing Tag from an Asset within the Darktrace UI.
+
+#### Base Command
+
+`darktrace-asm-unassign-tag`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| tag_name | Name of Tag to remove from Asset | Required |
+| asset_id | Asset ID to remove Tag from | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Darktrace.tag.success | boolean | Status of assignment. |
+| Darktrace.tag.asset.id | string | Asset ID. |
+| Darktrace.tag.asset.tags | list | List of Tags assigned to Asset. |
+
+### Command Example
+
+```!darktrace-asm-unassign-tag tag_name="API TEST" asset_id=SVBBZGRyZXNzVHlwZTox```
+
+#### Context Example
+
+```
+"unassignTag": {
+ "success": true,
+ "asset": {
+ "id": "SVBBZGRyZXNzVHlwZTox",
+ "tags": []
+ }
+ }
+```
+
+#### Human Readable Output
+
+>| Field | Value |
+>| --- | --- |
+>| asset | id: SVBBZGRyZXNzVHlwZTox
tags: |
+>| success | true |
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag.json
new file mode 100644
index 000000000000..b5092f814246
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag.json
@@ -0,0 +1,12 @@
+{"data": {
+ "assignTag": {
+ "success": true,
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tags": [
+ "API TEST"
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag_error.json
new file mode 100644
index 000000000000..8d087d297e38
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/assign_asm_tag_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Tag matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "assignTag"
+ ]
+ }
+ ],
+ "data": {
+ "assignTag": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag.json
new file mode 100644
index 000000000000..49a2c05d2063
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag.json
@@ -0,0 +1,10 @@
+{"data": {
+ "createTag": {
+ "success": true,
+ "tag": {
+ "id": "VGFnVHlwZTo1Mg==",
+ "name": "API TEST"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag_error.json
new file mode 100644
index 000000000000..59b5fe851460
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/create_asm_tag_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Tag matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "createTag"
+ ]
+ }
+ ],
+ "data": {
+ "createTag": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment.json
new file mode 100644
index 000000000000..c65960b33648
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment.json
@@ -0,0 +1,7 @@
+{"data":
+ {
+ "deleteComment": {
+ "success": true
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment_error.json
new file mode 100644
index 000000000000..9ab25e7caeee
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/delete_asm_comment_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Comment matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "deleteComment"
+ ]
+ }
+ ],
+ "data": {
+ "deleteComment": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment.json
new file mode 100644
index 000000000000..e341794bb58e
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment.json
@@ -0,0 +1,11 @@
+{"data":
+ {
+ "editComment": {
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment Edited"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment_error.json
new file mode 100644
index 000000000000..008106bf71a7
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/edit_asm_comment_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Comment matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "editComment"
+ ]
+ }
+ ],
+ "data": {
+ "editComment": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/fetch_incidents.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/fetch_incidents.json
new file mode 100644
index 000000000000..7e2c46e82e23
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/fetch_incidents.json
@@ -0,0 +1,76 @@
+{"data":
+ {"allRisks":
+ {"edges":
+ [
+ {"node":
+ {
+ "id": "Umlza1R5cGU6MTE5Nzc=",
+ "type": "SSL",
+ "startedAt": "2022-05-27T18:38:45.439551+00:00",
+ "endedAt": "2023-06-07T09:59:49.344739+00:00",
+ "title": "HSTS header missing",
+ "description": "The HSTS header enforces users to always visit your website through SSL, after their first visit.",
+ "evidence": "No HSTS header present.",
+ "proposedAction": "Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html",
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjIyNjg0",
+ "state": "Unconfirmed",
+ "brand": "Darktrace",
+ "createdAt": "2022-05-27 14:18:24.264958+00:00",
+ "updatedAt": "2023-06-29 06:40:41.007652+00:00",
+ "securityrating": "f",
+ "isMalicious": true,
+ "tags": []
+ },
+ "securityRating": "b",
+ "mitigatedAt": "2023-06-06T09:59:49.344739+00:00",
+ "comments": {
+ "edges": [
+ {
+ "node": {
+ "id": "Q29tbWVudFR5cGU6ODM=",
+ "text": "API TEST EDIT"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {"node":
+ {
+ "id": "Umlza1R5cGU6MTE5Nzc=",
+ "type": "SSL",
+ "startedAt": "2022-05-27T18:38:45.439551+00:00",
+ "endedAt": "2023-06-07T09:59:49.344739+00:00",
+ "title": "HSTS header missing",
+ "description": "The HSTS header enforces users to always visit your website through SSL, after their first visit.",
+ "evidence": "No HSTS header present.",
+ "proposedAction": "Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html",
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjIyNjg0",
+ "state": "Unconfirmed",
+ "brand": "Darktrace",
+ "createdAt": "2022-05-27 14:18:24.264958+00:00",
+ "updatedAt": "2023-06-29 06:40:41.007652+00:00",
+ "securityrating": "f",
+ "isMalicious": true,
+ "tags": []
+ },
+ "securityRating": "b",
+ "mitigatedAt": "2023-06-06T09:59:49.344739+00:00",
+ "comments": {
+ "edges": [
+ {
+ "node": {
+ "id": "Q29tbWVudFR5cGU6ODM=",
+ "text": "API TEST EDIT"
+ }
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_assign_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_assign_asm_tag.json
new file mode 100644
index 000000000000..9af34746c906
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_assign_asm_tag.json
@@ -0,0 +1,10 @@
+{
+ "success": true,
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tags": [
+ "API TEST"
+ ]
+ },
+ "readable_output": "Successfully assigned tag API TEST to asset QXBwbGljYXRpb25UeXBlOjI2NjI4. Tags applied to asset:\nAPI TEST"
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_create_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_create_asm_tag.json
new file mode 100644
index 000000000000..fbaab0d1a560
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_create_asm_tag.json
@@ -0,0 +1,8 @@
+{
+ "success": true,
+ "tag": {
+ "id": "VGFnVHlwZTo1Mg==",
+ "name": "API TEST"
+ },
+ "readable_output": "Successfully created tag API TEST."
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_delete_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_delete_asm_comment.json
new file mode 100644
index 000000000000..c63a5e5de970
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_delete_asm_comment.json
@@ -0,0 +1,4 @@
+{
+ "success": true,
+ "readable_output": "Comment successfully deleted. Comment ID: Q29tbWVudFR5cGU6OTg="
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_edit_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_edit_asm_comment.json
new file mode 100644
index 000000000000..23bcbd5536ac
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_edit_asm_comment.json
@@ -0,0 +1,8 @@
+{
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment Edited"
+ },
+ "readable_output": "Comment successfully edited. Comment ID: Q29tbWVudFR5cGU6OTg="
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_fetch_incidents.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_fetch_incidents.json
new file mode 100644
index 000000000000..fe0bf342ab80
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_fetch_incidents.json
@@ -0,0 +1,13 @@
+{"last_fetch": "2022-05-27T18:38:45.439551",
+ "incidents":
+ [
+ {"name": "Darktrace ASM | Risk Title: HSTS header missing | Brand: Darktrace",
+ "occurred": "2022-05-27T18:38:45.439551+00:00",
+ "rawJSON": "{\"id\": \"Umlza1R5cGU6MTE5Nzc=\", \"type\": \"SSL\", \"startedAt\": \"2022-05-27T18:38:45.439551+00:00\", \"endedAt\": \"2023-06-07T09:59:49.344739+00:00\", \"title\": \"HSTS header missing\", \"description\": \"The HSTS header enforces users to always visit your website through SSL, after their first visit.\", \"evidence\": \"No HSTS header present.\", \"proposedAction\": \"Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html\", \"asset\": {\"id\": \"QXBwbGljYXRpb25UeXBlOjIyNjg0\", \"state\": \"Unconfirmed\", \"brand\": \"Darktrace\", \"createdAt\": \"2022-05-27 14:18:24.264958+00:00\", \"updatedAt\": \"2023-06-29 06:40:41.007652+00:00\", \"securityrating\": \"f\", \"isMalicious\": true, \"tags\": []}, \"securityRating\": \"b\", \"mitigatedAt\": \"2023-06-06T09:59:49.344739+00:00\", \"comments\": {\"edges\": [{\"node\": {\"id\": \"Q29tbWVudFR5cGU6ODM=\", \"text\": \"API TEST EDIT\"}}]}}",
+ "severity": 1},
+ {"name": "Darktrace ASM | Risk Title: HSTS header missing | Brand: Darktrace",
+ "occurred": "2022-05-27T18:38:45.439551+00:00",
+ "rawJSON": "{\"id\": \"Umlza1R5cGU6MTE5Nzc=\", \"type\": \"SSL\", \"startedAt\": \"2022-05-27T18:38:45.439551+00:00\", \"endedAt\": \"2023-06-07T09:59:49.344739+00:00\", \"title\": \"HSTS header missing\", \"description\": \"The HSTS header enforces users to always visit your website through SSL, after their first visit.\", \"evidence\": \"No HSTS header present.\", \"proposedAction\": \"Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html\", \"asset\": {\"id\": \"QXBwbGljYXRpb25UeXBlOjIyNjg0\", \"state\": \"Unconfirmed\", \"brand\": \"Darktrace\", \"createdAt\": \"2022-05-27 14:18:24.264958+00:00\", \"updatedAt\": \"2023-06-29 06:40:41.007652+00:00\", \"securityrating\": \"f\", \"isMalicious\": true, \"tags\": []}, \"securityRating\": \"b\", \"mitigatedAt\": \"2023-06-06T09:59:49.344739+00:00\", \"comments\": {\"edges\": [{\"node\": {\"id\": \"Q29tbWVudFR5cGU6ODM=\", \"text\": \"API TEST EDIT\"}}]}}",
+ "severity": 1}
+ ]
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_asset.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_asset.json
new file mode 100644
index 000000000000..84fb46b6a863
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_asset.json
@@ -0,0 +1,76 @@
+{
+ "brand": "Darktrace",
+ "comments": [
+ {
+ "id": "Q29tbWVudFR5cGU6OTc=",
+ "text": "Test comment"
+ }
+ ],
+ "createdAt": "2022-06-27 18:34:50.473256+00:00",
+ "discoverySources": [
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc0Ng==",
+ "description": "Record retrieved from FQDN careers.darktrace.com"
+ },
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc1Nw==",
+ "description": "Application from https://careers.darktrace.com/"
+ }
+ ],
+ "fqdns": [
+ {
+ "id": "RnFkblR5cGU6MjY2Mjc=",
+ "name": "careers.darktrace.com"
+ }
+ ],
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "ipaddresses": [
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njc=",
+ "address": "104.16.61.2"
+ },
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njg=",
+ "address": "104.16.60.2"
+ }
+ ],
+ "isMalicious": false,
+ "risks": [
+ {
+ "id": "Umlza1R5cGU6NjYzNjA=",
+ "title": "Vulnerable software found - jquery ui/1.13.0 (highest CVE score 4.3)"
+ },
+ {
+ "id": "Umlza1R5cGU6MTU1ODQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ },
+ {
+ "id": "Umlza1R5cGU6MzQ4MzQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ }
+ ],
+ "screenshot": "https://storage.googleapis.com/asm-prod-1931-z5b5n7ow5w-copy/http_screenshot/screenshot_155822.jpg?Expires=1710617440&GoogleAccessId=asm-prod-1931-cyberweb%40dt-asm-prod.iam.gserviceaccount.com&Signature=Vbz1hBo%2Bo3ZYTRvg5p%2F%2F%2FTFFf4PHRgPaVUrcpaDG8Kp%2BOT2dSm8O2NC1HFJXQW420yD2zppJ5IbOCt46vJ6LZMvx5kcdm7IY1U6yKbedRGACfbpUQaXEjmXN1gLhVawnoET94CYqnmlYue6%2Fy4B6cS4fZwvH6sllm2OnbDZ%2FZacoSw9Xmf214R0M%2FgY3OjKuXapaAnu779r5c8fkjL8cSvX8E8PzkxToGF9ysTNuWVqZc46H05xxUtb8QSauiggAijBeSLg%2Blol1wVj0ZuMP%2Fb1kJvXNpCr6x0Dem6ITe4C%2FPrbiqcNMvwSZChptiDBhgoXGRAm%2FRJokWqktST19Nw%3D%3D",
+ "securityrating": "b",
+ "state": "Confirmed",
+ "tags": [
+ "MANAGED BY INTERNAL DEV"
+ ],
+ "updatedAt": "2023-08-21 00:31:57.299904+00:00",
+ "uri": "https://careers.darktrace.com",
+ "technologies": [
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY2",
+ "name": "Amazon ALB"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY3",
+ "name": "Amazon Web Services"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTE1MjU3",
+ "name": "Bootstrap"
+ }
+ ],
+ "protocol": "HTTP",
+ "type": "application"
+}
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_risk.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_risk.json
new file mode 100644
index 000000000000..b41acd700e6b
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_get_asm_risk.json
@@ -0,0 +1,32 @@
+{
+ "id": "Umlza1R5cGU6MTE5Nzc=",
+ "type": "SSL",
+ "startedAt": "2022-05-27T18:38:45.439551+00:00",
+ "endedAt": "2023-06-07T09:59:49.344739+00:00",
+ "title": "HSTS header missing",
+ "description": "The HSTS header enforces users to always visit your website through SSL, after their first visit.",
+ "evidence": "No HSTS header present.",
+ "proposedAction": "Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html",
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjIyNjg0",
+ "state": "Unconfirmed",
+ "brand": "Darktrace",
+ "createdAt": "2022-05-27 14:18:24.264958+00:00",
+ "updatedAt": "2023-06-29 06:40:41.007652+00:00",
+ "securityrating": "f",
+ "isMalicious": true,
+ "tags": []
+ },
+ "securityRating": "b",
+ "mitigatedAt": "2023-06-06T09:59:49.344739+00:00",
+ "comments": {
+ "edges": [
+ {
+ "node": {
+ "id": "Q29tbWVudFR5cGU6ODM=",
+ "text": "API TEST EDIT"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_mitigate_asm_risk.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_mitigate_asm_risk.json
new file mode 100644
index 000000000000..856f1541c9fb
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_mitigate_asm_risk.json
@@ -0,0 +1,3 @@
+{
+ "readable_output": "Successfully mitigated risk. Risk ID: Umlza1R5cGU6MTE5Nzc="
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_post_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_post_asm_comment.json
new file mode 100644
index 000000000000..7f0d2635c678
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_post_asm_comment.json
@@ -0,0 +1,8 @@
+{
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment"
+ },
+ "readable_output": "Comment successful. Comment ID: Q29tbWVudFR5cGU6OTg="
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_unassign_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_unassign_asm_tag.json
new file mode 100644
index 000000000000..4878dc5857ba
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/formatted_unassign_asm_tag.json
@@ -0,0 +1,8 @@
+{
+ "success": true,
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tags": []
+ },
+ "readable_output": "Successfully unassigned tag API TEST from asset QXBwbGljYXRpb25UeXBlOjI2NjI4. Tags applied to asset:\n"
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_asset.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_asset.json
new file mode 100644
index 000000000000..a9ad1866e509
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_asset.json
@@ -0,0 +1,78 @@
+{"data": {
+ "application": {
+ "brand": "Darktrace",
+ "comments": [
+ {
+ "id": "Q29tbWVudFR5cGU6OTc=",
+ "text": "Test comment"
+ }
+ ],
+ "createdAt": "2022-06-27 18:34:50.473256+00:00",
+ "discoverySources": [
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc0Ng==",
+ "description": "Record retrieved from FQDN careers.darktrace.com"
+ },
+ {
+ "id": "RGlzY292ZXJ5U291cmNlVHlwZTo1NDc1Nw==",
+ "description": "Application from https://careers.darktrace.com/"
+ }
+ ],
+ "fqdns": [
+ {
+ "id": "RnFkblR5cGU6MjY2Mjc=",
+ "name": "careers.darktrace.com"
+ }
+ ],
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "ipaddresses": [
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njc=",
+ "address": "104.16.61.2"
+ },
+ {
+ "id": "SVBBZGRyZXNzVHlwZToxNTU3Njg=",
+ "address": "104.16.60.2"
+ }
+ ],
+ "isMalicious": false,
+ "risks": [
+ {
+ "id": "Umlza1R5cGU6NjYzNjA=",
+ "title": "Vulnerable software found - jquery ui/1.13.0 (highest CVE score 4.3)"
+ },
+ {
+ "id": "Umlza1R5cGU6MTU1ODQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ },
+ {
+ "id": "Umlza1R5cGU6MzQ4MzQ=",
+ "title": "Excessive cookie lifetime (> 1 year)"
+ }
+ ],
+ "screenshot": "https://storage.googleapis.com/asm-prod-1931-z5b5n7ow5w-copy/http_screenshot/screenshot_155822.jpg?Expires=1710617440&GoogleAccessId=asm-prod-1931-cyberweb%40dt-asm-prod.iam.gserviceaccount.com&Signature=Vbz1hBo%2Bo3ZYTRvg5p%2F%2F%2FTFFf4PHRgPaVUrcpaDG8Kp%2BOT2dSm8O2NC1HFJXQW420yD2zppJ5IbOCt46vJ6LZMvx5kcdm7IY1U6yKbedRGACfbpUQaXEjmXN1gLhVawnoET94CYqnmlYue6%2Fy4B6cS4fZwvH6sllm2OnbDZ%2FZacoSw9Xmf214R0M%2FgY3OjKuXapaAnu779r5c8fkjL8cSvX8E8PzkxToGF9ysTNuWVqZc46H05xxUtb8QSauiggAijBeSLg%2Blol1wVj0ZuMP%2Fb1kJvXNpCr6x0Dem6ITe4C%2FPrbiqcNMvwSZChptiDBhgoXGRAm%2FRJokWqktST19Nw%3D%3D",
+ "securityrating": "b",
+ "state": "Confirmed",
+ "tags": [
+ "MANAGED BY INTERNAL DEV"
+ ],
+ "updatedAt": "2023-08-21 00:31:57.299904+00:00",
+ "uri": "https://careers.darktrace.com",
+ "technologies": [
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY2",
+ "name": "Amazon ALB"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTU4MjY3",
+ "name": "Amazon Web Services"
+ },
+ {
+ "id": "VGVjaG5vbG9neVR5cGU6MTE1MjU3",
+ "name": "Bootstrap"
+ }
+ ],
+ "protocol": "HTTP"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_risk.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_risk.json
new file mode 100644
index 000000000000..216e5bc57b4a
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/get_asm_risk.json
@@ -0,0 +1,36 @@
+{
+ "data": {
+ "risk": {
+ "id": "Umlza1R5cGU6MTE5Nzc=",
+ "type": "SSL",
+ "startedAt": "2022-05-27T18:38:45.439551+00:00",
+ "endedAt": "2023-06-07T09:59:49.344739+00:00",
+ "title": "HSTS header missing",
+ "description": "The HSTS header enforces users to always visit your website through SSL, after their first visit.",
+ "evidence": "No HSTS header present.",
+ "proposedAction": "Turn on the HSTS header, read more on https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Strict_Transport_Security_Cheat_Sheet.html",
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjIyNjg0",
+ "state": "Unconfirmed",
+ "brand": "Darktrace",
+ "createdAt": "2022-05-27 14:18:24.264958+00:00",
+ "updatedAt": "2023-06-29 06:40:41.007652+00:00",
+ "securityrating": "f",
+ "isMalicious": true,
+ "tags": []
+ },
+ "securityRating": "b",
+ "mitigatedAt": "2023-06-06T09:59:49.344739+00:00",
+ "comments": {
+ "edges": [
+ {
+ "node": {
+ "id": "Q29tbWVudFR5cGU6ODM=",
+ "text": "API TEST EDIT"
+ }
+ }
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk.json
new file mode 100644
index 000000000000..037e24ed3548
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk.json
@@ -0,0 +1,6 @@
+{"data": {
+ "closeRisk": {
+ "success": true
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk_error.json
new file mode 100644
index 000000000000..c5ed80c01672
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/mitigate_asm_risk_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "ID is not a Risk",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "closeRisk"
+ ]
+ }
+ ],
+ "data": {
+ "closeRisk": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment.json
new file mode 100644
index 000000000000..b5f9c86e2b03
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment.json
@@ -0,0 +1,11 @@
+{"data":
+ {
+ "placeComment": {
+ "success": true,
+ "comment": {
+ "id": "Q29tbWVudFR5cGU6OTg=",
+ "text": "API Test Comment"
+ }
+ }
+ }
+}
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment_error.json
new file mode 100644
index 000000000000..9ca3c81ef7c6
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/post_asm_comment_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Comment matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "placeComment"
+ ]
+ }
+ ],
+ "data": {
+ "placeComment": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag.json
new file mode 100644
index 000000000000..985ffa8ebe14
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag.json
@@ -0,0 +1,10 @@
+{"data": {
+ "unassignTag": {
+ "success": true,
+ "asset": {
+ "id": "QXBwbGljYXRpb25UeXBlOjI2NjI4",
+ "tags": []
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag_error.json b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag_error.json
new file mode 100644
index 000000000000..a0f0362acccc
--- /dev/null
+++ b/Packs/DarktraceASM/Integrations/DarktraceASMRisk/test_data/unassign_asm_tag_error.json
@@ -0,0 +1,19 @@
+{
+ "errors": [
+ {
+ "message": "Tag matching query does not exist.",
+ "locations": [
+ {
+ "line": 1,
+ "column": 24
+ }
+ ],
+ "path": [
+ "unassignTag"
+ ]
+ }
+ ],
+ "data": {
+ "unassignTag": null
+ }
+ }
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Layouts/layoutscontainer-DarktraceASM_ASM_Layout.json b/Packs/DarktraceASM/Layouts/layoutscontainer-DarktraceASM_ASM_Layout.json
new file mode 100644
index 000000000000..355495d3489c
--- /dev/null
+++ b/Packs/DarktraceASM/Layouts/layoutscontainer-DarktraceASM_ASM_Layout.json
@@ -0,0 +1,570 @@
+{
+ "close": {},
+ "definitionId": "",
+ "description": "",
+ "details": {},
+ "detailsV2": {
+ "tabs": [
+ {
+ "id": "summary",
+ "name": "Legacy Summary",
+ "type": "summary"
+ },
+ {
+ "id": "caseinfoid",
+ "name": "Incident Info",
+ "sections": [
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-fce71720-98b0-11e9-97d7-ed26ef9e46c8",
+ "isVisible": true,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "type",
+ "height": 22,
+ "id": "incident-type-field",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "severity",
+ "height": 22,
+ "id": "incident-severity-field",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "owner",
+ "height": 22,
+ "id": "incident-owner-field",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "sourcebrand",
+ "height": 22,
+ "id": "incident-sourceBrand-field",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "sourceinstance",
+ "height": 22,
+ "id": "incident-sourceInstance-field",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "playbookid",
+ "height": 22,
+ "id": "incident-playbookId-field",
+ "index": 5,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Case Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "h": 2,
+ "i": "caseinfoid-61263cc0-98b1-11e9-97d7-ed26ef9e46c8",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Notes",
+ "static": false,
+ "type": "notes",
+ "w": 1,
+ "x": 2,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-6aabad20-98b1-11e9-97d7-ed26ef9e46c8",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Work Plan",
+ "static": false,
+ "type": "workplan",
+ "w": 1,
+ "x": 1,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-770ec200-98b1-11e9-97d7-ed26ef9e46c8",
+ "isVisible": true,
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Linked Incidents",
+ "static": false,
+ "type": "linkedIncidents",
+ "w": 1,
+ "x": 1,
+ "y": 8
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-842632c0-98b1-11e9-97d7-ed26ef9e46c8",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Child Incidents",
+ "static": false,
+ "type": "childInv",
+ "w": 1,
+ "x": 2,
+ "y": 6
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-4a31afa0-98ba-11e9-a519-93a53c759fe0",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Evidence",
+ "static": false,
+ "type": "evidence",
+ "w": 1,
+ "x": 2,
+ "y": 4
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "caseinfoid-7717e580-9bed-11e9-9a3f-8b4b2158e260",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Team Members",
+ "static": false,
+ "type": "team",
+ "w": 1,
+ "x": 2,
+ "y": 8
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-7ce69dd0-a07f-11e9-936c-5395a1acf11e",
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Indicators",
+ "query": "",
+ "queryType": "input",
+ "static": false,
+ "type": "indicators",
+ "w": 2,
+ "x": 0,
+ "y": 6
+ },
+ {
+ "displayType": "CARD",
+ "h": 2,
+ "i": "caseinfoid-ac32f620-a0b0-11e9-b27f-13ae1773d289",
+ "items": [
+ {
+ "endCol": 1,
+ "fieldId": "occurred",
+ "height": 53,
+ "id": "incident-occurred-field",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 1,
+ "fieldId": "dbotmodified",
+ "height": 53,
+ "id": "incident-modified-field",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "dbotduedate",
+ "height": 53,
+ "id": "incident-dueDate-field",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "dbotcreated",
+ "height": 53,
+ "id": "incident-created-field",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 1
+ },
+ {
+ "endCol": 2,
+ "fieldId": "dbotclosed",
+ "height": 53,
+ "id": "incident-closed-field",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 1
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Timeline Information",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 4
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-88e6bf70-a0b1-11e9-b27f-13ae1773d289",
+ "isVisible": true,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "dbotclosed",
+ "height": 22,
+ "id": "incident-dbotClosed-field",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "closereason",
+ "height": 22,
+ "id": "incident-closeReason-field",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "closenotes",
+ "height": 22,
+ "id": "incident-closeNotes-field",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Closing Information",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 8
+ },
+ {
+ "displayType": "CARD",
+ "h": 2,
+ "i": "caseinfoid-e54b1770-a0b1-11e9-b27f-13ae1773d289",
+ "isVisible": true,
+ "items": [
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "details",
+ "height": 106,
+ "id": "incident-details-field",
+ "index": 0,
+ "listId": "caseinfoid-e54b1770-a0b1-11e9-b27f-13ae1773d289",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetupdatedtime",
+ "height": 22,
+ "id": "d4c64310-d668-11ee-ac49-b5c578bfaa5e",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Investigation Data",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 4
+ },
+ {
+ "displayType": "CARD",
+ "h": 2,
+ "hideName": false,
+ "i": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmrisktitle",
+ "height": 53,
+ "id": "4d273f20-bfa9-11ee-8d09-e1ab63b33f26",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "darktraceasmrisksecurityrating",
+ "height": 53,
+ "id": "8fb8c2e0-bfaa-11ee-8d09-e1ab63b33f26",
+ "index": 1,
+ "listId": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "darktraceasmriskstarttime",
+ "height": 53,
+ "id": "8c674250-d671-11ee-b7a5-e93984ad002a",
+ "index": 2,
+ "listId": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmriskendtime",
+ "height": 53,
+ "id": "98b1c620-d671-11ee-b7a5-e93984ad002a",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmriskdescription",
+ "height": 106,
+ "id": "93039c90-bfb4-11ee-8d09-e1ab63b33f26",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 4,
+ "fieldId": "darktraceasmriskid",
+ "height": 53,
+ "id": "e52efe90-bfa7-11ee-8d09-e1ab63b33f26",
+ "index": 0,
+ "listId": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 2
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 4,
+ "fieldId": "darktraceasmrisktype",
+ "height": 53,
+ "id": "e6cd4540-bfa7-11ee-8d09-e1ab63b33f26",
+ "index": 1,
+ "listId": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 2
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 4,
+ "fieldId": "darktraceasmriskevidence",
+ "height": 53,
+ "id": "a0add870-bfa9-11ee-8d09-e1ab63b33f26",
+ "index": 2,
+ "listId": "caseinfoid-c76012f0-bfa7-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 2
+ },
+ {
+ "endCol": 4,
+ "fieldId": "darktraceasmriskproposedaction",
+ "height": 106,
+ "id": "b01b6d30-bfb4-11ee-8d09-e1ab63b33f26",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 2
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Incident - Darktrace ASM Risk Details",
+ "static": false,
+ "w": 2,
+ "x": 0,
+ "y": 2
+ },
+ {
+ "displayType": "CARD",
+ "h": 2,
+ "hideName": false,
+ "i": "caseinfoid-6c947170-bfa9-11ee-8d09-e1ab63b33f26",
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetid",
+ "height": 53,
+ "id": "b62f1060-bfa9-11ee-8d09-e1ab63b33f26",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetsecurityrating",
+ "height": 53,
+ "id": "b8004df0-bfa9-11ee-8d09-e1ab63b33f26",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetbrand",
+ "height": 53,
+ "id": "5cb315d0-bfaa-11ee-8d09-e1ab63b33f26",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetismalicious",
+ "height": 53,
+ "id": "c81c3a20-d668-11ee-ac49-b5c578bfaa5e",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetstate",
+ "height": 53,
+ "id": "cd4b48b0-d668-11ee-ac49-b5c578bfaa5e",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "darktraceasmassetcreationtime",
+ "height": 53,
+ "id": "d158e1b0-d668-11ee-ac49-b5c578bfaa5e",
+ "index": 5,
+ "listId": "caseinfoid-6c947170-bfa9-11ee-8d09-e1ab63b33f26",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "darktraceasmassetupdatedtime",
+ "height": 53,
+ "id": "d9a97b90-d668-11ee-ac49-b5c578bfaa5e",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxH": null,
+ "maxW": 3,
+ "minH": 1,
+ "moved": false,
+ "name": "Darktrace ASM Asset Details",
+ "static": false,
+ "w": 1,
+ "x": 2,
+ "y": 2
+ }
+ ],
+ "type": "custom"
+ },
+ {
+ "id": "warRoom",
+ "name": "War Room",
+ "type": "warRoom"
+ },
+ {
+ "id": "workPlan",
+ "name": "Work Plan",
+ "type": "workPlan"
+ },
+ {
+ "id": "evidenceBoard",
+ "name": "Evidence Board",
+ "type": "evidenceBoard"
+ },
+ {
+ "id": "relatedIncidents",
+ "name": "Related Incidents",
+ "type": "relatedIncidents"
+ },
+ {
+ "id": "canvas",
+ "name": "Canvas",
+ "type": "canvas"
+ }
+ ]
+ },
+ "edit": {},
+ "fromVersion": "6.6.0",
+ "group": "incident",
+ "id": "Darktrace ASM Risk Layout",
+ "indicatorsDetails": {},
+ "indicatorsQuickView": {},
+ "mobile": {},
+ "name": "Darktrace ASM Risk Layout",
+ "quickView": {},
+ "system": false,
+ "version": -1,
+ "marketplaces": ["xsoar"]
+}
diff --git a/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler.yml b/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler.yml
new file mode 100644
index 000000000000..885cee73d59c
--- /dev/null
+++ b/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler.yml
@@ -0,0 +1,315 @@
+id: Darktrace ASM Basic Risk Handler
+version: -1
+name: Darktrace ASM Basic Risk Handler
+starttaskid: "0"
+description: Runs a common ASM Risk workflow for fetched ASM Risk alerts.
+fromversion: 6.6.0
+tasks:
+ "0":
+ id: "0"
+ taskid: abfb3c79-b29b-4580-8bc8-04756ce78cc1
+ type: start
+ task:
+ id: abfb3c79-b29b-4580-8bc8-04756ce78cc1
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 100,
+ "y": -180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: 32ca0da3-2d46-458a-8d07-18c3723984cf
+ type: condition
+ task:
+ id: 32ca0da3-2d46-458a-8d07-18c3723984cf
+ version: -1
+ name: Apply a comment on Darktrace?
+ type: condition
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ "No":
+ - "6"
+ "Yes":
+ - "5"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 100,
+ "y": 80
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to: null
+ subject: null
+ body: null
+ methods: []
+ format: ""
+ bcc: null
+ cc: null
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - "Yes"
+ - "No"
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 55a51166-2813-4aab-89c9-247beea7a3fa
+ type: regular
+ task:
+ id: 55a51166-2813-4aab-89c9-247beea7a3fa
+ version: -1
+ name: Apply a comment
+ description: Post a comment to a Darktrace ASM risk or asset within the Darktrace
+ UI.
+ script: Darktrace ASM|||darktrace-asm-post-comment
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "6"
+ scriptarguments:
+ comment:
+ simple: ${comment.Answers.0}
+ id:
+ simple: ${incident.darktraceasmriskid}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 360,
+ "y": 480
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: d17145e1-2676-4096-8d03-1dec34660c84
+ type: collection
+ task:
+ id: d17145e1-2676-4096-8d03-1dec34660c84
+ version: -1
+ name: Specify a comment message
+ type: collection
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ '#none#':
+ - "4"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 360,
+ "y": 260
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to: null
+ subject: null
+ body: null
+ methods: []
+ format: ""
+ bcc: null
+ cc: null
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: What comment would you like to post to this risk?
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: shortText
+ options: []
+ optionsarg: []
+ fieldassociated: ""
+ placeholder: ""
+ tooltip: ""
+ readonly: false
+ title: comment
+ description: ""
+ sender: ""
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: f341f404-45ac-495d-883e-b80609fb65c3
+ type: condition
+ task:
+ id: f341f404-45ac-495d-883e-b80609fb65c3
+ version: -1
+ name: Accept risk on Darktrace?
+ type: condition
+ iscommand: false
+ brand: ""
+ description: ""
+ nexttasks:
+ "No":
+ - "7"
+ "Yes":
+ - "8"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 100,
+ "y": 700
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to: null
+ subject: null
+ body:
+ simple: 'Warning: manually accepting a risk without taking action to actually
+ mitigate the risk means it will still be present, but it will no longer
+ appear within the Darktrace UI.'
+ methods: []
+ format: ""
+ bcc: null
+ cc: null
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - "Yes"
+ - "No"
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: d7936c6d-c8d5-4a0a-8a8a-5b56a441362e
+ type: regular
+ task:
+ id: d7936c6d-c8d5-4a0a-8a8a-5b56a441362e
+ version: -1
+ name: Close investigation
+ description: commands.local.cmd.close.inv
+ script: Builtin|||closeInvestigation
+ type: regular
+ iscommand: true
+ brand: Builtin
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 100,
+ "y": 1250
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 581a0edf-f0b6-4392-8e13-fed6dd68c17c
+ type: regular
+ task:
+ id: 581a0edf-f0b6-4392-8e13-fed6dd68c17c
+ version: -1
+ name: Mitigate risk
+ description: Mitigate Darktrace ASM Risk within the Darktrace UI.
+ script: Darktrace ASM|||darktrace-asm-mitigate-risk
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "7"
+ scriptarguments:
+ risk_id:
+ simple: ${incident.darktraceasmriskid}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 380,
+ "y": 980
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 1525,
+ "width": 660,
+ "x": 100,
+ "y": -180
+ }
+ }
+ }
+inputs: []
+outputs: []
\ No newline at end of file
diff --git a/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler_README.md b/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler_README.md
new file mode 100644
index 000000000000..648e03163ead
--- /dev/null
+++ b/Packs/DarktraceASM/Playbooks/playbook-DarktraceASM_Basic_ASM_Risk_Handler_README.md
@@ -0,0 +1,35 @@
+Handles each fetched Darktrace ASM Risk by offering the user the ability to take comment on Risks and Actions and actions on Risks from XSOAR to your Darktrace deployment.
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+
+### Integrations
+
+* DarktraceASM
+
+### Scripts
+
+
+### Commands
+
+* darktrace-asm-post-comment
+* darktrace-asm-mitigate-risk
+
+## Playbook Inputs
+
+---
+There are no inputs for this playbook.
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+![Darktrace](../doc_files/Darktrace_ASM_Basic_Risk_Handler.png)
diff --git a/Packs/DarktraceASM/README.md b/Packs/DarktraceASM/README.md
new file mode 100644
index 000000000000..00767a0f4f90
--- /dev/null
+++ b/Packs/DarktraceASM/README.md
@@ -0,0 +1,9 @@
+As organizations continue to mature their security stack and adopt defense-in-depth practices, it is increasingly important for security operation teams to have their data available in one place, rather than spread across multiple tools.
+
+This integration pack enriches your Cortex XSOAR playbooks with information from Darktrace’s self-learning AI in Darktrace PREVENT. Specifically, this integration pulls data from Darktrace PREVENT /Attack Surface Management. Darktrace PREVENT /ASM continuously monitors your attack surface for risks, high-impact vulnerabilities and external threats.
+
+Together with XSOAR, this pack can speed up your triage workflow, boost SOC efficiency and ensure all your security coverage can be found in one place.
+
+What does this pack do?
+
+This pack pulls Darktrace PREVENT /ASM risks, including evidence of the risk and proposed actions for remediation. Asset information pertaining to the risk is also included. Customers can decide which types of risks are integrated, ensuring visibility is bespoke to your individual SOC’s needs. Risks from the connector will populate in the XSOAR ‘Incidents’ tab.
\ No newline at end of file
diff --git a/Packs/DarktraceASM/doc_files/Darktrace_ASM_Basic_Risk_Handler.png b/Packs/DarktraceASM/doc_files/Darktrace_ASM_Basic_Risk_Handler.png
new file mode 100644
index 000000000000..d5d10654b2df
Binary files /dev/null and b/Packs/DarktraceASM/doc_files/Darktrace_ASM_Basic_Risk_Handler.png differ
diff --git a/Packs/DarktraceASM/pack_metadata.json b/Packs/DarktraceASM/pack_metadata.json
new file mode 100644
index 000000000000..72cafc78f3fc
--- /dev/null
+++ b/Packs/DarktraceASM/pack_metadata.json
@@ -0,0 +1,32 @@
+{
+ "name": "DarktraceASM",
+ "description": "Populates Darktrace ASM Risks in Cortex XSOAR, allowing for cross-platform automated investigation and response.",
+ "support": "partner",
+ "currentVersion": "1.0.0",
+ "fromVersion": "6.6.0",
+ "author": "Darktrace",
+ "githubUser": "",
+ "url": "https://customerportal.darktrace.com",
+ "email": "integrationsupport@darktrace.com",
+ "created": "",
+ "categories": [
+ "Vulnerability Management"
+ ],
+ "tags": [
+ "Compliance",
+ "Attack",
+ "Alerts",
+ "Malware",
+ "Network"
+ ],
+ "useCases": [],
+ "keywords": [
+ "Darktrace",
+ "Attack Surface Management"
+ ],
+ "certification": "certified",
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
diff --git a/Packs/DatadogCloudSIEM/Author_image.png b/Packs/DatadogCloudSIEM/Author_image.png
deleted file mode 100644
index 8775fc06c1f4..000000000000
Binary files a/Packs/DatadogCloudSIEM/Author_image.png and /dev/null differ
diff --git a/Packs/DataminrPulse/.pack-ignore b/Packs/DataminrPulse/.pack-ignore
index 5238a26b7abf..733d62e25c34 100644
--- a/Packs/DataminrPulse/.pack-ignore
+++ b/Packs/DataminrPulse/.pack-ignore
@@ -1,6 +1,3 @@
-[file:DataminrPulse.yml]
-ignore=IN124
-
[file:incidentfield-Dataminr-Pulse-Alert-Type.json]
ignore=IF115
diff --git a/Packs/DataminrPulse/Integrations/DataminrPulse/DataminrPulse.yml b/Packs/DataminrPulse/Integrations/DataminrPulse/DataminrPulse.yml
index bcdf0efe6671..135d4e312e86 100644
--- a/Packs/DataminrPulse/Integrations/DataminrPulse/DataminrPulse.yml
+++ b/Packs/DataminrPulse/Integrations/DataminrPulse/DataminrPulse.yml
@@ -709,7 +709,7 @@ script:
- contextPath: DataminrPulse.Alerts.userTopHashtags
description: User's top hashtags.
type: Unknown
- dockerimage: demisto/python3:3.10.14.91134
+ dockerimage: demisto/python3:3.10.14.99865
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/DataminrPulse/ReleaseNotes/1_0_11.md b/Packs/DataminrPulse/ReleaseNotes/1_0_11.md
new file mode 100644
index 000000000000..4f54e39fc398
--- /dev/null
+++ b/Packs/DataminrPulse/ReleaseNotes/1_0_11.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Dataminr Pulse
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/DataminrPulse/pack_metadata.json b/Packs/DataminrPulse/pack_metadata.json
index 32b42d68cb41..710489105915 100644
--- a/Packs/DataminrPulse/pack_metadata.json
+++ b/Packs/DataminrPulse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Dataminr Pulse",
"description": "Dataminr Pulse's AI-powered, real-time intelligence integrates into Cortex XSOAR workflows for faster detection and response.",
"support": "partner",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.0.11",
"author": "Dataminr",
"url": "https://www.dataminr.com/dataminr-support#support",
"categories": [
diff --git a/Packs/DeepL/Integrations/DeepL/DeepL.yml b/Packs/DeepL/Integrations/DeepL/DeepL.yml
index 7d79e1957a77..56b2851f1be0 100644
--- a/Packs/DeepL/Integrations/DeepL/DeepL.yml
+++ b/Packs/DeepL/Integrations/DeepL/DeepL.yml
@@ -271,7 +271,7 @@ script:
description: File Size
- contextPath: InfoFile.Type
description: File type e.g. "PDF"
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: ''
subtype: python3
diff --git a/Packs/DeepL/ReleaseNotes/1_2_4.md b/Packs/DeepL/ReleaseNotes/1_2_4.md
new file mode 100644
index 000000000000..c0a72f8db8d8
--- /dev/null
+++ b/Packs/DeepL/ReleaseNotes/1_2_4.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### DeepL
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/DeepL/pack_metadata.json b/Packs/DeepL/pack_metadata.json
index 9298f7265311..9b3622ee5a1f 100644
--- a/Packs/DeepL/pack_metadata.json
+++ b/Packs/DeepL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DeepL",
"description": "Uses DeepL (https://www.deepl.com/) to translate text or files",
"support": "community",
- "currentVersion": "1.2.3",
+ "currentVersion": "1.2.4",
"author": "Harri Ruuttila",
"url": "",
"email": "",
diff --git a/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md b/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md
new file mode 100644
index 000000000000..76646814e98c
--- /dev/null
+++ b/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md
@@ -0,0 +1,3 @@
+## Default
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DefaultPlaybook/pack_metadata.json b/Packs/DefaultPlaybook/pack_metadata.json
index 1cddfa74b745..cce5cb18d078 100644
--- a/Packs/DefaultPlaybook/pack_metadata.json
+++ b/Packs/DefaultPlaybook/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Default",
"description": "Got a unique incident? This Content Pack helps you automate the core steps of enrichment and severity calculation for any kind of incident.",
"support": "xsoar",
- "currentVersion": "2.0.10",
+ "currentVersion": "2.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DelineaDSV/Integrations/README.md b/Packs/DelineaDSV/Integrations/README.md
deleted file mode 100644
index acf1a401df63..000000000000
--- a/Packs/DelineaDSV/Integrations/README.md
+++ /dev/null
@@ -1,67 +0,0 @@
-Manage credentials for applications, databases, CI/CD tools, and services without causing friction in the development process.
-This integration was integrated and tested with version 1.37.0 of DelineaDSV
-
-## Configure DelineaDSV on Cortex XSOAR
-
-1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
-2. Search for DelineaDSV.
-3. Click **Add instance** to create and configure a new integration instance.
-
- | **Parameter** | **Required** |
- | --- | --- |
- | Server URL (e.g. https://example.com) | True |
- | Trust any certificate (not secure) | False |
- | Use system proxy settings | False |
- | Client ID | True |
- | Client Secret | True |
-
-4. Click **Test** to validate the URLs, token, and connection.
-## Commands
-You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
-After you successfully execute a command, a DBot message appears in the War Room with the command details.
-### dsv-secret-get
-***
-Getting a secret fom DSV
-
-
-#### Base Command
-
-`dsv-secret-get`
-#### Input
-
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| name | Secret name for DSV. | Required |
-
-
-#### Context Output
-
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| secret | String | Received JSON object secret |
-
-#### Command Example
-```!dsv-secret-get name="accounts/xsoar"```
-
-#### Context Example
-```json
-{
- "DSV": {
- "Secret": {
- "attributes": {},
- "created": "2022-05-17T10:55:41Z",
- "createdBy": "users:thy-one:testuser@accessecm.com",
- "data": {
- "password": "XSOARPassword",
- "username": "xsoar"
- },
- "description": "",
- "id": "e88f725b-ff1c-4902-961e-fcdf3c7f712f",
- "lastModified": "2022-05-17T10:55:41Z",
- "lastModifiedBy": "users:thy-one:testuser@accessecm.com",
- "path": "accounts:xsoar",
- "version": "1"
- }
- }
-}
-```
diff --git a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
index f23b9411481f..ea71acca0f20 100644
--- a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
+++ b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
@@ -31,6 +31,14 @@ function setLock(guid, info, version) {
return [integrationContext[lockName], null];
}
}
+function attemptToAcquireLock(guid, lockInfo, version) {
+ logDebug("Attempting to acquire lock");
+ try {
+ setLock(guid, lockInfo, version);
+ } catch (err) {
+ logDebug(err.message);
+ }
+}
var lockName = args.name || 'Default';
switch (command) {
@@ -44,12 +52,12 @@ switch (command) {
var guid = args.guid || guid();
var time = 0;
- var lock, version;
+ var lock, version, lock_candidate;
if (isDemistoVersionGE('8.0.0')) { // XSOAR 8 lock implementation with polling.
logDebug('Running on XSOAR version 8');
- // check if the process already holds the lock
+ // check if a lock already exists in the integration context
[lock, version] = getLock();
if (typeof version === "object") {
@@ -57,23 +65,21 @@ switch (command) {
}
logDebug('Task guid: ' + guid + ' | Current lock is: ' + JSON.stringify(lock) + ', version: ' + version);
- if (lock.guid === guid) {
+ // if no lock found, try to acquire a new lock
+ if (!lock.guid) {
+ attemptToAcquireLock(guid, lockInfo, version)
+ lock_candidate = getLock();
+ }
+
+ // stopping condition - the lock is acquired successfully
+ if (lock_candidate && lock_candidate[0].guid === guid) {
var md = '### Demisto Locking Mechanism\n';
md += 'Lock acquired successfully\n';
md += 'GUID: ' + guid;
logDebug(md)
return { ContentsFormat: formats.markdown, Type: entryTypes.note, Contents: md };
}
- else {
- // attempt to acquire the lock
- if (!lock.guid) {
- logDebug("Attempting to acquire lock")
- try {
- setLock(guid, lockInfo, version);
- } catch (err) {
- logDebug(err.message);
- }
- }
+ else { // polling condition - the lock acquire attempt failed (another lock already exist)
var timeout_err_msg = 'Timeout waiting for lock\n';
timeout_err_msg += 'Lock name: ' + lockName + '\n';
timeout_err_msg += 'Lock info: ' + lock.info + '\n';
@@ -82,7 +88,7 @@ switch (command) {
Type: entryTypes.note,
Contents: 'Lock was not acquired, Polling.',
PollingCommand: 'demisto-lock-get',
- NextRun: '30',
+ NextRun: '20',
PollingArgs: { name: lockName, info: args.info, timeout: args.timeout, guid: guid, timeout_err_msg: timeout_err_msg },
Timeout: String(lockTimeout)
}
diff --git a/Packs/DemistoLocking/ReleaseNotes/1_1_1.md b/Packs/DemistoLocking/ReleaseNotes/1_1_1.md
new file mode 100644
index 000000000000..ff25b5e075c6
--- /dev/null
+++ b/Packs/DemistoLocking/ReleaseNotes/1_1_1.md
@@ -0,0 +1,3 @@
+## Cortex Lock
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DemistoLocking/ReleaseNotes/1_1_2.md b/Packs/DemistoLocking/ReleaseNotes/1_1_2.md
new file mode 100644
index 000000000000..7f099e62558e
--- /dev/null
+++ b/Packs/DemistoLocking/ReleaseNotes/1_1_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Demisto Lock
+
+- Fixed an issue where ***demisto-get-lock*** command performed unnecessary polling while trying to acquire a lock.
diff --git a/Packs/DemistoLocking/pack_metadata.json b/Packs/DemistoLocking/pack_metadata.json
index 481496b2f9b5..c38d89efa906 100644
--- a/Packs/DemistoLocking/pack_metadata.json
+++ b/Packs/DemistoLocking/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Lock",
"description": "Locking mechanism that prevents concurrent execution of different tasks",
"support": "xsoar",
- "currentVersion": "1.1.0",
+ "currentVersion": "1.1.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DemistoRESTAPI/.pack-ignore b/Packs/DemistoRESTAPI/.pack-ignore
index 8a1e89ab7ac3..77458078a67d 100644
--- a/Packs/DemistoRESTAPI/.pack-ignore
+++ b/Packs/DemistoRESTAPI/.pack-ignore
@@ -1,5 +1,5 @@
[file:DemistoRESTAPI.yml]
-ignore=IN139,DS107,IN124
+ignore=IN139,DS107
[file:DemistoUploadFileV2.yml]
ignore=SC105
diff --git a/Packs/DemistoRESTAPI/Integrations/CoreRESTAPI/CoreRESTAPI.js b/Packs/DemistoRESTAPI/Integrations/CoreRESTAPI/CoreRESTAPI.js
index 481ec85e75e9..beede75f20da 100644
--- a/Packs/DemistoRESTAPI/Integrations/CoreRESTAPI/CoreRESTAPI.js
+++ b/Packs/DemistoRESTAPI/Integrations/CoreRESTAPI/CoreRESTAPI.js
@@ -32,6 +32,9 @@ getTenantAccountName = function () {
account_name = 'acc_' + tenant_name
}
}
+ else{
+ logDebug('getTenantAccountName: The server url ' + server_url + ' does not contain the expected tenant prefix acc_');
+ }
return account_name
}
@@ -120,7 +123,7 @@ sendMultipart = function (uri, entryID, body) {
'file'
);
tries++;
- } while (tries < 3 && res.Status.startsWith('timeout while waiting for answer'));
+ } while (tries < 3 && res.Status.startsWith('timeout'));
logDebug("Ran httpMultipart() " + tries + " time(s)")
if (res.StatusCode < 200 || res.StatusCode >= 300) {
diff --git a/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md
new file mode 100644
index 000000000000..56b8f0d70667
--- /dev/null
+++ b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md
@@ -0,0 +1,3 @@
+## Cortex REST API
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DemistoRESTAPI/ReleaseNotes/1_3_57.md b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_57.md
new file mode 100644
index 000000000000..ac36f8e1148f
--- /dev/null
+++ b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_57.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Core REST API
+
+- Added logs for debugging purposes.
diff --git a/Packs/DemistoRESTAPI/ReleaseNotes/1_3_58.md b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_58.md
new file mode 100644
index 000000000000..2238d2aced72
--- /dev/null
+++ b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_58.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Core REST API
+
+- Fixed an issue where the `core-api-install-packs` command reached a timeout when installing large packs.
diff --git a/Packs/DemistoRESTAPI/pack_metadata.json b/Packs/DemistoRESTAPI/pack_metadata.json
index c7ea29f9cf43..40132a42afc8 100644
--- a/Packs/DemistoRESTAPI/pack_metadata.json
+++ b/Packs/DemistoRESTAPI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex REST API",
"description": "Use Demisto REST APIs",
"support": "xsoar",
- "currentVersion": "1.3.55",
+ "currentVersion": "1.3.58",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic.png
new file mode 100644
index 000000000000..b208d74d13ec
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic_v2.png b/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic_v2.png
new file mode 100644
index 000000000000..c46e40d1f602
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Account_Enrichment_Generic_v2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Add_Indicator_to_Miner_Palo_Alto_MineMeld.png b/Packs/DeprecatedContent/doc_files/Add_Indicator_to_Miner_Palo_Alto_MineMeld.png
new file mode 100644
index 000000000000..489c092dd495
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Add_Indicator_to_Miner_Palo_Alto_MineMeld.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Block_File_Generic.png b/Packs/DeprecatedContent/doc_files/Block_File_Generic.png
new file mode 100644
index 000000000000..9ecd6c06a2d0
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Block_File_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Block_IP_Generic.png b/Packs/DeprecatedContent/doc_files/Block_IP_Generic.png
new file mode 100644
index 000000000000..e83a7c369c50
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Block_IP_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Block_Indicators_Generic.png b/Packs/DeprecatedContent/doc_files/Block_Indicators_Generic.png
new file mode 100644
index 000000000000..adde1e6a0bb3
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Block_Indicators_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/C_and_C_Communication_Hunting.png b/Packs/DeprecatedContent/doc_files/C_and_C_Communication_Hunting.png
new file mode 100644
index 000000000000..fa0ab8d5aee2
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/C_and_C_Communication_Hunting.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Calculate_Severity_Critical_assets.png b/Packs/DeprecatedContent/doc_files/Calculate_Severity_Critical_assets.png
new file mode 100644
index 000000000000..9a06f6ddaf0a
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Calculate_Severity_Critical_assets.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Calculate_Severity_Generic.png b/Packs/DeprecatedContent/doc_files/Calculate_Severity_Generic.png
new file mode 100644
index 000000000000..daab25f23d1d
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Calculate_Severity_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Carbon_Black_Rapid_IOC_Hunting.png b/Packs/DeprecatedContent/doc_files/Carbon_Black_Rapid_IOC_Hunting.png
new file mode 100644
index 000000000000..9fa8f8472604
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Carbon_Black_Rapid_IOC_Hunting.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Checkpoint_Firewall_Configuration_Backup.png b/Packs/DeprecatedContent/doc_files/Checkpoint_Firewall_Configuration_Backup.png
new file mode 100644
index 000000000000..0e0f5b371f5f
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Checkpoint_Firewall_Configuration_Backup.png differ
diff --git a/Packs/DeprecatedContent/doc_files/CrowdStrike_Rapid_IOC_Hunting.png b/Packs/DeprecatedContent/doc_files/CrowdStrike_Rapid_IOC_Hunting.png
new file mode 100644
index 000000000000..a439db48a04b
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/CrowdStrike_Rapid_IOC_Hunting.png differ
diff --git a/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier.png b/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier.png
new file mode 100644
index 000000000000..d6289b9c1a51
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier.png differ
diff --git a/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier_Job.png b/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier_Job.png
new file mode 100644
index 000000000000..41aa37f73f79
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/DBot_Create_Phishing_Classifier_Job.png differ
diff --git a/Packs/DeprecatedContent/doc_files/DeDup_incidents.png b/Packs/DeprecatedContent/doc_files/DeDup_incidents.png
new file mode 100644
index 000000000000..c4d7661db86f
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/DeDup_incidents.png differ
diff --git a/Packs/DeprecatedContent/doc_files/DeDup_incidents_-_ML.png b/Packs/DeprecatedContent/doc_files/DeDup_incidents_-_ML.png
new file mode 100644
index 000000000000..90941e8292d9
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/DeDup_incidents_-_ML.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Dedup_Generic.png b/Packs/DeprecatedContent/doc_files/Dedup_Generic.png
new file mode 100644
index 000000000000..bae6572c207c
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Dedup_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Demisto_Self-Defense_Account_policy_monitoring_playbook.png b/Packs/DeprecatedContent/doc_files/Demisto_Self-Defense_Account_policy_monitoring_playbook.png
new file mode 100644
index 000000000000..57d944cd6db4
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Demisto_Self-Defense_Account_policy_monitoring_playbook.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Domain_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/Domain_Enrichment_Generic.png
new file mode 100644
index 000000000000..0e4ccc563467
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Domain_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic.png
new file mode 100644
index 000000000000..f3cdd986264d
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic_v2.png b/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic_v2.png
new file mode 100644
index 000000000000..fec14b025a33
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Email_Address_Enrichment_Generic_v2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic.png
new file mode 100644
index 000000000000..656f1ca04f87
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic_v2.png b/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic_v2.png
new file mode 100644
index 000000000000..a2805410a1b8
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Endpoint_Enrichment_Generic_v2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Endpoint_data_collection.png b/Packs/DeprecatedContent/doc_files/Endpoint_data_collection.png
new file mode 100644
index 000000000000..0d43190c8796
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Endpoint_data_collection.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Enrich_DXL_with_ATD_verdict.png b/Packs/DeprecatedContent/doc_files/Enrich_DXL_with_ATD_verdict.png
new file mode 100644
index 000000000000..c15d34789726
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Enrich_DXL_with_ATD_verdict.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Enrich_McAfee_DXL_using_3rd_party_sandbox.png b/Packs/DeprecatedContent/doc_files/Enrich_McAfee_DXL_using_3rd_party_sandbox.png
new file mode 100644
index 000000000000..78f164bbd743
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Enrich_McAfee_DXL_using_3rd_party_sandbox.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Entity_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/Entity_Enrichment_Generic.png
new file mode 100644
index 000000000000..cff52c621995
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Entity_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Extract_Indicators_From_File_Generic.png b/Packs/DeprecatedContent/doc_files/Extract_Indicators_From_File_Generic.png
new file mode 100644
index 000000000000..752d45db7f57
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Extract_Indicators_From_File_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Extract_Indicators_Generic.png b/Packs/DeprecatedContent/doc_files/Extract_Indicators_Generic.png
new file mode 100644
index 000000000000..e288dbb7b3e0
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Extract_Indicators_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Failed_Login_Playbook_Slack_v2.png b/Packs/DeprecatedContent/doc_files/Failed_Login_Playbook_Slack_v2.png
new file mode 100644
index 000000000000..3a397b7f8a70
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Failed_Login_Playbook_Slack_v2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/File_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/File_Enrichment_Generic.png
new file mode 100644
index 000000000000..050d4f488a94
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/File_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Get_File_Sample_By_Hash_Generic.png b/Packs/DeprecatedContent/doc_files/Get_File_Sample_By_Hash_Generic.png
new file mode 100644
index 000000000000..9215c591e597
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Get_File_Sample_By_Hash_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Hunt_Extracted_Hashes.png b/Packs/DeprecatedContent/doc_files/Hunt_Extracted_Hashes.png
new file mode 100644
index 000000000000..8f498984ebae
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Hunt_Extracted_Hashes.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Hunt_for_bad_IOCs.png b/Packs/DeprecatedContent/doc_files/Hunt_for_bad_IOCs.png
new file mode 100644
index 000000000000..c7c730026764
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Hunt_for_bad_IOCs.png differ
diff --git a/Packs/DeprecatedContent/doc_files/IP_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/IP_Enrichment_Generic.png
new file mode 100644
index 000000000000..fc4ca130339c
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/IP_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic.png b/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic.png
new file mode 100644
index 000000000000..fb61a33fc640
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic_Setup.png b/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic_Setup.png
new file mode 100644
index 000000000000..6a1263197f48
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Malware_Investigation_Generic_Setup.png differ
diff --git a/Packs/DeprecatedContent/doc_files/McAfee_ePO_Endpoint_Compliance_Playbook.png b/Packs/DeprecatedContent/doc_files/McAfee_ePO_Endpoint_Compliance_Playbook.png
new file mode 100644
index 000000000000..aff0f407633b
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/McAfee_ePO_Endpoint_Compliance_Playbook.png differ
diff --git a/Packs/DeprecatedContent/doc_files/McAfee_ePO_Repository_Compliance_Playbook.png b/Packs/DeprecatedContent/doc_files/McAfee_ePO_Repository_Compliance_Playbook.png
new file mode 100644
index 000000000000..8432b5045add
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/McAfee_ePO_Repository_Compliance_Playbook.png differ
diff --git a/Packs/DeprecatedContent/doc_files/PAN-OS_Block_IP_and_URL_External_Dynamic_List.png b/Packs/DeprecatedContent/doc_files/PAN-OS_Block_IP_and_URL_External_Dynamic_List.png
new file mode 100644
index 000000000000..2038b2feabfb
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/PAN-OS_Block_IP_and_URL_External_Dynamic_List.png differ
diff --git a/Packs/DeprecatedContent/doc_files/PAN-OS_EDL_Setup.png b/Packs/DeprecatedContent/doc_files/PAN-OS_EDL_Setup.png
new file mode 100644
index 000000000000..06df4873916b
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/PAN-OS_EDL_Setup.png differ
diff --git a/Packs/DeprecatedContent/doc_files/PANW_Hunting_and_threat_detection_by_indicator_type_V2.png b/Packs/DeprecatedContent/doc_files/PANW_Hunting_and_threat_detection_by_indicator_type_V2.png
new file mode 100644
index 000000000000..1759348bc79b
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/PANW_Hunting_and_threat_detection_by_indicator_type_V2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Palo_Alto_Networks_-_Endpoint_Malware_Investigation_v2.png b/Packs/DeprecatedContent/doc_files/Palo_Alto_Networks_-_Endpoint_Malware_Investigation_v2.png
new file mode 100644
index 000000000000..81b0f5367093
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Palo_Alto_Networks_-_Endpoint_Malware_Investigation_v2.png differ
diff --git a/Packs/DeprecatedContent/doc_files/PanoramaQueryTrafficLogs.png b/Packs/DeprecatedContent/doc_files/PanoramaQueryTrafficLogs.png
new file mode 100644
index 000000000000..c24c4dbafac4
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/PanoramaQueryTrafficLogs.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Phishing_Investigation_Generic.png b/Packs/DeprecatedContent/doc_files/Phishing_Investigation_Generic.png
new file mode 100644
index 000000000000..ac18d0480c0a
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Phishing_Investigation_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Process_Email_Add_custom_fields.png b/Packs/DeprecatedContent/doc_files/Process_Email_Add_custom_fields.png
new file mode 100644
index 000000000000..f4c69bbfe622
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Process_Email_Add_custom_fields.png differ
diff --git a/Packs/DeprecatedContent/doc_files/QRadar_Get_offense_correlations.png b/Packs/DeprecatedContent/doc_files/QRadar_Get_offense_correlations.png
new file mode 100644
index 000000000000..45d4cc773fe6
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/QRadar_Get_offense_correlations.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Rapid_IOC_Hunting_Playbook.png b/Packs/DeprecatedContent/doc_files/Rapid_IOC_Hunting_Playbook.png
new file mode 100644
index 000000000000..6121424e1859
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Rapid_IOC_Hunting_Playbook.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Response.png b/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Response.png
new file mode 100644
index 000000000000..e0a0f37558f3
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Carbon_Black_Response.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Generic.png b/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Generic.png
new file mode 100644
index 000000000000..e68da7cf77df
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Search_Endpoints_By_Hash_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/URL_Enrichment_Generic.png b/Packs/DeprecatedContent/doc_files/URL_Enrichment_Generic.png
new file mode 100644
index 000000000000..05d61ed3c352
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/URL_Enrichment_Generic.png differ
diff --git a/Packs/DeprecatedContent/doc_files/VulnerabilityHandling_Qualys_AddCustomFieldsToDefaultLayout.png b/Packs/DeprecatedContent/doc_files/VulnerabilityHandling_Qualys_AddCustomFieldsToDefaultLayout.png
new file mode 100644
index 000000000000..daaa2ac747af
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/VulnerabilityHandling_Qualys_AddCustomFieldsToDefaultLayout.png differ
diff --git a/Packs/DeprecatedContent/doc_files/Vulnerability_Handling_Qualys.png b/Packs/DeprecatedContent/doc_files/Vulnerability_Handling_Qualys.png
new file mode 100644
index 000000000000..4f50f205e59d
Binary files /dev/null and b/Packs/DeprecatedContent/doc_files/Vulnerability_Handling_Qualys.png differ
diff --git a/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.py b/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.py
index 2fcbfb31d0a8..c763df4eeaf8 100644
--- a/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.py
+++ b/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.py
@@ -16,7 +16,7 @@ def __init__(self, server_url, verify, proxy, headers, client_cert, client_key,
self._verify = verify
self._base_url = server_url
self._proxy = proxy
- self._headers = headers if headers else dict()
+ self._headers = headers if headers else {}
self._client_cert = client_cert
self._client_key = client_key
diff --git a/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.yml b/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.yml
index 0c8af8bd10ba..cc8eb032138c 100644
--- a/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.yml
+++ b/Packs/DevSecOps/Integrations/DockerEngine/DockerEngine.yml
@@ -107,7 +107,7 @@ script:
- description: ID or name of the container
name: id
required: true
- description: Get changes on a container’s filesystem
+ description: Get changes on a container's filesystem
name: docker-container-changes
outputs:
- contextPath: Docker.ContainerChangeResponseItem.Path
@@ -981,7 +981,7 @@ script:
description: 'The network endpoint that the Engine advertises for the purpose of node discovery. ClusterAdvertise is a `host:port` combination on which the daemon is reachable by other hosts. p / /p Deprecated : This field is only propagated when using standalone Swarm mode, and overlay networking using an external k/v store. Overlay networks with Swarm mode enabled use the built-in raft store, and this field will be empty. '
type: String
- contextPath: Docker.SystemInfo.Runtimes.path
- description: "Name and, optional, path, of the OCI executable binary. If the path is omitted, the daemon searches the host's `$PATH` for the binary and uses the first result. "
+ description: "Name and, optional, path of the OCI executable binary. If the path is omitted, the daemon searches the host's `$PATH` for the binary and uses the first result. "
type: String
- contextPath: Docker.SystemInfo.DefaultRuntime
description: 'Name of the default OCI runtime that is used when starting containers. The default can be overridden per-container at create time. '
@@ -1234,7 +1234,7 @@ script:
- contextPath: Docker.ImageTag.Status Code
description: Image Tag Result
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/DevSecOps/Integrations/LGTM/LGTM.yml b/Packs/DevSecOps/Integrations/LGTM/LGTM.yml
index 7d530b2c15ac..d7f6cafa3006 100644
--- a/Packs/DevSecOps/Integrations/LGTM/LGTM.yml
+++ b/Packs/DevSecOps/Integrations/LGTM/LGTM.yml
@@ -29,10 +29,10 @@ script:
- description: The URL of the repository to analyze. LGTM tests this against the [repository providers](https://lgtm.com/admin/help/adding-repository-providers) defined for the system. If it doesn't match any of them, the request fails.
name: repository
required: true
- - description: 'Optional, a [language code](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported) to specify which language to analyze. To request the analysis of more than one language, specify a query meter for each language. By default, LGTM tries to analyze all supported languages.'
+ - description: Optional, a [language code](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported) to specify which language to analyze. To request the analysis of more than one language, specify a query meter for each language. By default, LGTM tries to analyze all supported languages.
isArray: true
name: language
- - description: 'The analysis mode of the new project. When set to `full` all commits of the project are analyzed; when set to `sparse` the latest commit of the project is analyzed periodically; when set to `upload`, no automatic analysis is performed, instead externally-generated databases should be uploaded. For new projects the default value is `full`. The `mode` meter cannot be used to change the analysis mode of existing projects. Therefore, for existing projects, it should either be left blank or set to match the analysis mode of the project.'
+ - description: The analysis mode of the new project. When set to `full` all commits of the project are analyzed; when set to `sparse` the latest commit of the project is analyzed periodically; when set to `upload`, no automatic analysis is performed, instead externally-generated databases should be uploaded. For new projects the default value is `full`. The `mode` meter cannot be used to change the analysis mode of existing projects. Therefore, for existing projects, it should either be left blank or set to match the analysis mode of the project.
name: mode
- description: Required when `mode=upload`, specify the identifier of the commit used to generate the database.
name: commit
@@ -71,7 +71,7 @@ script:
description: The analysis identifier.
type: String
- contextPath: LGTM.analysis_summary.commit-id
- description: 'The commit identifier. The commit identifier is included only if the same commit was successfully analyzed for all languages. A detailed eakdown of which commit was analyzed for each language is provided in the `languages` property.'
+ description: The commit identifier. The commit identifier is included only if the same commit was successfully analyzed for all languages. A detailed eakdown of which commit was analyzed for each language is provided in the `languages` property.
type: String
- contextPath: LGTM.analysis_summary.languages.language
description: The short name for the language.
@@ -114,7 +114,7 @@ script:
description: The analysis identifier.
type: String
- contextPath: LGTM.analysis_summary.commit-id
- description: 'The commit identifier. The commit identifier is included only if the same commit was successfully analyzed for all languages. A detailed eakdown of which commit was analyzed for each language is provided in the `languages` property.'
+ description: The commit identifier. The commit identifier is included only if the same commit was successfully analyzed for all languages. A detailed eakdown of which commit was analyzed for each language is provided in the `languages` property.
type: String
- contextPath: LGTM.analysis_summary.languages.language
description: The short name for the language.
@@ -265,7 +265,7 @@ script:
- description: The identifier of the commit to analyze.
name: commit
required: true
- - description: 'The language codes of the languages to analyze. For a list of available languages, see [Supported languages](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported). To specify more than one language, this meter can be repeated. If no language is specified, all the project''s languages will be analyzed.'
+ - description: The language codes of the languages to analyze. For a list of available languages, see [Supported languages](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported). To specify more than one language, this meter can be repeated. If no language is specified, all the project's languages will be analyzed.
isArray: true
name: language
description: Run analysis of a specific commit
@@ -293,9 +293,9 @@ script:
- description: Your reference number for the code review.
name: external-id
required: true
- - description: 'The callback URL for LGTM to post to on completion of the review. When the code review is complete, the API sends an HTTP POST request to the callback URL with the result of the code review in the request body. The code review results in the request body are identical to the results accessed through the [`/codereviews/{review-id}`](https://lgtm.com/help/lgtm/api/api-v1#opIdgetCodeReview) end-point. If you specify a `callback-secret`, the request also includes an `x-lgtm-signature` header with a digital signature of the request''s contents.'
+ - description: The callback URL for LGTM to post to on completion of the review. When the code review is complete, the API sends an HTTP POST request to the callback URL with the result of the code review in the request body. The code review results in the request body are identical to the results accessed through the [`/codereviews/{review-id}`](https://lgtm.com/help/lgtm/api/api-v1#opIdgetCodeReview) end-point. If you specify a `callback-secret`, the request also includes an `x-lgtm-signature` header with a digital signature of the request's contents.
name: callback-url
- - description: 'The `callback-secret` is used to compute a signature which is included in the `x-lgtm-signature` header of the callback response. The receiver of the callback can check the validity of the response by computing the signature using HMAC-SHA1 and verifying that it matches the `x-lgtm-signature` header value. The HMAC algorithm requires byte sequences as inputs for both the secret and the message. The callback secret string must be converted to bytes using UTF-8 encoding. The response body should ideally be read as a plain byte sequence. Conversion to, for example a JSON object, and back to a byte sequence might change the formatting, and would invalidate the signature.'
+ - description: The `callback-secret` is used to compute a signature which is included in the `x-lgtm-signature` header of the callback response. The receiver of the callback can check the validity of the response by computing the signature using HMAC-SHA1 and verifying that it matches the `x-lgtm-signature` header value. The HMAC algorithm requires byte sequences as inputs for both the secret and the message. The callback secret string must be converted to bytes using UTF-8 encoding. The response body should ideally be read as a plain byte sequence. Conversion to, for example a JSON object, and back to a byte sequence might change the formatting, and would invalidate the signature.
name: callback-secret
- description: Entry ID of the Patch File , you can use git diff --binary to generate patch file
name: patch-entry-id
@@ -313,7 +313,7 @@ script:
description: LGTM Code Review request task id
type: Unknown
- arguments:
- - description: 'The [language](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported) you want to analyze.'
+ - description: The [language](https://lgtm.com/help/lgtm/analysis-faqs#which-languages-are-supported) you want to analyze.
name: language
required: true
- description: The identifier of the project to analyze. Either `project-id` or `projects-list` must be specified.
@@ -384,7 +384,7 @@ script:
description: Describes whether the query was sucessfully executed against the project.
type: String
- contextPath: LGTM.queryjob-results-overview.data.total
- description: 'Number of results returned by the query. This is oken down further into `internal` and `external` results. Only applies if `status` is `success`.'
+ description: Number of results returned by the query. This is oken down further into `internal` and `external` results. Only applies if `status` is `success`.
type: Number
- contextPath: LGTM.queryjob-results-overview.data.internal
description: Number of results that refer to elements within the source tree. Only applies if `status` is `success`.
@@ -398,7 +398,7 @@ script:
- contextPath: LGTM.queryjob-results-overview.next
description: LGTM Query Job Results Overview Next
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/DevSecOps/Integrations/MinIO/MinIO.yml b/Packs/DevSecOps/Integrations/MinIO/MinIO.yml
index 8b58c5f54733..ce835172c3eb 100644
--- a/Packs/DevSecOps/Integrations/MinIO/MinIO.yml
+++ b/Packs/DevSecOps/Integrations/MinIO/MinIO.yml
@@ -216,7 +216,7 @@ script:
- contextPath: MinIO.Objects.tags
description: MinIO Object Tags.
description: Set tags configuration to an object.
- dockerimage: demisto/py3-tools:1.0.0.77497
+ dockerimage: demisto/py3-tools:1.0.0.102774
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/DevSecOps/ReleaseNotes/1_1_10.md b/Packs/DevSecOps/ReleaseNotes/1_1_10.md
new file mode 100644
index 000000000000..178625bd778c
--- /dev/null
+++ b/Packs/DevSecOps/ReleaseNotes/1_1_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### MinIO
+
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.102774*.
diff --git a/Packs/DevSecOps/ReleaseNotes/1_1_9.md b/Packs/DevSecOps/ReleaseNotes/1_1_9.md
new file mode 100644
index 000000000000..33a9cf163984
--- /dev/null
+++ b/Packs/DevSecOps/ReleaseNotes/1_1_9.md
@@ -0,0 +1,9 @@
+
+#### Integrations
+
+##### Docker Engine API
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### LGTM
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/DevSecOps/doc_files/DevOps_Services_image_link.png b/Packs/DevSecOps/doc_files/DevOps_Services_image_link.png
new file mode 100644
index 000000000000..1e905747596a
Binary files /dev/null and b/Packs/DevSecOps/doc_files/DevOps_Services_image_link.png differ
diff --git a/Packs/DevSecOps/pack_metadata.json b/Packs/DevSecOps/pack_metadata.json
index 3b1718cb947a..6c852456037c 100644
--- a/Packs/DevSecOps/pack_metadata.json
+++ b/Packs/DevSecOps/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DevSecOps",
"description": "DevSecOps CI/CD Orchestration Integration Pack.",
"support": "community",
- "currentVersion": "1.1.8",
+ "currentVersion": "1.1.10",
"author": "Ayman Mahmoud",
"githubUser": [
"ayman-m"
@@ -20,4 +20,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/DeveloperTools/Integrations/CreateIncidents/README.md b/Packs/DeveloperTools/Integrations/CreateIncidents/README.md
index d8923018c8bc..76a9cb034a6c 100644
--- a/Packs/DeveloperTools/Integrations/CreateIncidents/README.md
+++ b/Packs/DeveloperTools/Integrations/CreateIncidents/README.md
@@ -2,7 +2,7 @@ CreateIncidents fetches incident created manually.
## Configure Create Test Incidents on Cortex XSOAR
-1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+1. Navigate to **Settings** > **Integrations** > **Instances**.
2. Search for Create Test Incidents.
3. Click **Add instance** to create and configure a new integration instance.
@@ -65,12 +65,12 @@ Creates incidents from json file, and stores it in the instance context.
There is no context output for this command.
#### Command example
-```!create-test-incident-from-raw-json incident_entry_id=12@12"```
+```!create-test-incident-from-raw-json incident_entry_id="12@12"```
#### Human Readable Output
>Loaded 1 incidents from file.
-```!create-test-incident-from-raw-json incident_raw_json={'name': 'test_incident'}"```
+```!create-test-incident-from-raw-json incident_raw_json="{'name': 'test_incident'}"```
#### Human Readable Output
>Loaded 1 incidents from file.
diff --git a/Packs/DeveloperTools/Integrations/CreateMockFeed/CreateMockFeed.yml b/Packs/DeveloperTools/Integrations/CreateMockFeed/CreateMockFeed.yml
index de4e71a74b1c..290474df0e00 100644
--- a/Packs/DeveloperTools/Integrations/CreateMockFeed/CreateMockFeed.yml
+++ b/Packs/DeveloperTools/Integrations/CreateMockFeed/CreateMockFeed.yml
@@ -18,7 +18,7 @@ configuration:
- URL
- display: Amount of indicators
name: amount_indicators
- defaultvalue: "100000"
+ defaultvalue: '100000'
type: 0
required: false
- display: Incremental Feed
@@ -29,22 +29,22 @@ configuration:
required: false
- display: Indicators custom field length
name: indicators_custom_field_length
- defaultvalue: "200"
+ defaultvalue: '200'
type: 0
required: false
- display: Amount of relationships per indicator
name: amount_relationships
- defaultvalue: "1"
+ defaultvalue: '1'
type: 0
required: false
- display: Relationship description length
name: relationship_description_length
- defaultvalue: "1"
+ defaultvalue: '1'
type: 0
required: false
- display: Size of batches
name: batch_size
- defaultvalue: "30000"
+ defaultvalue: '30000'
type: 0
required: false
- display: Feed Fetch Interval
@@ -66,7 +66,7 @@ configuration:
type: 0
additionalinfo: Make sure to have the key "indicator" in the appropriate CSV column where the value of the indicator is expected to be
required: false
-- display: ""
+- display: ''
name: feedExpirationInterval
type: 1
required: false
@@ -92,7 +92,7 @@ configuration:
- E - Unreliable
- F - Reliability cannot be judged
additionalinfo: Reliability of the source providing the intelligence data
-- display: ""
+- display: ''
name: feedExpirationPolicy
type: 17
options:
@@ -124,7 +124,7 @@ configuration:
script:
script: ''
type: python
- dockerimage: demisto/feed-performance-test:1.0.46565
+ dockerimage: demisto/feed-performance-test:1.0.99137
feed: true
subtype: python3
tests:
diff --git a/Packs/DeveloperTools/ReleaseNotes/1_3_16.md b/Packs/DeveloperTools/ReleaseNotes/1_3_16.md
new file mode 100644
index 000000000000..f8e4e4b2819f
--- /dev/null
+++ b/Packs/DeveloperTools/ReleaseNotes/1_3_16.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### Hey
+
+- Fixed an issue where the script failed when passing a value containing spaces to the ***headers*** argument.
+- Updated the Docker image to: *demisto/rakyll-hey:1.0.0.98212*.
diff --git a/Packs/DeveloperTools/ReleaseNotes/1_3_17.md b/Packs/DeveloperTools/ReleaseNotes/1_3_17.md
new file mode 100644
index 000000000000..f12f5b79705f
--- /dev/null
+++ b/Packs/DeveloperTools/ReleaseNotes/1_3_17.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Create-Mock-Feed-Relationships
+
+- Updated the Docker image to: *demisto/feed-performance-test:1.0.99137*.
diff --git a/Packs/DeveloperTools/ReleaseNotes/1_3_18.md b/Packs/DeveloperTools/ReleaseNotes/1_3_18.md
new file mode 100644
index 000000000000..cd34bd2ea380
--- /dev/null
+++ b/Packs/DeveloperTools/ReleaseNotes/1_3_18.md
@@ -0,0 +1,18 @@
+
+#### Scripts
+
+##### VerifyObjectFieldsList
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### FetchFromInstance
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### VerifyEnoughIncidents
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### GetInstanceName
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
+##### VerifyEnoughIndicators
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.py b/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.py
index b718d2f02c87..239961da0c2a 100644
--- a/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.py
+++ b/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.py
@@ -30,22 +30,22 @@ def main():
instance_name = get_instance_name(args)
instance_name = instance_name.replace(" ", "_")
- command = '!{0}-fetch'.format(instance_name)
+ command = f'!{instance_name}-fetch'
response = demisto.executeCommand(command, {})
try:
if not response and expect_data:
- raise Exception("Error occurred while fetching incidents from {}".format(instance_name))
+ raise Exception(f"Error occurred while fetching incidents from {instance_name}")
for inc in response:
contents = inc.get('Contents', '')
error_msg_in_incident = demisto.args().get('error_msg_in_incident')
if error_msg_in_incident and error_msg_in_incident in str(contents):
- return_error("Error message '{0}' encountered while fetching incidents from {1}: {2}".format(
+ return_error("Error message '{}' encountered while fetching incidents from {}: {}".format(
error_msg_in_incident, instance_name, str(contents)))
if re.match("invalid character \'[a-zA-Z]\' looking for beginning of value", str(contents), re.IGNORECASE):
- return_error("Error occurred while fetching incidents from {0}: {1}".format(instance_name, str(contents)))
+ return_error(f"Error occurred while fetching incidents from {instance_name}: {str(contents)}")
if add_to_context:
try:
for entry in contents:
diff --git a/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.yml b/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.yml
index bdd01a1c0995..78ef2f3bf328 100644
--- a/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.yml
+++ b/Packs/DeveloperTools/Scripts/FetchFromInstance/FetchFromInstance.yml
@@ -14,19 +14,19 @@ args:
description: The name of the instance you want to fetch from
- name: expect_data
description: Whether to expect data to return from the fetch
- defaultValue: "true"
+ defaultValue: 'true'
- name: brand_name
description: The name of the brand you want to fetch from
- name: add_to_context
auto: PREDEFINED
predefined:
- - "true"
- - "false"
+ - 'true'
+ - 'false'
description: Add the raw JSON of incidents to context
- defaultValue: "false"
+ defaultValue: 'false'
- name: error_msg_in_incident
description: If this error message appears in the contents of the fetched incidents, will return error
- defaultValue: "Error"
+ defaultValue: Error
outputs:
- contextPath: FetchedIncidents
description: All incidents entries (raw JSON)
@@ -34,4 +34,4 @@ scripttarget: 0
runonce: false
tests:
- No test - cannot create a dummy integration inorder to test that
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
diff --git a/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.py b/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.py
index e8d0e8bae919..e07cf0d9bd4a 100644
--- a/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.py
+++ b/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.py
@@ -1,10 +1,10 @@
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
-from typing import Dict, Any, List
+from typing import Any
-def instance_check(instances, integration_name: str) -> List:
+def instance_check(instances, integration_name: str) -> list:
instance_names = []
for instance_name, details in instances.items():
if details.get('brand') == integration_name:
@@ -13,7 +13,7 @@ def instance_check(instances, integration_name: str) -> List:
return instance_names
-def get_instance_name_command(args: Dict[str, Any]) -> CommandResults:
+def get_instance_name_command(args: dict[str, Any]) -> CommandResults:
integration_name = args.get('integration_name', '')
instances = demisto.getModules()
diff --git a/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.yml b/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.yml
index e8e91919bfd5..4e9ee03a605b 100644
--- a/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.yml
+++ b/Packs/DeveloperTools/Scripts/GetInstanceName/GetInstanceName.yml
@@ -26,7 +26,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
fromversion: 6.0.0
tests:
- No test
diff --git a/Packs/DeveloperTools/Scripts/Hey/Hey.py b/Packs/DeveloperTools/Scripts/Hey/Hey.py
index 469faf99fc32..f7e89720a577 100644
--- a/Packs/DeveloperTools/Scripts/Hey/Hey.py
+++ b/Packs/DeveloperTools/Scripts/Hey/Hey.py
@@ -2,13 +2,6 @@
from CommonServerPython import * # noqa: F401
import re
import subprocess
-from typing import Tuple
-
-import urllib3
-
-
-# disable insecure warnings
-urllib3.disable_warnings()
# ---------- CONSTANTS ---------- #
@@ -28,7 +21,7 @@ def try_re(pattern: str, string: str, i: int = 0) -> Optional[Any]:
def name_value_arg_to_dict(arg: Optional[str]):
- parsed_input: Dict[str, str] = {}
+ parsed_input: dict[str, str] = {}
if arg:
args = argToList(arg)
for item in args:
@@ -64,19 +57,23 @@ def construct_hey_query(url: str,
d=body,
x=proxy
)
- hey_query = "hey "
+
+ query_args = ["hey"]
+
if disable_compression == 'true':
- hey_query += '--disable-compression '
+ query_args.append('--disable-compression')
if enable_http2 == 'true':
- hey_query += ' -h2 '
+ query_args.append('-h2')
if disable_redirects == 'true':
- hey_query += ' -disable-redirects '
+ query_args.append('-disable-redirects')
if headers:
for header_key, header_val in name_value_arg_to_dict(headers).items():
- hey_query += f' -H {header_key}:{header_val} '
- hey_query += " ".join(f"-{k} {v}" for k, v in hey_map.items()) + f' {url}'
- hey_query = hey_query.replace(" ", " ") # remove double spaces
- return hey_map, hey_query
+ query_args.extend(('-H', f'{header_key}:{header_val}'))
+ for k, v in hey_map.items():
+ query_args.extend([f"-{k}", v])
+ query_args.append(url)
+
+ return hey_map, query_args
# ---------- CLASSES ---------- #
@@ -99,7 +96,7 @@ def __init__(self,
self._result = result or ''
self._ext_outputs = name_value_arg_to_dict(results_map)
- def _get_summary(self, result: List[str]) -> Tuple[dict, int]:
+ def _get_summary(self, result: List[str]) -> tuple[dict, int]:
"""Returns summary dictionary and index after the summary"""
summary = {}
i = 0
@@ -183,7 +180,7 @@ def run_hey_test(url: str,
proxy,
enable_http2,
disable_redirects)
- result = subprocess.check_output(hey_query.split(), stderr=subprocess.STDOUT, text=True)
+ result = subprocess.check_output(hey_query, stderr=subprocess.STDOUT, text=True)
return HeyPerformanceResult(result=result, results_map=results_map, **hey_map).to_results()
diff --git a/Packs/DeveloperTools/Scripts/Hey/Hey.yml b/Packs/DeveloperTools/Scripts/Hey/Hey.yml
index ef080e42f144..bf8fac3e1b0a 100644
--- a/Packs/DeveloperTools/Scripts/Hey/Hey.yml
+++ b/Packs/DeveloperTools/Scripts/Hey/Hey.yml
@@ -31,7 +31,7 @@ args:
- OPTIONS
description: HTTP method.
- name: headers
- description: Custom HTTP header. Comma separated list of "key=value". e.g. User-Agent=curl/7.54.0,Accept=*/*
+ description: Custom HTTP header. Comma separated list of "key=value". e.g. User-Agent=curl/7.54.0,Accept=*/*.
- name: disable_compression
defaultValue: "true"
auto: PREDEFINED
@@ -80,11 +80,11 @@ outputs:
description: The average time it took for a request to finish.
type: number
- contextPath: Hey.Result
- description: The full result in text format when output is set to "human_readable"
+ description: The full result in text format when output is set to "human_readable".
type: number
scripttarget: 0
subtype: python3
-dockerimage: demisto/rakyll-hey:1.0.0.49364
+dockerimage: demisto/rakyll-hey:1.0.0.98212
runas: DBotWeakRole
tests:
- No test
diff --git a/Packs/DeveloperTools/Scripts/Hey/Hey_test.py b/Packs/DeveloperTools/Scripts/Hey/Hey_test.py
index 615f2f15f62e..723bb0bb96d5 100644
--- a/Packs/DeveloperTools/Scripts/Hey/Hey_test.py
+++ b/Packs/DeveloperTools/Scripts/Hey/Hey_test.py
@@ -1,4 +1,3 @@
-import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import Hey as hey
import pytest
@@ -35,70 +34,69 @@ def test_try_name_value_arg_to_dict():
def test_construct_hey_query():
- url = 'http://mock.com'
+ url = "http://mock.com"
res = hey.construct_hey_query(url)
assert res[0] == {}
- assert res[1] == f'hey {url}'
+ assert res[1] == ["hey", url]
- requests_number = '2'
+ requests_number = "2"
res = hey.construct_hey_query(url, requests_number=requests_number)
- assert res[0] == {'n': '2'}
- assert res[1] == f'hey -n 2 {url}'
+ assert res[0] == {"n": "2"}
+ assert res[1] == ["hey", "-n", "2", url]
- timeout = '2'
+ timeout = "2"
res = hey.construct_hey_query(url, timeout=timeout)
- assert res[0] == {'t': '2'}
- assert res[1] == f'hey -t 2 {url}'
+ assert res[0] == {"t": "2"}
+ assert res[1] == ["hey", "-t", "2", url]
- concurrency = '2'
+ concurrency = "2"
res = hey.construct_hey_query(url, concurrency=concurrency)
- assert res[0] == {'c': '2'}
- assert res[1] == f'hey -c 2 {url}'
+ assert res[0] == {"c": "2"}
+ assert res[1] == ["hey", "-c", "2", url]
- duration = '2'
+ duration = "2"
res = hey.construct_hey_query(url, duration=duration)
- assert res[0] == {'z': '2s'}
- assert res[1] == f'hey -z 2s {url}'
+ assert res[0] == {"z": "2s"}
+ assert res[1] == ["hey", "-z", "2s", url]
- method = 'POST'
+ method = "POST"
res = hey.construct_hey_query(url, method=method)
- assert res[0] == {'m': method}
- assert res[1] == f'hey -m {method} {url}'
+ assert res[0] == {"m": method}
+ assert res[1] == ["hey", "-m", method, url]
- disable_compression = 'false'
+ disable_compression = "false"
res = hey.construct_hey_query(url, disable_compression=disable_compression)
assert res[0] == {}
- assert res[1] == f'hey {url}'
+ assert res[1] == ["hey", url]
- disable_compression = 'true'
+ disable_compression = "true"
res = hey.construct_hey_query(url, disable_compression=disable_compression)
assert res[0] == {}
- assert res[1] == f'hey --disable-compression {url}'
-
- headers = 'a=1,b=2'
+ assert res[1] == ["hey", "--disable-compression", url]
+ headers = "a=1,b=2,c=3 4"
res = hey.construct_hey_query(url, headers=headers)
assert res[0] == {}
- assert res[1] == f'hey -H a:1 -H b:2 {url}'
+ assert res[1] == ["hey", "-H", "a:1", "-H", "b:2", "-H", "c:3 4", url]
- body = '{}'
+ body = "{}"
res = hey.construct_hey_query(url, body=body)
- assert res[0] == {'d': body}
- assert res[1] == f'hey -d {body} {url}'
+ assert res[0] == {"d": body}
+ assert res[1] == ["hey", "-d", body, url]
proxy = "a:1"
res = hey.construct_hey_query(url, proxy=proxy)
- assert res[0] == {'x': proxy}
- assert res[1] == f'hey -x {proxy} {url}'
+ assert res[0] == {"x": proxy}
+ assert res[1] == ["hey", "-x", proxy, url]
enable_http2 = "true"
res = hey.construct_hey_query(url, enable_http2=enable_http2)
assert res[0] == {}
- assert res[1] == f'hey -h2 {url}'
+ assert res[1] == ["hey", "-h2", url]
disable_redirects = "true"
res = hey.construct_hey_query(url, disable_redirects=disable_redirects)
assert res[0] == {}
- assert res[1] == f'hey -disable-redirects {url}'
+ assert res[1] == ["hey", "-disable-redirects", url]
res = hey.construct_hey_query(
url=url,
@@ -112,19 +110,44 @@ def test_construct_hey_query():
body=body,
proxy=proxy,
enable_http2=enable_http2,
- disable_redirects=disable_redirects
+ disable_redirects=disable_redirects,
)
assert res[0] == {
- 't': timeout,
- 'n': requests_number,
- 'c': concurrency,
- 'm': method,
- 'z': duration + 's',
- 'd': body,
- 'x': proxy
+ "t": timeout,
+ "n": requests_number,
+ "c": concurrency,
+ "m": method,
+ "z": duration + "s",
+ "d": body,
+ "x": proxy,
}
- assert res[1] == 'hey --disable-compression -h2 -disable-redirects -H a:1 -H b:2 -t 2 -n 2 -c 2 -m POST -z' \
- ' 2s -d {} -x a:1 http://mock.com'
+ assert res[1] == [
+ "hey",
+ "--disable-compression",
+ "-h2",
+ "-disable-redirects",
+ "-H",
+ "a:1",
+ "-H",
+ "b:2",
+ "-H",
+ "c:3 4",
+ "-t",
+ "2",
+ "-n",
+ "2",
+ "-c",
+ "2",
+ "-m",
+ "POST",
+ "-z",
+ "2s",
+ "-d",
+ "{}",
+ "-x",
+ "a:1",
+ "http://mock.com",
+ ]
def test_run_hey_test(mocker):
diff --git a/Packs/DeveloperTools/Scripts/VerifyEnoughIncidents/VerifyEnoughIncidents.yml b/Packs/DeveloperTools/Scripts/VerifyEnoughIncidents/VerifyEnoughIncidents.yml
index 50efdceb7335..baca1191bb83 100644
--- a/Packs/DeveloperTools/Scripts/VerifyEnoughIncidents/VerifyEnoughIncidents.yml
+++ b/Packs/DeveloperTools/Scripts/VerifyEnoughIncidents/VerifyEnoughIncidents.yml
@@ -27,7 +27,7 @@ outputs:
type: boolean
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
runas: DBotWeakRole
tests:
- No test
diff --git a/Packs/DeveloperTools/Scripts/VerifyEnoughIndicators/VerifyEnoughIndicators.yml b/Packs/DeveloperTools/Scripts/VerifyEnoughIndicators/VerifyEnoughIndicators.yml
index 726d2be4b1dc..edd36b61737c 100644
--- a/Packs/DeveloperTools/Scripts/VerifyEnoughIndicators/VerifyEnoughIndicators.yml
+++ b/Packs/DeveloperTools/Scripts/VerifyEnoughIndicators/VerifyEnoughIndicators.yml
@@ -27,7 +27,7 @@ outputs:
type: boolean
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
runas: DBotWeakRole
tests:
- No test
diff --git a/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.py b/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.py
index 628d85cca0e4..d271c61099f1 100644
--- a/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.py
+++ b/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.py
@@ -1,7 +1,7 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from typing import Dict, Any, Tuple
+from typing import Any
def check_components(components: list, context: Any):
@@ -21,7 +21,7 @@ def check_components(components: list, context: Any):
raise KeyError
-def check_fields(fields_to_search_array: list, context_json) -> Tuple[bool, Any]:
+def check_fields(fields_to_search_array: list, context_json) -> tuple[bool, Any]:
"""
Args:
fields_to_search_array(list): list of fields to search
@@ -42,7 +42,7 @@ def check_fields(fields_to_search_array: list, context_json) -> Tuple[bool, Any]
return True, None
-def check_fields_command(args: Dict[str, Any]) -> CommandResults:
+def check_fields_command(args: dict[str, Any]) -> CommandResults:
"""
Args:
args(dict): args from demisto
diff --git a/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.yml b/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.yml
index 510e9c209e05..547b07f3bef7 100644
--- a/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.yml
+++ b/Packs/DeveloperTools/Scripts/VerifyObjectFieldsList/VerifyObjectFieldsList.yml
@@ -22,7 +22,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.100715
fromversion: 6.0.0
tests:
- No test
diff --git a/Packs/DeveloperTools/pack_metadata.json b/Packs/DeveloperTools/pack_metadata.json
index e744c79853ca..76172ac21d0c 100644
--- a/Packs/DeveloperTools/pack_metadata.json
+++ b/Packs/DeveloperTools/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Developer Tools",
"description": "Basic tools for content development.",
"support": "community",
- "currentVersion": "1.3.15",
+ "currentVersion": "1.3.18",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Devo/.pack-ignore b/Packs/Devo/.pack-ignore
index 0d5b5238bc86..730fa10e0152 100644
--- a/Packs/Devo/.pack-ignore
+++ b/Packs/Devo/.pack-ignore
@@ -1,5 +1,5 @@
[file:Devo_v2.yml]
-ignore=IN124,IN126,BA108,BA109
+ignore=IN126,BA108,BA109
[file:README.md]
ignore=RM106,RM108
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.py b/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.py
index a52e0aab8d74..43ebcd403d3a 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.py
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.py
@@ -2,7 +2,7 @@
import json
import requests
import urllib3
-from typing import Dict, Any, Union
+from typing import Any
# Disable insecure warnings
urllib3.disable_warnings()
@@ -38,7 +38,7 @@ def request_api_token():
r = requests.post(url=AUTH_URL, headers=AUTH_HEADERS, data=payload, verify=VERIFY_CERT)
response_json = r.json()
if 200 <= r.status_code <= 299:
- api_key = response_json['access_token']
+ api_key = response_json.get('access_token')
CLIENT_HEADERS['Authorization'] = 'Bearer ' + api_key
else:
return_error(f'Error in request_api_token [{r.status_code}] - {r.reason}')
@@ -70,7 +70,7 @@ def get_watchlist_id(watchlist_name: str) -> str:
list_id = None
if 200 <= r.status_code <= 299:
for item in json_text:
- if item.get('display_name').lower() == watchlist_name.lower():
+ if item.get('display_name', '').lower() == watchlist_name.lower():
list_id = item.get('name')
else:
return_error(f'Error retrieving watchlist_id for {watchlist_name}, {r.status_code}: {r.text}')
@@ -91,8 +91,8 @@ def get_list_id(list_name: str, list_type: str) -> str:
list_id = None
if 200 <= r.status_code <= 299:
for jText in json_text:
- if str(jText['name']).lower() == list_name.lower():
- list_id = jText['id']
+ if str(jText.get('name', '')).lower() == list_name.lower():
+ list_id = jText.get('id')
else:
return_error(f'Error retrieving list_id for {list_name}, {r.status_code}: {r.text}')
@@ -125,8 +125,8 @@ def get_watchlist_entry_id(watchlist_name: str, watchlist_entry: str) -> str:
if r.status_code != requests.codes.ok:
return_error('Unable to retrieve watchlist entries')
for jText in json_text:
- if str(jText['value_name']).lower() == watchlist_entry.lower():
- watchlist_entry_id = jText['value_id']
+ if str(jText.get('value_name', '')).lower() == watchlist_entry.lower():
+ watchlist_entry_id = jText.get('value_id')
return str(watchlist_entry_id)
@@ -166,9 +166,9 @@ def check_componentlist_entry():
Sets DigitalGuardian.Componentlist.Found flag.
"""
- componentlist_name = demisto.args().get('componentlist_name', None)
- componentlist_entry = demisto.args().get('componentlist_entry', None)
- if componentlist_name is None or componentlist_entry is None:
+ componentlist_name = demisto.args().get('componentlist_name', '')
+ componentlist_entry = demisto.args().get('componentlist_entry', '')
+ if not componentlist_name or not componentlist_entry:
return_error('Please provide both componentlist_name and componentlist_entry')
componentlist = None
@@ -180,8 +180,8 @@ def check_componentlist_entry():
if 200 <= r.status_code <= 299:
for jText in json_text:
- if str(jText['content_value']).lower() == componentlist_entry.lower():
- componentlist = jText['content_value']
+ if str(jText.get('content_value', '')).lower() == componentlist_entry.lower():
+ componentlist = jText.get('content_value')
else:
return_error(f'Unable to find componentlist named {componentlist_name}, {r.status_code}')
@@ -233,7 +233,7 @@ def add_entry_to_watchlist():
r = requests.post(url=full_url + watchlist_id + '/values/', data=watchlist_entry_json,
headers=CLIENT_HEADERS, verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
- demisto.results('added watchlist entry ({}) to watchlist name ({})'.format(watchlist_entry, watchlist_name))
+ demisto.results(f'added watchlist entry ({watchlist_entry}) to watchlist name ({watchlist_name})')
else:
return_error(
'Failed to add watchlist entry({}) to watchlist name ({}). The response failed with status code {}. '
@@ -277,7 +277,7 @@ def rm_entry_from_watchlist():
headers=CLIENT_HEADERS, verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
demisto.results(
- 'removed watchlist entry ({}) from watchlist name ({})'.format(watchlist_entry, watchlist_name))
+ f'removed watchlist entry ({watchlist_entry}) from watchlist name ({watchlist_name})')
else:
return_error(
'Failed to remove watchlist entry({}) from watchlist name ({}). The response failed with status code {}. '
@@ -303,15 +303,16 @@ def get_items_request():
if r.status_code == 200:
header_field = []
- for field in json_text['fields']:
- header_field.append(field['name'])
+ for field in json_text.get('fields'):
+ header_field.append(field.get('name'))
exportdata = []
- if json_text['total_hits'] == 0:
+ if json_text.get('total_hits') == 0:
DEBUG('found no data')
+ return None
else:
DEBUG('found data')
- for data in json_text['data']:
+ for data in json_text.get('data'):
entry_line = {}
header_position = 0
@@ -321,11 +322,11 @@ def get_items_request():
exportdata.append(entry_line)
for items in exportdata:
- if not (items['dg_alert.dg_detection_source']) == 'alert' and items['dg_tags']:
- comm = items['dg_alarm_name'].find(',')
+ if items.get('dg_alert.dg_detection_source') != 'alert' and items.get('dg_tags'):
+ comm = items.get('dg_alarm_name', "").find(',')
if comm == -1:
comm = 100
- name = '{alarm_name}-{id}'.format(alarm_name=items['dg_alarm_name'][0:comm], id=items['dg_guid'])
+ name = '{alarm_name}-{id}'.format(alarm_name=items.get('dg_alarm_name', "")[0:comm], id=items.get('dg_guid'))
DEBUG(name + " != " + oldname)
if name != oldname:
DEBUG("create_artifacts...")
@@ -338,6 +339,7 @@ def get_items_request():
return_error('DigitalGuardian ARC Export Failed '
'Please check authentication related parameters. ' + json.dumps(r.json(), indent=4,
sort_keys=True))
+ return None
def convert_to_demisto_severity(dg_severity: str) -> int:
@@ -458,13 +460,13 @@ def create_artifacts(alert):
DEBUG("before alert")
DEBUG(json.dumps(alert))
if CATEGORY in specific_alert_mapping:
- temp_dict: Dict[Union[str, Any], Union[Union[str, int], Any]] = {}
- cef: Dict[Union[str, Any], Union[Union[str, int], Any]] = {}
+ temp_dict: dict[str | Any, str | int | Any] = {}
+ cef: dict[str | Any, str | int | Any] = {}
cef_types = {}
cef['Vendor ID'] = 'DG'
cef['Vendor Product'] = 'Digital Guardian'
- cef['severity'] = convert_to_demisto_severity(alert['dg_alarm_sev'])
- cef['sensitivity'] = convert_to_demisto_sensitivity(alert['dg_class.dg_name'])
+ cef['severity'] = convert_to_demisto_severity(alert.get('dg_alarm_sev'))
+ cef['sensitivity'] = convert_to_demisto_sensitivity(alert.get('dg_class.dg_name'))
DEBUG("cef: " + json.dumps(cef))
for artifact_key, artifact_tuple in specific_alert_mapping.get(CATEGORY).items(): # type: ignore
@@ -472,13 +474,13 @@ def create_artifacts(alert):
cef[artifact_key] = alert[artifact_tuple[0]]
cef_types[artifact_key] = artifact_tuple[1]
if cef:
- comm = alert['dg_alarm_name'].find(',')
+ comm = alert.get('dg_alarm_name', '').find(',')
if comm == -1:
comm = 100
- name = '{alarm_name}-{id}'.format(alarm_name=alert['dg_alarm_name'][0:comm], id=alert['dg_guid'])
+ name = '{alarm_name}-{id}'.format(alarm_name=alert.get('dg_alarm_name')[0:comm], id=alert.get('dg_guid'))
temp_dict['name'] = name
- temp_dict['severity'] = convert_to_demisto_severity(alert['dg_alarm_sev'])
- temp_dict['type'] = alert['dg_tags']
+ temp_dict['severity'] = convert_to_demisto_severity(alert.get('dg_alarm_sev'))
+ temp_dict['type'] = alert.get('dg_tags')
temp_dict['occurred'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
temp_dict['rawJSON'] = json.dumps(cef)
artifacts_list.update(temp_dict)
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.yml b/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.yml
index aa47c49b1e44..f95cae475df8 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.yml
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.yml
@@ -119,7 +119,7 @@ script:
required: true
description: Remove Componentlist Entry.
name: digitalguardian-remove-componentlist-entry
- dockerimage: demisto/python3:3.10.14.92207
+ dockerimage: demisto/python3:3.10.14.98471
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.py b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.py
index 0c15d5bd42b6..e12478258adf 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.py
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.py
@@ -118,12 +118,13 @@ def get_raw_events(client: Client, time_of_last_event: str) -> list:
time_of_last_event_str = temp_time.isoformat(sep=' ', timespec='milliseconds')
current_time = datetime_to_string(datetime.now())
events = client.get_events(time_of_last_event_str, current_time)
- for field_names in events["fields"]:
- outcome.append(field_names['name'])
- for event in events["data"]:
+ events = {} if events is None else events
+ for field_names in events.get("fields"):
+ outcome.append(field_names.get('name'))
+ for event in events.get("data"):
result = dict(zip(outcome, event))
event_list.append(result)
- event_list.sort(key=lambda item: (item["inc_mtime"], item["dg_guid"]))
+ event_list.sort(key=lambda item: (item.get("inc_mtime"), item.get("dg_guid")))
return event_list
@@ -141,7 +142,10 @@ def get_events_command(client: Client, args: dict) -> Tuple[list, CommandResults
limit = int(args.get("limit", 1000))
if limit:
event_list = event_list[:limit]
- hr = tableToMarkdown(name='Test Event', t=event_list)
+ if not event_list:
+ hr = "No events found."
+ else:
+ hr = tableToMarkdown(name='Test Event', t=event_list)
demisto.debug(f'get events command that ran with the limit: {limit}')
return event_list, CommandResults(readable_output=hr)
@@ -163,19 +167,21 @@ def create_events_for_push(event_list: list, last_time: str, id_list: list, limi
event_list_for_push = []
demisto.debug('Checking duplications and creating events for pushing to XSIAM')
for event in event_list:
+ inc_time = event.get("inc_mtime")
if last_time:
last_time_date = arg_to_datetime(arg=last_time, required=True).date() # type: ignore[union-attr]
- event_date = arg_to_datetime(arg=event.get("inc_mtime"), required=True).date() # type: ignore[union-attr]
- if event.get("inc_mtime") < last_time or event.get("dg_guid") in id_list:
+ event_date = arg_to_datetime(arg=inc_time, required=True).date() if inc_time else None # type: ignore[union-attr]
+ if (inc_time and inc_time < last_time) or event.get("dg_guid") in id_list:
continue
- if last_time_date == event_date:
+ if last_time_date == event_date and event.get("dg_guid"):
id_list.append(event.get("dg_guid"))
- else:
+ elif event.get("dg_guid"):
id_list = [event.get("dg_guid")]
else:
- id_list.append(event.get("dg_guid"))
+ if event.get("dg_guid"):
+ id_list.append(event.get("dg_guid"))
event_list_for_push.append(event)
- last_time = event.get("inc_mtime")
+ last_time = inc_time if inc_time else last_time
index += 1
if index == limit:
break
@@ -216,7 +222,7 @@ def add_time_to_events(events: list[dict]) -> None:
"""
if events:
for event in events:
- create_time = arg_to_datetime(arg=event.get('inc_mtime'))
+ create_time = arg_to_datetime(arg=event.get('inc_mtime')) if event.get('inc_mtime') else None
event['_time'] = create_time.strftime(DATE_FORMAT) if create_time else None
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.yml b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.yml
index 781508e76673..efb87eed194f 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.yml
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector.yml
@@ -70,7 +70,7 @@ script:
description: Gets events from Hello World.
execution: false
name: digital-guardian-get-events
- dockerimage: demisto/python3:3.10.13.78960
+ dockerimage: demisto/python3:3.10.14.98471
isfetchevents: true
runonce: false
script: '-'
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector_test.py b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector_test.py
index 68a340cbceb0..d3f3c6a5f3ab 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector_test.py
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/DigitalGuardianARCEventCollector_test.py
@@ -21,7 +21,7 @@ def test_add_time_key_to_events():
events = util_load_json('test_data/events.json')
add_time_to_events(events)
- assert events[0]['_time'] == "2023-05-23T06:56:39Z"
+ assert events[0]['_time'] is None
assert events[1]['_time'] == "2023-05-23T11:53:11Z"
@@ -120,4 +120,4 @@ def test_fetch_events_command(mocker):
mock_events = util_load_json('test_data/events.json')
assert events == mock_events
assert next_run == {'start_time': '2023-05-23 11:53:11',
- 'id_list': ['1dc3c1fa-5474-4fc0-a7c3-74ff42d28e5e', 'c742c377-b429-428a-b0c9-515cbbf143be']}
+ 'id_list': ['1dc3c1fa-5474-4fc0-a7c3-74ff42d28e5e']}
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events.json b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events.json
index 8124ad0f5057..8a96989d4d20 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events.json
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events.json
@@ -7,7 +7,7 @@
"dg_description": "This file outlook.exe was going to [demo.digitalg@gmail.com]",
"inc_id": "230523-WIQHA",
"dg_comment": "-",
- "inc_mtime": "2023-05-23 06:56:39",
+ "inc_mtime": "",
"dg_guid": "1dc3c1fa-5474-4fc0-a7c3-74ff42d28e5e",
"inc_sev": "Critical",
"dg_utype": "Incident",
@@ -22,7 +22,7 @@
"inc_id": "230523-RG0AB",
"dg_comment": "-",
"inc_mtime": "2023-05-23 11:53:11",
- "dg_guid": "c742c377-b429-428a-b0c9-515cbbf143be",
+ "dg_guid": "",
"inc_sev": "Critical",
"dg_utype": "Incident",
"dg_tenant": "279b59f3-02f3-44ea-a7c3-9bac2eb0224d"
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_1_response.json b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_1_response.json
index 7c201bdf9e60..d0bc5581965c 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_1_response.json
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_1_response.json
@@ -7,7 +7,7 @@
"dg_description": "This file outlook.exe was going to [demo.digitalg@gmail.com]",
"inc_id": "230523-WIQHA",
"dg_comment": "-",
- "inc_mtime": "2023-05-23 06:56:39",
+ "inc_mtime": "",
"dg_guid": "1dc3c1fa-5474-4fc0-a7c3-74ff42d28e5e",
"inc_sev": "Critical",
"dg_utype": "Incident",
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_request.json b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_request.json
index 04a8d3ba7985..d0e45f3665bb 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_request.json
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/test_data/events_mock_request.json
@@ -106,7 +106,7 @@
"This file outlook.exe was going to [demo.digitalg@gmail.com]",
"230523-WIQHA",
"-",
- "2023-05-23 06:56:39",
+ "",
"1dc3c1fa-5474-4fc0-a7c3-74ff42d28e5e",
"Critical",
"Incident",
@@ -121,7 +121,7 @@
"230523-RG0AB",
"-",
"2023-05-23 11:53:11",
- "c742c377-b429-428a-b0c9-515cbbf143be",
+ "",
"Critical",
"Incident",
"279b59f3-02f3-44ea-a7c3-9bac2eb0224d"
diff --git a/Packs/DigitalGuardian/ReleaseNotes/1_1_5.md b/Packs/DigitalGuardian/ReleaseNotes/1_1_5.md
new file mode 100644
index 000000000000..6d8e1d98d2fd
--- /dev/null
+++ b/Packs/DigitalGuardian/ReleaseNotes/1_1_5.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### Digital Guardian
+
+Fixed an issue where the ***fetch-incidents*** command did not handle empty responses gracefully.
+- Updated the Docker image to *demisto/python3:3.10.14.98471*.
+
+##### Digital Guardian ARC Event Collector
+
+- Fixed an issue where the ***Fetch-events*** command did not handle empty responses gracefully.
+- Updated the Docker image to *demisto/python3:3.10.14.98471*.
\ No newline at end of file
diff --git a/Packs/DigitalGuardian/pack_metadata.json b/Packs/DigitalGuardian/pack_metadata.json
index 7744841d42e8..879699b60a07 100644
--- a/Packs/DigitalGuardian/pack_metadata.json
+++ b/Packs/DigitalGuardian/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Digital Guardian",
"description": "Digital Guardian ARC Watchlist Integration",
"support": "partner",
- "currentVersion": "1.1.4",
+ "currentVersion": "1.1.5",
"author": "Digital Guardian",
"url": "https://digitalguardian.com",
"email": "support@digitalguardian.com",
diff --git a/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Old_Vulnerabilities_Found.png b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Old_Vulnerabilities_Found.png
new file mode 100644
index 000000000000..f5aca499b9fa
Binary files /dev/null and b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Old_Vulnerabilities_Found.png differ
diff --git a/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_PAN-OS_block_assets.png b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_PAN-OS_block_assets.png
new file mode 100644
index 000000000000..67a9c391db97
Binary files /dev/null and b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_PAN-OS_block_assets.png differ
diff --git a/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Scan_Asset_Not_Recently_Scanned.png b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Scan_Asset_Not_Recently_Scanned.png
new file mode 100644
index 000000000000..014060d12074
Binary files /dev/null and b/Packs/Digital_Defense_FrontlineVM/doc_files/Digital_Defense_FrontlineVM_Scan_Asset_Not_Recently_Scanned.png differ
diff --git a/Packs/DomainTools/.pack-ignore b/Packs/DomainTools/.pack-ignore
index c8d92c9ec03d..3f0405d75d4a 100644
--- a/Packs/DomainTools/.pack-ignore
+++ b/Packs/DomainTools/.pack-ignore
@@ -2,5 +2,5 @@
ignore=RM104
[file:DomainTools.yml]
-ignore=IN144,IN124,IN153
+ignore=IN144,IN153
diff --git a/Packs/DomainTools/Integrations/DomainTools/DomainTools.js b/Packs/DomainTools/Integrations/DomainTools/DomainTools.js
index a46bc9add26e..4628630988ff 100644
--- a/Packs/DomainTools/Integrations/DomainTools/DomainTools.js
+++ b/Packs/DomainTools/Integrations/DomainTools/DomainTools.js
@@ -51,8 +51,10 @@ var changeKeys = function(conv, obj){
return output;
};
-var callWhoIs = function(query, parsed,url){
- var res = sendRequest(url + query + '/whois/parsed/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key}));
+var callWhoIs = function(url, query, parsed){
+ var whois_endpoint = `${url}/v1/${query}/whois/parsed/${encodeToURLQuery(DOMAINTOOLS_PARAMS)}`
+ var res = sendRequest(whois_endpoint)
+
var error = res.response.error;
if(error && error.code === 206){
parsed = false;
@@ -101,7 +103,9 @@ var scoreConv = function(score, threshold){
};
var callDomain = function(url, domain, threshold){
- var repRes = sendRequest(url + 'reputation/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key, 'domain' : domain}));
+ var api_params = Object.assign(DOMAINTOOLS_PARAMS, {"domain": domain})
+ var domain_reputation_endpoint = `${url}/v1/reputation/${encodeToURLQuery(api_params)}`
+ var repRes = sendRequest(domain_reputation_endpoint)
var md = 'Domain '+repRes.response.domain+' found with risk score of '+ repRes.response.risk_score +'.';
var context = {
'DBotScore' : {
@@ -127,7 +131,8 @@ var callDomain = function(url, domain, threshold){
};
var callProfile= function(url, domain){
- var domRes = sendRequest(url + domain + '/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key}));
+ var domain_profile_endpoint = `${url}/v1/${domain}/${encodeToURLQuery(DOMAINTOOLS_PARAMS)}`
+ var domRes = sendRequest(domain_profile_endpoint)
return {
Type: entryTypes.note,
Contents: domRes,
@@ -155,7 +160,9 @@ var callDomainSearch = function(url, args){
args = changeKeys(argToUrlParam,args);
args.api_username = params.username;
args.api_key = params.key;
- var res = sendRequest(url + 'domain-search/'+encodeToURLQuery(args));
+
+ var api_params = Object.assign(DOMAINTOOLS_PARAMS, args)
+ var res = sendRequest(`${url}/v2/domain-search/${encodeToURLQuery(api_params)}`);
var results = res.response.results;
var md = '';
@@ -188,12 +195,15 @@ var callReverseIP = function(url, args){
var context = {'Domain' : []};
var res;
var addresses;
- if(args.domain){
- res = sendRequest(url + args.domain+'/reverse-ip/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key, 'limit': args.limit? args.limit : 50}));
+ var api_params = Object.assign(DOMAINTOOLS_PARAMS, {"limit": args.limit? args.limit : 50})
+ if(args.domain){
+ var reverse_ip_endpoint = `${url}/v1/${args.domain}/reverse-ip/${encodeToURLQuery(api_params)}`
+ res = sendRequest(reverse_ip_endpoint)
}
else if(args.ip){
- res = sendRequest(url + args.ip+'/host-domains/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key, 'limit': args.limit? args.limit : 50}));
+ var host_domains_endpoint = `${url}/v1/${args.ip}/host-domains/${encodeToURLQuery(api_params)}`
+ res = sendRequest(host_domains_endpoint)
}
addresses = res.response.ip_addresses;
if(!Array.isArray(addresses)){
@@ -219,7 +229,9 @@ var callReverseIP = function(url, args){
}
var callReverseNameServer = function(url, server, limit){
- var res = sendRequest(url +server + '/name-server-domains/' + encodeToURLQuery({'api_username':params.username,'api_key':params.key, 'limit': limit? limit : 50}));
+ var api_params = Object.assign(DOMAINTOOLS_PARAMS, {"limit": limit? limit : 50})
+ var reverse_ns_endpoint = `${url}/v1/${server}/name-server-domains/${encodeToURLQuery(api_params)}`
+ var res = sendRequest(reverse_ns_endpoint)
var md = 'Found ' + res.response.primary_domains.length + ' domains\n';
var context = {'Domain' : []};
res.response.primary_domains.forEach(function(domain){
@@ -248,7 +260,10 @@ var callReverseWhoIs = function(url, args){
delete args.quoteModel
delete args.onlyHistoricScope;
- var res = sendRequest(url + encodeToURLQuery(args));
+ var api_params = Object.assign(DOMAINTOOLS_PARAMS, args)
+ var reverse_whois_endpoint = `${url}/v1/reverse-whois/${encodeToURLQuery(api_params)}`
+
+ var res = sendRequest(reverse_whois_endpoint);
var context = {'Domain' : []};
var md = 'Found '+res.response.domains.length+ ' domains: \n';
res.response.domains.forEach(function(domain){
@@ -268,7 +283,8 @@ var callReverseWhoIs = function(url, args){
/*http://api.domaintools.com/v1/domaintools.com/whois/history/*/
var callWhoisHistory = function(url, domain){
- var res = sendRequest(url+domain+'/whois/history/'+ encodeToURLQuery({'api_username':params.username,'api_key':params.key}));
+ var whois_history_endpoint = `${url}/v1/${domain}/whois/history/${encodeToURLQuery(DOMAINTOOLS_PARAMS)}`
+ var res = sendRequest(whois_history_endpoint)
var splitRecord;
var context = {'Domain' : {'Name' : domain, 'WhoisHistory' : []}};
var md = '';
@@ -306,27 +322,37 @@ params.username = params.username || params.credentials.identifier
if (!params.key || !params.username) {
throw 'Username and API key must be provided.'
}
+
+const DOMAINTOOLS_PARAMS = {
+ "api_username": params.username,
+ "api_key": params.key,
+ "app_partner": "cortex_xsoar",
+ "app_name": "enterprise_for_xsoar",
+ "app_version": "1"
+}
+
switch (command) {
case 'test-module':
- var res = sendRequest(url + '/v1/demisto.com/whois/parsed/'+encodeToURLQuery({'api_username':params.username,'api_key':params.key}));
+ var account_url = `${url}/v1/account/${encodeToURLQuery(DOMAINTOOLS_PARAMS)}`
+ var res = sendRequest(account_url)
if(res.response.error){
log('Something went wrong - error code ' + error.code);
}
return 'ok';
case 'domain':
- return callDomain(url+'/v1/', args.domain, args.threshold);
+ return callDomain(url, args.domain, args.threshold);
case 'domainSearch':
- return callDomainSearch(url+'/v2/', args);
+ return callDomainSearch(url, args);
case 'reverseIP':
- return callReverseIP(url+'/v1/', args);
+ return callReverseIP(url, args);
case 'reverseNameServer':
- return callReverseNameServer(url+'/v1/', args.nameServer, args.limit);
+ return callReverseNameServer(url, args.nameServer, args.limit);
case 'reverseWhois':
- return callReverseWhoIs(url + '/v1/reverse-whois/', args);
+ return callReverseWhoIs(url, args);
case 'whois':
- return callWhoIs(args.query, args.parsed, url+'/v1/');
+ return callWhoIs(url, args.query, args.parsed);
case 'whoisHistory':
- return callWhoisHistory(url+'/v1/', args.domain);
+ return callWhoisHistory(url, args.domain);
case 'domainProfile':
- return callProfile(url+'/v1/', args.domain);
+ return callProfile(url, args.domain);
}
diff --git a/Packs/DomainTools/Integrations/DomainTools/DomainTools.yml b/Packs/DomainTools/Integrations/DomainTools/DomainTools.yml
index 3cf454f1093c..c3dca6830071 100644
--- a/Packs/DomainTools/Integrations/DomainTools/DomainTools.yml
+++ b/Packs/DomainTools/Integrations/DomainTools/DomainTools.yml
@@ -6,7 +6,7 @@ name: DomainTools
fromversion: 5.0.0
display: DomainTools
category: Data Enrichment & Threat Intelligence
-description: Domain name, DNS and Internet OSINT-based cyber threat intelligence and cybercrime forensics products and data
+description: Domain name, DNS and Internet OSINT-based cyber threat intelligence and cybercrime forensics products and data.
configuration:
- display: DomainTools API URL
name: server
@@ -75,24 +75,24 @@ script:
arguments:
- name: domain
required: true
- description: Domain name to check reputation
+ description: Domain name to check reputation.
- name: long
- description: Should we return full response with detected URLs
+ description: Should we return full response with detected URLs.
- name: sampleSize
- description: The number of samples from each type (resolutions, detections, etc.) to display for long format
+ description: The number of samples from each type (resolutions, detections, etc.) to display for long format.
- name: threshold
- description: 'If number of positive detected domains is bigger than the threshold we will consider it malicious'
+ description: 'If number of positive detected domains is bigger than the threshold we will consider it malicious.'
- name: wait
- description: Wait time between tries if we reach the API rate limit in seconds
+ description: Wait time between tries if we reach the API rate limit in seconds.
- name: retries
- description: Number of retries for API rate limit
+ description: Number of retries for API rate limit.
outputs:
- contextPath: Domain.Name
- description: The tested domain
+ description: The tested domain.
- contextPath: Domain.RiskScore
- description: The reputation returned from DomainTools
+ description: The reputation returned from DomainTools.
- contextPath: Domain.Malicious.Vendor
- description: For malicious domains, the vendor that made the decision
+ description: For malicious domains, the vendor that made the decision.
- contextPath: DBotScore.Indicator
description: The indicator that was tested.
- contextPath: DBotScore.Type
@@ -107,86 +107,86 @@ script:
- name: query
required: true
default: true
- description: (mandatory and default) Query strings. Each term in the query string must be at least three characters long. Use spaces to separate multiple terms
+ description: (mandatory and default) Query strings. Each term in the query string must be at least three characters long. Use spaces to separate multiple terms.
- name: pageNumber
- description: 'Sets the page of results to retrieve from the server. Each page is limited to 100 results. Default: 1'
+ description: 'Sets the page of results to retrieve from the server. Each page is limited to 100 results. Default: 1.'
defaultValue: "1"
- name: maxLength
- description: 'Limit the maximum domain character count. Default: 25'
+ description: 'Limit the maximum domain character count. Default: 25.'
defaultValue: "25"
- name: minLength
- description: 'Limit the minimum domain character count. Default: 1'
+ description: 'Limit the minimum domain character count. Default: 1.'
defaultValue: "1"
- name: hesHyphen
- description: '(true or false) Return results with hyphens in the domain name. Default: true'
+ description: '(true or false) Return results with hyphens in the domain name. Default: true.'
- name: exclude
- description: Terms to exclude from matching
+ description: Terms to exclude from matching.
- name: activeOnly
auto: PREDEFINED
predefined:
- "true"
- "false"
- description: '(true or false) Return only domains currently registered.Default: false'
+ description: '(true or false) Return only domains currently registered.Default: false.'
defaultValue: "false"
- name: deletedOnly
auto: PREDEFINED
predefined:
- "true"
- "false"
- description: '(true or false) Return only domains previously registered but not currently registered. Default: false'
+ description: '(true or false) Return only domains previously registered but not currently registered. Default: false.'
defaultValue: "false"
- name: anchorLeft
auto: PREDEFINED
predefined:
- "true"
- "false"
- description: '(true or false) Return only domains that start with the query term. Default: false'
+ description: '(true or false) Return only domains that start with the query term. Default: false.'
defaultValue: "false"
- name: anchorRight
auto: PREDEFINED
predefined:
- "true"
- "false"
- description: '(true or false) Return only domains that end with the query term. Default: false'
+ description: '(true or false) Return only domains that end with the query term. Default: false.'
defaultValue: "false"
- name: hasNumber
auto: PREDEFINED
predefined:
- "false"
- "true"
- description: '(true or false) Return results with numbers in the domain name. Default: true'
+ description: '(true or false) Return results with numbers in the domain name. Default: true.'
defaultValue: "true"
outputs:
- contextPath: Domain.Name
- description: Domain found by command
- description: Search for domain based on the given parameters
+ description: Domain found by command.
+ description: Search for domain based on the given parameters.
- name: reverseIP
arguments:
- name: ip
default: true
- description: (default) specify IP address
+ description: (default) specify IP address.
- name: domain
- description: 'If you provide a domain name, DomainTools will respond with the list of other domains that share the same IP'
+ description: 'If you provide a domain name, DomainTools will respond with the list of other domains that share the same IP.'
- name: limit
description: Limits the size of the domain list than can appear in a response. The limit is applied per-IP address, not for the entire request.
outputs:
- contextPath: Domain.Name
- description: Domain name
+ description: Domain name.
- contextPath: Domain.DNS.Address
- description: IP address
- description: Reverse loopkup of an IP address
+ description: IP address.
+ description: Reverse loopkup of an IP address.
- name: reverseNameServer
arguments:
- name: nameServer
required: true
default: true
- description: '(default and mandatory) specify the name of the primary or secondary name server'
+ description: '(default and mandatory) specify the name of the primary or secondary name server.'
- name: limit
- description: Limit the size of the domain list than can appear in a response
+ description: Limit the size of the domain list than can appear in a response.
outputs:
- contextPath: Domain.Name
- description: Name of domain
- description: Reverse nameserver lookup
+ description: Name of domain.
+ description: Reverse nameserver lookup.
- name: reverseWhois
arguments:
- name: terms
@@ -200,23 +200,23 @@ script:
predefined:
- "true"
- "false"
- description: Show only historic records
+ description: Show only historic records.
defaultValue: "false"
- name: quoteMode
- description: 'Only lists the size and retail price of the query if you have per-domain pricing access purchase : includes the complete list of domain names that match the query'
+ description: 'Only lists the size and retail price of the query if you have per-domain pricing access purchase : includes the complete list of domain names that match the query.'
defaultValue: purchase
outputs:
- contextPath: Domain.Name
- description: Name of domain
- description: Reverse lookup of whois information
+ description: Name of domain.
+ description: Reverse lookup of whois information.
- name: whois
arguments:
- name: query
required: true
default: true
- description: '(mandatory and default) enter domain (do not use full URL). e.g. !whois [query=]demisto.com'
+ description: '(mandatory and default) enter domain (do not use full URL). e.g. !whois [query=]demisto.com.'
- name: parsed
- description: Should return parsed or raw response. Default is true
+ description: Should return parsed or raw response. Default is true.
auto: PREDEFINED
predefined:
- "true"
@@ -224,24 +224,24 @@ script:
defaultValue: "true"
outputs:
- contextPath: Domain.Name
- description: Requested domain name
+ description: Requested domain name.
- contextPath: Domain.Whois
- description: Whois data
- description: Provides registration details about a domain
+ description: Whois data.
+ description: Provides registration details about a domain.
- name: whoisHistory
arguments:
- name: domain
required: true
default: true
- description: Specify domain e.g. mycompany.com
+ description: Specify domain e.g. mycompany.com.
outputs:
- contextPath: Domain.Name
- description: Name of domain
+ description: Name of domain.
- contextPath: Domain.WhoisHistory
- description: Domain Whois history data
- description: Display a history of whois for a given domain
+ description: Domain Whois history data.
+ description: Display a history of whois for a given domain.
- name: domainProfile
- description: Display profile for a given domain
+ description: Display profile for a given domain.
arguments:
- name: domain
- description: Specify domain e.g. mycompany.com
+ description: Specify domain e.g. mycompany.com.
diff --git a/Packs/DomainTools/ReleaseNotes/1_1_6.md b/Packs/DomainTools/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..dff8a342e9e4
--- /dev/null
+++ b/Packs/DomainTools/ReleaseNotes/1_1_6.md
@@ -0,0 +1,7 @@
+#### Integrations
+
+##### DomainTools
+- Added the following parameters to the API requests for better reporting:
+ - *app_partner*
+ - *app_name*
+ - *app_version*
\ No newline at end of file
diff --git a/Packs/DomainTools/pack_metadata.json b/Packs/DomainTools/pack_metadata.json
index 6eb2814e7094..c7340ab340d3 100644
--- a/Packs/DomainTools/pack_metadata.json
+++ b/Packs/DomainTools/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DomainTools Enterprise",
"description": "Provides DomainTools market-leading Whois data including hosting history and parsed IP records within Cortex XSOAR. Requires a DomainTools Enterprise API key.",
"support": "partner",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "DomainTools",
"url": "https://www.domaintools.com/support/",
"email": "memberservices@domaintools.com",
diff --git a/Packs/DomainToolsIrisDetect/.pack-ignore b/Packs/DomainToolsIrisDetect/.pack-ignore
index b27b26e8f36c..e69de29bb2d1 100644
--- a/Packs/DomainToolsIrisDetect/.pack-ignore
+++ b/Packs/DomainToolsIrisDetect/.pack-ignore
@@ -1,2 +0,0 @@
-[file:DomainToolsIrisDetect.yml]
-ignore=IN124
\ No newline at end of file
diff --git a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.py b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.py
index ce2f0db0792b..40ff62f199e8 100644
--- a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.py
+++ b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.py
@@ -1,10 +1,11 @@
"""
DomainTools Iris Detect XSOAR Integration
"""
+
from hashlib import sha256
from hmac import new
from math import ceil
-from typing import Callable, Tuple
+from collections.abc import Callable
from urllib.parse import urlencode, urlunparse
from urllib3 import disable_warnings
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
@@ -12,7 +13,7 @@
# Disable insecure warnings
disable_warnings() # pylint: disable=no-member
-''' CONSTANTS '''
+""" CONSTANTS """
INTEGRATION_CONTEXT_NAME = "DomainToolsIrisDetect"
DOMAINTOOLS_PARAMS: Dict[str, Any] = {
@@ -21,18 +22,33 @@
"app_version": "1",
}
-DEFAULT_HEADERS: Dict[str, str] = {"accept": "application/json", "Content-Type": "application/json"}
+DEFAULT_HEADERS: Dict[str, str] = {
+ "accept": "application/json",
+ "Content-Type": "application/json",
+}
TIMEOUT = 60.0
RETRY = 3
DOMAINTOOLS_API_BASE_URL = "api.domaintools.com"
DOMAINTOOLS_API_VERSION = "v1"
-DOMAINTOOLS_MANAGE_WATCHLIST_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/"
-DOMAINTOOLS_NEW_DOMAINS_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/new/"
-DOMAINTOOLS_WATCHED_DOMAINS_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/watched/"
-DOMAINTOOLS_IGNORED_DOMAINS_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/ignored/"
-DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/monitors/"
-DOMAINTOOLS_ESCALATE_DOMAINS_ENDPOINT = f"/{DOMAINTOOLS_API_VERSION}/iris-detect/escalations/"
+DOMAINTOOLS_MANAGE_WATCHLIST_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/"
+)
+DOMAINTOOLS_NEW_DOMAINS_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/new/"
+)
+DOMAINTOOLS_WATCHED_DOMAINS_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/watched/"
+)
+DOMAINTOOLS_IGNORED_DOMAINS_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/domains/ignored/"
+)
+DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/monitors/"
+)
+DOMAINTOOLS_ESCALATE_DOMAINS_ENDPOINT = (
+ f"/{DOMAINTOOLS_API_VERSION}/iris-detect/escalations/"
+)
DOMAINTOOLS_ESCALATE_DOMAINS_HEADER = "Escalated Domains"
DOMAINTOOLS_WATCHED_DOMAINS_HEADER = "Watched Domains"
@@ -71,7 +87,7 @@
DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # ISO8601 format with UTC, default in XSOAR
NO_DOMAINS_FOUND = "No Domains Found."
LIMIT_ERROR_MSG = "Invalid Input Error: limit should be greater than zero."
-DEFAULT_DAYS_BACK = '3 days'
+DEFAULT_DAYS_BACK = "3 days"
MAX_DAYS_BACK = 30
DATE_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
DEFAULT_PAGE_SIZE = 50
@@ -150,18 +166,18 @@ class Client(BaseClient):
"""
def __init__(
- self,
- username: str,
- api_key: str,
- new_domains: str,
- changed_domains: str,
- blocked_domains: str,
- risk_score_ranges: List,
- include_domain_data: Optional[bool] = None,
- first_fetch: str = '3 days',
- fetch_limit: Optional[int] = 50,
- verify=None,
- proxy=None,
+ self,
+ username: str,
+ api_key: str,
+ new_domains: str,
+ changed_domains: str,
+ blocked_domains: str,
+ risk_score_ranges: List,
+ include_domain_data: Optional[bool] = None,
+ first_fetch: str = "3 days",
+ fetch_limit: Optional[int] = 50,
+ verify=None,
+ proxy=None,
):
super().__init__(
DOMAINTOOLS_API_BASE_URL,
@@ -199,10 +215,11 @@ def query_dt_api(self, end_point: str, method: str, **kwargs):
query = {
"api_username": self.username,
"signature": signer.sign(timestamp, end_point),
- "timestamp": timestamp
+ "timestamp": timestamp,
}
- full_url = urlunparse(("https", DOMAINTOOLS_API_BASE_URL, end_point, "", urlencode(query), None))
-
+ full_url = urlunparse(
+ ("https", DOMAINTOOLS_API_BASE_URL, end_point, "", urlencode(query), None)
+ )
return self._http_request(
method=method,
full_url=full_url,
@@ -214,7 +231,9 @@ def query_dt_api(self, end_point: str, method: str, **kwargs):
error_handler=dt_error_handler,
)
- def create_indicator_from_detect_domain(self, item: Dict, term: Dict) -> Dict[str, Any]:
+ def create_indicator_from_detect_domain(
+ self, item: Dict, term: Dict
+ ) -> Dict[str, Any]:
"""Return the indicator object for the given DomainTools Iris Detect domain object.
Args:
@@ -224,7 +243,9 @@ def create_indicator_from_detect_domain(self, item: Dict, term: Dict) -> Dict[st
Returns:
Dict: The indicator object containing various fields and values.
"""
- risk_score_components = flatten_nested_dict(item.get("risk_score_components", {}))
+ risk_score_components = flatten_nested_dict(
+ item.get("risk_score_components", {})
+ )
return {
"name": "DomainTools Iris Detect",
@@ -241,9 +262,14 @@ def create_indicator_from_detect_domain(self, item: Dict, term: Dict) -> Dict[st
"irisdetectdiscovereddate": item.get("discovered_date", ""),
"irisdetectchangeddate": item.get("changed_date", ""),
"irisdetectdomainstatus": item.get("status", ""),
- "irisdetectdomainstate": "blocked" if any(result.get("escalation_type", "") == "blocked" for result in
- item.get("escalations", [])) else item.get("state", ""),
-
+ "irisdetectdomainstate": (
+ "blocked"
+ if any(
+ result.get("escalation_type", "") == "blocked"
+ for result in item.get("escalations", [])
+ )
+ else item.get("state", "")
+ ),
"domaintoolsriskscore": item.get("risk_score", ""),
"domaintoolsriskscorestatus": item.get("risk_score_status", ""),
"irisdetectdomainid": item.get("id", ""),
@@ -282,8 +308,14 @@ def create_indicator_from_detect_domain(self, item: Dict, term: Dict) -> Dict[st
},
}
- def process_dt_domains_into_xsoar(self, domains_list: List[Dict[str, Any]], incident_name: str, last_run: str,
- term: Dict[str, Any], enable_incidents: bool = True) -> List[Any]:
+ def process_dt_domains_into_xsoar(
+ self,
+ domains_list: List[Dict[str, Any]],
+ incident_name: str,
+ last_run: str,
+ term: Dict[str, Any],
+ enable_incidents: bool = True,
+ ) -> List[Any]:
"""
Create indicators and, optionally, an incident in XSOAR for a list of
DomainTools Iris Detect domains.
@@ -300,8 +332,13 @@ def process_dt_domains_into_xsoar(self, domains_list: List[Dict[str, Any]], inci
otherwise an empty list.
"""
for domain in domains_list:
- domain['monitor_term'] = join_dict_values_for_keys(domain.get("monitor_ids", []), term)
- indicators = [self.create_indicator_from_detect_domain(item, term) for item in domains_list]
+ domain["monitor_term"] = join_dict_values_for_keys(
+ domain.get("monitor_ids", []), term
+ )
+ indicators = [
+ self.create_indicator_from_detect_domain(item, term)
+ for item in domains_list
+ ]
if not indicators:
return []
@@ -310,13 +347,19 @@ def process_dt_domains_into_xsoar(self, domains_list: List[Dict[str, Any]], inci
demisto.info(f"Added {len(indicators)} indicators to demisto")
if enable_incidents:
- last_run_dt_without_ms = datetime.strptime(get_last_run(last_run), DATE_FORMAT).replace(
- microsecond=0) if get_last_run(last_run) else None
- first_run_dt_without_ms = (datetime.now() - timedelta(days=validate_first_fetch(self.first_fetch))).replace(
- microsecond=0)
+ last_run_dt_without_ms = (
+ datetime.strptime(get_last_run(last_run), DATE_FORMAT).replace(
+ microsecond=0
+ )
+ if get_last_run(last_run)
+ else None
+ )
+ first_run_dt_without_ms = (
+ datetime.now() - timedelta(days=validate_first_fetch(self.first_fetch))
+ ).replace(microsecond=0)
incident = {
"name": f"{incident_name} "
- f"{last_run_dt_without_ms or first_run_dt_without_ms}",
+ f"{last_run_dt_without_ms or first_run_dt_without_ms}",
"details": json.dumps(domains_list),
"rawJSON": json.dumps({"incidents": domains_list}),
"type": INCIDENT_TYPE[incident_name],
@@ -325,7 +368,9 @@ def process_dt_domains_into_xsoar(self, domains_list: List[Dict[str, Any]], inci
return []
- def fetch_dt_domains_from_api(self, end_point: str, last_run: str) -> Tuple[List[Dict], str]:
+ def fetch_dt_domains_from_api(
+ self, end_point: str, last_run: str
+ ) -> tuple[List[Dict], str]:
"""
Makes an API call to the Domain Tools API endpoint and retrieves domain data based on the provided
parameters.
@@ -344,16 +389,20 @@ def fetch_dt_domains_from_api(self, end_point: str, last_run: str) -> Tuple[List
if last_run_value:
params = DOMAINTOOLS_PARAMS | {
DT_TIMESTAMP_DICT[last_run]: last_run_value,
- "include_domain_data": INCLUDE_DOMAIN_DATA_VALUE if self.include_domain_data else 0,
+ "include_domain_data": (
+ INCLUDE_DOMAIN_DATA_VALUE if self.include_domain_data else 0
+ ),
}
- demisto.info(f'Found last run, fetching domains from {last_run_value}')
+ demisto.info(f"Found last run, fetching domains from {last_run_value}")
else:
days_back = validate_first_fetch(self.first_fetch)
params = DOMAINTOOLS_PARAMS | {
DT_TIMESTAMP_DICT[last_run]: datetime.now() - timedelta(days=days_back),
- "include_domain_data": INCLUDE_DOMAIN_DATA_VALUE if self.include_domain_data else 0,
+ "include_domain_data": (
+ INCLUDE_DOMAIN_DATA_VALUE if self.include_domain_data else 0
+ ),
}
- demisto.info(f'First run, fetching domains from last {days_back} days')
+ demisto.info(f"First run, fetching domains from last {days_back} days")
if self.risk_score_ranges:
params["risk_score_ranges[]"] = self.risk_score_ranges
@@ -371,11 +420,13 @@ def fetch_and_process_domains(self) -> None:
"""Fetches DomainTools domain information and creates incidents in XSOAR."""
def process_domains(
- process_endpoint: str,
- process_timestamp_key: str,
- process_incident_name: str,
- import_only: bool,
- process_filter_func: Optional[Callable[[List[Dict[str, Any]]], List[Dict[str, Any]]]] = None,
+ process_endpoint: str,
+ process_timestamp_key: str,
+ process_incident_name: str,
+ import_only: bool,
+ process_filter_func: Optional[
+ Callable[[List[Dict[str, Any]]], List[Dict[str, Any]]]
+ ] = None,
) -> str:
"""
Process domains by calling DomainTools API, filtering results, and converting them into XSOAR incidents.
@@ -407,7 +458,9 @@ def process_domains(
)
return last_run
- def filter_blocked_domains(domains: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ def filter_blocked_domains(
+ domains: List[Dict[str, Any]]
+ ) -> List[Dict[str, Any]]:
"""
Filters the list of domains to return only the blocked domains.
@@ -420,12 +473,15 @@ def filter_blocked_domains(domains: List[Dict[str, Any]]) -> List[Dict[str, Any]
return [
domain
for domain in domains
- if domain.get("escalations") and any(
- escalation.get("escalation_type") == "blocked" for escalation in domain["escalations"])
+ if domain.get("escalations")
+ and any(
+ escalation.get("escalation_type") == "blocked"
+ for escalation in domain["escalations"]
+ )
]
monitor_result = self.query_dt_api(
- DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT, "GET"
+ DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT, "GET", params=DOMAINTOOLS_PARAMS
)
term = {
results.get("id"): results.get("term")
@@ -459,11 +515,21 @@ def filter_blocked_domains(domains: List[Dict[str, Any]]) -> List[Dict[str, Any]
last_runs = {CHANGED_DOMAIN_TIMESTAMP: "", NEW_DOMAIN_TIMESTAMP: ""}
- for endpoint, timestamp_key, incident_name, domain_setting, filter_func in domains_to_process:
+ for (
+ endpoint,
+ timestamp_key,
+ incident_name,
+ domain_setting,
+ filter_func,
+ ) in domains_to_process:
if domain_setting:
- last_runs[timestamp_key] = process_domains(endpoint, timestamp_key, incident_name,
- domain_setting == "Import Indicators Only", filter_func,
- )
+ last_runs[timestamp_key] = process_domains(
+ endpoint,
+ timestamp_key,
+ incident_name,
+ domain_setting == "Import Indicators Only",
+ filter_func,
+ )
demisto.setIntegrationContext(last_runs)
demisto.info(f"Adding {len(incidents)} incidents to demisto")
@@ -561,17 +627,23 @@ def dt_error_handler(response: requests.Response) -> None:
403: "Forbidden: The request is understood, but it has been refused or access is not allowed.",
404: "Not Found: The requested resource could not be found.",
500: "Internal Server Error: An error occurred on the server side.",
- 206: "Partial Content: The requested resource has been partially returned."
+ 206: "Partial Content: The requested resource has been partially returned.",
}
if response.status_code in {206} | set(range(400, 600)):
try:
error_json = response.json().get("error", {})
- error_message = (error_json.get("message") or " ".join(
- error_json.get("messages", [])) or specific_error_messages.get(response.status_code,
- "An unknown error occurred."))
+ error_message = (
+ error_json.get("message")
+ or " ".join(error_json.get("messages", []))
+ or specific_error_messages.get(
+ response.status_code, "An unknown error occurred."
+ )
+ )
except ValueError:
- error_message = specific_error_messages.get(response.status_code, "An unknown error occurred.")
+ error_message = specific_error_messages.get(
+ response.status_code, "An unknown error occurred."
+ )
raise DemistoException(error_message, res=response)
@@ -671,10 +743,10 @@ def format_watchlist_fields(result: Dict[Any, Any]) -> Dict[str, Any]:
def format_data(
- result: Dict[str, List[Dict[str, Any]]],
- field: str,
- output_prefix: str,
- data_key: str,
+ result: Dict[str, List[Dict[str, Any]]],
+ field: str,
+ output_prefix: str,
+ data_key: str,
) -> Dict[str, Any]:
"""
Extracts and formats data.
@@ -763,17 +835,22 @@ def create_common_api_arguments(args: Dict[str, Any]) -> Dict[str, Any]:
return {
"monitor_id": args.get("monitor_id"),
"tlds[]": argToList(args.get("tlds")),
- "include_domain_data": argToBoolean(args.get("include_domain_data")) if args.get(
- "include_domain_data") else None,
+ "include_domain_data": (
+ argToBoolean(args.get("include_domain_data"))
+ if args.get("include_domain_data")
+ else None
+ ),
"risk_score_ranges[]": argToList(args.get("risk_score_ranges")),
"sort[]": argToList(args.get("sort")),
"order": args.get("order"),
- "mx_exists": argToBoolean(args.get("mx_exists")) if args.get("mx_exists") else None,
+ "mx_exists": (
+ argToBoolean(args.get("mx_exists")) if args.get("mx_exists") else None
+ ),
"preview": argToBoolean(args.get("preview")) if args.get("preview") else None,
"search": args.get("search"),
"limit": arg_to_number(args.get("limit")),
"page": arg_to_number(args.get("page")),
- "page_size": arg_to_number(args.get("page_size"))
+ "page_size": arg_to_number(args.get("page_size")),
}
@@ -794,21 +871,23 @@ def create_escalated_api_arguments(args: Dict[str, Any]) -> Dict[str, Any]:
return {
"escalated_since": args.get("escalated_since"),
"escalation_types[]": args.get("escalation_types"),
- "changed_since": args.get("changed_since")
+ "changed_since": args.get("changed_since"),
}
-def pagination(page: Optional[int], page_size: Optional[int], limit: Optional[int]) -> Tuple[int, int]:
+def pagination(
+ page: Optional[int], page_size: Optional[int], limit: Optional[int]
+) -> tuple[int, int]:
+ """
+ Define pagination.
+ Args:
+ limit: Records per page.
+ page: The page number.
+ page_size: The number of requested results per page.
+ Returns:
+ limit (int): Records per page.
+ offset (int): The number of records to be skipped.
"""
- Define pagination.
- Args:
- limit: Records per page.
- page: The page number.
- page_size: The number of requested results per page.
- Returns:
- limit (int): Records per page.
- offset (int): The number of records to be skipped.
- """
if page is not None and page <= 0:
raise DemistoException(PAGE_NUMBER_ERROR_MSG)
@@ -818,11 +897,14 @@ def pagination(page: Optional[int], page_size: Optional[int], limit: Optional[in
raise DemistoException(LIMIT_ERROR_MSG)
if page_size and limit:
limit = page_size
- return limit or page_size or DEFAULT_PAGE_SIZE, (page - 1 if page else DEFAULT_OFFSET) * (page_size or DEFAULT_PAGE_SIZE)
+ return limit or page_size or DEFAULT_PAGE_SIZE, (
+ page - 1 if page else DEFAULT_OFFSET
+ ) * (page_size or DEFAULT_PAGE_SIZE)
-def get_command_title_string(sub_context: str, page: Optional[int], page_size: Optional[int],
- hits: Optional[int]) -> str:
+def get_command_title_string(
+ sub_context: str, page: Optional[int], page_size: Optional[int], hits: Optional[int]
+) -> str:
"""
Generates a command title string based on the provided context and pagination information.
@@ -837,8 +919,10 @@ def get_command_title_string(sub_context: str, page: Optional[int], page_size: O
"""
if page and page_size and hits is not None and (page > 0 and page_size > 0):
total_page = ceil(hits / page_size) if hits > 0 else 1
- return f'{sub_context} \nCurrent page size: {page_size}\n' \
- f'Showing page {page} out of {total_page}'
+ return (
+ f"{sub_context} \nCurrent page size: {page_size}\n"
+ f"Showing page {page} out of {total_page}"
+ )
return f"{sub_context}"
@@ -867,16 +951,23 @@ def get_max_limit(end_point: str, dt_args: Dict[str, Any]) -> int:
include_domain_data = dt_args.get("include_domain_data", False)
return (
- MONITOR_DOMAINS_LIMIT if end_point == DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT and not include_counts else
- INCLUDE_COUNTS_LIMIT if include_counts else
- INCLUDE_DOMAIN_DATA_LIMIT if include_domain_data else
- DEFAULT_LIMIT
+ MONITOR_DOMAINS_LIMIT
+ if end_point == DOMAINTOOLS_MONITOR_DOMAINS_ENDPOINT and not include_counts
+ else (
+ INCLUDE_COUNTS_LIMIT
+ if include_counts
+ else INCLUDE_DOMAIN_DATA_LIMIT if include_domain_data else DEFAULT_LIMIT
+ )
)
def get_results_helper(
- client: Client, end_point: str, dt_args: Dict[str, Any], result_key: str, tb_header_name: str
-) -> Tuple[List[Any], str]:
+ client: Client,
+ end_point: str,
+ dt_args: Dict[str, Any],
+ result_key: str,
+ tb_header_name: str,
+) -> tuple[List[Any], str]:
"""
Helper function to get results for the given endpoint and result_key.
@@ -902,12 +993,17 @@ def get_results_helper(
total_count = 0
while True:
- fetch_size = min(limit - len(results), max_limit) if limit is not None else max_limit
+ fetch_size = (
+ min(limit - len(results), max_limit) if limit is not None else max_limit
+ )
if fetch_size <= 0:
break
dt_args.update({"offset": offset, "limit": fetch_size})
- response = client.query_dt_api(end_point, "GET", params=DOMAINTOOLS_PARAMS | dt_args)
+
+ response = client.query_dt_api(
+ end_point, "GET", params=DOMAINTOOLS_PARAMS | dt_args
+ )
total_count = response.get("total_count", 0)
new_results = response.get(result_key, [])
@@ -920,11 +1016,13 @@ def get_results_helper(
if len(new_results) < fetch_size:
break
- return results, get_command_title_string(tb_header_name, page, page_size, total_count)
+ return results, get_command_title_string(
+ tb_header_name, page, page_size, total_count
+ )
def fetch_domain_tools_api_results(
- client: Client, end_point: str, tb_header_name: str, dt_args: Dict[str, Any]
+ client: Client, end_point: str, tb_header_name: str, dt_args: Dict[str, Any]
) -> CommandResults:
"""
Gets the results for a DomainTools API endpoint.
@@ -939,7 +1037,10 @@ def fetch_domain_tools_api_results(
CommandResults: The results of the command.
"""
- results, title = get_results_helper(client, end_point, dt_args, "watchlist_domains", tb_header_name)
+
+ results, title = get_results_helper(
+ client, end_point, dt_args, "watchlist_domains", tb_header_name
+ )
indicator_list: List[Dict] = []
if results:
@@ -964,14 +1065,16 @@ def fetch_domain_tools_api_results(
outputs=results,
outputs_prefix=f"{INTEGRATION_CONTEXT_NAME}.{CONTEXT_PATH_KEY[tb_header_name]}",
outputs_key_field="domain",
- readable_output=tableToMarkdown(name=title, t=indicator_list)
- if indicator_list
- else NO_DOMAINS_FOUND,
+ readable_output=(
+ tableToMarkdown(name=title, t=indicator_list)
+ if indicator_list
+ else NO_DOMAINS_FOUND
+ ),
)
def domaintools_iris_detect_get_watched_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
domaintools_iris_detect_get_watched_domains_command: Get the watched domains list.
@@ -992,7 +1095,7 @@ def domaintools_iris_detect_get_watched_domains_command(
def domaintools_iris_detect_get_new_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
domaintools_iris_detect_get_new_domains_command: Get the new domains list.
@@ -1009,14 +1112,12 @@ def domaintools_iris_detect_get_new_domains_command(
DOMAINTOOLS_NEW_DOMAINS_ENDPOINT,
DOMAINTOOLS_NEW_DOMAINS_HEADER,
create_common_api_arguments(args)
- | {
- "discovered_since": args.get("discovered_since")
- },
+ | {"discovered_since": args.get("discovered_since")},
)
def domaintools_iris_detect_get_ignored_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
domaintools_iris_detect_get_ignored_domains_command: Get the ignored domains list.
@@ -1037,7 +1138,7 @@ def domaintools_iris_detect_get_ignored_domains_command(
def domaintools_iris_detect_get_blocklist_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
domaintools_iris_detect_get_blocklist_domains_command: Get the blocked domains list.
@@ -1053,12 +1154,14 @@ def domaintools_iris_detect_get_blocklist_domains_command(
client,
DOMAINTOOLS_WATCHED_DOMAINS_ENDPOINT,
DOMAINTOOLS_BLOCKED_DOMAINS_HEADER,
- create_common_api_arguments(args) | create_escalated_api_arguments(args) | {"escalation_types[]": "blocked"},
+ create_common_api_arguments(args)
+ | create_escalated_api_arguments(args)
+ | {"escalation_types[]": "blocked"},
)
def domaintools_iris_detect_get_escalated_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
domaintools_iris_detect_get_escalated_domains_command: Get the escalated domains
@@ -1075,13 +1178,14 @@ def domaintools_iris_detect_get_escalated_domains_command(
client,
DOMAINTOOLS_WATCHED_DOMAINS_ENDPOINT,
DOMAINTOOLS_ESCALATE_DOMAINS_HEADER,
- create_common_api_arguments(args) | create_escalated_api_arguments(args) | {
- "escalation_types[]": "google_safe"},
+ create_common_api_arguments(args)
+ | create_escalated_api_arguments(args)
+ | {"escalation_types[]": "google_safe"},
)
def domaintools_iris_detect_get_monitors_list_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Get the monitor domains list.
@@ -1103,13 +1207,16 @@ def domaintools_iris_detect_get_monitors_list_command(
}
| create_common_api_arguments(args)
| create_escalated_api_arguments(args),
- "monitors", DOMAINTOOLS_MONITORS_HEADER
+ "monitors",
+ DOMAINTOOLS_MONITORS_HEADER,
)
+
if results:
monitor_data = [format_monitor_fields(result) for result in results]
headers = list(monitor_data[0].keys())
- readable_output = tableToMarkdown(name=title, t=monitor_data,
- removeNull=True, headers=headers)
+ readable_output = tableToMarkdown(
+ name=title, t=monitor_data, removeNull=True, headers=headers
+ )
else:
readable_output = NO_DOMAINS_FOUND
return CommandResults(
@@ -1121,7 +1228,7 @@ def domaintools_iris_detect_get_monitors_list_command(
def handle_domain_action(
- client: Client, args: Dict[str, Any], action: str
+ client: Client, args: Dict[str, Any], action: str
) -> CommandResults:
"""
Performs the specified action on one or more watchlist domains.
@@ -1141,33 +1248,35 @@ def handle_domain_action(
DOMAINTOOLS_MANAGE_WATCHLIST_ENDPOINT,
DOMAINTOOLS_WATCHED_DOMAINS_HEADER,
format_watchlist_fields,
- "WatchedDomain"
+ "WatchedDomain",
),
"ignored": (
"PATCH",
DOMAINTOOLS_MANAGE_WATCHLIST_ENDPOINT,
DOMAINTOOLS_IGNORE_DOMAINS_HEADER,
format_watchlist_fields,
- "IgnoredDomain"
+ "IgnoredDomain",
),
"google_safe": (
"POST",
DOMAINTOOLS_ESCALATE_DOMAINS_ENDPOINT,
DOMAINTOOLS_ESCALATE_DOMAINS_HEADER,
format_blocklist_fields,
- "EscalatedDomain"
+ "EscalatedDomain",
),
"blocked": (
"POST",
DOMAINTOOLS_ESCALATE_DOMAINS_ENDPOINT,
DOMAINTOOLS_BLOCKED_DOMAINS_HEADER,
format_blocklist_fields,
- "BlockedDomain"
+ "BlockedDomain",
),
}
method, endpoint, header, format_func, context_output_string = action_params[action]
- data = {"watchlist_domain_ids": argToList(args.get("watchlist_domain_ids"))} | DOMAINTOOLS_PARAMS
+ data = {
+ "watchlist_domain_ids": argToList(args.get("watchlist_domain_ids"))
+ } | DOMAINTOOLS_PARAMS
if action in ["watched", "ignored"]:
data |= {"state": action}
@@ -1175,24 +1284,28 @@ def handle_domain_action(
data |= {"escalation_type": action}
indicators_list = [
- dict(format_func(result)) for result in
- client.query_dt_api(endpoint, method, json_data=data).get(
- "watchlist_domains" if action in ["watched", "ignored"] else "escalations", [])
+ dict(format_func(result))
+ for result in client.query_dt_api(endpoint, method, json_data=data).get(
+ "watchlist_domains" if action in ["watched", "ignored"] else "escalations",
+ [],
+ )
]
return CommandResults(
outputs=indicators_list,
outputs_prefix=f"{INTEGRATION_CONTEXT_NAME}.{context_output_string}",
outputs_key_field="",
- readable_output=tableToMarkdown(name=header, t=indicators_list)
- if indicators_list
- else NO_DOMAINS_FOUND,
+ readable_output=(
+ tableToMarkdown(name=header, t=indicators_list)
+ if indicators_list
+ else NO_DOMAINS_FOUND
+ ),
raw_response=indicators_list,
)
def domaintools_iris_detect_watch_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Watch domains for changes using DomainTools Iris API.
@@ -1209,7 +1322,7 @@ def domaintools_iris_detect_watch_domains_command(
def domaintools_iris_detect_ignore_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Ignore domains using DomainTools Iris API.
@@ -1226,7 +1339,7 @@ def domaintools_iris_detect_ignore_domains_command(
def domaintools_iris_detect_escalate_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Escalate domains to Google Safe Browsing using DomainTools Iris API.
@@ -1243,7 +1356,7 @@ def domaintools_iris_detect_escalate_domains_command(
def domaintools_iris_detect_blocklist_domains_command(
- client: Client, args: Dict[str, Any]
+ client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Blocklist domains using DomainTools Iris API.
@@ -1272,15 +1385,15 @@ def main() -> None:
command = demisto.command()
args = demisto.args()
params = demisto.params()
- username = params.get('credentials', {}).get('identifier')
- api_key = params.get('credentials', {}).get('password')
+ username = params.get("credentials", {}).get("identifier")
+ api_key = params.get("credentials", {}).get("password")
verify_certificate = not params.get("insecure", False)
proxy = params.get("proxy", False)
handle_proxy()
risk_score_ranges = argToList(params.get("risk_score_ranges"))
include_domain_data = params.get("include_domain_data")
- first_fetch_time = params.get('first_fetch', DEFAULT_DAYS_BACK).strip()
- fetch_limit = arg_to_number(params.get('max_fetch', 50))
+ first_fetch_time = params.get("first_fetch", DEFAULT_DAYS_BACK).strip()
+ fetch_limit = arg_to_number(params.get("max_fetch", 50))
new_domains = params.get("new_domains")
changed_domains = params.get("changed_domains")
blocked_domains = params.get("blocked_domains")
diff --git a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.yml b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.yml
index 5023c88f8637..fac966d9cb89 100644
--- a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.yml
+++ b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect.yml
@@ -104,7 +104,7 @@ script:
commands:
- arguments:
- description: |-
- List of Iris Detect domain IDs to escalate. The domain ID can be
+ List of Iris Detect domain IDs to escalate. The domain ID can be
found using 'domaintools-iris-detect-get-new-domains' command.
isArray: true
name: watchlist_domain_ids
@@ -132,8 +132,8 @@ script:
type: String
- arguments:
- description: |-
- List of Iris Detect domain IDs to escalate. The domain ID can be
- found using 'domaintools-iris-detect-get-new-domains,
+ List of Iris Detect domain IDs to escalate. The domain ID can be
+ found using 'domaintools-iris-detect-get-new-domains,
domaintools-iris-detect-get-watched-domains' commands.
isArray: true
name: watchlist_domain_ids
@@ -161,7 +161,7 @@ script:
type: String
- arguments:
- description: |-
- List of Iris Detect domain IDs to escalate. The domain ID can be
+ List of Iris Detect domain IDs to escalate. The domain ID can be
found using 'domaintools-iris-detect-get-new-domains' command.
isArray: true
name: watchlist_domain_ids
@@ -192,7 +192,7 @@ script:
type: String
- arguments:
- description: |-
- List of Iris Detect domain IDs to escalate. The domain ID can be
+ List of Iris Detect domain IDs to escalate. The domain ID can be
found using 'domaintools-iris-detect-get-new-domains, domaintools-iris-detect-get-watched-domains' command.
isArray: true
name: watchlist_domain_ids
@@ -282,8 +282,8 @@ script:
- description: Filter domains by when they were discovered. Provide a datetime in ISO 8601 format, for example 2022-05-18T12:19:51.685496.
name: discovered_since
- description: |-
- Monitor ID is used when requesting domains for a specific monitor.
- The monitor ID can be found using the
+ Monitor ID is used when requesting domains for a specific monitor.
+ The monitor ID can be found using the
'domaintools-iris-detect-get-monitors-list' command.
name: monitor_id
- auto: PREDEFINED
@@ -436,8 +436,8 @@ script:
- blocked
- google_safe
- description: |-
- Monitor ID is used when requesting domains for a specific monitor.
- The monitor ID can be found using the
+ Monitor ID is used when requesting domains for a specific monitor.
+ The monitor ID can be found using the
'domaintools-iris-detect-get-monitors-list' command.
name: monitor_id
- auto: PREDEFINED
@@ -597,8 +597,8 @@ script:
- 70-99
- 100-100
- description: |-
- Monitor ID is used when requesting domains for a specific monitor.
- The monitor ID can be found using the
+ Monitor ID is used when requesting domains for a specific monitor.
+ The monitor ID can be found using the
'domaintools-iris-detect-get-monitors-list' command.
name: monitor_id
- auto: PREDEFINED
@@ -749,8 +749,8 @@ script:
- 70-99
- 100-100
- description: |-
- Monitor ID is used when requesting domains for a specific monitor.
- The monitor ID can be found using the
+ Monitor ID is used when requesting domains for a specific monitor.
+ The monitor ID can be found using the
'domaintools-iris-detect-get-monitors-list' command.
name: monitor_id
- auto: PREDEFINED
@@ -892,8 +892,8 @@ script:
type: String
- arguments:
- description: |-
- Monitor ID is used when requesting domains for a specific monitor.
- The monitor ID can be found using the
+ Monitor ID is used when requesting domains for a specific monitor.
+ The monitor ID can be found using the
'domaintools-iris-detect-get-monitors-list' command.
name: monitor_id
- auto: PREDEFINED
@@ -1044,7 +1044,7 @@ script:
type: String
- description: This command will reset your fetch history.
name: domaintools-iris-detect-reset-fetch-indicators
- dockerimage: demisto/python3:3.10.14.91134
+ dockerimage: demisto/python3:3.10.14.99865
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect_test.py b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect_test.py
index 50f2abee06b7..695db4be34b1 100644
--- a/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect_test.py
+++ b/Packs/DomainToolsIrisDetect/Integrations/DomainToolsIrisDetect/DomainToolsIrisDetect_test.py
@@ -2,11 +2,10 @@
DomainTools Iris Detect Test Cases
"""
import hmac
-import io
import json
import time
from hashlib import sha256
-from typing import Any, Dict, Optional, Tuple
+from typing import Any
import pytest
import requests
@@ -213,7 +212,7 @@ def load_json(path):
JSONDecodeError: If the file at the specified path contains invalid JSON.
"""
- with io.open(path, mode="r", encoding="utf-8") as file:
+ with open(path, encoding="utf-8") as file:
return json.loads(file.read())
@@ -1012,7 +1011,7 @@ def test_validate_first_fetch_parametrized(value, expected):
(1, 10, 10, (10, 0)),
],
)
-def test_pagination(page: Optional[int], page_size: Optional[int], limit: Optional[int], expected: Tuple[int, int]):
+def test_pagination(page: int | None, page_size: int | None, limit: int | None, expected: tuple[int, int]):
"""
Test the pagination function with various input cases, including when page, page_size, and limit are None,
when only page is provided, when page and page_size are provided, and when all parameters are provided.
@@ -1038,7 +1037,7 @@ def test_pagination(page: Optional[int], page_size: Optional[int], limit: Option
(1, 10, 0, LIMIT_ERROR_MSG),
],
)
-def test_pagination_errors(page: Optional[int], page_size: Optional[int], limit: Optional[int], error_msg: str):
+def test_pagination_errors(page: int | None, page_size: int | None, limit: int | None, error_msg: str):
"""
Test the pagination function with invalid input cases that should raise exceptions.
@@ -1061,7 +1060,7 @@ def test_pagination_errors(page: Optional[int], page_size: Optional[int], limit:
("Test Context", 1, 10, 0, "Test Context \nCurrent page size: 10\nShowing page 1 out of 1"),
],
)
-def test_get_command_title_string(sub_context: str, page: Optional[int], page_size: Optional[int], hits: Optional[int],
+def test_get_command_title_string(sub_context: str, page: int | None, page_size: int | None, hits: int | None,
expected_output: str):
"""
Test the get_command_title_string function with various input cases.
@@ -1086,7 +1085,7 @@ def test_get_command_title_string(sub_context: str, page: Optional[int], page_si
("some_other_endpoint", {"include_counts": False, "include_domain_data": False}, DEFAULT_LIMIT),
],
)
-def test_get_max_limit(end_point: str, dt_args: Dict[str, Any], expected_max_limit: int):
+def test_get_max_limit(end_point: str, dt_args: dict[str, Any], expected_max_limit: int):
"""
Test the get_max_limit function with various input cases, including different endpoints and argument combinations.
diff --git a/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_13.md b/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_13.md
new file mode 100644
index 000000000000..bf3954ba7bb3
--- /dev/null
+++ b/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_13.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### DomainTools Iris Detect
+
+- Updated the Docker image to: *demisto/python3:3.10.14.99865*.
diff --git a/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_14.md b/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_14.md
new file mode 100644
index 000000000000..ba26b37f9ef9
--- /dev/null
+++ b/Packs/DomainToolsIrisDetect/ReleaseNotes/1_0_14.md
@@ -0,0 +1,9 @@
+
+#### Integrations
+
+##### DomainTools Iris Detect
+
+- Fixed an issue in **fetch_incident** command that calls monitor domains API without the default following api parameters:
+ - *app_partner*
+ - *app_name*
+ - *app_version*
diff --git a/Packs/DomainToolsIrisDetect/pack_metadata.json b/Packs/DomainToolsIrisDetect/pack_metadata.json
index 4b8b06e6f5a2..aa32d0a80909 100644
--- a/Packs/DomainToolsIrisDetect/pack_metadata.json
+++ b/Packs/DomainToolsIrisDetect/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DomainTools Iris Detect",
"description": "Iris Detect protects against malicious domains impersonating your brands and supply chain.",
"support": "partner",
- "currentVersion": "1.0.12",
+ "currentVersion": "1.0.14",
"author": "DomainTools Integrations",
"url": "http://www.domaintools.com",
"email": "enterprisesupport@domaintools.com",
diff --git a/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/DomainTools_Iris.yml b/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/DomainTools_Iris.yml
index b659570c02e0..2de198e3f7d0 100644
--- a/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/DomainTools_Iris.yml
+++ b/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/DomainTools_Iris.yml
@@ -458,10 +458,10 @@ script:
description: The server type.
type: Number
- contextPath: DBotScore.Indicator
- description: The indicator of the DBotScore.
+ description: The indicator that was tested.
type: String
- contextPath: DBotScore.Type
- description: The indicator type of the DBotScore.
+ description: The indicator type.
type: String
- contextPath: DBotScore.Vendor
description: The vendor used to calculate the score.
@@ -2410,7 +2410,7 @@ script:
description: Parsed Whois data.
- contextPath: Domain.WhoisRecords
description: Full Whois record.
- dockerimage: demisto/vendors-sdk:1.0.0.92984
+ dockerimage: demisto/vendors-sdk:1.0.0.100383
runonce: false
script: '-'
type: python
diff --git a/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/README.md b/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/README.md
index 8baaf16c6614..71c814919261 100644
--- a/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/README.md
+++ b/Packs/DomainTools_Iris/Integrations/DomainTools_Iris/README.md
@@ -9,7 +9,7 @@ This integration was integrated and tested with version 1.0 of DomainTools Iris.
| **Parameter** | **Description** | **Required** |
| --- | --- | --- |
- | DomainTools API URL | Change to https://api.domaintools.com in order to use DomainTool's https endpoint. | True |
+ | DomainTools API URL | Change to in order to use DomainTool's https endpoint. | True |
| API Username | | True |
| API Key | | True |
| High-Risk Threshold | | True |
@@ -164,7 +164,7 @@ Provides data enrichment for domains.
| DomainTools.WebsiteTitle | Number | The website title. |
| DomainTools.FirstSeen | Number | The date the domain was first seen. |
| DomainTools.ServerType | Number | The server type. |
-| DBotScore.Indicator | String | The indicator of the DBotScore. |
+| DBotScore.Indicator | String | The indicator that was tested. |
| DBotScore.Type | String | The indicator type of the DBotScore. |
| DBotScore.Vendor | String | The vendor used to calculate the score. |
| DBotScore.Score | Number | The actual score. |
diff --git a/Packs/DomainTools_Iris/ReleaseNotes/2_0_3.md b/Packs/DomainTools_Iris/ReleaseNotes/2_0_3.md
new file mode 100644
index 000000000000..18d9c54a759f
--- /dev/null
+++ b/Packs/DomainTools_Iris/ReleaseNotes/2_0_3.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### DomainTools Iris
+- Updated the Docker image to: *demisto/vendors-sdk:1.0.0.100383*.
+
+- Documentation and metadata improvements.
diff --git a/Packs/DomainTools_Iris/doc_files/Indicator_Pivoting_DomainTools_Iris.png b/Packs/DomainTools_Iris/doc_files/Indicator_Pivoting_DomainTools_Iris.png
new file mode 100644
index 000000000000..d60f6f20cb75
Binary files /dev/null and b/Packs/DomainTools_Iris/doc_files/Indicator_Pivoting_DomainTools_Iris.png differ
diff --git a/Packs/DomainTools_Iris/pack_metadata.json b/Packs/DomainTools_Iris/pack_metadata.json
index 3255031c21c7..656c3e02a9bf 100644
--- a/Packs/DomainTools_Iris/pack_metadata.json
+++ b/Packs/DomainTools_Iris/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DomainTools Iris Investigate",
"description": "Facilitates automation of key infrastructure characterization and hunting portions of the incident response process. Organizations will have access to essential domain profile, web crawl, SSL, and infrastructure data from within Cortex XSOAR. Requires a DomainTools Iris Investigate API key.",
"support": "partner",
- "currentVersion": "2.0.2",
+ "currentVersion": "2.0.3",
"author": "DomainTools",
"url": "https://www.domaintools.com/support/",
"email": "memberservices@domaintools.com",
diff --git a/Packs/Drift/Integrations/Drift/Drift.py b/Packs/Drift/Integrations/Drift/Drift.py
index 9a0401373462..0d6f78a2a280 100644
--- a/Packs/Drift/Integrations/Drift/Drift.py
+++ b/Packs/Drift/Integrations/Drift/Drift.py
@@ -18,7 +18,7 @@ def post_contact(self, email: dict = None):
def get_contact(self, contact_id: str = None, email: str = None):
url_suffix = '/contacts'
- params = dict()
+ params = {}
if contact_id:
url_suffix = f"{url_suffix}/{contact_id}"
elif email:
diff --git a/Packs/Drift/Integrations/Drift/Drift.yml b/Packs/Drift/Integrations/Drift/Drift.yml
index d1d4046e3b12..04403a98fd25 100644
--- a/Packs/Drift/Integrations/Drift/Drift.yml
+++ b/Packs/Drift/Integrations/Drift/Drift.yml
@@ -86,7 +86,7 @@ script:
- contextPath: Drift.Contacts.Attributes
description: Attributes of the contact (JSON dict).
description: 'Post New Contact using a new contact Email '
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
subtype: python3
fromversion: 6.2.0
diff --git a/Packs/Drift/ReleaseNotes/1_0_4.md b/Packs/Drift/ReleaseNotes/1_0_4.md
new file mode 100644
index 000000000000..b826959d263a
--- /dev/null
+++ b/Packs/Drift/ReleaseNotes/1_0_4.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Drift
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/Drift/pack_metadata.json b/Packs/Drift/pack_metadata.json
index c7a23263de4c..9c4d60da4805 100644
--- a/Packs/Drift/pack_metadata.json
+++ b/Packs/Drift/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Drift",
"description": "Drift Pack containing integrations with the Drift API",
"support": "community",
- "currentVersion": "1.0.3",
+ "currentVersion": "1.0.4",
"author": "Adriana Rose Diaz",
"url": "https://devdocs.drift.com/docs/using-drift-apis",
"email": "",
diff --git a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.py b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.py
index 4785627c81e9..841483b6b2e4 100644
--- a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.py
+++ b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.py
@@ -42,7 +42,7 @@ def __init__(
def set_request_filter(self, cursor: str):
if 'continue' not in str(self.request.url):
- self.request.url = AnyUrl(f'{str(self.request.url).removesuffix("/")}/continue')
+ self.request.url = parse_obj_as(AnyUrl, f'{str(self.request.url).removesuffix("/")}/continue')
self.request.data = json.dumps({'cursor': cursor})
@@ -56,7 +56,7 @@ def get_access_token(self):
)
response = self.call(request)
self.request.headers['Authorization'] = f'Bearer {response.json()["access_token"]}'
- self.request.url = AnyUrl(f'{str(self.request.url).removesuffix("/")}/2/team_log/get_events')
+ self.request.url = parse_obj_as(AnyUrl, f'{str(self.request.url).removesuffix("/")}/2/team_log/get_events')
class DropboxEventsGetter(IntegrationGetEvents):
diff --git a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.yml b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.yml
index e820ce5550d7..5b46f4dda329 100644
--- a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.yml
+++ b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector.yml
@@ -78,7 +78,7 @@ script:
defaultValue: 3 days
description: Get events.
name: dropbox-get-events
- dockerimage: demisto/fastapi:1.0.0.70530
+ dockerimage: demisto/py3-tools:1.0.0.94051
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector_test.py b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector_test.py
index a6d30c3dd929..a79d40587ed8 100644
--- a/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector_test.py
+++ b/Packs/Dropbox/Integrations/DropboxEventCollector/DropboxEventCollector_test.py
@@ -2,7 +2,6 @@
import requests_mock
from freezegun import freeze_time
import demistomock as demisto
-from pydantic import parse_obj_as # noqa: F401
DEMISTO_PARAMS = {
diff --git a/Packs/Dropbox/ReleaseNotes/1_1_0.md b/Packs/Dropbox/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..104a9f977689
--- /dev/null
+++ b/Packs/Dropbox/ReleaseNotes/1_1_0.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Dropbox Event Collector
+
+- Changed the Docker image to `py3-tools`.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.94051*.
diff --git a/Packs/Dropbox/pack_metadata.json b/Packs/Dropbox/pack_metadata.json
index 54a01397bf9a..cf589a5523dc 100644
--- a/Packs/Dropbox/pack_metadata.json
+++ b/Packs/Dropbox/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Dropbox",
"description": "Use the Dropbox integration to fetch events",
"support": "xsoar",
- "currentVersion": "1.0.29",
+ "currentVersion": "1.1.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
index eed495558930..fe366e9099a5 100644
--- a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
+++ b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
@@ -325,7 +325,7 @@ def main():
raw_response=events,
)
return_results(command_results)
- if argToBoolean(demisto_params.get('push_events', 'false')):
+ if argToBoolean(demisto_params.get('should_push_events', 'false')):
demisto.debug(f'Sending {len(events)} events to XSIAM')
send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT)
else:
diff --git a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
index 6f2994f696d9..9673407a42f4 100644
--- a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
+++ b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
@@ -59,7 +59,7 @@ script:
required: true
description: Manual command to fetch events and display them.
name: duo-get-events
- dockerimage: demisto/vendors-sdk:1.0.0.87491
+ dockerimage: demisto/vendors-sdk:1.0.0.96124
isfetchevents: true
subtype: python3
marketplaces:
diff --git a/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md b/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md
new file mode 100644
index 000000000000..932ecb97e70a
--- /dev/null
+++ b/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md
@@ -0,0 +1,5 @@
+
+#### Integrations
+##### Duo Event Collector
+- Fixed an issue where the *should_push_events* argument was not passed correctly to the **duo-get-events** command.
+- Updated the Docker image to: *demisto/vendors-sdk:1.0.0.96124*.
\ No newline at end of file
diff --git a/Packs/DuoAdminApi/pack_metadata.json b/Packs/DuoAdminApi/pack_metadata.json
index ef16ff50bf05..9916b1b8e293 100644
--- a/Packs/DuoAdminApi/pack_metadata.json
+++ b/Packs/DuoAdminApi/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DUO Admin",
"description": "DUO for admins.\nMust have access to the admin api in order to use this",
"support": "xsoar",
- "currentVersion": "4.0.18",
+ "currentVersion": "4.0.19",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DynamicSectionReports/doc_files/incident_report.png b/Packs/DynamicSectionReports/doc_files/incident_report.png
new file mode 100644
index 000000000000..d94af69acb06
Binary files /dev/null and b/Packs/DynamicSectionReports/doc_files/incident_report.png differ
diff --git a/Packs/DynamicSectionReports/doc_files/indicator_report.png b/Packs/DynamicSectionReports/doc_files/indicator_report.png
new file mode 100644
index 000000000000..60bf341fcebe
Binary files /dev/null and b/Packs/DynamicSectionReports/doc_files/indicator_report.png differ
diff --git a/Packs/DynamicSectionReports/doc_files/tag-playbook.png b/Packs/DynamicSectionReports/doc_files/tag-playbook.png
new file mode 100644
index 000000000000..48e5deaf92b0
Binary files /dev/null and b/Packs/DynamicSectionReports/doc_files/tag-playbook.png differ
diff --git a/Packs/DynamicSectionReports/doc_files/tag-warroom.png b/Packs/DynamicSectionReports/doc_files/tag-warroom.png
new file mode 100644
index 000000000000..7f31dc7daeb2
Binary files /dev/null and b/Packs/DynamicSectionReports/doc_files/tag-warroom.png differ
diff --git a/Packs/EDL/Playbooks/Modify_EDL.yml b/Packs/EDL/Playbooks/Modify_EDL.yml
index 8743b5ca57ba..d09bcb0c54d6 100644
--- a/Packs/EDL/Playbooks/Modify_EDL.yml
+++ b/Packs/EDL/Playbooks/Modify_EDL.yml
@@ -79,10 +79,10 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 64f17c95-4126-441f-8e9f-c15bdb612756
+ taskid: 2a97257f-f9bb-40f2-8d6f-b476961da61f
type: regular
task:
- id: 64f17c95-4126-441f-8e9f-c15bdb612756
+ id: 2a97257f-f9bb-40f2-8d6f-b476961da61f
version: -1
name: Save input list of indicators as array (Split transformer)
description: Set a value in context under the key you entered.
@@ -99,7 +99,7 @@ tasks:
value:
complex:
root: incident
- accessor: edlindicatorslist
+ accessor: genericexportindicatorsserviceindicatorslist
transformers:
- operator: StripChars
args:
@@ -182,9 +182,9 @@ tasks:
- "26"
scriptarguments:
tags:
- simple: ${incident.edltag}
+ simple: ${incident.genericexportindicatorsservicetag}
type:
- simple: ${incident.edlindicatortype}
+ simple: ${incident.genericexportindicatorsserviceindicatortype}
value:
complex:
root: InputIndicatorsArray
@@ -263,7 +263,7 @@ tasks:
field:
simple: tags
fieldValue:
- simple: ${incident.edltag}
+ simple: ${incident.genericexportindicatorsservicetag}
indicatorsValues:
complex:
root: InputIndicatorsArray
@@ -299,7 +299,8 @@ tasks:
id: 0c815fb4-5ee4-4f72-8c83-6e559d73928a
version: -1
name: Are there new indicators to create?
- description: Check if there are new indicators to create, which do not already exist in the Cortex XSOAR database.
+ description: Check if there are new indicators to create, which do not already
+ exist in the Cortex XSOAR database.
type: condition
iscommand: false
brand: ""
@@ -351,7 +352,8 @@ tasks:
id: 63c89c44-1914-427f-8590-328c861ba7ae
version: -1
name: Are there pre-existing indicators to update?
- description: Check if there are indicators that already exist in the Cortex XSOAR database. IF so, the tag needs to be added to add them to the EDL.
+ description: Check if there are indicators that already exist in the Cortex
+ XSOAR database. IF so, the tag needs to be added to add them to the EDL.
type: condition
iscommand: false
brand: ""
@@ -397,10 +399,10 @@ tasks:
isautoswitchedtoquietmode: false
"26":
id: "26"
- taskid: b61606d4-3222-422c-8d5e-c05416c729bb
+ taskid: e58007a8-290f-43b3-85f2-280a16526440
type: regular
task:
- id: b61606d4-3222-422c-8d5e-c05416c729bb
+ id: e58007a8-290f-43b3-85f2-280a16526440
version: -1
name: Close incident
description: commands.local.cmd.close.inv
@@ -428,13 +430,14 @@ tasks:
isautoswitchedtoquietmode: false
"27":
id: "27"
- taskid: 2b108cd4-dd69-4f2f-8e94-fa56238d6791
+ taskid: 8ff5b7b0-f350-47dc-85ae-a8674691f6d0
type: condition
task:
- id: 2b108cd4-dd69-4f2f-8e94-fa56238d6791
+ id: 8ff5b7b0-f350-47dc-85ae-a8674691f6d0
version: -1
name: Adding or removing indicators?
- description: Check whether to add or remove indicators from the EDL, according to the value of the `EDL Action`input field.
+ description: Check whether to add or remove indicators from the EDL, according
+ to the value of the `EDL Action`input field.
type: condition
iscommand: false
brand: ""
@@ -450,7 +453,7 @@ tasks:
- - operator: isEqualString
left:
value:
- simple: incident.edlaction
+ simple: incident.genericexportindicatorsserviceaction
iscontext: true
right:
value:
@@ -460,7 +463,7 @@ tasks:
- - operator: isEqualString
left:
value:
- simple: incident.edlaction
+ simple: incident.genericexportindicatorsserviceaction
iscontext: true
right:
value:
@@ -511,10 +514,10 @@ tasks:
isautoswitchedtoquietmode: false
"29":
id: "29"
- taskid: de93e06f-1ebe-48e2-84a2-2637ede4f7c5
+ taskid: ffc0eb39-23e0-48ba-87be-37c73242c37e
type: regular
task:
- id: de93e06f-1ebe-48e2-84a2-2637ede4f7c5
+ id: ffc0eb39-23e0-48ba-87be-37c73242c37e
version: -1
name: Remove EDL tag from indicators
description: commands.local.cmd.remove.values.to.indicator.multi.select.field
@@ -529,9 +532,7 @@ tasks:
field:
simple: tags
fieldValue:
- complex:
- root: incident
- accessor: edltag
+ simple: ${incident.genericexportindicatorsservicetag}
indicatorsValues:
simple: ${InputIndicatorsArray}
separatecontext: false
@@ -551,13 +552,14 @@ tasks:
isautoswitchedtoquietmode: false
"30":
id: "30"
- taskid: cb238dc6-b905-4e82-844b-42c29320cf24
+ taskid: b4dc98ea-e45d-412a-899a-b4f454ff33bc
type: condition
task:
- id: cb238dc6-b905-4e82-844b-42c29320cf24
+ id: b4dc98ea-e45d-412a-899a-b4f454ff33bc
version: -1
name: Playbook inputs provided instead of incident fields?
- description: Check if playbook is being run as a sub-playbook. (Check if values were passed as playbook inputs instead of incident fields.)
+ description: Check if playbook is being run as a sub-playbook. (Check if values
+ were passed as playbook inputs instead of incident fields.)
type: condition
iscommand: false
brand: ""
@@ -590,17 +592,17 @@ tasks:
- - operator: isEmpty
left:
value:
- simple: incident.edlaction
+ simple: incident.genericexportindicatorsserviceaction
iscontext: true
- - operator: isEmpty
left:
value:
- simple: incident.edltag
+ simple: incident.genericexportindicatorsservicetag
iscontext: true
- - operator: isEmpty
left:
value:
- simple: incident.edlindicatorslist
+ simple: incident.genericexportindicatorsserviceindicatorslist
iscontext: true
view: |-
{
@@ -618,10 +620,10 @@ tasks:
isautoswitchedtoquietmode: false
"31":
id: "31"
- taskid: 4dccfd34-d082-419b-86df-5ffd31660d0d
+ taskid: 3680f381-35df-4f13-850f-5f178d4f5499
type: regular
task:
- id: 4dccfd34-d082-419b-86df-5ffd31660d0d
+ id: 3680f381-35df-4f13-850f-5f178d4f5499
version: -1
name: Set inputs to indicator fields
description: commands.local.cmd.set.incident
@@ -633,9 +635,10 @@ tasks:
'#none#':
- "4"
scriptarguments:
- edlaction:
+
+ genericexportindicatorsserviceaction:
simple: ${inputs.EDL Action}
- edlindicatorslist:
+ genericexportindicatorsserviceindicatorslist:
complex:
root: inputs.EDL Indicators List
transformers:
@@ -644,9 +647,9 @@ tasks:
separator:
value:
simple: ','
- edlindicatortype:
+ genericexportindicatorsserviceindicatortype:
simple: ${inputs.EDL Indicator Type}
- edltag:
+ genericexportindicatorsservicetag:
simple: ${inputs.EDL Tag}
separatecontext: false
view: |-
@@ -663,6 +666,7 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+system: true
view: |-
{
"linkLabelsPosition": {},
@@ -680,23 +684,25 @@ inputs:
value: {}
required: true
description: 'Action to perform on EDL. Possible values: "Add", "Remove".'
- playbookInputQuery:
+ playbookInputQuery: null
- key: EDL Tag
value: {}
required: true
- description: Tag that adds indicators to EDL. Must match tag value used in EDL query in the instance configuration.
- playbookInputQuery:
+ description: Tag that adds indicators to EDL. Must match tag value used in EDL query
+ in the instance configuration.
+ playbookInputQuery: null
- key: EDL Indicator Type
value: {}
required: false
description: Required only if adding to EDL. Type of indicators to add to EDL.
- playbookInputQuery:
+ playbookInputQuery: null
- key: EDL Indicators List
value: {}
required: true
- description: List of IOCs to add to or remove from EDL (according to value of EDL Action). May be newline or comma-delimited.
- playbookInputQuery:
+ description: List of IOCs to add to or remove from EDL (according to value of EDL
+ Action). May be newline or comma-delimited.
+ playbookInputQuery: null
outputs: []
tests:
- No tests (auto formatted)
-fromversion: 6.6.0
+fromversion: 6.6.0
\ No newline at end of file
diff --git a/Packs/EDL/ReleaseNotes/3_3_2.md b/Packs/EDL/ReleaseNotes/3_3_2.md
new file mode 100644
index 000000000000..c7f30b22f05b
--- /dev/null
+++ b/Packs/EDL/ReleaseNotes/3_3_2.md
@@ -0,0 +1,3 @@
+## Generic Export Indicators Service
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/EDL/ReleaseNotes/3_3_3.md b/Packs/EDL/ReleaseNotes/3_3_3.md
new file mode 100644
index 000000000000..c9e0fc425e90
--- /dev/null
+++ b/Packs/EDL/ReleaseNotes/3_3_3.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Modify EDL
+
+- Fixed an issue where the playbook failed due to missing inputs.
diff --git a/Packs/EDL/doc_files/159843984-16859c01-eb4e-4ebc-93a4-bed625b793e1.png b/Packs/EDL/doc_files/159843984-16859c01-eb4e-4ebc-93a4-bed625b793e1.png
new file mode 100644
index 000000000000..30b12cb53da0
Binary files /dev/null and b/Packs/EDL/doc_files/159843984-16859c01-eb4e-4ebc-93a4-bed625b793e1.png differ
diff --git a/Packs/EDL/pack_metadata.json b/Packs/EDL/pack_metadata.json
index 20877c402bed..0f1f931e59b9 100644
--- a/Packs/EDL/pack_metadata.json
+++ b/Packs/EDL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Generic Export Indicators Service",
"description": "Use this pack to generate a list based on your Threat Intel Library, and export it to ANY other product in your network, such as your firewall, agent or SIEM. This pack is built for ongoing distribution of indicators from XSOAR to other products in the network, by creating an endpoint with a list of indicators that can be pulled by external vendors.",
"support": "xsoar",
- "currentVersion": "3.3.1",
+ "currentVersion": "3.3.3",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.py b/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.py
index a3d728932c71..f5fa48f53e3e 100644
--- a/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.py
+++ b/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.py
@@ -172,6 +172,7 @@ def check_edl(cmd, start_time, EDL, edl_user, edl_pwd, verify_certificate, email
return ["Success"]
elif cmd == "get-edl-contents":
return [csv_string, pull_time]
+ return None
''' MAIN FUNCTION '''
diff --git a/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.yml b/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.yml
index b3801e01ac18..9828366d2b25 100644
--- a/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.yml
+++ b/Packs/EDLMonitor/Integrations/EDLMonitor/EDLMonitor.yml
@@ -4,7 +4,7 @@ commonfields:
version: -1
configuration:
- additionalinfo: Timeout (in seconds) for how long to wait for EDL response before detecting as down (default 2 minutes)
- defaultvalue: "120"
+ defaultvalue: '120'
display: 'Timeout:'
name: timeout
type: 0
@@ -86,7 +86,7 @@ script:
- contextPath: ResponseCode
description: The response code.
type: number
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.100715
runonce: false
script: ''
subtype: python3
diff --git a/Packs/EDLMonitor/ReleaseNotes/1_0_2.md b/Packs/EDLMonitor/ReleaseNotes/1_0_2.md
new file mode 100644
index 000000000000..78dfffacf08c
--- /dev/null
+++ b/Packs/EDLMonitor/ReleaseNotes/1_0_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### EDL Monitor
+
+- Updated the Docker image to: *demisto/python3:3.10.14.100715*.
diff --git a/Packs/EDLMonitor/pack_metadata.json b/Packs/EDLMonitor/pack_metadata.json
index 9dca0081ae30..bf568652e756 100644
--- a/Packs/EDLMonitor/pack_metadata.json
+++ b/Packs/EDLMonitor/pack_metadata.json
@@ -2,12 +2,14 @@
"name": "EDL Monitor",
"description": "This content pack can monitor EDL contents a by emailing the content of an EDL as a zipped file to a specified user at an interval (simply configure a job to run the playbook included), and/or simply monitor the EDL for availability and email the user if the EDL is not available",
"support": "community",
- "currentVersion": "1.0.1",
+ "currentVersion": "1.0.2",
"author": "Andrew Murret",
"url": "",
"email": "",
"created": "2023-04-03T14:42:50Z",
- "categories": ["Utilities"],
+ "categories": [
+ "Utilities"
+ ],
"tags": [],
"useCases": [],
"keywords": [],
diff --git a/Packs/EWS/.pack-ignore b/Packs/EWS/.pack-ignore
index 0073c266cf91..2267b536f969 100644
--- a/Packs/EWS/.pack-ignore
+++ b/Packs/EWS/.pack-ignore
@@ -8,7 +8,7 @@ ignore=BA101
ignore=PB118
[file:EWSO365.yml]
-ignore=IN126,DS107,IN124
+ignore=IN126,DS107
[file:EWSv2.yml]
ignore=IN126,IN135
@@ -16,9 +16,6 @@ ignore=IN126,IN135
[file:SecurityAndCompliance.yml]
ignore=IN128
-[file:SecurityAndComplianceV2.yml]
-ignore=IN124
-
[file:README.md]
ignore=RM102,RM106
diff --git a/Packs/EclecticIQ/ReleaseNotes/3_0_1.md b/Packs/EclecticIQ/ReleaseNotes/3_0_1.md
index cf71fd6bd2d8..061ddb269a54 100644
--- a/Packs/EclecticIQ/ReleaseNotes/3_0_1.md
+++ b/Packs/EclecticIQ/ReleaseNotes/3_0_1.md
@@ -3,4 +3,4 @@
##### EclecticIQ Intelligence Center v3
-Fixed an issue in `Base` pack (Version `1.33.52`) so now EclecticIQ Intelligence Center v3 will correctly input email addresses into context under `Accounts.[x].Email` and not under `Email`.
+Fixed an issue in `Base` pack (Version `1.33.52`) so now EclecticIQ Intelligence Center v3 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/EmailCommunication/ReleaseNotes/2_0_28.md b/Packs/EmailCommunication/ReleaseNotes/2_0_28.md
new file mode 100644
index 000000000000..f48c83d2c83a
--- /dev/null
+++ b/Packs/EmailCommunication/ReleaseNotes/2_0_28.md
@@ -0,0 +1,14 @@
+
+#### Scripts
+
+##### SendEmailReply
+
+- Updated the send-mail, reply-mail, get-attachments, fetch-incidents to handle inline image from xsoar and to xsoar.
+- Updated the Docker image to: *demisto/bs4-py3:1.0.0.98602*.
+
+##### PreprocessEmail
+
+- Updated the send-mail, reply-mail, get-attachments, fetch-incidents to handle inline image from xsoar and to xsoar.
+- Updated the Docker image to: *demisto/python3:3.10.14.98471*.
+
+
diff --git a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py
index 5926b2f069ea..b5c87fe9af97 100644
--- a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py
+++ b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py
@@ -44,6 +44,15 @@ def get_query_window():
return '60 days'
+def remove_html_conversation_history(email_html):
+ # Removing the conversation's history
+ for marker in QUOTE_MARKERS:
+ index = email_html.find(marker)
+ if index != -1:
+ email_html = f'{email_html[:index]}