Skip to content

Commit

Permalink
archer v2: fixed fetch times and timzone (demisto#10247)
Browse files Browse the repository at this point in the history
* archer v2: fixed fetch times and timzone

* fixed fetch times seconds and format

* fixed cr

* Update 1_1_6.md

* fixed double fetch bug

* fixed flake8 unittest

* aren't fix

Co-authored-by: roysagi <50295826+roysagi@users.noreply.github.com>
  • Loading branch information
jochman and roysagi committed Dec 6, 2020
1 parent d90b8b3 commit c49eabe
Show file tree
Hide file tree
Showing 5 changed files with 200 additions and 29 deletions.
62 changes: 37 additions & 25 deletions Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def format_time(datetime_object: datetime, use_european_time: bool) -> str:
A string formatted:
7/22/2017 3:58 PM (American) or 22/7/2017 3:58 PM (European)
"""
time_format = '%d/%m/%Y %I:%M %p' if use_european_time else '%m/%d/%Y %I:%M %p'
time_format = '%d/%m/%Y %I:%M:%S %p' if use_european_time else '%m/%d/%Y %I:%M:%S %p'
return datetime_object.strftime(time_format)


Expand All @@ -57,8 +57,10 @@ def parse_date_to_datetime(date: str, day_first: bool = False) -> datetime:
Returns:
a datetime object
"""
date_order = {'DATE_ORDER': 'DMY' if day_first else 'MDY'}
date_obj = parser(date, settings=date_order)
date_obj = parser(date)
if date_obj.tzinfo is None or date_obj.tzinfo.utcoffset(date_obj) is None: # if no timezone provided
date_order = {'DATE_ORDER': 'DMY' if day_first else 'MDY'}
date_obj = parser(date, settings=date_order) # Could throw `AssertionError` if could not parse the timestamp
return date_obj


Expand Down Expand Up @@ -388,16 +390,30 @@ def get_record(self, app_id, record_id):
def record_to_incident(
self, record_item, app_id, date_field, day_first: bool = False, offset: int = 0
) -> Tuple[dict, datetime]:
"""Transform a recotrd to incident
Args:
record_item: The record item dict
app_id: IF of the app
date_field: what is the date field
day_first: should the day be first in the day field (european date)
offset: what is the offset to the server
Returns:
incident, incident created time (in Archer's local time)
"""
labels = []
raw_record = record_item['raw']
record_item = record_item['record']
incident_created_time = datetime(1, 1, 1)
if record_item.get(date_field):
occurred_time = incident_created_time
if date_field := record_item.get(date_field):
incident_created_time = parse_date_to_datetime(
record_item[date_field], day_first=day_first
).replace(tzinfo=None)
# fix occurred by offset
incident_created_time = incident_created_time + timedelta(minutes=offset)
date_field, day_first=day_first
).replace(tzinfo=timezone.utc)
# fix ocurred time. if the offset is -120 minutes (Archer is two hours behind
# Cortex XSOAR, we should add 120 minutes to the occurred. So negative the incident_created_time
occurred_time = incident_created_time - timedelta(minutes=offset)

# Will convert value to strs
for k, v in record_item.items():
Expand All @@ -415,15 +431,13 @@ def record_to_incident(
labels.append({'type': 'ModuleId', 'value': app_id})
labels.append({'type': 'ContentId', 'value': record_item.get("Id")})
labels.append({'type': 'rawJSON', 'value': json.dumps(raw_record)})

incident = {
'name': f'RSA Archer Incident: {record_item.get("Id")}',
'details': json.dumps(record_item),
'occurred': incident_created_time.strftime(OCCURRED_FORMAT),
'occurred': occurred_time.strftime(OCCURRED_FORMAT),
'labels': labels,
'rawJSON': json.dumps(raw_record)
}
demisto.debug(f'Going out with a new incident. occurred={incident["occurred"]}')
return incident, incident_created_time

def search_records(
Expand Down Expand Up @@ -1078,8 +1092,8 @@ def print_cache_command(client: Client, args: Dict[str, str]):


def fetch_incidents(
client: Client, params: dict, from_time: str
) -> Tuple[list, str]:
client: Client, params: dict, from_time: datetime
) -> Tuple[list, datetime]:
"""Fetches incidents.
Args:
Expand All @@ -1088,7 +1102,7 @@ def fetch_incidents(
from_time: Time to start the fetch from
Returns:
next_run object, incidents
incidents, next_run datetime in archer's local time
"""
# Not using get method as those params are a must
app_id = params['applicationId']
Expand All @@ -1098,9 +1112,7 @@ def fetch_incidents(
fields_to_display = argToList(params.get('fields_to_fetch'))
fields_to_display.append(date_field)
day_first = argToBoolean(params.get('useEuropeanTime', False))

from_time_utc_obj = parser(from_time).replace(tzinfo=timezone.utc)
from_time_utc = format_time(from_time_utc_obj, day_first)
from_time_utc = format_time(from_time, day_first)
# API Call
records, raw_res = client.search_records(
app_id, fields_to_display, date_field,
Expand All @@ -1111,7 +1123,7 @@ def fetch_incidents(

# Build incidents
incidents = list()
next_fetch = from_time_utc_obj
next_fetch = from_time
for record in records:
incident, incident_created_time = client.record_to_incident(
record, app_id, date_field, day_first=day_first, offset=offset
Expand All @@ -1120,10 +1132,10 @@ def fetch_incidents(
next_fetch = incident_created_time
incidents.append(incident)

return incidents, next_fetch.strftime(OCCURRED_FORMAT)
return incidents, next_fetch


def get_fetch_time(last_fetch: dict, first_fetch_time: str, offset: int = 0) -> str:
def get_fetch_time(last_fetch: dict, first_fetch_time: str, offset: int = 0) -> datetime:
"""Gets lastRun object and first fetch time (str, 3 days) and returns
a datetime object of the last run if exists, else datetime of the first fetch time
Expand All @@ -1134,15 +1146,15 @@ def get_fetch_time(last_fetch: dict, first_fetch_time: str, offset: int = 0) ->
Returns:
Time to start fetch from
"""
if next_run := last_fetch.get('last_fetch'):
start_fetch = parser(next_run)
else:
start_fetch, _ = parse_date_range(first_fetch_time)
if offset:
start_fetch = start_fetch - timedelta(minutes=offset)
start_fetch = start_fetch.replace(tzinfo=None)
return start_fetch.strftime(OCCURRED_FORMAT)
start_fetch += timedelta(minutes=offset)
start_fetch = start_fetch.replace(tzinfo=timezone.utc)
return start_fetch


def main():
Expand Down Expand Up @@ -1200,7 +1212,7 @@ def main():
from_time=from_time
)
demisto.debug(f'Setting next run to {next_fetch}')
demisto.setLastRun({'last_fetch': next_fetch})
demisto.setLastRun({'last_fetch': next_fetch.strftime(OCCURRED_FORMAT)})
demisto.incidents(incidents)
elif command == 'test-module':
demisto.results(test_module(client, params))
Expand Down
159 changes: 157 additions & 2 deletions Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import copy
from datetime import datetime, timezone

import pytest

import demistomock as demisto
from ArcherV2 import Client, extract_from_xml, generate_field_contents, get_errors_from_res, generate_field_value
from ArcherV2 import Client, extract_from_xml, generate_field_contents, get_errors_from_res, generate_field_value, \
fetch_incidents, get_fetch_time, parser

BASE_URL = 'https://test.com/'

Expand Down Expand Up @@ -388,9 +392,160 @@ def test_record_to_incident_american_time(self):
"""
client = Client(BASE_URL, '', '', '', '')
incident = INCIDENT_RECORD.copy()
incident['record']['Date/Time Reported'] = "03/26/2018 10:03 AM"
incident['record']['Date/Time Reported'] = '03/26/2018 10:03 AM'
incident, incident_created_time = client.record_to_incident(
INCIDENT_RECORD, 75, 'Date/Time Reported', day_first=False
)
assert incident_created_time.strftime('%Y-%m-%dT%H:%M:%SZ') == '2018-03-26T10:03:00Z'
assert incident['occurred'] == '2018-03-26T10:03:00Z'

@pytest.mark.parametrize('date_time_reported, use_european_time, occurred', [
('2018-04-03T10:03:00.000Z', False, '2018-04-03T10:03:00Z'),
('2018-04-03T10:03:00.000Z', True, '2018-04-03T10:03:00Z'),
('03/04/2018 10:03 AM', True, '2018-04-03T10:03:00Z'),
('04/03/2018 10:03 AM', False, '2018-04-03T10:03:00Z')
])
def test_fetch_time_change(
self, mocker, date_time_reported: str, use_european_time: bool, occurred: str
):
"""
Given:
incident with date/time reported
european time (day first) - True or false
When:
Fetching incidents
Then:
Check that the new next fetch is greater than last_fetch
Check the wanted next_fetch is true
Assert occurred time
"""
client = Client(BASE_URL, '', '', '', '')
params = {
'applicationId': '75',
'applicationDateField': 'Date/Time Reported',
'time_zone': 0,
'useEuropeanTime': use_european_time
}
record = copy.deepcopy(INCIDENT_RECORD)
record['record']['Date/Time Reported'] = date_time_reported
last_fetch = get_fetch_time(
{'last_fetch': '2018-03-01T10:03:00Z'}, params.get('fetch_time', '3 days'),
0
)
mocker.patch.object(client, 'search_records', return_value=([record], {}))
incidents, next_fetch = fetch_incidents(client, params, last_fetch)
assert last_fetch < next_fetch
assert next_fetch == datetime(2018, 4, 3, 10, 3, tzinfo=timezone.utc)
assert incidents[0]['occurred'] == occurred

@pytest.mark.parametrize('date_time_reported, use_european_time, occurred', [
('11/29/2018 10:03 AM', False, '2018-11-29T10:03:00Z'),
('29/11/2018 10:03 AM', True, '2018-11-29T10:03:00Z')
])
def test_fetch_times_with_impossible_date(
self, mocker, date_time_reported: str, use_european_time: bool, occurred: str
):
"""
Given:
incident with date/time reported. The day/months can't be misplaced (29-11, 11-29)
european time (day first) - True or false
When:
Fetching incidents
Then:
Check that the new next fetch is greater than last_fetch
Check the wanted next_fetch is true
Assert occurred time
"""
client = Client(BASE_URL, '', '', '', '')
params = {
'applicationId': '75',
'applicationDateField': 'Date/Time Reported',
'time_zone': 0,
'useEuropeanTime': use_european_time
}
record = copy.deepcopy(INCIDENT_RECORD)
record['record']['Date/Time Reported'] = date_time_reported
last_fetch = get_fetch_time(
{'last_fetch': '2018-03-01T10:03:00Z'}, params.get('fetch_time', '3 days'),
0
)
mocker.patch.object(client, 'search_records', return_value=([record], {}))
incidents, next_fetch = fetch_incidents(client, params, last_fetch)
assert last_fetch < next_fetch
assert next_fetch == datetime(2018, 11, 29, 10, 3, tzinfo=timezone.utc)
assert incidents[0]['occurred'] == occurred

def test_fetch_time_change_with_offset(self, mocker):
"""
Given:
offset of -120 (2 hours)
When:
Fetching incidents
Then:
Check that the new last fetch is equals to record reported time (no delta) and is after the last_fetch
Assert occurred time
"""
client = Client(BASE_URL, '', '', '', '')
record = copy.deepcopy(INCIDENT_RECORD)
record['record']['Date/Time Reported'] = '03/04/2018 10:03 AM'
params = {
'applicationId': '75',
'applicationDateField': 'Date/Time Reported',
'time_zone': -120,
'useEuropeanTime': 'true'
}
last_fetch = get_fetch_time(
{'last_fetch': '2018-03-24T10:03:00Z'}, params.get('fetch_time', '3 days'),
0
)
mocker.patch.object(client, 'search_records', return_value=([record], {}))
incidents, next_fetch = fetch_incidents(client, params, last_fetch)
assert last_fetch < next_fetch
assert next_fetch == datetime(2018, 4, 3, 10, 3, tzinfo=timezone.utc)
assert incidents[0]['occurred'] == '2018-04-03T12:03:00Z'

def test_two_fetches(self, mocker):
"""
Given:
2 incident with date/time reported
european time (day first) - True
running two fetches.
When:
Fetching incidents
Then:
Check that the new next fetch is greater than last_fetch on both calls.
Check the wanted next_fetch is equals to the date in the incident in both calls.
Assert occurred time
"""
client = Client(BASE_URL, '', '', '', '')
params = {
'applicationId': '75',
'applicationDateField': 'Date/Time Reported',
'time_zone': 0,
'useEuropeanTime': True
}
record1, record2 = copy.deepcopy(INCIDENT_RECORD), copy.deepcopy(INCIDENT_RECORD)
record1['record']['Date/Time Reported'] = '18/03/2020 10:30 AM'
record2['record']['Date/Time Reported'] = '18/03/2020 03:30 PM'
last_fetch = parser('2020-18-03T09:00:00Z').replace(tzinfo=timezone.utc)
mocker.patch.object(
client, 'search_records', side_effect=[
([record1], {}),
([record2], {})
]
)
incidents, next_fetch = fetch_incidents(client, params, last_fetch)
assert last_fetch < next_fetch
assert next_fetch == datetime(2020, 3, 18, 10, 30, tzinfo=timezone.utc)
assert incidents[0]['occurred'] == '2020-03-18T10:30:00Z'
incidents, next_fetch = fetch_incidents(client, params, next_fetch)
assert last_fetch < next_fetch
assert next_fetch == datetime(2020, 3, 18, 15, 30, tzinfo=timezone.utc)
assert incidents[0]['occurred'] == '2020-03-18T15:30:00Z'
2 changes: 1 addition & 1 deletion Packs/ArcherRSA/Integrations/ArcherV2/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ This integration was integrated and tested with version xx of RSA Archer v2

## Clarifications
The timezone (offset) parameter should be used if the Cortex XSOAR server and Archer's server aren't in the same time zone.
If the Cortex XSOAR server time is 00:00 and the Archer server time is 01:00, the timezone parameter should be -60 (minutes).
If the Cortex XSOAR server time is 00:00 and the Archer server time is 01:00, the timezone parameter should be +60 (minutes).

| **Parameter** | **Description** | **Required** |
| --- | --- | --- |
Expand Down
4 changes: 4 additions & 0 deletions Packs/ArcherRSA/ReleaseNotes/1_1_6.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@

#### Integrations
##### RSA Archer v2
- Fixed an issue in the **fetch-incident** command where there was a mismatched between timezones.
2 changes: 1 addition & 1 deletion Packs/ArcherRSA/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "RSA Archer",
"description": "The RSA Archer GRC Platform provides a common foundation for managing policies, controls, risks, assessments and deficiencies across lines of business.",
"support": "xsoar",
"currentVersion": "1.1.5",
"currentVersion": "1.1.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down

0 comments on commit c49eabe

Please sign in to comment.