Import from old repository
This commit is contained in:
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains an import statement for each SQLite plugin."""
|
||||
|
||||
from plaso.parsers.sqlite_plugins import android_calls
|
||||
from plaso.parsers.sqlite_plugins import android_sms
|
||||
from plaso.parsers.sqlite_plugins import appusage
|
||||
from plaso.parsers.sqlite_plugins import chrome
|
||||
from plaso.parsers.sqlite_plugins import chrome_cookies
|
||||
from plaso.parsers.sqlite_plugins import chrome_extension_activity
|
||||
from plaso.parsers.sqlite_plugins import firefox
|
||||
from plaso.parsers.sqlite_plugins import firefox_cookies
|
||||
from plaso.parsers.sqlite_plugins import gdrive
|
||||
from plaso.parsers.sqlite_plugins import ls_quarantine
|
||||
from plaso.parsers.sqlite_plugins import mac_document_versions
|
||||
from plaso.parsers.sqlite_plugins import mackeeper_cache
|
||||
from plaso.parsers.sqlite_plugins import skype
|
||||
from plaso.parsers.sqlite_plugins import zeitgeist
|
||||
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Android contacts2 Call History.
|
||||
|
||||
Android Call History is stored in SQLite database files named contacts2.db.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class AndroidCallEvent(time_events.JavaTimeEvent):
|
||||
"""Convenience class for an Android Call History event."""
|
||||
|
||||
DATA_TYPE = 'android:event:call'
|
||||
|
||||
def __init__(
|
||||
self, java_time, usage, identifier, number, name, duration, call_type):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
java_time: The Java time value.
|
||||
usage: The description of the usage of the time value.
|
||||
identifier: The row identifier.
|
||||
number: The phone number associated to the remote party.
|
||||
duration: The number of seconds the call lasted.
|
||||
call_type: Incoming, Outgoing, or Missed.
|
||||
"""
|
||||
super(AndroidCallEvent, self).__init__(java_time, usage)
|
||||
self.offset = identifier
|
||||
self.number = number
|
||||
self.name = name
|
||||
self.duration = duration
|
||||
self.call_type = call_type
|
||||
|
||||
|
||||
class AndroidCallPlugin(interface.SQLitePlugin):
|
||||
"""Parse Android contacts2 database."""
|
||||
|
||||
NAME = 'android_calls'
|
||||
DESCRIPTION = u'Parser for Android calls SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
('SELECT _id AS id, date, number, name, duration, type FROM calls',
|
||||
'ParseCallsRow')]
|
||||
|
||||
CALL_TYPE = {
|
||||
1: u'INCOMING',
|
||||
2: u'OUTGOING',
|
||||
3: u'MISSED'}
|
||||
|
||||
def ParseCallsRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a Call record row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
# Extract and lookup the call type.
|
||||
call_type = self.CALL_TYPE.get(row['type'], u'UNKNOWN')
|
||||
|
||||
event_object = AndroidCallEvent(
|
||||
row['date'], u'Call Started', row['id'], row['number'], row['name'],
|
||||
row['duration'], call_type)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, parser_chain=parser_chain, file_entry=file_entry,
|
||||
query=query)
|
||||
|
||||
duration = row['duration']
|
||||
if isinstance(duration, basestring):
|
||||
try:
|
||||
duration = int(duration, 10)
|
||||
except ValueError:
|
||||
duration = 0
|
||||
|
||||
if duration:
|
||||
# The duration is in seconds and the date value in milli seconds.
|
||||
duration *= 1000
|
||||
event_object = AndroidCallEvent(
|
||||
row['date'] + duration, u'Call Ended', row['id'], row['number'],
|
||||
row['name'], row['duration'], call_type)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(AndroidCallPlugin)
|
||||
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Android SMS call history plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import android_calls as android_calls_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import android_calls
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class AndroidCallSQLitePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Android Call History database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = android_calls.AndroidCallPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Test the Process function on an Android contacts2.db file."""
|
||||
test_file = self._GetTestFilePath(['contacts2.db'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The contacts2 database file contains 5 events (MISSED/OUTGOING/INCOMING).
|
||||
self.assertEquals(len(event_objects), 5)
|
||||
|
||||
# Check the first event.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(event_object.timestamp_desc, u'Call Started')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-11-06 21:17:16.690')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_number = u'5404561685'
|
||||
self.assertEquals(event_object.number, expected_number)
|
||||
|
||||
expected_type = u'MISSED'
|
||||
self.assertEquals(event_object.call_type, expected_type)
|
||||
|
||||
expected_call = (
|
||||
u'MISSED '
|
||||
u'Number: 5404561685 '
|
||||
u'Name: Barney '
|
||||
u'Duration: 0 seconds')
|
||||
expected_short = u'MISSED Call'
|
||||
self._TestGetMessageStrings(event_object, expected_call, expected_short)
|
||||
|
||||
# Run some tests on the last 2 events.
|
||||
event_object_3 = event_objects[3]
|
||||
event_object_4 = event_objects[4]
|
||||
|
||||
# Check the timestamp_desc of the last event.
|
||||
self.assertEquals(event_object_4.timestamp_desc, u'Call Ended')
|
||||
|
||||
expected_timestamp3 = timelib_test.CopyStringToTimestamp(
|
||||
'2013-11-07 00:03:36.690')
|
||||
self.assertEquals(event_object_3.timestamp, expected_timestamp3)
|
||||
|
||||
expected_timestamp4 = timelib_test.CopyStringToTimestamp(
|
||||
'2013-11-07 00:14:15.690')
|
||||
self.assertEquals(event_object_4.timestamp, expected_timestamp4)
|
||||
|
||||
# Ensure the difference in btw. events 3 and 4 equals the duration.
|
||||
expected_duration = (
|
||||
(expected_timestamp4 - expected_timestamp3) / 1000000)
|
||||
self.assertEquals(event_object_4.duration, expected_duration)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Android SMS database.
|
||||
|
||||
Android SMS messages are stored in SQLite database files named mmssms.dbs.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class AndroidSmsEvent(time_events.JavaTimeEvent):
|
||||
"""Convenience class for an Android SMS event."""
|
||||
|
||||
DATA_TYPE = 'android:messaging:sms'
|
||||
|
||||
def __init__(self, java_time, identifier, address, sms_read, sms_type, body):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
java_time: The Java time value.
|
||||
identifier: The row identifier.
|
||||
address: The phone number associated to the sender/receiver.
|
||||
status: Read or Unread.
|
||||
type: Sent or Received.
|
||||
body: Content of the SMS text message.
|
||||
"""
|
||||
super(AndroidSmsEvent, self).__init__(
|
||||
java_time, eventdata.EventTimestamp.CREATION_TIME)
|
||||
self.offset = identifier
|
||||
self.address = address
|
||||
self.sms_read = sms_read
|
||||
self.sms_type = sms_type
|
||||
self.body = body
|
||||
|
||||
|
||||
class AndroidSmsPlugin(interface.SQLitePlugin):
|
||||
"""Parse Android SMS database."""
|
||||
|
||||
NAME = 'android_sms'
|
||||
DESCRIPTION = u'Parser for Android text messages SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
('SELECT _id AS id, address, date, read, type, body FROM sms',
|
||||
'ParseSmsRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset(['sms'])
|
||||
|
||||
SMS_TYPE = {
|
||||
1: u'RECEIVED',
|
||||
2: u'SENT'}
|
||||
SMS_READ = {
|
||||
0: u'UNREAD',
|
||||
1: u'READ'}
|
||||
|
||||
def ParseSmsRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses an SMS row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
# Extract and lookup the SMS type and read status.
|
||||
sms_type = self.SMS_TYPE.get(row['type'], u'UNKNOWN')
|
||||
sms_read = self.SMS_READ.get(row['read'], u'UNKNOWN')
|
||||
|
||||
event_object = AndroidSmsEvent(
|
||||
row['date'], row['id'], row['address'], sms_read, sms_type, row['body'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(AndroidSmsPlugin)
|
||||
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Android SMS plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import android_sms as android_sms_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import android_sms
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class AndroidSmsTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Android SMS database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = android_sms.AndroidSmsPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Test the Process function on an Android SMS mmssms.db file."""
|
||||
test_file = self._GetTestFilePath(['mmssms.db'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The SMS database file contains 9 events (5 SENT, 4 RECEIVED messages).
|
||||
self.assertEquals(len(event_objects), 9)
|
||||
|
||||
# Check the first SMS sent.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-10-29 16:56:28.038')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_address = u'1 555-521-5554'
|
||||
self.assertEquals(event_object.address, expected_address)
|
||||
|
||||
expected_body = u'Yo Fred this is my new number.'
|
||||
self.assertEquals(event_object.body, expected_body)
|
||||
|
||||
expected_msg = (
|
||||
u'Type: SENT '
|
||||
u'Address: 1 555-521-5554 '
|
||||
u'Status: READ '
|
||||
u'Message: Yo Fred this is my new number.')
|
||||
expected_short = u'Yo Fred this is my new number.'
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Mac OS X application usage.
|
||||
|
||||
The application usage is stored in SQLite database files named
|
||||
/var/db/application_usage.sqlite
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class MacOSXApplicationUsageEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for a Mac OS X application usage event."""
|
||||
|
||||
DATA_TYPE = 'macosx:application_usage'
|
||||
|
||||
def __init__(
|
||||
self, posix_time, usage, application_name, application_version,
|
||||
bundle_id, number_of_times):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
posix_time: The POSIX time value.
|
||||
usage: The description of the usage of the time value.
|
||||
application_name: The name of the application.
|
||||
application_version: The version of the application.
|
||||
bundle_id: The bundle identifier of the application.
|
||||
number_of_times: TODO: number of times what?
|
||||
"""
|
||||
super(MacOSXApplicationUsageEvent, self).__init__(posix_time, usage)
|
||||
|
||||
self.application = application_name
|
||||
self.app_version = application_version
|
||||
self.bundle_id = bundle_id
|
||||
self.count = number_of_times
|
||||
|
||||
|
||||
class ApplicationUsagePlugin(interface.SQLitePlugin):
|
||||
"""Parse Application Usage history files.
|
||||
|
||||
Application usage is a SQLite database that logs down entries
|
||||
triggered by NSWorkspaceWillLaunchApplicationNotification and
|
||||
NSWorkspaceDidTerminateApplicationNotification NSWorkspace notifications by
|
||||
crankd.
|
||||
|
||||
See the code here:
|
||||
http://code.google.com/p/google-macops/source/browse/trunk/crankd/\
|
||||
ApplicationUsage.py
|
||||
|
||||
Default installation: /var/db/application_usage.sqlite
|
||||
"""
|
||||
|
||||
NAME = 'appusage'
|
||||
DESCRIPTION = u'Parser for Mac OS X application usage SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [(
|
||||
('SELECT last_time, event, bundle_id, app_version, app_path, '
|
||||
'number_times FROM application_usage ORDER BY last_time'),
|
||||
'ParseApplicationUsageRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset(['application_usage'])
|
||||
|
||||
def ParseApplicationUsageRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses an application usage row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
# TODO: replace usage by definition(s) in eventdata. Not sure which values
|
||||
# it will hold here.
|
||||
usage = u'Application {0:s}'.format(row['event'])
|
||||
|
||||
event_object = MacOSXApplicationUsageEvent(
|
||||
row['last_time'], usage, row['app_path'], row['app_version'],
|
||||
row['bundle_id'], row['number_times'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(ApplicationUsagePlugin)
|
||||
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Mac OS X application usage database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import appusage as appusage_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
from plaso.parsers.sqlite_plugins import appusage
|
||||
|
||||
|
||||
class ApplicationUsagePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Mac OS X application usage activity database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = appusage.ApplicationUsagePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function."""
|
||||
test_file = self._GetTestFilePath(['application_usage.sqlite'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The sqlite database contains 5 events.
|
||||
self.assertEquals(len(event_objects), 5)
|
||||
|
||||
# Check the first event.
|
||||
event_object = event_objects[0]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2014-05-07 18:52:02')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.application, u'/Applications/Safari.app')
|
||||
self.assertEquals(event_object.app_version, u'9537.75.14')
|
||||
self.assertEquals(event_object.bundle_id, u'com.apple.Safari')
|
||||
self.assertEquals(event_object.count, 1)
|
||||
|
||||
expected_msg = (
|
||||
u'/Applications/Safari.app v.9537.75.14 '
|
||||
u'(bundle: com.apple.Safari). '
|
||||
u'Launched: 1 time(s)')
|
||||
|
||||
expected_msg_short = u'/Applications/Safari.app (1 time(s))'
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,337 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parser for the Google Chrome History files.
|
||||
|
||||
The Chrome History is stored in SQLite database files named History
|
||||
and Archived History. Where the Archived History does not contain
|
||||
the downloads table.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import timelib
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class ChromeHistoryFileDownloadedEvent(time_events.TimestampEvent):
|
||||
"""Convenience class for a Chrome History file downloaded event."""
|
||||
DATA_TYPE = 'chrome:history:file_downloaded'
|
||||
|
||||
def __init__(
|
||||
self, timestamp, row_id, url, full_path, received_bytes, total_bytes):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value.
|
||||
row_id: The identifier of the corresponding row.
|
||||
url: The URL of the downloaded file.
|
||||
full_path: The full path where the file was downloaded to.
|
||||
received_bytes: The number of bytes received while downloading.
|
||||
total_bytes: The total number of bytes to download.
|
||||
"""
|
||||
super(ChromeHistoryFileDownloadedEvent, self).__init__(
|
||||
timestamp, eventdata.EventTimestamp.FILE_DOWNLOADED)
|
||||
|
||||
self.offset = row_id
|
||||
self.url = url
|
||||
self.full_path = full_path
|
||||
self.received_bytes = received_bytes
|
||||
self.total_bytes = total_bytes
|
||||
|
||||
|
||||
class ChromeHistoryPageVisitedEvent(time_events.WebKitTimeEvent):
|
||||
"""Convenience class for a Chrome History page visited event."""
|
||||
DATA_TYPE = 'chrome:history:page_visited'
|
||||
|
||||
# TODO: refactor extra to be conditional arguments.
|
||||
def __init__(
|
||||
self, webkit_time, row_id, url, title, hostname, typed_count, from_visit,
|
||||
extra, visit_source):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
webkit_time: The WebKit time value.
|
||||
row_id: The identifier of the corresponding row.
|
||||
url: The URL of the visited page.
|
||||
title: The title of the visited page.
|
||||
hostname: The visited hostname.
|
||||
typed_count: The number of charcters of the URL that were typed.
|
||||
from_visit: The URL where the visit originated from.
|
||||
extra: String containing extra event data.
|
||||
visit_source: The source of the page visit, if defined.
|
||||
"""
|
||||
super(ChromeHistoryPageVisitedEvent, self).__init__(
|
||||
webkit_time, eventdata.EventTimestamp.PAGE_VISITED)
|
||||
|
||||
self.offset = row_id
|
||||
self.url = url
|
||||
self.title = title
|
||||
self.host = hostname
|
||||
self.typed_count = typed_count
|
||||
self.from_visit = from_visit
|
||||
self.extra = extra
|
||||
if visit_source is not None:
|
||||
self.visit_source = visit_source
|
||||
|
||||
|
||||
class ChromeHistoryPlugin(interface.SQLitePlugin):
|
||||
"""Parse Chrome Archived History and History files."""
|
||||
|
||||
NAME = 'chrome_history'
|
||||
DESCRIPTION = u'Parser for Chrome history SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT urls.id, urls.url, urls.title, urls.visit_count, '
|
||||
'urls.typed_count, urls.last_visit_time, urls.hidden, visits.'
|
||||
'visit_time, visits.from_visit, visits.transition, visits.id '
|
||||
'AS visit_id FROM urls, visits WHERE urls.id = visits.url ORDER '
|
||||
'BY visits.visit_time'), 'ParseLastVisitedRow'),
|
||||
(('SELECT downloads.id AS id, downloads.start_time,'
|
||||
'downloads.target_path, downloads_url_chains.url, '
|
||||
'downloads.received_bytes, downloads.total_bytes FROM downloads,'
|
||||
' downloads_url_chains WHERE downloads.id = '
|
||||
'downloads_url_chains.id'), 'ParseNewFileDownloadedRow'),
|
||||
(('SELECT id, full_path, url, start_time, received_bytes, '
|
||||
'total_bytes,state FROM downloads'), 'ParseFileDownloadedRow')]
|
||||
|
||||
# The required tables common to Archived History and History.
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'keyword_search_terms', 'meta', 'urls', 'visits', 'visit_source'])
|
||||
|
||||
# Queries for cache building.
|
||||
URL_CACHE_QUERY = (
|
||||
'SELECT visits.id AS id, urls.url, urls.title FROM '
|
||||
'visits, urls WHERE urls.id = visits.url')
|
||||
SYNC_CACHE_QUERY = 'SELECT id, source FROM visit_source'
|
||||
|
||||
# The following definition for values can be found here:
|
||||
# http://src.chromium.org/svn/trunk/src/content/public/common/ \
|
||||
# page_transition_types_list.h
|
||||
PAGE_TRANSITION = {
|
||||
0: u'LINK',
|
||||
1: u'TYPED',
|
||||
2: u'AUTO_BOOKMARK',
|
||||
3: u'AUTO_SUBFRAME',
|
||||
4: u'MANUAL_SUBFRAME',
|
||||
5: u'GENERATED',
|
||||
6: u'START_PAGE',
|
||||
7: u'FORM_SUBMIT',
|
||||
8: u'RELOAD',
|
||||
9: u'KEYWORD',
|
||||
10: u'KEYWORD_GENERATED '
|
||||
}
|
||||
|
||||
TRANSITION_LONGER = {
|
||||
0: u'User clicked a link',
|
||||
1: u'User typed the URL in the URL bar',
|
||||
2: u'Got through a suggestion in the UI',
|
||||
3: (u'Content automatically loaded in a non-toplevel frame - user may '
|
||||
u'not realize'),
|
||||
4: u'Subframe explicitly requested by the user',
|
||||
5: (u'User typed in the URL bar and selected an entry from the list - '
|
||||
u'such as a search bar'),
|
||||
6: u'The start page of the browser',
|
||||
7: u'A form the user has submitted values to',
|
||||
8: (u'The user reloaded the page, eg by hitting the reload button or '
|
||||
u'restored a session'),
|
||||
9: (u'URL what was generated from a replaceable keyword other than the '
|
||||
u'default search provider'),
|
||||
10: u'Corresponds to a visit generated from a KEYWORD'
|
||||
}
|
||||
|
||||
# The following is the values for the source enum found in the visit_source
|
||||
# table and describes where a record originated from (if it originates from a
|
||||
# different storage than locally generated).
|
||||
# The source can be found here:
|
||||
# http://src.chromium.org/svn/trunk/src/chrome/browser/history/\
|
||||
# history_types.h
|
||||
VISIT_SOURCE = {
|
||||
0: u'SOURCE_SYNCED',
|
||||
1: u'SOURCE_BROWSED',
|
||||
2: u'SOURCE_EXTENSION',
|
||||
3: u'SOURCE_FIREFOX_IMPORTED',
|
||||
4: u'SOURCE_IE_IMPORTED',
|
||||
5: u'SOURCE_SAFARI_IMPORTED'
|
||||
}
|
||||
|
||||
CORE_MASK = 0xff
|
||||
|
||||
def _GetHostname(self, hostname):
|
||||
"""Return a hostname from a full URL."""
|
||||
if hostname.startswith('http') or hostname.startswith('ftp'):
|
||||
_, _, uri = hostname.partition('//')
|
||||
hostname, _, _ = uri.partition('/')
|
||||
|
||||
return hostname
|
||||
|
||||
if hostname.startswith('about') or hostname.startswith('chrome'):
|
||||
site, _, _ = hostname.partition('/')
|
||||
return site
|
||||
|
||||
return hostname
|
||||
|
||||
def _GetUrl(self, url, cache, database):
|
||||
"""Return an URL from a reference to an entry in the from_visit table."""
|
||||
if not url:
|
||||
return u''
|
||||
|
||||
url_cache_results = cache.GetResults('url')
|
||||
if not url_cache_results:
|
||||
cursor = database.cursor
|
||||
result_set = cursor.execute(self.URL_CACHE_QUERY)
|
||||
cache.CacheQueryResults(
|
||||
result_set, 'url', 'id', ('url', 'title'))
|
||||
url_cache_results = cache.GetResults('url')
|
||||
|
||||
reference_url, reference_title = url_cache_results.get(url, [u'', u''])
|
||||
|
||||
if not reference_url:
|
||||
return u''
|
||||
|
||||
return u'{0:s} ({1:s})'.format(reference_url, reference_title)
|
||||
|
||||
def _GetVisitSource(self, visit_id, cache, database):
|
||||
"""Return a string denoting the visit source type if possible.
|
||||
|
||||
Args:
|
||||
visit_id: The ID from the visits table for the particular record.
|
||||
cache: A cache object (instance of SQLiteCache).
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
|
||||
Returns:
|
||||
A string with the visit source, None if not found.
|
||||
"""
|
||||
if not visit_id:
|
||||
return
|
||||
|
||||
sync_cache_results = cache.GetResults('sync')
|
||||
if not sync_cache_results:
|
||||
cursor = database.cursor
|
||||
result_set = cursor.execute(self.SYNC_CACHE_QUERY)
|
||||
cache.CacheQueryResults(
|
||||
result_set, 'sync', 'id', ('source',))
|
||||
sync_cache_results = cache.GetResults('sync')
|
||||
|
||||
results = sync_cache_results.get(visit_id, None)
|
||||
if results is None:
|
||||
return
|
||||
|
||||
return self.VISIT_SOURCE.get(results, None)
|
||||
|
||||
def ParseFileDownloadedRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a file downloaded row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
timestamp = timelib.Timestamp.FromPosixTime(row['start_time'])
|
||||
event_object = ChromeHistoryFileDownloadedEvent(
|
||||
timestamp, row['id'], row['url'], row['full_path'],
|
||||
row['received_bytes'], row['total_bytes'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseNewFileDownloadedRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a file downloaded row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
timestamp = timelib.Timestamp.FromWebKitTime(row['start_time'])
|
||||
event_object = ChromeHistoryFileDownloadedEvent(
|
||||
timestamp, row['id'], row['url'], row['target_path'],
|
||||
row['received_bytes'], row['total_bytes'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseLastVisitedRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
cache=None, database=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a last visited row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
cache: Optional cache object (instance of SQLiteCache).
|
||||
The default is None.
|
||||
database: Optional database object (instance of SQLiteDatabase).
|
||||
The default is None.
|
||||
"""
|
||||
extras = []
|
||||
|
||||
transition_nr = row['transition'] & self.CORE_MASK
|
||||
page_transition = self.PAGE_TRANSITION.get(transition_nr, '')
|
||||
if page_transition:
|
||||
extras.append(u'Type: [{0:s} - {1:s}]'.format(
|
||||
page_transition, self.TRANSITION_LONGER.get(transition_nr, '')))
|
||||
|
||||
if row['hidden'] == '1':
|
||||
extras.append(u'(url hidden)')
|
||||
|
||||
# TODO: move to formatter.
|
||||
count = row['typed_count']
|
||||
if count >= 1:
|
||||
if count > 1:
|
||||
multi = u's'
|
||||
else:
|
||||
multi = u''
|
||||
|
||||
extras.append(u'(type count {1:d} time{0:s})'.format(multi, count))
|
||||
else:
|
||||
extras.append(u'(URL not typed directly - no typed count)')
|
||||
|
||||
visit_source = self._GetVisitSource(row['visit_id'], cache, database)
|
||||
|
||||
# TODO: replace extras by conditional formatting.
|
||||
event_object = ChromeHistoryPageVisitedEvent(
|
||||
row['visit_time'], row['id'], row['url'], row['title'],
|
||||
self._GetHostname(row['url']), row['typed_count'],
|
||||
self._GetUrl(row['from_visit'], cache, database), u' '.join(extras),
|
||||
visit_source)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(ChromeHistoryPlugin)
|
||||
@@ -0,0 +1,166 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parser for the Google Chrome Cookie database."""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import errors
|
||||
from plaso.lib import eventdata
|
||||
# Register the cookie plugins.
|
||||
from plaso.parsers import cookie_plugins # pylint: disable=unused-import
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.cookie_plugins import interface as cookie_interface
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class ChromeCookieEvent(time_events.WebKitTimeEvent):
|
||||
"""Convenience class for a Chrome Cookie event."""
|
||||
|
||||
DATA_TYPE = 'chrome:cookie:entry'
|
||||
|
||||
def __init__(
|
||||
self, timestamp, usage, hostname, cookie_name, value, path, secure,
|
||||
httponly, persistent):
|
||||
"""Initializes the event.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value in WebKit format..
|
||||
usage: Timestamp description string.
|
||||
hostname: The hostname of host that set the cookie value.
|
||||
cookie_name: The name field of the cookie.
|
||||
value: The value of the cookie.
|
||||
path: An URI of the page that set the cookie.
|
||||
secure: Indication if this cookie should only be transmitted over a secure
|
||||
channel.
|
||||
httponly: An indication that the cookie cannot be accessed through client
|
||||
side script.
|
||||
persistent: A flag indicating cookies persistent value.
|
||||
"""
|
||||
super(ChromeCookieEvent, self).__init__(timestamp, usage)
|
||||
if hostname.startswith('.'):
|
||||
hostname = hostname[1:]
|
||||
|
||||
self.host = hostname
|
||||
self.cookie_name = cookie_name
|
||||
self.data = value
|
||||
self.path = path
|
||||
self.secure = True if secure else False
|
||||
self.httponly = True if httponly else False
|
||||
self.persistent = True if persistent else False
|
||||
|
||||
if self.secure:
|
||||
scheme = u'https'
|
||||
else:
|
||||
scheme = u'http'
|
||||
|
||||
self.url = u'{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)
|
||||
|
||||
|
||||
class ChromeCookiePlugin(interface.SQLitePlugin):
|
||||
"""Parse Chrome Cookies file."""
|
||||
|
||||
NAME = 'chrome_cookies'
|
||||
DESCRIPTION = u'Parser for Chrome cookies SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT creation_utc, host_key, name, value, path, expires_utc, '
|
||||
'secure, httponly, last_access_utc, has_expires, persistent '
|
||||
'FROM cookies'), 'ParseCookieRow')]
|
||||
|
||||
# The required tables common to Archived History and History.
|
||||
REQUIRED_TABLES = frozenset(['cookies', 'meta'])
|
||||
|
||||
# Point to few sources for URL information.
|
||||
URLS = [
|
||||
u'http://src.chromium.org/svn/trunk/src/net/cookies/',
|
||||
(u'http://www.dfinews.com/articles/2012/02/'
|
||||
u'google-analytics-cookies-and-forensic-implications')]
|
||||
|
||||
# Google Analytics __utmz variable translation.
|
||||
# Taken from:
|
||||
# http://www.dfinews.com/sites/dfinews.com/files/u739/Tab2Cookies020312.jpg
|
||||
GA_UTMZ_TRANSLATION = {
|
||||
'utmcsr': 'Last source used to access.',
|
||||
'utmccn': 'Ad campaign information.',
|
||||
'utmcmd': 'Last type of visit.',
|
||||
'utmctr': 'Keywords used to find site.',
|
||||
'utmcct': 'Path to the page of referring link.'}
|
||||
|
||||
def __init__(self):
|
||||
"""Initializes a plugin object."""
|
||||
super(ChromeCookiePlugin, self).__init__()
|
||||
self._cookie_plugins = cookie_interface.GetPlugins()
|
||||
|
||||
def ParseCookieRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None,
|
||||
query=None, **unused_kwargs):
|
||||
"""Parses a cookie row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
event_object = ChromeCookieEvent(
|
||||
row['creation_utc'], eventdata.EventTimestamp.CREATION_TIME,
|
||||
row['host_key'], row['name'], row['value'], row['path'], row['secure'],
|
||||
row['httponly'], row['persistent'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
event_object = ChromeCookieEvent(
|
||||
row['last_access_utc'], eventdata.EventTimestamp.ACCESS_TIME,
|
||||
row['host_key'], row['name'], row['value'], row['path'], row['secure'],
|
||||
row['httponly'], row['persistent'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['has_expires']:
|
||||
event_object = ChromeCookieEvent(
|
||||
row['expires_utc'], 'Cookie Expires',
|
||||
row['host_key'], row['name'], row['value'], row['path'],
|
||||
row['secure'], row['httponly'], row['persistent'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
# Go through all cookie plugins to see if there are is any specific parsing
|
||||
# needed.
|
||||
hostname = row['host_key']
|
||||
if hostname.startswith('.'):
|
||||
hostname = hostname[1:]
|
||||
|
||||
url = u'http{0:s}://{1:s}{2:s}'.format(
|
||||
u's' if row['secure'] else u'', hostname, row['path'])
|
||||
|
||||
for cookie_plugin in self._cookie_plugins:
|
||||
try:
|
||||
cookie_plugin.Process(
|
||||
parser_context, cookie_name=row['name'], cookie_data=row['value'],
|
||||
url=url, parser_chain=parser_chain, file_entry=file_entry)
|
||||
except errors.WrongPlugin:
|
||||
pass
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(ChromeCookiePlugin)
|
||||
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Google Chrome cookie database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import chrome_cookies as chrome_cookies_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import chrome_cookies
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class ChromeCookiesPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Google Chrome cookie database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = chrome_cookies.ChromeCookiePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Chrome cookie database file."""
|
||||
test_file = self._GetTestFilePath(['cookies.db'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
|
||||
event_objects = []
|
||||
extra_objects = []
|
||||
|
||||
# Since we've got both events generated by cookie plugins and the Chrome
|
||||
# cookie plugin we need to separate them.
|
||||
for event_object in self._GetEventObjectsFromQueue(event_queue_consumer):
|
||||
if isinstance(event_object, chrome_cookies.ChromeCookieEvent):
|
||||
event_objects.append(event_object)
|
||||
else:
|
||||
extra_objects.append(event_object)
|
||||
|
||||
# The cookie database contains 560 entries:
|
||||
# 560 creation timestamps.
|
||||
# 560 last access timestamps.
|
||||
# 560 expired timestamps.
|
||||
# Then there are extra events created by plugins:
|
||||
# 75 events created by Google Analytics cookies.
|
||||
# In total: 1755 events.
|
||||
self.assertEquals(len(event_objects), 3 * 560)
|
||||
|
||||
# Double check that we've got at least the 75 Google Analytics sessions.
|
||||
self.assertGreaterEqual(len(extra_objects), 75)
|
||||
|
||||
# Check few "random" events to verify.
|
||||
|
||||
# Check one linkedin cookie.
|
||||
event_object = event_objects[124]
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.ACCESS_TIME)
|
||||
self.assertEquals(event_object.host, u'www.linkedin.com')
|
||||
self.assertEquals(event_object.cookie_name, u'leo_auth_token')
|
||||
self.assertFalse(event_object.httponly)
|
||||
self.assertEquals(event_object.url, u'http://www.linkedin.com/')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2011-08-25 21:50:27.292367')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_msg = (
|
||||
u'http://www.linkedin.com/ (leo_auth_token) Flags: [HTTP only] = False '
|
||||
u'[Persistent] = True')
|
||||
expected_short = u'www.linkedin.com (leo_auth_token)'
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check one of the visits to rubiconproject.com.
|
||||
event_object = event_objects[379]
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.ACCESS_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2012-04-01 13:54:34.949210')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.url, u'http://rubiconproject.com/')
|
||||
self.assertEquals(event_object.path, u'/')
|
||||
self.assertFalse(event_object.secure)
|
||||
self.assertTrue(event_object.persistent)
|
||||
|
||||
expected_msg = (
|
||||
u'http://rubiconproject.com/ (put_2249) Flags: [HTTP only] = False '
|
||||
u'[Persistent] = True')
|
||||
self._TestGetMessageStrings(
|
||||
event_object, expected_msg, u'rubiconproject.com (put_2249)')
|
||||
|
||||
# Examine an event for a visit to a political blog site.
|
||||
event_object = event_objects[444]
|
||||
self.assertEquals(
|
||||
event_object.path,
|
||||
u'/2012/03/21/romney-tries-to-clean-up-etch-a-sketch-mess/')
|
||||
self.assertEquals(event_object.host, u'politicalticker.blogs.cnn.com')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2012-03-22 01:47:21.012022')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
# Examine a cookie that has an autologin entry.
|
||||
event_object = event_objects[1425]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2012-04-01 13:52:56.189444')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.host, u'marvel.com')
|
||||
self.assertEquals(event_object.cookie_name, u'autologin[timeout]')
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
|
||||
# This particular cookie value represents a timeout value that corresponds
|
||||
# to the expiration date of the cookie.
|
||||
self.assertEquals(event_object.data, u'1364824322')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parser for the Google Chrome extension activity database files.
|
||||
|
||||
The Chrome extension activity is stored in SQLite database files named
|
||||
Extension Activity.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class ChromeExtensionActivityEvent(time_events.WebKitTimeEvent):
|
||||
"""Convenience class for a Chrome Extension Activity event."""
|
||||
DATA_TYPE = 'chrome:extension_activity:activity_log'
|
||||
|
||||
def __init__(self, row):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
row: The row resulting from the query (instance of sqlite3.Row).
|
||||
"""
|
||||
# TODO: change the timestamp usage from unknown to something else.
|
||||
super(ChromeExtensionActivityEvent, self).__init__(
|
||||
row['time'], eventdata.EventTimestamp.UNKNOWN)
|
||||
|
||||
self.extension_id = row['extension_id']
|
||||
self.action_type = row['action_type']
|
||||
self.api_name = row['api_name']
|
||||
self.args = row['args']
|
||||
self.page_url = row['page_url']
|
||||
self.page_title = row['page_title']
|
||||
self.arg_url = row['arg_url']
|
||||
self.other = row['other']
|
||||
self.activity_id = row['activity_id']
|
||||
|
||||
|
||||
class ChromeExtensionActivityPlugin(interface.SQLitePlugin):
|
||||
"""Plugin to parse Chrome extension activity database files."""
|
||||
|
||||
NAME = 'chrome_extension_activity'
|
||||
DESCRIPTION = u'Parser for Chrome exention activitiy SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT time, extension_id, action_type, api_name, args, page_url, '
|
||||
'page_title, arg_url, other, activity_id '
|
||||
'FROM activitylog_uncompressed ORDER BY time'),
|
||||
'ParseActivityLogUncompressedRow')]
|
||||
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'activitylog_compressed', 'string_ids', 'url_ids'])
|
||||
|
||||
def ParseActivityLogUncompressedRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a file downloaded row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query (instance of sqlite3.Row).
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
event_object = ChromeExtensionActivityEvent(row)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(ChromeExtensionActivityPlugin)
|
||||
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Google Chrome extension activity database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import chrome_extension_activity as chrome_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import chrome_extension_activity
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class ChromeExtensionActivityPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Google Chrome extension activity database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = chrome_extension_activity.ChromeExtensionActivityPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Chrome extension activity database."""
|
||||
test_file = self._GetTestFilePath(['Extension Activity'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
self.assertEquals(len(event_objects), 56)
|
||||
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.UNKNOWN)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2014-11-25 21:08:23.698737')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_extension_id = u'ognampngfcbddbfemdapefohjiobgbdl'
|
||||
self.assertEquals(event_object.extension_id, expected_extension_id)
|
||||
|
||||
self.assertEquals(event_object.action_type, 1)
|
||||
self.assertEquals(event_object.activity_id, 48)
|
||||
self.assertEquals(event_object.api_name, u'browserAction.onClicked')
|
||||
|
||||
expected_msg = (
|
||||
u'Chrome extension: ognampngfcbddbfemdapefohjiobgbdl '
|
||||
u'Action type: 1 '
|
||||
u'Activity identifier: 48 '
|
||||
u'API name: browserAction.onClicked')
|
||||
expected_short = (
|
||||
u'ognampngfcbddbfemdapefohjiobgbdl browserAction.onClicked')
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Google Chrome History database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import chrome as chrome_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import chrome
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class ChromeHistoryPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Google Chrome History database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = chrome.ChromeHistoryPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Chrome History database file."""
|
||||
test_file = self._GetTestFilePath(['History'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The History file contains 71 events (69 page visits, 1 file downloads).
|
||||
self.assertEquals(len(event_objects), 71)
|
||||
|
||||
# Check the first page visited entry.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.PAGE_VISITED)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2011-04-07 12:03:11')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = u'http://start.ubuntu.com/10.04/Google/'
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_title = u'Ubuntu Start Page'
|
||||
self.assertEquals(event_object.title, expected_title)
|
||||
|
||||
expected_msg = (
|
||||
u'{0:s} ({1:s}) [count: 0] Host: start.ubuntu.com '
|
||||
u'Visit Source: [SOURCE_FIREFOX_IMPORTED] Type: [LINK - User clicked '
|
||||
u'a link] (URL not typed directly - no typed count)').format(
|
||||
expected_url, expected_title)
|
||||
expected_short = u'{0:s} ({1:s})'.format(expected_url, expected_title)
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check the first file downloaded entry.
|
||||
event_object = event_objects[69]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.FILE_DOWNLOADED)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2011-05-23 08:35:30')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = (
|
||||
u'http://fatloss4idiotsx.com/download/funcats/'
|
||||
u'funcats_scr.exe')
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_full_path = u'/home/john/Downloads/funcats_scr.exe'
|
||||
self.assertEquals(event_object.full_path, expected_full_path)
|
||||
|
||||
expected_msg = (
|
||||
u'{0:s} ({1:s}). Received: 1132155 bytes out of: '
|
||||
u'1132155 bytes.').format(
|
||||
expected_url, expected_full_path)
|
||||
expected_short = u'{0:s} downloaded (1132155 bytes)'.format(
|
||||
expected_full_path)
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,476 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Mozilla Firefox history."""
|
||||
|
||||
import sqlite3
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import event
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
# Check SQlite version, bail out early if too old.
|
||||
if sqlite3.sqlite_version_info < (3, 7, 8):
|
||||
raise ImportWarning(
|
||||
'FirefoxHistoryParser requires at least SQLite version 3.7.8.')
|
||||
|
||||
|
||||
class FirefoxPlacesBookmarkAnnotation(time_events.TimestampEvent):
|
||||
"""Convenience class for a Firefox bookmark annotation event."""
|
||||
|
||||
DATA_TYPE = 'firefox:places:bookmark_annotation'
|
||||
|
||||
def __init__(self, timestamp, usage, row_id, title, url, content):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value.
|
||||
usage: Timestamp description string.
|
||||
row_id: The identifier of the corresponding row.
|
||||
title: The title of the bookmark folder.
|
||||
url: The bookmarked URL.
|
||||
content: The content of the annotation.
|
||||
"""
|
||||
super(FirefoxPlacesBookmarkAnnotation, self).__init__(
|
||||
timestamp, usage)
|
||||
|
||||
self.offset = row_id
|
||||
self.title = title
|
||||
self.url = url
|
||||
self.content = content
|
||||
|
||||
|
||||
class FirefoxPlacesBookmarkFolder(time_events.TimestampEvent):
|
||||
"""Convenience class for a Firefox bookmark folder event."""
|
||||
|
||||
DATA_TYPE = 'firefox:places:bookmark_folder'
|
||||
|
||||
def __init__(self, timestamp, usage, row_id, title):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value.
|
||||
usage: Timestamp description string.
|
||||
row_id: The identifier of the corresponding row.
|
||||
title: The title of the bookmark folder.
|
||||
"""
|
||||
super(FirefoxPlacesBookmarkFolder, self).__init__(
|
||||
timestamp, usage)
|
||||
|
||||
self.offset = row_id
|
||||
self.title = title
|
||||
|
||||
|
||||
class FirefoxPlacesBookmark(time_events.TimestampEvent):
|
||||
"""Convenience class for a Firefox bookmark event."""
|
||||
|
||||
DATA_TYPE = 'firefox:places:bookmark'
|
||||
|
||||
# TODO: move to formatter.
|
||||
_TYPES = {
|
||||
1: 'URL',
|
||||
2: 'Folder',
|
||||
3: 'Separator',
|
||||
}
|
||||
_TYPES.setdefault('N/A')
|
||||
|
||||
# pylint: disable=redefined-builtin
|
||||
def __init__(self, timestamp, usage, row_id, type, title, url, places_title,
|
||||
hostname, visit_count):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value.
|
||||
usage: Timestamp description string.
|
||||
row_id: The identifier of the corresponding row.
|
||||
type: Integer value containing the bookmark type.
|
||||
title: The title of the bookmark folder.
|
||||
url: The bookmarked URL.
|
||||
places_title: The places title.
|
||||
hostname: The hostname.
|
||||
visit_count: The visit count.
|
||||
"""
|
||||
super(FirefoxPlacesBookmark, self).__init__(timestamp, usage)
|
||||
|
||||
self.offset = row_id
|
||||
self.type = self._TYPES[type]
|
||||
self.title = title
|
||||
self.url = url
|
||||
self.places_title = places_title
|
||||
self.host = hostname
|
||||
self.visit_count = visit_count
|
||||
|
||||
|
||||
class FirefoxPlacesPageVisitedEvent(event.EventObject):
|
||||
"""Convenience class for a Firefox page visited event."""
|
||||
|
||||
DATA_TYPE = 'firefox:places:page_visited'
|
||||
|
||||
def __init__(self, timestamp, row_id, url, title, hostname, visit_count,
|
||||
visit_type, extra):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp time value. The timestamp contains the
|
||||
number of microseconds since Jan 1, 1970 00:00:00 UTC.
|
||||
row_id: The identifier of the corresponding row.
|
||||
url: The URL of the visited page.
|
||||
title: The title of the visited page.
|
||||
hostname: The visited hostname.
|
||||
visit_count: The visit count.
|
||||
visit_type: The transition type for the event.
|
||||
extra: A list containing extra event data (TODO refactor).
|
||||
"""
|
||||
super(FirefoxPlacesPageVisitedEvent, self).__init__()
|
||||
|
||||
self.timestamp = timestamp
|
||||
self.timestamp_desc = eventdata.EventTimestamp.PAGE_VISITED
|
||||
|
||||
self.offset = row_id
|
||||
self.url = url
|
||||
self.title = title
|
||||
self.host = hostname
|
||||
self.visit_count = visit_count
|
||||
self.visit_type = visit_type
|
||||
if extra:
|
||||
self.extra = extra
|
||||
|
||||
|
||||
class FirefoxDownload(time_events.TimestampEvent):
|
||||
"""Convenience class for a Firefox download event."""
|
||||
|
||||
DATA_TYPE = 'firefox:downloads:download'
|
||||
|
||||
def __init__(self, timestamp, usage, row_id, name, url, referrer, full_path,
|
||||
temporary_location, received_bytes, total_bytes, mime_type):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value.
|
||||
usage: Timestamp description string.
|
||||
row_id: The identifier of the corresponding row.
|
||||
name: The name of the download.
|
||||
url: The source URL of the download.
|
||||
referrer: The referrer URL of the download.
|
||||
full_path: The full path of the target of the download.
|
||||
temporary_location: The temporary location of the download.
|
||||
received_bytes: The number of bytes received.
|
||||
total_bytes: The total number of bytes of the download.
|
||||
mime_type: The mime type of the download.
|
||||
"""
|
||||
super(FirefoxDownload, self).__init__(timestamp, usage)
|
||||
|
||||
self.offset = row_id
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.referrer = referrer
|
||||
self.full_path = full_path
|
||||
self.temporary_location = temporary_location
|
||||
self.received_bytes = received_bytes
|
||||
self.total_bytes = total_bytes
|
||||
self.mime_type = mime_type
|
||||
|
||||
|
||||
class FirefoxHistoryPlugin(interface.SQLitePlugin):
|
||||
"""Parses a Firefox history file.
|
||||
|
||||
The Firefox history is stored in a SQLite database file named
|
||||
places.sqlite.
|
||||
"""
|
||||
|
||||
NAME = 'firefox_history'
|
||||
DESCRIPTION = u'Parser for Firefox history SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT moz_historyvisits.id, moz_places.url, moz_places.title, '
|
||||
'moz_places.visit_count, moz_historyvisits.visit_date, '
|
||||
'moz_historyvisits.from_visit, moz_places.rev_host, '
|
||||
'moz_places.hidden, moz_places.typed, moz_historyvisits.visit_type '
|
||||
'FROM moz_places, moz_historyvisits '
|
||||
'WHERE moz_places.id = moz_historyvisits.place_id'),
|
||||
'ParsePageVisitedRow'),
|
||||
(('SELECT moz_bookmarks.type, moz_bookmarks.title AS bookmark_title, '
|
||||
'moz_bookmarks.dateAdded, moz_bookmarks.lastModified, '
|
||||
'moz_places.url, moz_places.title AS places_title, '
|
||||
'moz_places.rev_host, moz_places.visit_count, moz_bookmarks.id '
|
||||
'FROM moz_places, moz_bookmarks WHERE moz_bookmarks.fk = moz_places.id '
|
||||
'AND moz_bookmarks.type <> 3'),
|
||||
'ParseBookmarkRow'),
|
||||
(('SELECT moz_items_annos.content, moz_items_annos.dateAdded, '
|
||||
'moz_items_annos.lastModified, moz_bookmarks.title, '
|
||||
'moz_places.url, moz_places.rev_host, moz_items_annos.id '
|
||||
'FROM moz_items_annos, moz_bookmarks, moz_places '
|
||||
'WHERE moz_items_annos.item_id = moz_bookmarks.id '
|
||||
'AND moz_bookmarks.fk = moz_places.id'),
|
||||
'ParseBookmarkAnnotationRow'),
|
||||
(('SELECT moz_bookmarks.id, moz_bookmarks.title,'
|
||||
'moz_bookmarks.dateAdded, moz_bookmarks.lastModified '
|
||||
'FROM moz_bookmarks WHERE moz_bookmarks.type = 2'),
|
||||
'ParseBookmarkFolderRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'moz_places', 'moz_historyvisits', 'moz_bookmarks', 'moz_items_annos'])
|
||||
|
||||
# Cache queries.
|
||||
URL_CACHE_QUERY = (
|
||||
'SELECT h.id AS id, p.url, p.rev_host FROM moz_places p, '
|
||||
'moz_historyvisits h WHERE p.id = h.place_id')
|
||||
|
||||
def ParseBookmarkAnnotationRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a bookmark annotation row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if row['dateAdded']:
|
||||
event_object = FirefoxPlacesBookmarkAnnotation(
|
||||
row['dateAdded'], eventdata.EventTimestamp.ADDED_TIME,
|
||||
row['id'], row['title'], row['url'], row['content'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastModified']:
|
||||
event_object = FirefoxPlacesBookmarkAnnotation(
|
||||
row['lastModified'], eventdata.EventTimestamp.MODIFICATION_TIME,
|
||||
row['id'], row['title'], row['url'], row['content'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseBookmarkFolderRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a bookmark folder row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if not row['title']:
|
||||
title = 'N/A'
|
||||
else:
|
||||
title = row['title']
|
||||
|
||||
if row['dateAdded']:
|
||||
event_object = FirefoxPlacesBookmarkFolder(
|
||||
row['dateAdded'], eventdata.EventTimestamp.ADDED_TIME,
|
||||
row['id'], title)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastModified']:
|
||||
event_object = FirefoxPlacesBookmarkFolder(
|
||||
row['lastModified'], eventdata.EventTimestamp.MODIFICATION_TIME,
|
||||
row['id'], title)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseBookmarkRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a bookmark row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if row['dateAdded']:
|
||||
event_object = FirefoxPlacesBookmark(
|
||||
row['dateAdded'], eventdata.EventTimestamp.ADDED_TIME,
|
||||
row['id'], row['type'], row['bookmark_title'], row['url'],
|
||||
row['places_title'], getattr(row, 'rev_host', 'N/A'),
|
||||
row['visit_count'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastModified']:
|
||||
event_object = FirefoxPlacesBookmark(
|
||||
row['lastModified'], eventdata.EventTimestamp.MODIFICATION_TIME,
|
||||
row['id'], row['type'], row['bookmark_title'], row['url'],
|
||||
row['places_title'], getattr(row, 'rev_host', 'N/A'),
|
||||
row['visit_count'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParsePageVisitedRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
cache=None, database=None, **unused_kwargs):
|
||||
"""Parses a page visited row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
cache: A cache object (instance of SQLiteCache).
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
"""
|
||||
# TODO: make extra conditional formatting.
|
||||
extras = []
|
||||
if row['from_visit']:
|
||||
extras.append(u'visited from: {0}'.format(
|
||||
self._GetUrl(row['from_visit'], cache, database)))
|
||||
|
||||
if row['hidden'] == '1':
|
||||
extras.append('(url hidden)')
|
||||
|
||||
if row['typed'] == '1':
|
||||
extras.append('(directly typed)')
|
||||
else:
|
||||
extras.append('(URL not typed directly)')
|
||||
|
||||
if row['visit_date']:
|
||||
event_object = FirefoxPlacesPageVisitedEvent(
|
||||
row['visit_date'], row['id'], row['url'], row['title'],
|
||||
self._ReverseHostname(row['rev_host']), row['visit_count'],
|
||||
row['visit_type'], extras)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def _ReverseHostname(self, hostname):
|
||||
"""Reverses the hostname and strips the leading dot.
|
||||
|
||||
The hostname entry is reversed:
|
||||
moc.elgoog.www.
|
||||
Should be:
|
||||
www.google.com
|
||||
|
||||
Args:
|
||||
hostname: The reversed hostname.
|
||||
|
||||
Returns:
|
||||
Reversed string without a leading dot.
|
||||
"""
|
||||
if not hostname:
|
||||
return ''
|
||||
|
||||
if len(hostname) > 1:
|
||||
if hostname[-1] == '.':
|
||||
return hostname[::-1][1:]
|
||||
else:
|
||||
return hostname[::-1][0:]
|
||||
return hostname
|
||||
|
||||
def _GetUrl(self, url_id, cache, database):
|
||||
"""Return an URL from a reference to an entry in the from_visit table."""
|
||||
url_cache_results = cache.GetResults('url')
|
||||
if not url_cache_results:
|
||||
cursor = database.cursor
|
||||
result_set = cursor.execute(self.URL_CACHE_QUERY)
|
||||
cache.CacheQueryResults(
|
||||
result_set, 'url', 'id', ('url', 'rev_host'))
|
||||
url_cache_results = cache.GetResults('url')
|
||||
|
||||
url, reverse_host = url_cache_results.get(url_id, [u'', u''])
|
||||
|
||||
if not url:
|
||||
return u''
|
||||
|
||||
hostname = self._ReverseHostname(reverse_host)
|
||||
return u'{:s} ({:s})'.format(url, hostname)
|
||||
|
||||
|
||||
class FirefoxDownloadsPlugin(interface.SQLitePlugin):
|
||||
"""Parses a Firefox downloads file.
|
||||
|
||||
The Firefox downloads history is stored in a SQLite database file named
|
||||
downloads.sqlite.
|
||||
"""
|
||||
|
||||
NAME = 'firefox_downloads'
|
||||
DESCRIPTION = u'Parser for Firefox downloads SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT moz_downloads.id, moz_downloads.name, moz_downloads.source, '
|
||||
'moz_downloads.target, moz_downloads.tempPath, '
|
||||
'moz_downloads.startTime, moz_downloads.endTime, moz_downloads.state, '
|
||||
'moz_downloads.referrer, moz_downloads.currBytes, '
|
||||
'moz_downloads.maxBytes, moz_downloads.mimeType '
|
||||
'FROM moz_downloads'),
|
||||
'ParseDownloadsRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset(['moz_downloads'])
|
||||
|
||||
def ParseDownloadsRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a downloads row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if row['startTime']:
|
||||
event_object = FirefoxDownload(
|
||||
row['startTime'], eventdata.EventTimestamp.START_TIME,
|
||||
row['id'], row['name'], row['source'], row['referrer'], row['target'],
|
||||
row['tempPath'], row['currBytes'], row['maxBytes'], row['mimeType'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['endTime']:
|
||||
event_object = FirefoxDownload(
|
||||
row['endTime'], eventdata.EventTimestamp.END_TIME,
|
||||
row['id'], row['name'], row['source'], row['referrer'], row['target'],
|
||||
row['tempPath'], row['currBytes'], row['maxBytes'], row['mimeType'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugins(
|
||||
[FirefoxHistoryPlugin, FirefoxDownloadsPlugin])
|
||||
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parser for the Firefox Cookie database."""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import errors
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib
|
||||
# Register the cookie plugins.
|
||||
from plaso.parsers import cookie_plugins # pylint: disable=unused-import
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.cookie_plugins import interface as cookie_interface
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class FirefoxCookieEvent(time_events.TimestampEvent):
|
||||
"""Convenience class for a Firefox Cookie event."""
|
||||
|
||||
DATA_TYPE = 'firefox:cookie:entry'
|
||||
|
||||
def __init__(
|
||||
self, timestamp, usage, identifier, hostname, cookie_name, value, path,
|
||||
secure, httponly):
|
||||
"""Initializes the event.
|
||||
|
||||
Args:
|
||||
timestamp: The timestamp value in WebKit format..
|
||||
usage: Timestamp description string.
|
||||
identifier: The row identifier.
|
||||
hostname: The hostname of host that set the cookie value.
|
||||
cookie_name: The name field of the cookie.
|
||||
value: The value of the cookie.
|
||||
path: An URI of the page that set the cookie.
|
||||
secure: Indication if this cookie should only be transmitted over a secure
|
||||
channel.
|
||||
httponly: An indication that the cookie cannot be accessed through client
|
||||
side script.
|
||||
"""
|
||||
super(FirefoxCookieEvent, self).__init__(timestamp, usage)
|
||||
if hostname.startswith('.'):
|
||||
hostname = hostname[1:]
|
||||
|
||||
self.offset = identifier
|
||||
self.host = hostname
|
||||
self.cookie_name = cookie_name
|
||||
self.data = value
|
||||
self.path = path
|
||||
self.secure = True if secure else False
|
||||
self.httponly = True if httponly else False
|
||||
|
||||
if self.secure:
|
||||
scheme = u'https'
|
||||
else:
|
||||
scheme = u'http'
|
||||
|
||||
self.url = u'{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)
|
||||
|
||||
|
||||
class FirefoxCookiePlugin(interface.SQLitePlugin):
|
||||
"""Parse Firefox Cookies file."""
|
||||
|
||||
NAME = 'firefox_cookies'
|
||||
DESCRIPTION = u'Parser for Firefox cookies SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT id, baseDomain, name, value, host, path, expiry, lastAccessed, '
|
||||
'creationTime, isSecure, isHttpOnly FROM moz_cookies'),
|
||||
'ParseCookieRow')]
|
||||
|
||||
# The required tables common to Archived History and History.
|
||||
REQUIRED_TABLES = frozenset(['moz_cookies'])
|
||||
|
||||
# Point to few sources for URL information.
|
||||
URLS = [
|
||||
(u'https://hg.mozilla.org/mozilla-central/file/349a2f003529/netwerk/'
|
||||
u'cookie/nsCookie.h')]
|
||||
|
||||
def __init__(self):
|
||||
"""Initializes a plugin object."""
|
||||
super(FirefoxCookiePlugin, self).__init__()
|
||||
self._cookie_plugins = cookie_interface.GetPlugins()
|
||||
|
||||
def ParseCookieRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None,
|
||||
query=None, **unused_kwargs):
|
||||
"""Parses a cookie row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if row['creationTime']:
|
||||
event_object = FirefoxCookieEvent(
|
||||
row['creationTime'], eventdata.EventTimestamp.CREATION_TIME,
|
||||
row['id'], row['host'], row['name'], row['value'], row['path'],
|
||||
row['isSecure'], row['isHttpOnly'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastAccessed']:
|
||||
event_object = FirefoxCookieEvent(
|
||||
row['lastAccessed'], eventdata.EventTimestamp.ACCESS_TIME, row['id'],
|
||||
row['host'], row['name'], row['value'], row['path'], row['isSecure'],
|
||||
row['isHttpOnly'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['expiry']:
|
||||
# Expiry time (nsCookieService::GetExpiry in
|
||||
# netwerk/cookie/nsCookieService.cpp).
|
||||
# It's calculated as the difference between the server time and the time
|
||||
# the server wants the cookie to expire and adding that difference to the
|
||||
# client time. This localizes the client time regardless of whether or not
|
||||
# the TZ environment variable was set on the client.
|
||||
timestamp = timelib.Timestamp.FromPosixTime(row['expiry'])
|
||||
event_object = FirefoxCookieEvent(
|
||||
timestamp, u'Cookie Expires', row['id'], row['host'], row['name'],
|
||||
row['value'], row['path'], row['isSecure'], row['isHttpOnly'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
# Go through all cookie plugins to see if there are is any specific parsing
|
||||
# needed.
|
||||
hostname = row['host']
|
||||
if hostname.startswith('.'):
|
||||
hostname = hostname[1:]
|
||||
url = u'http{0:s}://{1:s}{2:s}'.format(
|
||||
u's' if row['isSecure'] else u'', hostname, row['path'])
|
||||
|
||||
for cookie_plugin in self._cookie_plugins:
|
||||
try:
|
||||
cookie_plugin.Process(
|
||||
parser_context, cookie_name=row['name'], cookie_data=row['value'],
|
||||
url=url, file_entry=file_entry, parser_chain=parser_chain)
|
||||
except errors.WrongPlugin:
|
||||
pass
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(FirefoxCookiePlugin)
|
||||
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Firefox cookie database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import firefox_cookies as firefox_cookies_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import firefox_cookies
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class FirefoxCookiesPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Firefox cookie database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = firefox_cookies.FirefoxCookiePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Firefox 29 cookie database file."""
|
||||
test_file = self._GetTestFilePath(['firefox_cookies.sqlite'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
|
||||
event_objects = []
|
||||
extra_objects = []
|
||||
|
||||
# sqlite> SELECT COUNT(id) FROM moz_cookies;
|
||||
# 90
|
||||
# Thus the cookie database contains 93 entries:
|
||||
# 90 Last Access Time
|
||||
# 90 Cookie Expires
|
||||
# 90 Creation Time
|
||||
#
|
||||
# And then in addition the following entries are added due to cookie
|
||||
# plugins (TODO filter these out since adding new cookie plugin will
|
||||
# change this number and thus affect this test):
|
||||
# 15 Last Visited Time
|
||||
# 5 Analytics Previous Time
|
||||
# 5 Analytics Creation Time
|
||||
#
|
||||
# In total: 93 * 3 + 15 + 5 + 5 = 304 events.
|
||||
for event_object in self._GetEventObjectsFromQueue(event_queue_consumer):
|
||||
if isinstance(event_object, firefox_cookies.FirefoxCookieEvent):
|
||||
event_objects.append(event_object)
|
||||
else:
|
||||
extra_objects.append(event_object)
|
||||
|
||||
self.assertEquals(len(event_objects), 90 * 3)
|
||||
self.assertGreaterEqual(len(extra_objects), 25)
|
||||
|
||||
# Check one greenqloud.com event
|
||||
event_object = event_objects[32]
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, 'Cookie Expires')
|
||||
self.assertEquals(event_object.host, u's.greenqloud.com')
|
||||
self.assertEquals(event_object.cookie_name, u'__utma')
|
||||
self.assertFalse(event_object.httponly)
|
||||
self.assertEquals(event_object.url, u'http://s.greenqloud.com/')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2015-10-30 21:56:03')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_msg = (
|
||||
u'http://s.greenqloud.com/ (__utma) Flags: [HTTP only]: False')
|
||||
expected_short = u's.greenqloud.com (__utma)'
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check one of the visits to pubmatic.com.
|
||||
event_object = event_objects[62]
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, u'Cookie Expires')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-11-29 21:56:04')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.url, u'http://pubmatic.com/')
|
||||
self.assertEquals(event_object.path, u'/')
|
||||
self.assertFalse(event_object.secure)
|
||||
|
||||
expected_msg = (
|
||||
u'http://pubmatic.com/ (KRTBCOOKIE_391) Flags: [HTTP only]: False')
|
||||
self._TestGetMessageStrings(
|
||||
event_object, expected_msg, u'pubmatic.com (KRTBCOOKIE_391)')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Mozilla Firefox history database plugin."""
|
||||
|
||||
import collections
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import firefox as firefox_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import firefox
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class FirefoxHistoryPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Mozilla Firefox history database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = firefox.FirefoxHistoryPlugin()
|
||||
|
||||
def testProcessPriorTo24(self):
|
||||
"""Tests the Process function on a Firefox History database file."""
|
||||
# This is probably version 23 but potentially an older version.
|
||||
test_file = self._GetTestFilePath(['places.sqlite'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The places.sqlite file contains 205 events (1 page visit,
|
||||
# 2 x 91 bookmark records, 2 x 3 bookmark annotations,
|
||||
# 2 x 8 bookmark folders).
|
||||
# However there are three events that do not have a timestamp
|
||||
# so the test file will show 202 extracted events.
|
||||
self.assertEquals(len(event_objects), 202)
|
||||
|
||||
# Check the first page visited event.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(event_object.data_type, 'firefox:places:page_visited')
|
||||
|
||||
self.assertEquals(event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.PAGE_VISITED)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2011-07-01 11:16:21.371935')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = u'http://news.google.com/'
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_title = u'Google News'
|
||||
self.assertEquals(event_object.title, expected_title)
|
||||
|
||||
expected_msg = (
|
||||
u'{0:s} ({1:s}) [count: 1] Host: news.google.com '
|
||||
u'(URL not typed directly) Transition: TYPED').format(
|
||||
expected_url, expected_title)
|
||||
expected_short = u'URL: {}'.format(expected_url)
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check the first bookmark event.
|
||||
event_object = event_objects[1]
|
||||
|
||||
self.assertEquals(event_object.data_type, 'firefox:places:bookmark')
|
||||
|
||||
self.assertEquals(event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.ADDED_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-07-01 11:13:59.266344+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
# Check the second bookmark event.
|
||||
event_object = event_objects[2]
|
||||
|
||||
self.assertEquals(event_object.data_type, 'firefox:places:bookmark')
|
||||
|
||||
self.assertEquals(event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.MODIFICATION_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-07-01 11:13:59.267198+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = (
|
||||
u'place:folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder=TOOLBAR&'
|
||||
u'sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation=livemark%2F'
|
||||
u'feedURI&maxResults=10&queryType=1')
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_title = u'Recently Bookmarked'
|
||||
self.assertEquals(event_object.title, expected_title)
|
||||
|
||||
expected_msg = (
|
||||
u'Bookmark URL {0:s} ({1:s}) [folder=BOOKMARKS_MENU&'
|
||||
u'folder=UNFILED_BOOKMARKS&folder=TOOLBAR&sort=12&excludeQueries=1&'
|
||||
u'excludeItemIfParentHasAnnotation=livemark%2FfeedURI&maxResults=10&'
|
||||
u'queryType=1] visit count 0').format(
|
||||
expected_title, expected_url)
|
||||
expected_short = (
|
||||
u'Bookmarked Recently Bookmarked '
|
||||
u'(place:folder=BOOKMARKS_MENU&folder=UNFILED_BO...')
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check the first bookmark annotation event.
|
||||
event_object = event_objects[183]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.data_type, 'firefox:places:bookmark_annotation')
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-07-01 11:13:59.267146+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
# Check another bookmark annotation event.
|
||||
event_object = event_objects[184]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.data_type, 'firefox:places:bookmark_annotation')
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-07-01 11:13:59.267605+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = u'place:sort=14&type=6&maxResults=10&queryType=1'
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_title = u'Recent Tags'
|
||||
self.assertEquals(event_object.title, expected_title)
|
||||
|
||||
expected_msg = (
|
||||
u'Bookmark Annotation: [RecentTags] to bookmark '
|
||||
u'[{0:s}] ({1:s})').format(
|
||||
expected_title, expected_url)
|
||||
expected_short = u'Bookmark Annotation: Recent Tags'
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
# Check the second last bookmark folder event.
|
||||
event_object = event_objects[200]
|
||||
|
||||
self.assertEquals(event_object.data_type, 'firefox:places:bookmark_folder')
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.ADDED_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-03-21 10:05:01.553774+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
# Check the last bookmark folder event.
|
||||
event_object = event_objects[201]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.data_type, 'firefox:places:bookmark_folder')
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.MODIFICATION_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2011-07-01 11:14:11.766851+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_title = u'Latest Headlines'
|
||||
self.assertEquals(event_object.title, expected_title)
|
||||
|
||||
expected_msg = expected_title
|
||||
expected_short = expected_title
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
def testProcessVersion25(self):
|
||||
"""Tests the Process function on a Firefox History database file v 25."""
|
||||
test_file = self._GetTestFilePath(['places_new.sqlite'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The places.sqlite file contains 84 events:
|
||||
# 34 page visits.
|
||||
# 28 bookmarks
|
||||
# 14 bookmark folders
|
||||
# 8 annotations
|
||||
self.assertEquals(len(event_objects), 84)
|
||||
counter = collections.Counter()
|
||||
for event_object in event_objects:
|
||||
counter[event_object.data_type] += 1
|
||||
|
||||
self.assertEquals(counter['firefox:places:bookmark'], 28)
|
||||
self.assertEquals(counter['firefox:places:page_visited'], 34)
|
||||
self.assertEquals(counter['firefox:places:bookmark_folder'], 14)
|
||||
self.assertEquals(counter['firefox:places:bookmark_annotation'], 8)
|
||||
|
||||
random_event = event_objects[10]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-10-30 21:57:11.281942')
|
||||
self.assertEquals(random_event.timestamp, expected_timestamp)
|
||||
|
||||
expected_short = u'URL: http://code.google.com/p/plaso'
|
||||
expected_msg = (
|
||||
u'http://code.google.com/p/plaso [count: 1] Host: code.google.com '
|
||||
u'(URL not typed directly) Transition: TYPED')
|
||||
|
||||
self._TestGetMessageStrings(random_event, expected_msg, expected_short)
|
||||
|
||||
|
||||
class FirefoxDownloadsPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Mozilla Firefox downloads database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = firefox.FirefoxDownloadsPlugin()
|
||||
|
||||
def testProcessVersion25(self):
|
||||
"""Tests the Process function on a Firefox Downloads database file."""
|
||||
test_file = self._GetTestFilePath(['downloads.sqlite'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The downloads.sqlite file contains 2 events (1 download).
|
||||
self.assertEquals(len(event_objects), 2)
|
||||
|
||||
# Check the first page visited event.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(event_object.data_type, 'firefox:downloads:download')
|
||||
|
||||
self.assertEquals(event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.START_TIME)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
u'2013-07-18 18:59:59.312000+00:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_url = (
|
||||
u'https://plaso.googlecode.com/files/'
|
||||
u'plaso-static-1.0.1-win32-vs2008.zip')
|
||||
self.assertEquals(event_object.url, expected_url)
|
||||
|
||||
expected_full_path = u'file:///D:/plaso-static-1.0.1-win32-vs2008.zip'
|
||||
self.assertEquals(event_object.full_path, expected_full_path)
|
||||
|
||||
self.assertEquals(event_object.received_bytes, 15974599)
|
||||
self.assertEquals(event_object.total_bytes, 15974599)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,268 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Google Drive snaphots.
|
||||
|
||||
The Google Drive snapshots are stored in SQLite database files named
|
||||
snapshot.db.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
__author__ = 'David Nides (david.nides@gmail.com)'
|
||||
|
||||
|
||||
class GoogleDriveSnapshotCloudEntryEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for a Google Drive snapshot cloud entry."""
|
||||
|
||||
DATA_TYPE = 'gdrive:snapshot:cloud_entry'
|
||||
|
||||
# TODO: this could be moved to the formatter.
|
||||
# The following definition for values can be found on Patrick Olson's blog:
|
||||
# http://www.sysforensics.org/2012/05/google-drive-forensics-notes.html
|
||||
_DOC_TYPES = {
|
||||
0: u'FOLDER',
|
||||
1: u'FILE',
|
||||
2: u'PRESENTATION',
|
||||
3: u'UNKNOWN',
|
||||
4: u'SPREADSHEET',
|
||||
5: u'DRAWING',
|
||||
6: u'DOCUMENT',
|
||||
7: u'TABLE',
|
||||
}
|
||||
|
||||
def __init__(self, posix_time, usage, url, path, size, doc_type, shared):
|
||||
"""Initializes the event.
|
||||
|
||||
Args:
|
||||
posix_time: The POSIX time value.
|
||||
usage: The description of the usage of the time value.
|
||||
url: The URL of the file as in the cloud.
|
||||
path: The path of the file.
|
||||
size: The size of the file.
|
||||
doc_type: Integer value containing the document type.
|
||||
shared: A string indicating whether or not this is a shared document.
|
||||
"""
|
||||
super(GoogleDriveSnapshotCloudEntryEvent, self).__init__(
|
||||
posix_time, usage)
|
||||
|
||||
self.url = url
|
||||
self.path = path
|
||||
self.size = size
|
||||
self.document_type = self._DOC_TYPES.get(doc_type, u'UNKNOWN')
|
||||
self.shared = shared
|
||||
|
||||
|
||||
class GoogleDriveSnapshotLocalEntryEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for a Google Drive snapshot local entry event."""
|
||||
|
||||
DATA_TYPE = 'gdrive:snapshot:local_entry'
|
||||
|
||||
def __init__(self, posix_time, local_path, size):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
posix_time: The POSIX time value.
|
||||
local_path: The local path of the file.
|
||||
size: The size of the file.
|
||||
"""
|
||||
super(GoogleDriveSnapshotLocalEntryEvent, self).__init__(
|
||||
posix_time, eventdata.EventTimestamp.MODIFICATION_TIME)
|
||||
|
||||
self.path = local_path
|
||||
self.size = size
|
||||
|
||||
|
||||
class GoogleDrivePlugin(interface.SQLitePlugin):
|
||||
"""SQLite plugin for Google Drive snapshot.db files."""
|
||||
|
||||
NAME = 'google_drive'
|
||||
DESCRIPTION = u'Parser for Google Drive SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
((u'SELECT e.resource_id, e.filename, e.modified, e.created, e.size, '
|
||||
u'e.doc_type, e.shared, e.checksum, e.url, r.parent_resource_id FROM '
|
||||
u'cloud_entry AS e, cloud_relations AS r WHERE r.child_resource_id = '
|
||||
u'e.resource_id AND e.modified IS NOT NULL;'), 'ParseCloudEntryRow'),
|
||||
((u'SELECT inode_number, filename, modified, checksum, size FROM '
|
||||
u'local_entry WHERE modified IS NOT NULL;'), 'ParseLocalEntryRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'cloud_entry', 'cloud_relations', 'local_entry', 'local_relations',
|
||||
'mapping', 'overlay_status'])
|
||||
|
||||
# Queries used to build cache.
|
||||
LOCAL_PATH_CACHE_QUERY = (
|
||||
u'SELECT r.child_inode_number, r.parent_inode_number, e.filename FROM '
|
||||
u'local_relations AS r, local_entry AS e WHERE r.child_inode_number = '
|
||||
u'e.inode_number')
|
||||
CLOUD_PATH_CACHE_QUERY = (
|
||||
u'SELECT e.filename, e.resource_id, r.parent_resource_id AS parent '
|
||||
u'FROM cloud_entry AS e, cloud_relations AS r WHERE e.doc_type = 0 '
|
||||
u'AND e.resource_id = r.child_resource_id')
|
||||
|
||||
def GetLocalPath(self, inode, cache, database):
|
||||
"""Return local path for a given inode.
|
||||
|
||||
Args:
|
||||
inode: The inode number for the file.
|
||||
cache: A cache object (instance of SQLiteCache).
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
|
||||
Returns:
|
||||
A full path, including the filename of the given inode value.
|
||||
"""
|
||||
local_path = cache.GetResults('local_path')
|
||||
if not local_path:
|
||||
cursor = database.cursor
|
||||
results = cursor.execute(self.LOCAL_PATH_CACHE_QUERY)
|
||||
cache.CacheQueryResults(
|
||||
results, 'local_path', 'child_inode_number',
|
||||
('parent_inode_number', 'filename'))
|
||||
local_path = cache.GetResults('local_path')
|
||||
|
||||
parent, path = local_path.get(inode, [None, None])
|
||||
|
||||
# TODO: Read the local_sync_root from the sync_config.db and use that
|
||||
# for a root value.
|
||||
root_value = u'%local_sync_root%/'
|
||||
|
||||
if not path:
|
||||
return root_value
|
||||
|
||||
paths = []
|
||||
while path:
|
||||
paths.append(path)
|
||||
parent, path = local_path.get(parent, [None, None])
|
||||
|
||||
if not paths:
|
||||
return root_value
|
||||
|
||||
# Paths are built top level to root so we need to reverse the list to
|
||||
# represent them in the traditional order.
|
||||
paths.reverse()
|
||||
return root_value + u'/'.join(paths)
|
||||
|
||||
def GetCloudPath(self, resource_id, cache, database):
|
||||
"""Return cloud path given a resource id.
|
||||
|
||||
Args:
|
||||
resource_id: The resource_id for the file.
|
||||
cache: The local cache object.
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
|
||||
Returns:
|
||||
A full path to the resource value.
|
||||
"""
|
||||
cloud_path = cache.GetResults('cloud_path')
|
||||
if not cloud_path:
|
||||
cursor = database.cursor
|
||||
results = cursor.execute(self.CLOUD_PATH_CACHE_QUERY)
|
||||
cache.CacheQueryResults(
|
||||
results, 'cloud_path', 'resource_id', ('filename', 'parent'))
|
||||
cloud_path = cache.GetResults('cloud_path')
|
||||
|
||||
if resource_id == u'folder:root':
|
||||
return u'/'
|
||||
|
||||
paths = []
|
||||
parent_path, parent_id = cloud_path.get(resource_id, [u'', u''])
|
||||
while parent_path:
|
||||
if parent_path == u'folder:root':
|
||||
break
|
||||
paths.append(parent_path)
|
||||
parent_path, parent_id = cloud_path.get(parent_id, [u'', u''])
|
||||
|
||||
if not paths:
|
||||
return u'/'
|
||||
|
||||
# Paths are built top level to root so we need to reverse the list to
|
||||
# represent them in the traditional order.
|
||||
paths.reverse()
|
||||
return u'/{0:s}/'.format(u'/'.join(paths))
|
||||
|
||||
def ParseCloudEntryRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
cache=None, database=None, **unused_kwargs):
|
||||
"""Parses a cloud entry row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
cache: The local cache object.
|
||||
database: The database object.
|
||||
"""
|
||||
cloud_path = self.GetCloudPath(row['parent_resource_id'], cache, database)
|
||||
cloud_filename = u'{0:s}{1:s}'.format(cloud_path, row['filename'])
|
||||
|
||||
if row['shared']:
|
||||
shared = 'Shared'
|
||||
else:
|
||||
shared = 'Private'
|
||||
|
||||
event_object = GoogleDriveSnapshotCloudEntryEvent(
|
||||
row['modified'], eventdata.EventTimestamp.MODIFICATION_TIME,
|
||||
row['url'], cloud_filename, row['size'], row['doc_type'], shared)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['created']:
|
||||
event_object = GoogleDriveSnapshotCloudEntryEvent(
|
||||
row['created'], eventdata.EventTimestamp.CREATION_TIME,
|
||||
row['url'], cloud_filename, row['size'], row['doc_type'], shared)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseLocalEntryRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
cache=None, database=None, **unused_kwargs):
|
||||
"""Parses a local entry row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
cache: The local cache object (instance of SQLiteCache).
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
"""
|
||||
local_path = self.GetLocalPath(row['inode_number'], cache, database)
|
||||
|
||||
event_object = GoogleDriveSnapshotLocalEntryEvent(
|
||||
row['modified'], local_path, row['size'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(GoogleDrivePlugin)
|
||||
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Google Drive database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import gdrive as gdrive_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import gdrive
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Google Drive database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = gdrive.GoogleDrivePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Google Drive database file."""
|
||||
test_file = self._GetTestFilePath(['snapshot.db'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache=cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
self.assertEquals(len(event_objects), 30)
|
||||
|
||||
# Let's verify that we've got the correct balance of cloud and local
|
||||
# entry events.
|
||||
# 10 files mounting to:
|
||||
# 20 Cloud Entries (two timestamps per file).
|
||||
# 10 Local Entries (one timestamp per file).
|
||||
local_entries = []
|
||||
cloud_entries = []
|
||||
for event_object in event_objects:
|
||||
if event_object.data_type == 'gdrive:snapshot:local_entry':
|
||||
local_entries.append(event_object)
|
||||
else:
|
||||
cloud_entries.append(event_object)
|
||||
self.assertEquals(len(local_entries), 10)
|
||||
self.assertEquals(len(cloud_entries), 20)
|
||||
|
||||
# Test one local and one cloud entry.
|
||||
event_object = local_entries[5]
|
||||
|
||||
file_path = (
|
||||
u'%local_sync_root%/Top Secret/Enn meiri '
|
||||
u'leyndarm\xe1l/S\xfdnileiki - \xd6rverpi.gdoc')
|
||||
self.assertEquals(event_object.path, file_path)
|
||||
|
||||
expected_msg = u'File Path: {} Size: 184'.format(file_path)
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, file_path)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2014-01-28 00:11:25')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
event_object = cloud_entries[16]
|
||||
|
||||
self.assertEquals(event_object.document_type, u'DOCUMENT')
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc,
|
||||
eventdata.EventTimestamp.MODIFICATION_TIME)
|
||||
self.assertEquals(event_object.url, (
|
||||
u'https://docs.google.com/document/d/'
|
||||
u'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api'))
|
||||
|
||||
expected_msg = (
|
||||
u'File Path: /Almenningur/Saklausa hli\xf0in [Private] Size: 0 URL: '
|
||||
u'https://docs.google.com/document/d/'
|
||||
u'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api '
|
||||
u'Type: DOCUMENT')
|
||||
expected_short = u'/Almenningur/Saklausa hli\xf0in'
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2014-01-28 00:12:27')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a SQLite parser."""
|
||||
|
||||
import logging
|
||||
|
||||
import sqlite3
|
||||
|
||||
from plaso.lib import errors
|
||||
from plaso.parsers import plugins
|
||||
|
||||
|
||||
class SQLitePlugin(plugins.BasePlugin):
|
||||
"""A SQLite plugin for Plaso."""
|
||||
|
||||
NAME = 'sqlite'
|
||||
DESCRIPTION = u'Parser for SQLite database files.'
|
||||
|
||||
# Queries to be executed.
|
||||
# Should be a list of tuples with two entries, SQLCommand and callback
|
||||
# function name.
|
||||
QUERIES = []
|
||||
|
||||
# List of tables that should be present in the database, for verification.
|
||||
REQUIRED_TABLES = frozenset([])
|
||||
|
||||
def GetEntries(
|
||||
self, parser_context, file_entry=None, parser_chain=None, cache=None,
|
||||
database=None, **kwargs):
|
||||
"""Extracts event objects from a SQLite database.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
cache: A SQLiteCache object.
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
"""
|
||||
for query, callback_method in self.QUERIES:
|
||||
try:
|
||||
callback = getattr(self, callback_method, None)
|
||||
if callback is None:
|
||||
logging.warning(
|
||||
u'[{0:s}] missing callback method: {1:s} for query: {2:s}'.format(
|
||||
self.NAME, callback_method, query))
|
||||
continue
|
||||
|
||||
cursor = database.cursor
|
||||
sql_results = cursor.execute(query)
|
||||
row = sql_results.fetchone()
|
||||
|
||||
while row:
|
||||
callback(
|
||||
parser_context, row, query=query, cache=cache, database=database,
|
||||
file_entry=file_entry, parser_chain=parser_chain)
|
||||
|
||||
row = sql_results.fetchone()
|
||||
|
||||
except sqlite3.DatabaseError as exception:
|
||||
logging.debug(u'SQLite error occured: {0:s}'.format(exception))
|
||||
|
||||
def Process(
|
||||
self, parser_context, file_entry=None, parser_chain=None, cache=None,
|
||||
database=None, **kwargs):
|
||||
"""Determine if this is the right plugin for this database.
|
||||
|
||||
This function takes a SQLiteDatabase object and compares the list
|
||||
of required tables against the available tables in the database.
|
||||
If all the tables defined in REQUIRED_TABLES are present in the
|
||||
database then this plugin is considered to be the correct plugin
|
||||
and the function will return back a generator that yields event
|
||||
objects.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
cache: A SQLiteCache object.
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
|
||||
Raises:
|
||||
errors.WrongPlugin: If the database does not contain all the tables
|
||||
defined in the REQUIRED_TABLES set.
|
||||
ValueError: If the database attribute is not passed in.
|
||||
"""
|
||||
if database is None:
|
||||
raise ValueError(u'Database is not set.')
|
||||
|
||||
if not frozenset(database.tables) >= self.REQUIRED_TABLES:
|
||||
raise errors.WrongPlugin(
|
||||
u'Not the correct database tables for: {0:s}'.format(self.NAME))
|
||||
|
||||
# This will raise if unhandled keyword arguments are passed.
|
||||
super(SQLitePlugin, self).Process(parser_context, **kwargs)
|
||||
|
||||
# Add ourselves to the parser chain, which will be used in all subsequent
|
||||
# event creation in this parser.
|
||||
parser_chain = self._BuildParserChain(parser_chain)
|
||||
|
||||
self.GetEntries(
|
||||
parser_context, cache=cache, database=database, file_entry=file_entry,
|
||||
parser_chain=parser_chain)
|
||||
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2012 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Plugin for the Mac OS X launch services quarantine events."""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class LsQuarantineEvent(time_events.CocoaTimeEvent):
|
||||
"""Convenience class for a Mac OS X launch services quarantine event."""
|
||||
DATA_TYPE = 'macosx:lsquarantine'
|
||||
|
||||
# TODO: describe more clearly what the data value contains.
|
||||
def __init__(self, cocoa_time, url, user_agent, data):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
cocoa_time: The Cocoa time value.
|
||||
url: The original URL of the file.
|
||||
user_agent: The user agent that was used to download the file.
|
||||
data: The data.
|
||||
"""
|
||||
super(LsQuarantineEvent, self).__init__(
|
||||
cocoa_time, eventdata.EventTimestamp.FILE_DOWNLOADED)
|
||||
|
||||
self.url = url
|
||||
self.agent = user_agent
|
||||
self.data = data
|
||||
|
||||
|
||||
class LsQuarantinePlugin(interface.SQLitePlugin):
|
||||
"""Parses the launch services quarantine events database.
|
||||
|
||||
The LS quarantine events are stored in SQLite database files named
|
||||
/Users/<username>/Library/Preferences/\
|
||||
QuarantineEvents.com.apple.LaunchServices
|
||||
"""
|
||||
|
||||
NAME = 'ls_quarantine'
|
||||
DESCRIPTION = u'Parser for LS quarantine events SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT LSQuarantineTimestamp AS Time, LSQuarantine'
|
||||
'AgentName AS Agent, LSQuarantineOriginURLString AS URL, '
|
||||
'LSQuarantineDataURLString AS Data FROM LSQuarantineEvent '
|
||||
'ORDER BY Time'), 'ParseLSQuarantineRow')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset(['LSQuarantineEvent'])
|
||||
|
||||
def ParseLSQuarantineRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a launch services quarantine event row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
event_object = LsQuarantineEvent(
|
||||
row['Time'], row['URL'], row['Agent'], row['Data'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(LsQuarantinePlugin)
|
||||
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the LS Quarantine database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import ls_quarantine as ls_quarantine_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import ls_quarantine
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class LSQuarantinePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the LS Quarantine database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = ls_quarantine.LsQuarantinePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a LS Quarantine database file."""
|
||||
test_file = self._GetTestFilePath(['quarantine.db'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The quarantine database contains 14 event_objects.
|
||||
self.assertEquals(len(event_objects), 14)
|
||||
|
||||
# Examine a VLC event.
|
||||
event_object = event_objects[3]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-08 21:12:03')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.agent, u'Google Chrome')
|
||||
vlc_url = (
|
||||
u'http://download.cnet.com/VLC-Media-Player/3001-2139_4-10210434.html'
|
||||
u'?spi=40ab24d3c71594a5017d74be3b0c946c')
|
||||
self.assertEquals(event_object.url, vlc_url)
|
||||
|
||||
self.assertTrue(u'vlc-2.0.7-intel64.dmg' in event_object.data)
|
||||
|
||||
# Examine a MacKeeper event.
|
||||
event_object = event_objects[9]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-12 19:28:58')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
# Examine a SpeedTest event.
|
||||
event_object = event_objects[10]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-12 19:30:16')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
speedtest_message = (
|
||||
u'[Google Chrome] Downloaded: http://mackeeperapp.zeobit.com/aff/'
|
||||
u'speedtest.net.6/download.php?affid=460245286&trt=5&utm_campaign='
|
||||
u'3ES&tid_ext=P107fSKcSfqpMbcP3sI4fhKmeMchEB3dkAGpX4YIsvM;US;L;1 '
|
||||
u'<http://download.mackeeper.zeobit.com/package.php?'
|
||||
u'key=460245286&trt=5&landpr=Speedtest>')
|
||||
speedtest_short = (
|
||||
u'http://mackeeperapp.zeobit.com/aff/speedtest.net.6/download.php?'
|
||||
u'affid=4602452...')
|
||||
|
||||
self._TestGetMessageStrings(
|
||||
event_object, speedtest_message, speedtest_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parser for the Mac OS X Document Versions files."""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
__author__ = 'Joaquin Moreno Garijo (Joaquin.MorenoGarijo.2013@live.rhul.ac.uk)'
|
||||
|
||||
|
||||
class MacDocumentVersionsEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for a entry from the Document Versions database."""
|
||||
|
||||
DATA_TYPE = 'mac:document_versions:file'
|
||||
|
||||
def __init__(self, posix_time, name, path, version_path, last_time, user_sid):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
posix_time: The POSIX time value.
|
||||
name: name of the original file.
|
||||
path: path from the original file.
|
||||
version_path: path to the version copy of the original file.
|
||||
last_time: the system user ID of the user that opened the file.
|
||||
user_sid: identification user ID that open the file.
|
||||
"""
|
||||
super(MacDocumentVersionsEvent, self).__init__(
|
||||
posix_time, eventdata.EventTimestamp.CREATION_TIME)
|
||||
|
||||
self.name = name
|
||||
self.path = path
|
||||
self.version_path = version_path
|
||||
# TODO: shouldn't this be a separate event?
|
||||
self.last_time = last_time
|
||||
self.user_sid = unicode(user_sid)
|
||||
|
||||
|
||||
class MacDocumentVersionsPlugin(interface.SQLitePlugin):
|
||||
"""Parse the Mac OS X Document Versions SQLite database.."""
|
||||
|
||||
NAME = 'mac_document_versions'
|
||||
DESCRIPTION = u'Parser for document revisions SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
# name: name from the original file.
|
||||
# path: path from the original file (include the file)
|
||||
# last_time: last time when the file was replicated.
|
||||
# version_path: path where the version is stored.
|
||||
# version_time: the timestamp when the version was created.
|
||||
QUERIES = [
|
||||
(('SELECT f.file_name AS name, f.file_path AS path, '
|
||||
'f.file_last_seen AS last_time, g.generation_path AS version_path, '
|
||||
'g.generation_add_time AS version_time FROM files f, generations g '
|
||||
'WHERE f.file_storage_id = g.generation_storage_id;'),
|
||||
'DocumentVersionsRow')]
|
||||
|
||||
# The required tables for the query.
|
||||
REQUIRED_TABLES = frozenset(['files', 'generations'])
|
||||
|
||||
# The SQL field path is the relative path from DocumentRevisions.
|
||||
# For this reason the Path to the program has to be added at the beginning.
|
||||
ROOT_VERSION_PATH = u'/.DocumentRevisions-V100/'
|
||||
|
||||
def DocumentVersionsRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a document versions row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
# version_path = "PerUser/UserID/xx/client_id/version_file"
|
||||
# where PerUser and UserID are a real directories.
|
||||
paths = row['version_path'].split(u'/')
|
||||
if len(paths) < 2 or not paths[1].isdigit():
|
||||
user_sid = None
|
||||
else:
|
||||
user_sid = paths[1]
|
||||
version_path = self.ROOT_VERSION_PATH + row['version_path']
|
||||
path, _, _ = row['path'].rpartition(u'/')
|
||||
|
||||
event_object = MacDocumentVersionsEvent(
|
||||
row['version_time'], row['name'], path, version_path,
|
||||
row['last_time'], user_sid)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(MacDocumentVersionsPlugin)
|
||||
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Mac OS X Document Versions plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import mac_document_versions as mac_doc_rev_formatter
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import mac_document_versions
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class MacDocumentVersionsTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Mac OS X Document Versions plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = mac_document_versions.MacDocumentVersionsPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Mac OS X Document Versions file."""
|
||||
test_file = self._GetTestFilePath(['document_versions.sql'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
self.assertEquals(len(event_objects), 4)
|
||||
|
||||
# Check the first page visited entry.
|
||||
event_object = event_objects[0]
|
||||
|
||||
self.assertEquals(
|
||||
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2014-01-21 02:03:00')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(event_object.name, u'Spain is beautiful.rtf')
|
||||
self.assertEquals(event_object.path, u'/Users/moxilo/Documents')
|
||||
self.assertEquals(event_object.user_sid, u'501')
|
||||
expected_version_path = (
|
||||
u'/.DocumentRevisions-V100/PerUID/501/1/'
|
||||
u'com.apple.documentVersions/'
|
||||
u'08CFEB5A-5CDA-486F-AED5-EA35BF3EE4C2.rtf')
|
||||
self.assertEquals(event_object.version_path, expected_version_path)
|
||||
|
||||
expected_msg = (
|
||||
u'Version of [{0:s}] ({1:s}) stored in {2:s} by {3:s}'.format(
|
||||
event_object.name, event_object.path,
|
||||
event_object.version_path, event_object.user_sid))
|
||||
expected_short = u'Stored a document version of [{0:s}]'.format(
|
||||
event_object.name)
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,229 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a parser for the Mac OS X MacKeeper cache database."""
|
||||
|
||||
import json
|
||||
|
||||
from plaso.lib import event
|
||||
from plaso.lib import eventdata
|
||||
from plaso.lib import timelib
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
def DictToList(data_dict):
|
||||
"""Take a dict object and return a list of strings back."""
|
||||
ret_list = []
|
||||
for key, value in data_dict.iteritems():
|
||||
if key in ('body', 'datetime', 'type', 'room', 'rooms', 'id'):
|
||||
continue
|
||||
ret_list.append(u'{0:s} = {1!s}'.format(key, value))
|
||||
|
||||
return ret_list
|
||||
|
||||
|
||||
def ExtractJQuery(jquery_raw):
|
||||
"""Extract and return the data inside a JQuery as a dict object."""
|
||||
data_part = u''
|
||||
if not jquery_raw:
|
||||
return {}
|
||||
|
||||
if '[' in jquery_raw:
|
||||
_, _, first_part = jquery_raw.partition('[')
|
||||
data_part, _, _ = first_part.partition(']')
|
||||
elif jquery_raw.startswith('//'):
|
||||
_, _, first_part = jquery_raw.partition('{')
|
||||
data_part = u'{{{0:s}'.format(first_part)
|
||||
elif '({' in jquery_raw:
|
||||
_, _, first_part = jquery_raw.partition('(')
|
||||
data_part, _, _ = first_part.rpartition(')')
|
||||
|
||||
if not data_part:
|
||||
return {}
|
||||
|
||||
try:
|
||||
data_dict = json.loads(data_part)
|
||||
except ValueError:
|
||||
return {}
|
||||
|
||||
return data_dict
|
||||
|
||||
|
||||
def ParseChatData(data):
|
||||
"""Parse a chat comment data dict and return a parsed one back.
|
||||
|
||||
Args:
|
||||
data: A dict object that is parsed from the record.
|
||||
|
||||
Returns:
|
||||
A dict object to store the results in.
|
||||
"""
|
||||
data_store = {}
|
||||
|
||||
if 'body' in data:
|
||||
body = data.get('body', '').replace('\n', ' ')
|
||||
if body.startswith('//') and '{' in body:
|
||||
body_dict = ExtractJQuery(body)
|
||||
title, _, _ = body.partition('{')
|
||||
body = u'{0:s} <{1!s}>'.format(title[2:], DictToList(body_dict))
|
||||
else:
|
||||
body = 'No text.'
|
||||
|
||||
data_store['text'] = body
|
||||
|
||||
room = data.get('rooms', None)
|
||||
if not room:
|
||||
room = data.get('room', None)
|
||||
if room:
|
||||
data_store['room'] = room
|
||||
|
||||
data_store['id'] = data.get('id', None)
|
||||
user = data.get('user', None)
|
||||
if user:
|
||||
try:
|
||||
user_sid = int(user)
|
||||
data_store['sid'] = user_sid
|
||||
except (ValueError, TypeError):
|
||||
data_store['user'] = user
|
||||
|
||||
return data_store
|
||||
|
||||
|
||||
class MacKeeperCacheEvent(event.EventObject):
|
||||
"""Convenience class for a MacKeeper Cache event."""
|
||||
DATA_TYPE = 'mackeeper:cache'
|
||||
|
||||
def __init__(self, timestamp, description, identifier, url, data_dict):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
timestamp: A timestamp as a number of milliseconds since Epoch
|
||||
or as a UTC string.
|
||||
description: The description of the cache entry.
|
||||
identifier: The row identifier.
|
||||
url: The MacKeeper URL value that is stored in every event.
|
||||
data_dict: A dict object with the descriptive information.
|
||||
"""
|
||||
super(MacKeeperCacheEvent, self).__init__()
|
||||
|
||||
# Two different types of timestamps stored in log files.
|
||||
if type(timestamp) in (int, long):
|
||||
self.timestamp = timelib.Timestamp.FromJavaTime(timestamp)
|
||||
else:
|
||||
self.timestamp = timelib.Timestamp.FromTimeString(timestamp)
|
||||
|
||||
self.timestamp_desc = eventdata.EventTimestamp.ADDED_TIME
|
||||
self.description = description
|
||||
self.offset = identifier
|
||||
self.text = data_dict.get('text', None)
|
||||
self.user_sid = data_dict.get('sid', None)
|
||||
self.user_name = data_dict.get('user', None)
|
||||
self.event_type = data_dict.get('event_type', None)
|
||||
self.room = data_dict.get('room', None)
|
||||
self.record_id = data_dict.get('id', None)
|
||||
self.url = url
|
||||
|
||||
|
||||
class MacKeeperCachePlugin(interface.SQLitePlugin):
|
||||
"""Plugin for the MacKeeper Cache database file."""
|
||||
|
||||
NAME = 'mackeeper_cache'
|
||||
DESCRIPTION = u'Parser for MacKeeper Cache SQLite database files.'
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [((
|
||||
'SELECT d.entry_ID AS id, d.receiver_data AS data, r.request_key, '
|
||||
'r.time_stamp AS time_string FROM cfurl_cache_receiver_data d, '
|
||||
'cfurl_cache_response r WHERE r.entry_ID = '
|
||||
'd.entry_ID'), 'ParseReceiverData')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'cfurl_cache_blob_data', 'cfurl_cache_receiver_data',
|
||||
'cfurl_cache_response'])
|
||||
|
||||
def ParseReceiverData(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a single row from the receiver and cache response table.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
data = {}
|
||||
key_url = row['request_key']
|
||||
|
||||
data_dict = {}
|
||||
description = 'MacKeeper Entry'
|
||||
# Check the URL, since that contains vital information about the type of
|
||||
# event we are dealing with.
|
||||
if key_url.endswith('plist'):
|
||||
description = 'Configuration Definition'
|
||||
data['text'] = 'Plist content added to cache.'
|
||||
elif key_url.startswith('http://event.zeobit.com'):
|
||||
description = 'MacKeeper Event'
|
||||
try:
|
||||
_, _, part = key_url.partition('?')
|
||||
data['text'] = part.replace('&', ' ')
|
||||
except UnicodeDecodeError:
|
||||
data['text'] = 'N/A'
|
||||
elif key_url.startswith('http://account.zeobit.com'):
|
||||
description = 'Account Activity'
|
||||
_, _, activity = key_url.partition('#')
|
||||
if activity:
|
||||
data['text'] = u'Action started: {0:s}'.format(activity)
|
||||
else:
|
||||
data['text'] = u'Unknown activity.'
|
||||
elif key_url.startswith('http://support.') and 'chat' in key_url:
|
||||
description = 'Chat '
|
||||
try:
|
||||
jquery = unicode(row['data'])
|
||||
except UnicodeDecodeError:
|
||||
jquery = ''
|
||||
|
||||
data_dict = ExtractJQuery(jquery)
|
||||
data = ParseChatData(data_dict)
|
||||
|
||||
data['entry_type'] = data_dict.get('type', '')
|
||||
if data['entry_type'] == 'comment':
|
||||
description += 'Comment'
|
||||
elif data['entry_type'] == 'outgoing':
|
||||
description += 'Outgoing Message'
|
||||
elif data['entry_type'] == 'incoming':
|
||||
description += 'Incoming Message'
|
||||
else:
|
||||
# Empty or not known entry type, generic status message.
|
||||
description += 'Entry'
|
||||
data['text'] = u';'.join(DictToList(data_dict))
|
||||
if not data['text']:
|
||||
data['text'] = 'No additional data.'
|
||||
|
||||
event_object = MacKeeperCacheEvent(
|
||||
row['time_string'], description, row['id'], key_url, data)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(MacKeeperCachePlugin)
|
||||
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the MacKeeper Cache database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import mackeeper_cache as mackeeper_cache_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import mackeeper_cache
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class MacKeeperCachePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the MacKeeper Cache database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = mackeeper_cache.MacKeeperCachePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a MacKeeper Cache database file."""
|
||||
test_file = self._GetTestFilePath(['mackeeper_cache.db'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The cache file contains 198 entries.
|
||||
self.assertEquals(len(event_objects), 198)
|
||||
|
||||
event_object = event_objects[41]
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-12 19:30:31')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_msg = (
|
||||
u'Chat Outgoing Message : I have received your system scan report and '
|
||||
u'I will start analyzing it right now. [ URL: http://support.kromtech.'
|
||||
u'net/chat/listen/12828340738351e0593f987450z40787/?client-id=51e0593f'
|
||||
u'a1a24468673655&callback=jQuery183013571173651143909_1373657420912&_='
|
||||
u'1373657423647 Event ID: 16059074 Room: '
|
||||
u'12828340738351e0593f987450z40787 ]')
|
||||
|
||||
expected_short = (
|
||||
u'I have received your system scan report and I will start analyzing '
|
||||
u'it right now.')
|
||||
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_short)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,492 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""This file contains a basic Skype SQLite parser."""
|
||||
|
||||
import logging
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
__author__ = 'Joaquin Moreno Garijo (bastionado@gmail.com)'
|
||||
|
||||
|
||||
class SkypeChatEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for a Skype event."""
|
||||
|
||||
DATA_TYPE = 'skype:event:chat'
|
||||
|
||||
def __init__(self, row, to_account):
|
||||
"""Build a Skype Event from a single row.
|
||||
|
||||
Args:
|
||||
row: A row object (instance of sqlite3.Row) that contains the
|
||||
extracted data from a single row in the database.
|
||||
to_account: A string containing the accounts (excluding the
|
||||
author) of the conversation.
|
||||
"""
|
||||
super(SkypeChatEvent, self).__init__(
|
||||
row['timestamp'], 'Chat from Skype', self.DATA_TYPE)
|
||||
|
||||
self.title = row['title']
|
||||
self.text = row['body_xml']
|
||||
self.from_account = u'{0:s} <{1:s}>'.format(
|
||||
row['from_displayname'], row['author'])
|
||||
self.to_account = to_account
|
||||
|
||||
|
||||
class SkypeAccountEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience class for account information."""
|
||||
|
||||
DATA_TYPE = 'skype:event:account'
|
||||
|
||||
def __init__(
|
||||
self, timestamp, usage, identifier, full_name, display_name, email,
|
||||
country):
|
||||
"""Initialize the event.
|
||||
|
||||
Args:
|
||||
timestamp: The POSIX timestamp value.
|
||||
usage: A string containing the description string of the timestamp.
|
||||
identifier: The row identifier.
|
||||
full_name: A string containing the full name of the Skype account holder.
|
||||
display_name: A string containing the chosen display name of the account
|
||||
holder.
|
||||
email: A string containing the registered email address of the account
|
||||
holder.
|
||||
country: A string containing the chosen home country of the account
|
||||
holder.
|
||||
"""
|
||||
super(SkypeAccountEvent, self).__init__(timestamp, usage)
|
||||
|
||||
self.offset = identifier
|
||||
self.username = u'{0:s} <{1:s}>'.format(full_name, display_name)
|
||||
self.display_name = display_name
|
||||
self.email = email
|
||||
self.country = country
|
||||
self.data_type = self.DATA_TYPE
|
||||
|
||||
|
||||
class SkypeSMSEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience EventObject for SMS."""
|
||||
|
||||
DATA_TYPE = 'skype:event:sms'
|
||||
|
||||
def __init__(self, row, dst_number):
|
||||
"""Read the information related with the SMS.
|
||||
|
||||
Args:
|
||||
row: row form the sql query.
|
||||
row['time_sms']: timestamp when the sms was send.
|
||||
row['dstnum_sms']: number which receives the sms.
|
||||
row['msg_sms']: text send to this sms.
|
||||
dst_number: phone number where the user send the sms.
|
||||
"""
|
||||
super(SkypeSMSEvent, self).__init__(
|
||||
row['time_sms'], 'SMS from Skype', self.DATA_TYPE)
|
||||
|
||||
self.number = dst_number
|
||||
self.text = row['msg_sms']
|
||||
|
||||
|
||||
class SkypeCallEvent(time_events.PosixTimeEvent):
|
||||
"""Convenience EventObject for the calls."""
|
||||
|
||||
DATA_TYPE = 'skype:event:call'
|
||||
|
||||
def __init__(self, timestamp, call_type, user_start_call,
|
||||
source, destination, video_conference):
|
||||
"""Contains information if the call was cancelled, accepted or finished.
|
||||
|
||||
Args:
|
||||
timestamp: the timestamp of the event.
|
||||
call_type: WAITING, STARTED, FINISHED.
|
||||
user_start_call: boolean, true indicates that the owner
|
||||
account started the call.
|
||||
source: the account which started the call.
|
||||
destination: the account which gets the call.
|
||||
video_conference: boolean, if is true it was a videoconference.
|
||||
"""
|
||||
|
||||
super(SkypeCallEvent, self).__init__(
|
||||
timestamp, 'Call from Skype', self.DATA_TYPE)
|
||||
|
||||
self.call_type = call_type
|
||||
self.user_start_call = user_start_call
|
||||
self.src_call = source
|
||||
self.dst_call = destination
|
||||
self.video_conference = video_conference
|
||||
|
||||
|
||||
class SkypeTransferFileEvent(time_events.PosixTimeEvent):
|
||||
"""Evaluate the action of send a file."""
|
||||
|
||||
DATA_TYPE = 'skype:event:transferfile'
|
||||
|
||||
def __init__(self, row, timestamp, action_type, source, destination):
|
||||
"""Actions related with sending files.
|
||||
|
||||
Args:
|
||||
row:
|
||||
filepath: path from the file.
|
||||
filename: name of the file.
|
||||
filesize: size of the file.
|
||||
timestamp: when the action happens.
|
||||
action_type: GETSOLICITUDE, SENDSOLICITUDE, ACCEPTED, FINISHED.
|
||||
source: The account that sent the file.
|
||||
destination: The account that received the file.
|
||||
"""
|
||||
|
||||
super(SkypeTransferFileEvent, self).__init__(
|
||||
timestamp, 'File transfer from Skype', self.DATA_TYPE)
|
||||
|
||||
self.offset = row['id']
|
||||
self.action_type = action_type
|
||||
self.source = source
|
||||
self.destination = destination
|
||||
self.transferred_filepath = row['filepath']
|
||||
self.transferred_filename = row['filename']
|
||||
try:
|
||||
self.transferred_filesize = int(row['filesize'])
|
||||
except ValueError:
|
||||
logging.debug(u'Unknown filesize {0:s}'.format(
|
||||
self.transferred_filename))
|
||||
self.transferred_filesize = 0
|
||||
|
||||
|
||||
class SkypePlugin(interface.SQLitePlugin):
|
||||
"""SQLite plugin for Skype main.db SQlite database file."""
|
||||
|
||||
NAME = 'skype'
|
||||
DESCRIPTION = u'Parser for Skype SQLite database files.'
|
||||
|
||||
# Queries for building cache.
|
||||
QUERY_DEST_FROM_TRANSFER = (
|
||||
u'SELECT parent_id, partner_handle AS skypeid, '
|
||||
u'partner_dispname AS skypename FROM transfers')
|
||||
QUERY_SOURCE_FROM_TRANSFER = (
|
||||
u'SELECT pk_id, partner_handle AS skypeid, '
|
||||
u'partner_dispname AS skypename FROM transfers')
|
||||
|
||||
# Define the needed queries.
|
||||
QUERIES = [
|
||||
(('SELECT c.id, c.participants, c.friendlyname AS title, '
|
||||
'm.author AS author, m.from_dispname AS from_displayname, '
|
||||
'm.body_xml, m.timestamp, c.dialog_partner FROM Chats c, Messages m '
|
||||
'WHERE c.name = m.chatname'), 'ParseChat'),
|
||||
(('SELECT id, fullname, given_displayname, emails, '
|
||||
'country, profile_timestamp, authreq_timestamp, '
|
||||
'lastonline_timestamp, mood_timestamp, sent_authrequest_time, '
|
||||
'lastused_timestamp FROM Accounts'), 'ParseAccountInformation'),
|
||||
(('SELECT id, target_numbers AS dstnum_sms, timestamp AS time_sms, '
|
||||
'body AS msg_sms FROM SMSes'), 'ParseSMS'),
|
||||
(('SELECT id, partner_handle, partner_dispname, offer_send_list, '
|
||||
'starttime, accepttime, finishtime, filepath, filename, filesize, '
|
||||
'status, parent_id, pk_id FROM Transfers'), 'ParseFileTransfer'),
|
||||
(('SELECT c.id, cm.guid, c.is_incoming, '
|
||||
'cm.call_db_id, cm.videostatus, c.begin_timestamp AS try_call, '
|
||||
'cm.start_timestamp AS accept_call, cm.call_duration '
|
||||
'FROM Calls c, CallMembers cm '
|
||||
'WHERE c.id = cm.call_db_id;'), 'ParseCall')]
|
||||
|
||||
# The required tables.
|
||||
REQUIRED_TABLES = frozenset([
|
||||
'Chats', 'Accounts', 'Conversations', 'Contacts', 'SMSes', 'Transfers',
|
||||
'CallMembers', 'Calls'])
|
||||
|
||||
def ParseAccountInformation(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses the Accounts database.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
if row['profile_timestamp']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['profile_timestamp'], u'Profile Changed', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['authreq_timestamp']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['authreq_timestamp'], u'Authenticate Request', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastonline_timestamp']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['lastonline_timestamp'], u'Last Online', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['mood_timestamp']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['mood_timestamp'], u'Mood Event', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['sent_authrequest_time']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['sent_authrequest_time'], u'Auth Request Sent', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['lastused_timestamp']:
|
||||
event_object = SkypeAccountEvent(
|
||||
row['lastused_timestamp'], u'Last Used', row['id'],
|
||||
row['fullname'], row['given_displayname'], row['emails'],
|
||||
row['country'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseChat(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses a chat message row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
to_account = ''
|
||||
accounts = []
|
||||
participants = row['participants'].split(' ')
|
||||
for participant in participants:
|
||||
if participant != row['author']:
|
||||
accounts.append(participant)
|
||||
to_account = u', '.join(accounts)
|
||||
|
||||
if not to_account:
|
||||
if row['dialog_partner']:
|
||||
to_account = row['dialog_partner']
|
||||
else:
|
||||
to_account = u'Unknown User'
|
||||
|
||||
event_object = SkypeChatEvent(row, to_account)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseSMS(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parse SMS.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
dst_number = row['dstnum_sms'].replace(' ', '')
|
||||
|
||||
event_object = SkypeSMSEvent(row, dst_number)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
def ParseCall(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parse the calls taking into accounts some rows.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
try:
|
||||
aux = row['guid']
|
||||
if aux:
|
||||
aux_list = aux.split('-')
|
||||
src_aux = aux_list[0]
|
||||
dst_aux = aux_list[1]
|
||||
else:
|
||||
src_aux = u'Unknown [no GUID]'
|
||||
dst_aux = u'Unknown [no GUID]'
|
||||
except IndexError:
|
||||
src_aux = u'Unknown [{0:s}]'.format(row['guid'])
|
||||
dst_aux = u'Unknown [{0:s}]'.format(row['guid'])
|
||||
|
||||
if row['is_incoming'] == '0':
|
||||
user_start_call = True
|
||||
source = src_aux
|
||||
if row['ip_address']:
|
||||
destination = u'{0:s} <{1:s}>'.format(dst_aux, row['ip_address'])
|
||||
else:
|
||||
destination = dst_aux
|
||||
else:
|
||||
user_start_call = False
|
||||
source = src_aux
|
||||
destination = dst_aux
|
||||
|
||||
if row['videostatus'] == '3':
|
||||
video_conference = True
|
||||
else:
|
||||
video_conference = False
|
||||
|
||||
event_object = SkypeCallEvent(
|
||||
row['try_call'], 'WAITING', user_start_call, source, destination,
|
||||
video_conference)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['accept_call']:
|
||||
event_object = SkypeCallEvent(
|
||||
row['accept_call'], 'ACCEPTED', user_start_call, source, destination,
|
||||
video_conference)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['call_duration']:
|
||||
try:
|
||||
timestamp = int(row['accept_call']) + int(row['call_duration'])
|
||||
event_object = SkypeCallEvent(
|
||||
timestamp, 'FINISHED', user_start_call, source, destination,
|
||||
video_conference)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
except ValueError:
|
||||
logging.debug((
|
||||
u'[{0:s}] Unable to determine when the call {1:s} was '
|
||||
u'finished.').format(self.NAME, row['id']))
|
||||
|
||||
def ParseFileTransfer(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, cache=None,
|
||||
database=None, query=None, **unused_kwargs):
|
||||
"""Parse the transfer files.
|
||||
|
||||
There is no direct relationship between who sends the file and
|
||||
who accepts the file.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: the row with all information related with the file transfers.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
cache: a cache object (instance of SQLiteCache).
|
||||
database: A database object (instance of SQLiteDatabase).
|
||||
"""
|
||||
source_dict = cache.GetResults('source')
|
||||
if not source_dict:
|
||||
cursor = database.cursor
|
||||
results = cursor.execute(self.QUERY_SOURCE_FROM_TRANSFER)
|
||||
cache.CacheQueryResults(
|
||||
results, 'source', 'pk_id', ('skypeid', 'skypename'))
|
||||
source_dict = cache.GetResults('source')
|
||||
|
||||
dest_dict = cache.GetResults('destination')
|
||||
if not dest_dict:
|
||||
cursor = database.cursor
|
||||
results = cursor.execute(self.QUERY_DEST_FROM_TRANSFER)
|
||||
cache.CacheQueryResults(
|
||||
results, 'destination', 'parent_id', ('skypeid', 'skypename'))
|
||||
dest_dict = cache.GetResults('destination')
|
||||
|
||||
source = u'Unknown'
|
||||
destination = u'Unknown'
|
||||
|
||||
if row['parent_id']:
|
||||
destination = u'{0:s} <{1:s}>'.format(
|
||||
row['partner_handle'], row['partner_dispname'])
|
||||
skype_id, skype_name = source_dict.get(row['parent_id'], [None, None])
|
||||
if skype_name:
|
||||
source = u'{0:s} <{1:s}>'.format(skype_id, skype_name)
|
||||
else:
|
||||
source = u'{0:s} <{1:s}>'.format(
|
||||
row['partner_handle'], row['partner_dispname'])
|
||||
|
||||
if row['pk_id']:
|
||||
skype_id, skype_name = dest_dict.get(row['pk_id'], [None, None])
|
||||
if skype_name:
|
||||
destination = u'{0:s} <{1:s}>'.format(skype_id, skype_name)
|
||||
|
||||
if row['status'] == 8:
|
||||
if row['starttime']:
|
||||
event_object = SkypeTransferFileEvent(
|
||||
row, row['starttime'], 'GETSOLICITUDE', source, destination)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['accepttime']:
|
||||
event_object = SkypeTransferFileEvent(
|
||||
row, row['accepttime'], 'ACCEPTED', source, destination)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
if row['finishtime']:
|
||||
event_object = SkypeTransferFileEvent(
|
||||
row, row['finishtime'], 'FINISHED', source, destination)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
elif row['status'] == 2 and row['starttime']:
|
||||
event_object = SkypeTransferFileEvent(
|
||||
row, row['starttime'], 'SENDSOLICITUDE', source, destination)
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(SkypePlugin)
|
||||
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Skype main.db history database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import skype as skype_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import skype
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
|
||||
|
||||
class SkypePluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Skype main.db history database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = skype.SkypePlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function on a Skype History database file.
|
||||
|
||||
The History file contains 24 events:
|
||||
4 call events
|
||||
4 transfers file events
|
||||
1 sms events
|
||||
15 chat events
|
||||
|
||||
Events used:
|
||||
id = 16 -> SMS
|
||||
id = 22 -> Call
|
||||
id = 18 -> File
|
||||
id = 1 -> Chat
|
||||
id = 14 -> ChatRoom
|
||||
"""
|
||||
test_file = self._GetTestFilePath(['skype_main.db'])
|
||||
cache = sqlite.SQLiteCache()
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file, cache)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
calls = 0
|
||||
files = 0
|
||||
sms = 0
|
||||
chats = 0
|
||||
for event_object in event_objects:
|
||||
if event_object.data_type == 'skype:event:call':
|
||||
calls += 1
|
||||
if event_object.data_type == 'skype:event:transferfile':
|
||||
files += 1
|
||||
if event_object.data_type == 'skype:event:sms':
|
||||
sms += 1
|
||||
if event_object.data_type == 'skype:event:chat':
|
||||
chats += 1
|
||||
|
||||
self.assertEquals(len(event_objects), 24)
|
||||
self.assertEquals(files, 4)
|
||||
self.assertEquals(sms, 1)
|
||||
self.assertEquals(chats, 15)
|
||||
self.assertEquals(calls, 3)
|
||||
|
||||
# TODO: Split this up into separate functions for testing each type of
|
||||
# event, eg: testSMS, etc.
|
||||
sms_event_object = event_objects[16]
|
||||
call_event_object = event_objects[22]
|
||||
event_file = event_objects[18]
|
||||
chat_event_object = event_objects[1]
|
||||
chat_room_event_object = event_objects[14]
|
||||
|
||||
# Test cache processing and format strings.
|
||||
expected_msg = (
|
||||
u'Source: gen.beringer <Gen Beringer> Destination: '
|
||||
u'european.bbq.competitor <European BBQ> File: secret-project.pdf '
|
||||
u'[SENDSOLICITUDE]')
|
||||
|
||||
self._TestGetMessageStrings(
|
||||
event_objects[17], expected_msg, expected_msg[0:77] + '...')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-01 22:14:22')
|
||||
self.assertEquals(sms_event_object.timestamp, expected_timestamp)
|
||||
text_sms = (u'If you want I can copy '
|
||||
u'some documents for you, '
|
||||
u'if you can pay it... ;)')
|
||||
self.assertEquals(sms_event_object.text, text_sms)
|
||||
number = u'+34123456789'
|
||||
self.assertEquals(sms_event_object.number, number)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-10-24 21:49:35')
|
||||
self.assertEquals(event_file.timestamp, expected_timestamp)
|
||||
|
||||
action_type = u'GETSOLICITUDE'
|
||||
self.assertEquals(event_file.action_type, action_type)
|
||||
source = u'gen.beringer <Gen Beringer>'
|
||||
self.assertEquals(event_file.source, source)
|
||||
destination = u'european.bbq.competitor <European BBQ>'
|
||||
self.assertEquals(event_file.destination, destination)
|
||||
transferred_filename = u'secret-project.pdf'
|
||||
self.assertEquals(event_file.transferred_filename, transferred_filename)
|
||||
filepath = u'/Users/gberinger/Desktop/secret-project.pdf'
|
||||
self.assertEquals(event_file.transferred_filepath, filepath)
|
||||
self.assertEquals(event_file.transferred_filesize, 69986)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-30 21:27:11')
|
||||
self.assertEquals(chat_event_object.timestamp, expected_timestamp)
|
||||
|
||||
title = u'European Competitor | need to know if you got it..'
|
||||
self.assertEquals(chat_event_object.title, title)
|
||||
expected_msg = u'need to know if you got it this time.'
|
||||
self.assertEquals(chat_event_object.text, expected_msg)
|
||||
from_account = u'Gen Beringer <gen.beringer>'
|
||||
self.assertEquals(chat_event_object.from_account, from_account)
|
||||
self.assertEquals(chat_event_object.to_account, u'european.bbq.competitor')
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-10-27 15:29:19')
|
||||
self.assertEquals(chat_room_event_object.timestamp, expected_timestamp)
|
||||
|
||||
title = u'European Competitor, Echo123'
|
||||
self.assertEquals(chat_room_event_object.title, title)
|
||||
expected_msg = u'He is our new employee'
|
||||
self.assertEquals(chat_room_event_object.text, expected_msg)
|
||||
from_account = u'European Competitor <european.bbq.competitor>'
|
||||
self.assertEquals(chat_room_event_object.from_account, from_account)
|
||||
to_account = u'gen.beringer, echo123'
|
||||
self.assertEquals(chat_room_event_object.to_account, to_account)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-07-01 22:12:17')
|
||||
self.assertEquals(call_event_object.timestamp, expected_timestamp)
|
||||
|
||||
self.assertEquals(call_event_object.dst_call, u'european.bbq.competitor')
|
||||
self.assertEquals(call_event_object.src_call, u'gen.beringer')
|
||||
self.assertEquals(call_event_object.user_start_call, False)
|
||||
self.assertEquals(call_event_object.video_conference, False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License');
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""SQLite database plugin related functions and classes for testing."""
|
||||
|
||||
from dfvfs.lib import definitions
|
||||
from dfvfs.path import factory as path_spec_factory
|
||||
from dfvfs.resolver import resolver as path_spec_resolver
|
||||
|
||||
from plaso.engine import single_process
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers import test_lib
|
||||
|
||||
|
||||
class SQLitePluginTestCase(test_lib.ParserTestCase):
|
||||
"""The unit test case for SQLite database plugins."""
|
||||
|
||||
def _ParseDatabaseFileWithPlugin(
|
||||
self, plugin_object, path, cache=None, knowledge_base_values=None):
|
||||
"""Parses a file as a SQLite database with a specific plugin.
|
||||
|
||||
Args:
|
||||
plugin_object: The plugin object that is used to extract an event
|
||||
generator.
|
||||
path: The path to the SQLite database file.
|
||||
cache: A cache object (instance of SQLiteCache).
|
||||
knowledge_base_values: optional dict containing the knowledge base
|
||||
values. The default is None.
|
||||
|
||||
Returns:
|
||||
An event object queue consumer object (instance of
|
||||
TestEventObjectQueueConsumer).
|
||||
"""
|
||||
event_queue = single_process.SingleProcessQueue()
|
||||
event_queue_consumer = test_lib.TestEventObjectQueueConsumer(event_queue)
|
||||
|
||||
parse_error_queue = single_process.SingleProcessQueue()
|
||||
|
||||
parser_context = self._GetParserContext(
|
||||
event_queue, parse_error_queue,
|
||||
knowledge_base_values=knowledge_base_values)
|
||||
path_spec = path_spec_factory.Factory.NewPathSpec(
|
||||
definitions.TYPE_INDICATOR_OS, location=path)
|
||||
file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
|
||||
|
||||
with sqlite.SQLiteDatabase(file_entry) as database:
|
||||
plugin_object.Process(parser_context, cache=cache, database=database)
|
||||
|
||||
return event_queue_consumer
|
||||
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Plugin for the Zeitgeist SQLite database.
|
||||
|
||||
Zeitgeist is a service which logs the user activities and events, anywhere
|
||||
from files opened to websites visited and conversations.
|
||||
"""
|
||||
|
||||
from plaso.events import time_events
|
||||
from plaso.lib import eventdata
|
||||
from plaso.parsers import sqlite
|
||||
from plaso.parsers.sqlite_plugins import interface
|
||||
|
||||
|
||||
class ZeitgeistEvent(time_events.JavaTimeEvent):
|
||||
"""Convenience class for a Zeitgeist event."""
|
||||
|
||||
DATA_TYPE = 'zeitgeist:activity'
|
||||
|
||||
def __init__(self, java_time, row_id, subject_uri):
|
||||
"""Initializes the event object.
|
||||
|
||||
Args:
|
||||
java_time: The Java time value.
|
||||
row_id: The identifier of the corresponding row.
|
||||
subject_uri: The Zeitgeist event.
|
||||
"""
|
||||
super(ZeitgeistEvent, self).__init__(
|
||||
java_time, eventdata.EventTimestamp.UNKNOWN)
|
||||
|
||||
self.offset = row_id
|
||||
self.subject_uri = subject_uri
|
||||
|
||||
|
||||
class ZeitgeistPlugin(interface.SQLitePlugin):
|
||||
"""SQLite plugin for Zeitgeist activity database."""
|
||||
|
||||
NAME = 'zeitgeist'
|
||||
DESCRIPTION = u'Parser for Zeitgeist activity SQLite database files.'
|
||||
|
||||
# TODO: Explore the database more and make this parser cover new findings.
|
||||
|
||||
QUERIES = [
|
||||
('SELECT id, timestamp, subj_uri FROM event_view',
|
||||
'ParseZeitgeistEventRow')]
|
||||
|
||||
REQUIRED_TABLES = frozenset(['event', 'actor'])
|
||||
|
||||
def ParseZeitgeistEventRow(
|
||||
self, parser_context, row, file_entry=None, parser_chain=None, query=None,
|
||||
**unused_kwargs):
|
||||
"""Parses zeitgeist event row.
|
||||
|
||||
Args:
|
||||
parser_context: A parser context object (instance of ParserContext).
|
||||
row: The row resulting from the query.
|
||||
file_entry: Optional file entry object (instance of dfvfs.FileEntry).
|
||||
The default is None.
|
||||
parser_chain: Optional string containing the parsing chain up to this
|
||||
point. The default is None.
|
||||
query: Optional query string. The default is None.
|
||||
"""
|
||||
event_object = ZeitgeistEvent(row['timestamp'], row['id'], row['subj_uri'])
|
||||
parser_context.ProduceEvent(
|
||||
event_object, query=query, parser_chain=parser_chain,
|
||||
file_entry=file_entry)
|
||||
|
||||
|
||||
sqlite.SQLiteParser.RegisterPlugin(ZeitgeistPlugin)
|
||||
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 The Plaso Project Authors.
|
||||
# Please see the AUTHORS file for details on individual authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Tests for the Zeitgeist activity database plugin."""
|
||||
|
||||
import unittest
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from plaso.formatters import zeitgeist as zeitgeist_formatter
|
||||
from plaso.lib import timelib_test
|
||||
from plaso.parsers.sqlite_plugins import test_lib
|
||||
from plaso.parsers.sqlite_plugins import zeitgeist
|
||||
|
||||
|
||||
class ZeitgeistPluginTest(test_lib.SQLitePluginTestCase):
|
||||
"""Tests for the Zeitgeist activity database plugin."""
|
||||
|
||||
def setUp(self):
|
||||
"""Sets up the needed objects used throughout the test."""
|
||||
self._plugin = zeitgeist.ZeitgeistPlugin()
|
||||
|
||||
def testProcess(self):
|
||||
"""Tests the Process function."""
|
||||
test_file = self._GetTestFilePath(['activity.sqlite'])
|
||||
event_queue_consumer = self._ParseDatabaseFileWithPlugin(
|
||||
self._plugin, test_file)
|
||||
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
|
||||
|
||||
# The sqlite database contains 44 events.
|
||||
self.assertEquals(len(event_objects), 44)
|
||||
|
||||
# Check the first event.
|
||||
event_object = event_objects[0]
|
||||
|
||||
expected_subject_uri = u'application://rhythmbox.desktop'
|
||||
self.assertEquals(event_object.subject_uri, expected_subject_uri)
|
||||
|
||||
expected_timestamp = timelib_test.CopyStringToTimestamp(
|
||||
'2013-10-22 08:53:19.477')
|
||||
self.assertEquals(event_object.timestamp, expected_timestamp)
|
||||
|
||||
expected_msg = u'application://rhythmbox.desktop'
|
||||
self._TestGetMessageStrings(event_object, expected_msg, expected_msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user