Import from old repository

This commit is contained in:
Stefan
2020-04-06 18:48:34 +02:00
commit 0da6783a45
762 changed files with 103065 additions and 0 deletions
+2789
View File
File diff suppressed because it is too large Load Diff
+387
View File
@@ -0,0 +1,387 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to check for the availability and version of dependencies."""
import re
import urllib2
def DownloadPageContent(download_url):
"""Downloads the page content.
Args:
download_url: the URL where to download the page content.
Returns:
The page content if successful, None otherwise.
"""
if not download_url:
return
url_object = urllib2.urlopen(download_url)
if url_object.code != 200:
return
return url_object.read()
def GetLibyalGithubReleasesLatestVersion(library_name):
"""Retrieves the latest version number of a libyal library on GitHub releases.
Args:
library_name: the name of the libyal library.
Returns:
The latest version for a given libyal library on GitHub releases
or 0 on error.
"""
download_url = (
u'https://github.com/libyal/{0:s}/releases').format(library_name)
page_content = DownloadPageContent(download_url)
if not page_content:
return 0
# The format of the project download URL is:
# /libyal/{project name}/releases/download/{git tag}/
# {project name}{status-}{version}.tar.gz
# Note that the status is optional and will be: beta, alpha or experimental.
expression_string = (
u'/libyal/{0:s}/releases/download/[^/]*/{0:s}-[a-z-]*([0-9]+)'
u'[.]tar[.]gz').format(library_name)
matches = re.findall(expression_string, page_content)
if not matches:
return 0
return int(max(matches))
# TODO: Remove when Google Drive support is no longer needed.
def GetLibyalGoogleDriveLatestVersion(library_name):
"""Retrieves the latest version number of a libyal library on Google Drive.
Args:
library_name: the name of the libyal library.
Returns:
The latest version for a given libyal library on Google Drive
or 0 on error.
"""
download_url = 'https://code.google.com/p/{0:s}/'.format(library_name)
page_content = DownloadPageContent(download_url)
if not page_content:
return 0
# The format of the library downloads URL is:
# https://googledrive.com/host/{random string}/
expression_string = (
'<a href="(https://googledrive.com/host/[^/]*/)"[^>]*>Downloads</a>')
matches = re.findall(expression_string, page_content)
if not matches or len(matches) != 1:
return 0
page_content = DownloadPageContent(matches[0])
if not page_content:
return 0
# The format of the library download URL is:
# /host/{random string}/{library name}-{status-}{version}.tar.gz
# Note that the status is optional and will be: beta, alpha or experimental.
expression_string = '/host/[^/]*/{0:s}-[a-z-]*([0-9]+)[.]tar[.]gz'.format(
library_name)
matches = re.findall(expression_string, page_content)
if not matches:
return 0
return int(max(matches))
def CheckLibyal(libyal_python_modules):
"""Checks the availability of libyal libraries.
Args:
libyal_python_modules: list of libyal python module names.
Returns:
True if the libyal libraries are available, false otherwise.
"""
connection_error = False
result = True
for module_name, module_version in libyal_python_modules:
try:
module_object = map(__import__, [module_name])[0]
module_loaded = True
except ImportError:
print u'[FAILURE]\tmissing: {0:s}.'.format(module_name)
module_loaded = False
result = False
if module_loaded:
libyal_name = u'lib{0:s}'.format(module_name[2:])
installed_version = int(module_object.get_version())
try:
latest_version = GetLibyalGithubReleasesLatestVersion(libyal_name)
except urllib2.URLError:
latest_version = 0
if not latest_version:
try:
latest_version = GetLibyalGoogleDriveLatestVersion(libyal_name)
except urllib2.URLError:
latest_version = 0
if not latest_version:
print (
u'Unable to determine latest version of {0:s} ({1:s}).\n').format(
libyal_name, module_name)
latest_version = None
connection_error = True
if module_version is not None and installed_version < module_version:
print (
u'[FAILURE]\t{0:s} ({1:s}) version: {2:d} is too old, {3:d} or '
u'later required.').format(
libyal_name, module_name, installed_version, module_version)
result = False
elif latest_version and installed_version != latest_version:
print (
u'[INFO]\t\t{0:s} ({1:s}) version: {2:d} installed, '
u'version: {3:d} available.').format(
libyal_name, module_name, installed_version, latest_version)
else:
print u'[OK]\t\t{0:s} ({1:s}) version: {2:d}'.format(
libyal_name, module_name, installed_version)
if connection_error:
print (
u'[INFO] to check for the latest versions this script needs Internet '
u'access.')
return result
def CheckPythonModule(
module_name, version_attribute_name, minimum_version,
maximum_version=None):
"""Checks the availability of a Python module.
Args:
module_name: the name of the module.
version_attribute_name: the name of the attribute that contains the module
version.
minimum_version: the minimum required version.
maximum_version: the maximum required version. This attribute is optional
and should only be used if there is a recent API change
that prevents the tool from running if a later version
is used.
Returns:
True if the Python module is available and conforms to the minimum required
version. False otherwise.
"""
try:
module_object = map(__import__, [module_name])[0]
except ImportError:
print u'[FAILURE]\tmissing: {0:s}.'.format(module_name)
return False
if version_attribute_name and minimum_version:
module_version = getattr(module_object, version_attribute_name, None)
if not module_version:
return False
# Split the version string and convert every digit into an integer.
# A string compare of both version strings will yield an incorrect result.
module_version_map = map(int, module_version.split('.'))
minimum_version_map = map(int, minimum_version.split('.'))
if module_version_map < minimum_version_map:
print (
u'[FAILURE]\t{0:s} version: {1:s} is too old, {2:s} or later '
u'required.').format(module_name, module_version, minimum_version)
return False
if maximum_version:
maximum_version_map = map(int, maximum_version.split('.'))
if module_version_map > maximum_version_map:
print (
u'[FAILURE]\t{0:s} version: {1:s} is too recent, {2:s} or earlier '
u'required.').format(module_name, module_version, maximum_version)
return False
print u'[OK]\t\t{0:s} version: {1:s}'.format(module_name, module_version)
else:
print u'[OK]\t\t{0:s}'.format(module_name)
return True
def CheckPytsk():
"""Checks the availability of pytsk3.
Returns:
True if the pytsk3 Python module is available, false otherwise.
"""
module_name = 'pytsk3'
try:
module_object = map(__import__, [module_name])[0]
except ImportError:
print u'[FAILURE]\tmissing: {0:s}.'.format(module_name)
return False
minimum_version = '4.1.2'
module_version = module_object.TSK_VERSION_STR
# Split the version string and convert every digit into an integer.
# A string compare of both version strings will yield an incorrect result.
module_version_map = map(int, module_version.split('.'))
minimum_version_map = map(int, minimum_version.split('.'))
if module_version_map < minimum_version_map:
print (
u'[FAILURE]\tSleuthKit version: {0:s} is too old, {1:s} or later '
u'required.').format(module_version, minimum_version)
return False
print u'[OK]\t\tSleuthKit version: {0:s}'.format(module_version)
minimum_version = '20140506'
if not hasattr(module_object, 'get_version'):
print u'[FAILURE]\t{0:s} is too old, {1:s} or later required.'.format(
module_name, minimum_version)
return False
module_version = module_object.get_version()
if module_version < minimum_version:
print (
u'[FAILURE]\t{0:s} version: {1:s} is too old, {2:s} or later '
u'required.').format(module_name, module_version, minimum_version)
return False
print u'[OK]\t\t{0:s} version: {1:s}'.format(module_name, module_version)
return True
if __name__ == '__main__':
check_result = True
print u'Checking availability and versions of plaso dependencies.'
# The bencode module does not appear to have no version information.
if not CheckPythonModule('bencode', '', ''):
check_result = False
if not CheckPythonModule('binplist', '__version__', '0.1.4'):
check_result = False
if not CheckPythonModule('construct', '__version__', '2.5.2'):
check_result = False
if not CheckPythonModule('dateutil.parser', '', ''):
check_result = False
if not CheckPythonModule('dfvfs', '__version__', '20141220'):
check_result = False
if not CheckPythonModule('dpkt', '__version__', '1.8'):
check_result = False
# The protobuf module does not appear to have version information.
if not CheckPythonModule('google.protobuf', '', ''):
check_result = False
if not CheckPythonModule('hachoir_core', '__version__', '1.3.3'):
check_result = False
if not CheckPythonModule('hachoir_parser', '__version__', '1.3.4'):
check_result = False
if not CheckPythonModule('hachoir_metadata', '__version__', '1.3.3'):
check_result = False
if not CheckPythonModule('IPython', '__version__', '1.2.1'):
check_result = False
if not CheckPythonModule('yaml', '__version__', '3.10'):
check_result = False
if not CheckPythonModule('psutil', '__version__', '1.2.1'):
check_result = False
if not CheckPythonModule('pyparsing', '__version__', '2.0.2'):
check_result = False
# TODO: determine the version of pytz.
# pytz uses __version__ but has a different version indicator e.g. 2012d
if not CheckPythonModule('pytz', '', ''):
check_result = False
if not CheckPythonModule('six', '__version__', '1.1.0'):
check_result = False
if not CheckPythonModule('sqlite3', 'sqlite_version', '3.7.8'):
check_result = False
if not CheckPytsk():
check_result = False
libyal_check_result = CheckLibyal([
('pybde', 20140531),
('pyesedb', 20140301),
('pyevt', None),
('pyevtx', 20141112),
('pyewf', 20131210),
('pyfwsi', 20140714),
('pylnk', 20141026),
('pymsiecf', 20130317),
('pyolecf', 20131012),
('pyqcow', 20131204),
('pyregf', 20130716),
('pysmdev', 20140529),
('pysmraw', 20140612),
('pyvhdi', 20131210),
('pyvmdk', 20140421),
('pyvshadow', 20131209),
])
if not check_result:
build_instructions_url = (
u'https://sites.google.com/a/kiddaland.net/plaso/developer'
u'/building-the-tool')
print u'See: {0:s} on how to set up plaso.'.format(
build_instructions_url)
if not libyal_check_result:
libyal_downloads_url = (
u'https://googledrive.com/host/0B30H7z4S52FleW5vUHBnblJfcjg'
u'/libyal.html')
print u'Libyal libraries can be downloaded from here: {0:s}'.format(
libyal_downloads_url)
print u''
+84
View File
@@ -0,0 +1,84 @@
#!/bin/bash
# A small script that contains common functions for code review checks.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_SUCCESS=0;
linter()
{
# Examples of the output of "git status -s"
# If a file is added:
# A utils/common.sh
# If a file is modified:
# M utils/common.sh
# If a file is renamed:
# R utils/common.sh -> utils/uncommon.sh
# If a file is modified and renamed:
# RM utils/common.sh -> utils/uncommon.sh
AWK_SCRIPT="if (\$1 == \"A\" || \$1 == \"AM\" || \$1 == \"M\" || \$1 == \"MM\") { print \$2; } else if (\$1 == \"R\" || \$1 == \"RM\") { print \$4; }";
# First find all files that need linter
FILES=`git status -s | grep -v "^?" | awk "{ ${AWK_SCRIPT} }" | grep "\.py$"`;
PYLINT_VERSION=`pylint --version 2> /dev/null | grep 'pylint' | sed 's/^pylint \(.*\),/\1/'`;
RESULT=`echo -e "${PYLINT_VERSION}\n1.1.0" | sort -V | head -n1`;
if test "${RESULT}" = "${PYLINT_VERSION}";
then
PYLINTRC="utils/pylintrc";
else
PYLINTRC="utils/pylintrc-1.1.0";
fi
LINTER="pylint --rcfile=${PYLINTRC}";
echo "Run through pylint.";
for FILE in ${FILES};
do
if test "${FILE}" = "setup.py" || test "${FILE}" = "utils/upload.py" ;
then
echo " -- Skipping: ${FILE} --"
continue
fi
if test `echo ${FILE} | tail -c8` == "_pb2.py" ;
then
echo "Skipping compiled protobufs: ${FILE}"
continue
fi
echo " -- Checking: ${FILE} --"
$LINTER "${FILE}"
if test $? -ne 0 ;
then
echo "Fix linter errors before proceeding."
return ${EXIT_FAILURE};
fi
done
if test $? -ne 0 ;
then
return ${EXIT_FAILURE};
fi
echo "Linter clear.";
return ${EXIT_SUCCESS};
}
+24
View File
@@ -0,0 +1,24 @@
#!/bin/bash
# A small helper script to compile protobufs.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
compile()
{
protoc -I=. --python_out=. plaso/proto/$1
}
compile plaso_storage.proto
+81
View File
@@ -0,0 +1,81 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file simply creates the AUTHOR file based on parser content."""
import os
import fnmatch
def ProcessFile(file_path):
"""Process a single file to match for an author tag."""
# TODO: Change to do a "proper" import of modules and
# check the __author__ attribute of it.
# Current approach does not work if the author tag is a list
# instead of a single attribute (current files as of writing do
# not have that behavior, but that might change in the future).
ret = ''
with open(file_path, 'rb') as fh:
for line in fh:
if '__author__' in line:
_, _, ret = line[:-1].partition(' = ')
return ret[1:-1]
if __name__ == '__main__':
header = """# Names should be added to this file with this pattern:
#
# For individuals:
# Name (email address)
#
# For organizations:
# Organization (fnmatch pattern)
#
# See python fnmatch module documentation for more information.
Google Inc. (*@google.com)
Kristinn Gudjonsson (kiddi@kiddaland.net)
Joachim Metz (joachim.metz@gmail.com)
Eric Mak (ericmak@gmail.com)
Elizabeth Schweinsberg (beth@bethlogic.net)
Keith Wall (kwallster@gmail.com)
"""
authors = []
with open('AUTHORS', 'wb') as out_file:
out_file.write(header)
for path, folders, files in os.walk('.'):
if path in ('utils', 'tools', 'build'):
continue
for filematch in fnmatch.filter(files, '*.py'):
author = ProcessFile(os.path.join(path, filematch))
if not author:
continue
if type(author) in (list, tuple):
for author_name in author:
if author_name not in authors:
authors.append(author)
else:
if author not in authors:
authors.append(author)
out_file.write('\n'.join(authors))
out_file.write('\n')
print 'Added {0:d} authors from files.'.format(len(authors))
+164
View File
@@ -0,0 +1,164 @@
[bencode]
homepage_url: http://bittorent.com/
download_url: https://pypi.python.org/pypi/bencode
maintainer: Thomas Rampelberg <thomas@bittorrent.com>
description_short: The BitTorrent bencode module as light-weight, standalone
package
description_long: The BitTorrent bencode module as light-weight, standalone
package
[binplist]
minimum_version: 0.1.4
homepage_url: https://code.google.com/p/binplist/
download_url: https://code.google.com/p/binplist/downloads/list
[construct]
minimum_version: 2.5.2
homepage_url: http://construct.readthedocs.org/en/latest/
download_url: https://pypi.python.org/pypi/construct
maintainer: Tomer Filiba <tomerfiliba@gmail.com>
description_short: Construct is a powerful declarative parser (and builder)
for binary data
description_long: Construct is a powerful declarative parser (and builder)
for binary data
[dfvfs]
minimum_version: 20140824
download_url: https://github.com/log2timeline/dfvfs
[dpkt]
minimum_version: 1.8
maintainer: Dug Song <dugsong@monkey.org>
homepage_url: https://code.google.com/p/dpkt/
download_url: https://code.google.com/p/dpkt/downloads/list
description_short: Python packet creation / parsing module
description_long: Python module for fast, simple packet creation / parsing,
with definitions for the basic TCP/IP protocols.
[ipython]
minimum_version: 1.2.1
[libbde]
minimum_version: 20140531
download_url: https://github.com/libyal/libbde
[libesedb]
minimum_version: 20140301
download_url: https://github.com/libyal/libesedb
[libevt]
minimum_version: 20141026
download_url: https://github.com/libyal/libevt
[libevtx]
minimum_version: 20141112
download_url: https://github.com/libyal/libevtx
[libewf]
minimum_version: 20131210
download_url: https://googledrive.com/host/0B3fBvzttpiiSMTdoaVExWWNsRjg/
[libfwsi]
minimum_version: 20140714
download_url: https://github.com/libyal/libfwsi
[liblnk]
minimum_version: 20141026
download_url: https://github.com/libyal/liblnk
[libmsiecf]
minimum_version: 20130317
download_url: https://github.com/libyal/libmsiecf
[libolecf]
minimum_version: 20131012
download_url: https://github.com/libyal/libolecf
[libqcow]
minimum_version: 20131204
download_url: https://github.com/libyal/libqcow
[libregf]
minimum_version: 20130716
download_url: https://github.com/libyal/libregf
[libsmdev]
minimum_version: 20140529
download_url: https://github.com/libyal/libsmdev
[libsmraw]
minimum_version: 20140612
download_url: https://github.com/libyal/libsmraw
[libvhdi]
minimum_version: 20131210
download_url: https://github.com/libyal/libvhdi
[libvmdk]
minimum_version: 20140421
download_url: https://github.com/libyal/libvmdk
[libvshadow]
minimum_version: 20131209
download_url: https://github.com/libyal/libvshadow
[psutil]
minimum_version: 1.2.1
[pyparsing]
minimum_version: 2.0.2
maintainer: Paul McGuire <ptmcg@users.sourceforge.net>
homepage_url: http://pyparsing.wikispaces.com/
download_url: http://sourceforge.net/projects/pyparsing/files/
description_short:
description_long: The parsing module is an alternative approach to creating
and executing simple grammars, vs. the traditional lex/yacc approach,
or the use of regular expressions. The parsing module provides a library
of classes that client code uses to construct the grammar directly
in Python code.
[pysqlite]
minimum_version: 3.7.8
maintainer: Gerhard Häring <gh@ghaering.de>
homepage_url: https://github.com/ghaering/pysqlite
download_url: https://pypi.python.org/pypi/pysqlite/
description_short:
description_long: pysqlite is a DB-API 2.0-compliant database interface
for SQLite.
[pytz]
dpkg_dependencies: tzdata
dpkg_name: tz
maintainer: Stuart Bishop <stuart@stuartbishop.net>
homepage_url: http://pythonhosted.org/pytz/
download_url: http://pypi.python.org/pypi/pytz/
description_short:
description_long: python-tz brings the Olson tz database into Python.
This library allows accurate and cross platform timezone calculations
using Python 2.3 or higher. It also solves the issue of ambiguous times
at the end of daylight savings, which you can read more about in
the Python Library Reference (datetime.tzinfo).
[PyYAML]
minimum_version: 3.10
dpkg_name: yaml
maintainer: Kirill Simonov <xi@resolvent.net>
homepage_url: http://pyyaml.org/
download_url: https://pypi.python.org/pypi/PyYAML
description_short:
description_long: Python-yaml is a complete YAML 1.1 parser and emitter
for Python. It can parse all examples from the specification. The parsing
algorithm is simple enough to be a reference for YAML parser implementors.
A simple extension API is also provided. The package is built using libyaml
for improved speed.
[six]
minimum_version: 1.1.0
maintainer: Benjamin Peterson <benjamin@python.org>
homepage_url: http://pypi.python.org/pypi/six/
download_url: http://pypi.python.org/pypi/six/
description_short: Python 2 and 3 compatibility library (Python 2 interface)
description_long: Six is a Python 2 and 3 compatibility library. It provides
utility functions for smoothing over the differences between the Python
versions with the goal of writing Python code that is compatible on both
Python versions.
+115
View File
@@ -0,0 +1,115 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains a simple utility to fetch content of code reviews."""
import os
import json
import urllib2
import sys
import subprocess
import tempfile
def DownloadPatchSet(cl_number):
"""Returns the name of the patch file for a given CL.
Args:
cl_number: The CL number for the code review.
Returns:
The name fo the patch file, or a None if unable to download
the patch.
"""
try:
test_cl = int(cl_number)
if cl_number != str(test_cl):
return
except ValueError:
return
url = 'https://codereview.appspot.com/api/{0}/'.format(cl_number)
url_object = urllib2.urlopen(url)
if url_object.code != 200:
return
data = url_object.read()
try:
data_obj = json.loads(data)
except ValueError:
return
patches = data_obj.get('patchsets', [])
last_patch = patches.pop()
patch_url = 'https://codereview.appspot.com/download/issue{}_{}.diff'.format(
cl_number, last_patch)
patch_object = urllib2.urlopen(patch_url)
if patch_object.code != 200:
return
patch_data = patch_object.read()
patch_file_name = ''
with tempfile.NamedTemporaryFile(delete=False) as patch_file_object:
patch_file_object.write(patch_data)
patch_file_name = patch_file_object.name
return patch_file_name
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Need to provide a CL number.'
sys.exit(1)
code_review_number = sys.argv[1]
patch_file = DownloadPatchSet(code_review_number)
if not patch_file:
print 'Unable to download a patch set, exiting.'
sys.exit(1)
branch_name = 'review_{}'.format(code_review_number)
branch_exit = os.system('git checkout -b {}'.format(branch_name))
if branch_exit:
print 'Unable to create a new branch, exiting.'
sys.exit(1)
patch_exit = os.system('patch -p1 < {}'.format(patch_file))
if patch_exit:
print 'Unable to patch files.'
sys.exit(1)
git_add = subprocess.Popen(
'git status -s', shell=True, stdout=subprocess.PIPE)
git_to_add = []
for git_line in git_add.stdout:
if git_line.startswith('??'):
git_to_add.append(git_line[3:-1])
os.system('git add -A')
print 'Files added to git branch'
os.system('git commit -a "Committing CL to branch"')
os.remove(patch_file)
print 'Patch downloaded and applied, branch {} created.'.format(
branch_name)
print 'Remember to delete branch when done testing/inspecting.'
print 'git checkout master && git branch -D {}'.format(branch_name)
+1781
View File
File diff suppressed because it is too large Load Diff
+51
View File
@@ -0,0 +1,51 @@
set nocompatible
set backspace=2
syntax on
" set both tabstop and shiftwidth to 2 spaces
set sw=2
set ts=2 ai et list
set showmatch "matching brackets
if has("autocmd")
au BufReadPost * if line("'\"") > 1 && line("'\"") <= line("$") | exe "normal! g'\"" | endif
endif
" Indent Python in the Google way.
setlocal indentexpr=GetGooglePythonIndent(v:lnum)
let s:maxoff = 50 " maximum number of lines to look backwards.
function GetGooglePythonIndent(lnum)
" Indent inside parens.
" Align with the open paren unless it is at the end of the line.
" E.g.
" open_paren_not_at_EOL(100,
" (200,
" 300),
" 400)
" open_paren_at_EOL(
" 100, 200, 300, 400)
call cursor(a:lnum, 1)
let [par_line, par_col] = searchpairpos('(\|{\|\[', '', ')\|}\|\]', 'bW',
\ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :"
\ . " synIDattr(synID(line('.'), col('.'), 1), 'name')"
\ . " =~ '\\(Comment\\|String\\)$'")
if par_line > 0
call cursor(par_line, 1)
if par_col != col("$") - 1
return par_col
endif
endif
" Delegate the rest to the original function.
return GetPythonIndent(a:lnum)
endfunction
let pyindent_nested_paren="&sw*2"
let pyindent_open_paren="&sw*2"
set textwidth=80
set ruler
Executable
+871
View File
@@ -0,0 +1,871 @@
#!/usr/bin/python
# git-cl -- a git-command for integrating reviews on Rietveld
# Copyright (C) 2008 Evan Martin <martine@danga.com>
import getpass
import optparse
import os
import re
import readline
import subprocess
import sys
import tempfile
import textwrap
import upload
import urllib2
try:
import readline
except ImportError:
pass
DEFAULT_SERVER = 'codereview.appspot.com'
PREDCOMMIT_HOOK = '.git/hooks/pre-cl-dcommit'
PREUPLOAD_HOOK = '.git/hooks/pre-cl-upload'
def DieWithError(message):
print >>sys.stderr, message
sys.exit(1)
def RunCommand(cmd, error_ok=False, error_message=None, exit_code=False,
redirect_stdout=True):
# Useful for debugging:
# print >>sys.stderr, ' '.join(cmd)
if redirect_stdout:
stdout = subprocess.PIPE
else:
stdout = None
proc = subprocess.Popen(cmd, stdout=stdout)
output = proc.communicate()[0]
if exit_code:
return proc.returncode
if not error_ok and proc.returncode != 0:
DieWithError('Command "%s" failed.\n' % (' '.join(cmd)) +
(error_message or output))
return output
def RunGit(args, **kwargs):
cmd = ['git'] + args
return RunCommand(cmd, **kwargs)
class Settings:
def __init__(self):
self.server = None
self.cc = None
self.root = None
self.is_git_svn = None
self.svn_branch = None
self.tree_status_url = None
self.viewvc_url = None
def GetServer(self, error_ok=False):
if not self.server:
if not error_ok:
error_message = ('You must configure your review setup by running '
'"git cl config".')
self.server = self._GetConfig('rietveld.server',
error_message=error_message)
else:
self.server = self._GetConfig('rietveld.server', error_ok=True)
return self.server
def GetCCList(self):
if self.cc is None:
self.cc = self._GetConfig('rietveld.cc', error_ok=True)
return self.cc
def GetRoot(self):
if not self.root:
self.root = os.path.abspath(RunGit(['rev-parse', '--show-cdup']).strip())
return self.root
def GetIsGitSvn(self):
"""Return true if this repo looks like it's using git-svn."""
if self.is_git_svn is None:
# If you have any "svn-remote.*" config keys, we think you're using svn.
self.is_git_svn = RunGit(['config', '--get-regexp', r'^svn-remote\.'],
exit_code=True) == 0
return self.is_git_svn
def GetSVNBranch(self):
if self.svn_branch is None:
if not self.GetIsGitSvn():
raise "Repo doesn't appear to be a git-svn repo."
# Try to figure out which remote branch we're based on.
# Strategy:
# 1) find all git-svn branches and note their svn URLs.
# 2) iterate through our branch history and match up the URLs.
# regexp matching the git-svn line that contains the URL.
git_svn_re = re.compile(r'^\s*git-svn-id: (\S+)@', re.MULTILINE)
# Get the refname and svn url for all refs/remotes/*.
remotes = RunGit(['for-each-ref', '--format=%(refname)',
'refs/remotes']).splitlines()
svn_refs = {}
for ref in remotes:
match = git_svn_re.search(RunGit(['cat-file', '-p', ref]))
if match:
svn_refs[match.group(1)] = ref
if len(svn_refs) == 1:
# Only one svn branch exists -- seems like a good candidate.
self.svn_branch = svn_refs.values()[0]
elif len(svn_refs) > 1:
# We have more than one remote branch available. We don't
# want to go through all of history, so read a line from the
# pipe at a time.
# The -100 is an arbitrary limit so we don't search forever.
cmd = ['git', 'log', '-100', '--pretty=medium']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
for line in proc.stdout:
match = git_svn_re.match(line)
if match:
url = match.group(1)
if url in svn_refs:
self.svn_branch = svn_refs[url]
proc.stdout.close() # Cut pipe.
break
if not self.svn_branch:
raise "Can't guess svn branch -- try specifying it on the command line"
return self.svn_branch
def GetTreeStatusUrl(self, error_ok=False):
if not self.tree_status_url:
error_message = ('You must configure your tree status URL by running '
'"git cl config".')
self.tree_status_url = self._GetConfig('rietveld.tree-status-url',
error_ok=error_ok,
error_message=error_message)
return self.tree_status_url
def GetViewVCUrl(self):
if not self.viewvc_url:
self.viewvc_url = self._GetConfig('rietveld.viewvc-url', error_ok=True)
return self.viewvc_url
def _GetConfig(self, param, **kwargs):
return RunGit(['config', param], **kwargs).strip()
settings = Settings()
did_migrate_check = False
def CheckForMigration():
"""Migrate from the old issue format, if found.
We used to store the branch<->issue mapping in a file in .git, but it's
better to store it in the .git/config, since deleting a branch deletes that
branch's entry there.
"""
# Don't run more than once.
global did_migrate_check
if did_migrate_check:
return
gitdir = RunGit(['rev-parse', '--git-dir']).strip()
storepath = os.path.join(gitdir, 'cl-mapping')
if os.path.exists(storepath):
print "old-style git-cl mapping file (%s) found; migrating." % storepath
store = open(storepath, 'r')
for line in store:
branch, issue = line.strip().split()
RunGit(['config', 'branch.%s.rietveldissue' % ShortBranchName(branch),
issue])
store.close()
os.remove(storepath)
did_migrate_check = True
def IssueURL(issue):
"""Get the URL for a particular issue."""
return 'http://%s/%s' % (settings.GetServer(), issue)
def ShortBranchName(branch):
"""Convert a name like 'refs/heads/foo' to just 'foo'."""
return branch.replace('refs/heads/', '')
class Changelist:
def __init__(self, branchref=None):
# Poke settings so we get the "configure your server" message if necessary.
settings.GetServer()
self.branchref = branchref
if self.branchref:
self.branch = ShortBranchName(self.branchref)
else:
self.branch = None
self.upstream_branch = None
self.has_issue = False
self.issue = None
self.has_description = False
self.description = None
def GetBranch(self):
"""Returns the short branch name, e.g. 'master'."""
if not self.branch:
self.branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
self.branch = ShortBranchName(self.branchref)
return self.branch
def GetBranchRef(self):
"""Returns the full branch name, e.g. 'refs/heads/master'."""
self.GetBranch() # Poke the lazy loader.
return self.branchref
def GetUpstreamBranch(self):
if self.upstream_branch is None:
branch = self.GetBranch()
upstream_branch = RunGit(['config', 'branch.%s.merge' % branch],
error_ok=True).strip()
if upstream_branch:
remote = RunGit(['config', 'branch.%s.remote' % branch]).strip()
# We have remote=origin and branch=refs/heads/foobar; convert to
# refs/remotes/origin/foobar.
self.upstream_branch = upstream_branch.replace('heads',
'remotes/' + remote)
if not self.upstream_branch:
# Fall back on trying a git-svn upstream branch.
if settings.GetIsGitSvn():
self.upstream_branch = settings.GetSVNBranch()
if not self.upstream_branch:
DieWithError("""Unable to determine default branch to diff against.
Either pass complete "git diff"-style arguments, like
git cl upload origin/master
or verify this branch is set up to track another (via the --track argument to
"git checkout -b ...").""")
return self.upstream_branch
def GetIssue(self):
if not self.has_issue:
CheckForMigration()
issue = RunGit(['config', self._IssueSetting()], error_ok=True).strip()
if issue:
self.issue = issue
else:
self.issue = None
self.has_issue = True
return self.issue
def GetIssueURL(self):
return IssueURL(self.GetIssue())
def GetDescription(self, pretty=False):
if not self.has_description:
if self.GetIssue():
url = self.GetIssueURL() + '/description'
self.description = urllib2.urlopen(url).read().strip()
self.has_description = True
if pretty:
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = wrapper.subsequent_indent = ' '
return wrapper.fill(self.description)
return self.description
def GetPatchset(self):
if not self.has_patchset:
patchset = RunGit(['config', self._PatchsetSetting()],
error_ok=True).strip()
if patchset:
self.patchset = patchset
else:
self.patchset = None
self.has_patchset = True
return self.patchset
def SetPatchset(self, patchset):
"""Set this branch's patchset. If patchset=0, clears the patchset."""
if patchset:
RunGit(['config', self._PatchsetSetting(), str(patchset)])
else:
RunGit(['config', '--unset', self._PatchsetSetting()])
self.has_patchset = False
def SetIssue(self, issue):
"""Set this branch's issue. If issue=0, clears the issue."""
if issue:
RunGit(['config', self._IssueSetting(), str(issue)])
else:
RunGit(['config', '--unset', self._IssueSetting()])
self.SetPatchset(0)
self.has_issue = False
def CloseIssue(self):
def GetUserCredentials():
email = raw_input('Email: ').strip()
password = getpass.getpass('Password for %s: ' % email)
return email, password
rpc_server = upload.HttpRpcServer(settings.GetServer(),
GetUserCredentials,
host_override=settings.GetServer(),
save_cookies=True)
# You cannot close an issue with a GET.
# We pass an empty string for the data so it is a POST rather than a GET.
data = [("description", self.description),]
ctype, body = upload.EncodeMultipartFormData(data, [])
rpc_server.Send('/' + self.GetIssue() + '/close', body, ctype)
def _IssueSetting(self):
"""Return the git setting that stores this change's issue."""
return 'branch.%s.rietveldissue' % self.GetBranch()
def _PatchsetSetting(self):
"""Return the git setting that stores this change's most recent patchset."""
return 'branch.%s.rietveldpatchset' % self.GetBranch()
def GetCodereviewSettingsInteractively():
"""Prompt the user for settings."""
server = settings.GetServer(error_ok=True)
prompt = 'Rietveld server (host[:port])'
prompt += ' [%s]' % (server or DEFAULT_SERVER)
newserver = raw_input(prompt + ': ')
if not server and not newserver:
newserver = DEFAULT_SERVER
if newserver and newserver != server:
RunGit(['config', 'rietveld.server', newserver])
def SetProperty(initial, caption, name):
prompt = caption
if initial:
prompt += ' ("x" to clear) [%s]' % initial
new_val = raw_input(prompt + ': ')
if new_val == 'x':
RunGit(['config', '--unset-all', 'rietveld.' + name], error_ok=True)
elif new_val and new_val != initial:
RunGit(['config', 'rietveld.' + name, new_val])
SetProperty(settings.GetCCList(), 'CC list', 'cc')
SetProperty(settings.GetTreeStatusUrl(error_ok=True), 'Tree status URL',
'tree-status-url')
SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url')
# TODO: configure a default branch to diff against, rather than this
# svn-based hackery.
def LoadCodereviewSettingsFromFile(file):
"""Parse a codereview.settings file."""
settings = {}
for line in file.read().splitlines():
if not line or line.startswith("#"):
continue
k, v = line.split(": ", 1)
settings[k] = v
def GetProperty(name):
return settings.get(name)
def SetProperty(name, setting, unset_error_ok=False):
fullname = 'rietveld.' + name
if setting in settings:
RunGit(['config', fullname, settings[setting]])
else:
RunGit(['config', '--unset-all', fullname], error_ok=unset_error_ok)
SetProperty('server', 'CODE_REVIEW_SERVER')
# Only server setting is required. Other settings can be absent.
# In that case, we ignore errors raised during option deletion attempt.
SetProperty('cc', 'CC_LIST', unset_error_ok=True)
SetProperty('tree-status-url', 'STATUS', unset_error_ok=True)
SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
hooks = {}
if GetProperty('GITCL_PREUPLOAD'):
hooks['preupload'] = GetProperty('GITCL_PREUPLOAD')
if GetProperty('GITCL_PREDCOMMIT'):
hooks['predcommit'] = GetProperty('GITCL_PREDCOMMIT')
return hooks
def CmdConfig(args):
def DownloadToFile(url, filename):
filename = os.path.join(settings.GetRoot(), filename)
if os.path.exists(filename):
print '%s exists, skipping' % filename
return False
contents = urllib2.urlopen(url).read()
file = open(filename, 'w')
file.write(contents)
file.close()
os.chmod(filename, 0755)
return True
parser = optparse.OptionParser(
usage='git cl config [repo root containing codereview.settings]')
(options, args) = parser.parse_args(args)
if len(args) == 0:
GetCodereviewSettingsInteractively()
return
url = args[0]
if not url.endswith('codereview.settings'):
url = os.path.join(url, 'codereview.settings')
# Load Codereview settings and download hooks (if available).
hooks = LoadCodereviewSettingsFromFile(urllib2.urlopen(url))
for key, filename in (('predcommit', PREDCOMMIT_HOOK),
('preupload', PREUPLOAD_HOOK)):
if key in hooks:
DownloadToFile(hooks[key], filename)
def CmdStatus(args):
parser = optparse.OptionParser(usage='git cl status [options]')
parser.add_option('--field', help='print only specific field (desc|id|url)')
(options, args) = parser.parse_args(args)
# TODO: maybe make show_branches a flag if necessary.
show_branches = not options.field
if show_branches:
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if branches:
print 'Branches associated with reviews:'
for branch in sorted(branches.splitlines()):
cl = Changelist(branchref=branch)
print " %10s: %s" % (cl.GetBranch(), cl.GetIssue())
cl = Changelist()
if options.field:
if options.field.startswith('desc'):
print cl.GetDescription()
elif options.field == 'id':
print cl.GetIssue()
elif options.field == 'url':
print cl.GetIssueURL()
else:
print
print 'Current branch:',
if not cl.GetIssue():
print 'no issue assigned.'
return 0
print cl.GetBranch()
print 'Issue number:', cl.GetIssue(), '(%s)' % cl.GetIssueURL()
print 'Issue description:'
print cl.GetDescription(pretty=True)
def CmdIssue(args):
parser = optparse.OptionParser(usage='git cl issue [issue_number]')
parser.description = ('Set or display the current code review issue. ' +
'Pass issue number 0 to clear the current issue.')
(options, args) = parser.parse_args(args)
cl = Changelist()
if len(args) > 0:
cl.SetIssue(int(args[0]))
print 'Issue number:', cl.GetIssue(), '(%s)' % cl.GetIssueURL()
def UserEditedLog(starting_text):
"""Given some starting text, let the user edit it and return the result."""
editor = os.getenv('EDITOR', 'vi')
(file_handle, filename) = tempfile.mkstemp()
file = os.fdopen(file_handle, 'w')
file.write(starting_text)
file.close()
ret = subprocess.call(editor + ' ' + filename, shell=True)
if ret != 0:
os.remove(filename)
return
file = open(filename)
text = file.read()
file.close()
os.remove(filename)
stripcomment_re = re.compile(r'^#.*$', re.MULTILINE)
return stripcomment_re.sub('', text).strip()
def RunHook(hook, upstream_branch='origin', error_ok=False):
"""Run a given hook if it exists. By default, we fail on errors."""
hook = '%s/%s' % (settings.GetRoot(), hook)
if not os.path.exists(hook):
return
output = RunCommand([hook, upstream_branch], error_ok).strip()
if output != '':
print output
def CmdPresubmit(args):
"""Reports what presubmit checks on the change would report."""
parser = optparse.OptionParser(
usage='git cl presubmit [options]')
(options, args) = parser.parse_args(args)
if RunGit(['diff-index', 'HEAD']):
print 'Cannot presubmit with a dirty tree. You must commit locally first.'
return 1
print '*** Presubmit checks for UPLOAD would report: ***'
RunHook(PREUPLOAD_HOOK, error_ok=True)
print '*** Presubmit checks for DCOMMIT would report: ***'
RunHook(PREDCOMMIT_HOOK, error_ok=True)
def CmdUpload(args):
parser = optparse.OptionParser(
usage='git cl upload [options] [args to "git diff"]')
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
help='bypass upload presubmit hook')
parser.add_option('-m', dest='message', help='message for patch')
parser.add_option('-r', '--reviewers',
help='reviewer email addresses')
parser.add_option('--send-mail', action='store_true',
help='send email to reviewer immediately')
(options, args) = parser.parse_args(args)
if RunGit(['diff-index', 'HEAD']):
print 'Cannot upload with a dirty tree. You must commit locally first.'
return 1
cl = Changelist()
if args:
base_branch = args[0]
else:
# Default to diffing against the "upstream" branch.
base_branch = cl.GetUpstreamBranch()
args = [base_branch + "..."]
if not options.bypass_hooks:
RunHook(PREUPLOAD_HOOK, upstream_branch=base_branch, error_ok=False)
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
subprocess.call(['git', 'diff', '--no-ext-diff', '--stat', '-M'] + args,
env=env)
upload_args = ['--assume_yes'] # Don't ask about untracked files.
upload_args.extend(['--server', settings.GetServer()])
if options.reviewers:
upload_args.extend(['--reviewers', options.reviewers])
upload_args.extend(['--cc', settings.GetCCList()])
if options.message:
upload_args.extend(['--message', options.message])
if options.send_mail:
if not options.reviewers:
DieWithError("Must specify reviewers to send email.")
upload_args.append('--send_mail')
if cl.GetIssue():
upload_args.extend(['--issue', cl.GetIssue()])
print ("This branch is associated with issue %s. "
"Adding patch to that issue." % cl.GetIssue())
else:
# Construct a description for this change from the log.
# We need to convert diff options to log options.
log_args = []
if len(args) == 1 and not args[0].endswith('.'):
log_args = [args[0] + '..']
elif len(args) == 2:
log_args = [args[0] + '..' + args[1]]
else:
log_args = args[:] # Hope for the best!
desc = RunGit(['log', '--pretty=format:%s\n\n%b'] + log_args)
initial_text = """# Enter a description of the change.
# This will displayed on the codereview site.
# The first line will also be used as the subject of the review."""
desc = UserEditedLog(initial_text + '\n' + desc)
if not desc:
print "Description empty; aborting."
return 1
subject = desc.splitlines()[0]
upload_args.extend(['--message', subject])
upload_args.extend(['--description', desc])
issue, patchset = upload.RealMain(['upload'] + upload_args + args)
if not cl.GetIssue():
cl.SetIssue(issue)
cl.SetPatchset(patchset)
def CmdDCommit(args):
parser = optparse.OptionParser(
usage='git cl dcommit [options] [git-svn branch to apply against]')
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
help='bypass upload presubmit hook')
parser.add_option('-m', dest='message',
help="override review description")
parser.add_option('-f', action='store_true', dest='force',
help="force yes to questions (don't prompt)")
parser.add_option('-c', dest='contributor',
help="external contributor for patch (appended to " +
"description)")
parser.add_option('--tbr', action='store_true', dest='tbr',
help="short for 'to be reviewed', commit branch " +
"even without uploading for review")
(options, args) = parser.parse_args(args)
cl = Changelist()
if not args:
# Default to merging against our best guess of the upstream branch.
args = [cl.GetUpstreamBranch()]
base_branch = args[0]
if RunGit(['diff-index', 'HEAD']):
print 'Cannot dcommit with a dirty tree. You must commit locally first.'
return 1
# This rev-list syntax means "show all commits not in my branch that
# are in base_branch".
upstream_commits = RunGit(['rev-list', '^' + cl.GetBranchRef(),
base_branch]).splitlines()
if upstream_commits:
print ('Base branch "%s" has %d commits '
'not in this branch.' % (base_branch, len(upstream_commits)))
print 'Run "git merge %s" before attempting to dcommit.' % base_branch
return 1
if not options.force and not options.bypass_hooks:
RunHook(PREDCOMMIT_HOOK, upstream_branch=base_branch, error_ok=False)
# Check the tree status if the tree status URL is set.
status = GetTreeStatus()
if 'closed' == status:
print ('The tree is closed. Please wait for it to reopen. Use '
'"git cl dcommit -f" to commit on a closed tree.')
return 1
elif 'unknown' == status:
print ('Unable to determine tree status. Please verify manually and '
'use "git cl dcommit -f" to commit on a closed tree.')
description = options.message
if not options.tbr:
# It is important to have these checks early. Not only for user
# convenience, but also because the cl object then caches the correct values
# of these fields even as we're juggling branches for setting up the commit.
if not cl.GetIssue():
print 'Current issue unknown -- has this branch been uploaded?'
print 'Use --tbr to commit without review.'
return 1
if not description:
description = cl.GetDescription()
if not description:
print 'No description set.'
print 'Visit %s/edit to set it.' % (cl.GetIssueURL())
return 1
description += "\n\nReview URL: %s" % cl.GetIssueURL()
else:
# Submitting TBR. Get a description now.
if not description:
description = UserEditedLog('TBR: ')
if not description:
print "Description empty; aborting."
return 1
if options.contributor:
description += "\nPatch from %s." % options.contributor
print 'Description:', repr(description)
branches = [base_branch, cl.GetBranchRef()]
if not options.force:
subprocess.call(['git', 'diff', '--stat'] + branches)
raw_input("About to commit; enter to confirm.")
# We want to squash all this branch's commits into one commit with the
# proper description.
# We do this by doing a "merge --squash" into a new commit branch, then
# dcommitting that.
MERGE_BRANCH = 'git-cl-commit'
# Delete the merge branch if it already exists.
if RunGit(['show-ref', '--quiet', '--verify', 'refs/heads/' + MERGE_BRANCH],
exit_code=True) == 0:
RunGit(['branch', '-D', MERGE_BRANCH])
# We might be in a directory that's present in this branch but not in the
# trunk. Move up to the top of the tree so that git commands that expect a
# valid CWD won't fail after we check out the merge branch.
rel_base_path = RunGit(['rev-parse', '--show-cdup']).strip()
if rel_base_path:
os.chdir(rel_base_path)
# Stuff our change into the merge branch.
# We wrap in a try...finally block so if anything goes wrong,
# we clean up the branches.
try:
RunGit(['checkout', '-q', '-b', MERGE_BRANCH, base_branch])
RunGit(['merge', '--squash', cl.GetBranchRef()])
RunGit(['commit', '-m', description])
# dcommit the merge branch.
output = RunGit(['svn', 'dcommit', '--no-rebase'])
finally:
# And then swap back to the original branch and clean up.
RunGit(['checkout', '-q', cl.GetBranch()])
RunGit(['branch', '-D', MERGE_BRANCH])
if cl.has_issue and output.find("Committed r") != -1:
print "Closing issue (you may be prompted for your codereview password)..."
viewvc_url = settings.GetViewVCUrl()
if viewvc_url:
revision = re.compile(".*?\nCommitted r(\d+)",
re.DOTALL).match(output).group(1)
cl.description = (cl.description +
"\n\nCommitted: " + viewvc_url + revision)
cl.CloseIssue()
cl.SetIssue(0)
def CmdPatch(args):
parser = optparse.OptionParser(usage=('git cl patch [options] '
'<patch url or issue id>'))
parser.add_option('-b', dest='newbranch',
help='create a new branch off trunk for the patch')
parser.add_option('-f', action='store_true', dest='force',
help='with -b, clobber any existing branch')
parser.add_option('--reject', action='store_true', dest='reject',
help='allow failed patches and spew .rej files')
parser.add_option('-n', '--no-commit', action='store_true', dest='nocommit',
help="don't commit after patch applies")
(options, args) = parser.parse_args(args)
if len(args) != 1:
return parser.print_help()
input = args[0]
if re.match(r'\d+', input):
# Input is an issue id. Figure out the URL.
issue = input
fetch = "curl --silent http://%s/%s" % (settings.GetServer(), issue)
grep = "grep -E -o '/download/issue[0-9]+_[0-9]+.diff'"
pipe = subprocess.Popen("%s | %s" % (fetch, grep), shell=True,
stdout=subprocess.PIPE)
path = pipe.stdout.read().strip()
url = 'http://%s%s' % (settings.GetServer(), path)
else:
# Assume it's a URL to the patch.
match = re.match(r'http://.*?/issue(\d+)_\d+.diff', input)
if match:
issue = match.group(1)
url = input
else:
print "Must pass an issue ID or full URL for 'Download raw patch set'"
return 1
if options.newbranch:
if options.force:
RunGit(['branch', '-D', options.newbranch], error_ok=True)
RunGit(['checkout', '-b', options.newbranch])
# Switch up to the top-level directory, if necessary, in preparation for
# applying the patch.
top = RunGit(['rev-parse', '--show-cdup']).strip()
if top:
os.chdir(top)
# Construct a pipeline to feed the patch into "git apply".
# We use "git apply" to apply the patch instead of "patch" so that we can
# pick up file adds.
# 1) Fetch the patch.
fetch = "curl --silent %s" % url
# 2) Munge the patch.
# Git patches have a/ at the beginning of source paths. We strip that out
# with a sed script rather than the -p flag to patch so we can feed either
# Git or svn-style patches into the same apply command.
gitsed = "sed -e 's|^--- a/|--- |; s|^+++ b/|+++ |'"
# 3) Apply the patch.
# The --index flag means: also insert into the index (so we catch adds).
apply = "git apply --index -p0"
if options.reject:
apply += " --reject"
subprocess.check_call(' | '.join([fetch, gitsed, apply]), shell=True)
# If we had an issue, commit the current state and register the issue.
if not options.nocommit:
RunGit(['commit', '-m', 'patch from issue %s' % issue])
cl = Changelist()
cl.SetIssue(issue)
print "Committed patch."
else:
print "Patch applied to index."
def CmdRebase(args):
# Provide a wrapper for git svn rebase to help avoid accidental
# git svn dcommit.
RunGit(['svn', 'rebase'], redirect_stdout=False)
def GetTreeStatus():
"""Fetches the tree status and returns either 'open', 'closed',
'unknown' or 'unset'."""
url = settings.GetTreeStatusUrl(error_ok=True)
if url:
status = urllib2.urlopen(url).read().lower()
if status.find('closed') != -1 or status == '0':
return 'closed'
elif status.find('open') != -1 or status == '1':
return 'open'
return 'unknown'
return 'unset'
def CmdTreeStatus(args):
status = GetTreeStatus()
if 'unset' == status:
print 'You must configure your tree status URL by running "git cl config".'
return 2
print "The tree is %s" % status
if status != 'open':
return 1
return 0
def CmdUpstream(args):
cl = Changelist()
print cl.GetUpstreamBranch()
COMMANDS = [
('config', 'edit configuration for this tree', CmdConfig),
('dcommit', 'commit the current changelist via git-svn', CmdDCommit),
('issue', 'show/set current branch\'s issue number', CmdIssue),
('patch', 'patch in a code review', CmdPatch),
('presubmit', 'run presubmit tests on the current changelist', CmdPresubmit),
('rebase', 'rebase current branch on top of svn repo', CmdRebase),
('status', 'show status of changelists', CmdStatus),
('tree', 'show the status of the tree', CmdTreeStatus),
('upload', 'upload the current changelist to codereview', CmdUpload),
('upstream', 'print the name of the upstream branch, if any', CmdUpstream),
]
def Usage(name):
print 'usage: %s <command>' % name
print 'commands are:'
for name, desc, _ in COMMANDS:
print ' %-10s %s' % (name, desc)
sys.exit(1)
def main(argv):
if len(argv) < 2:
Usage(argv[0])
command = argv[1]
for name, _, func in COMMANDS:
if name == command:
return func(argv[2:])
print 'unknown command: %s' % command
Usage(argv[0])
if __name__ == '__main__':
sys.exit(main(sys.argv))
+45
View File
@@ -0,0 +1,45 @@
#!/bin/bash
# Script that prepares the codebase for building a binary distribution
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_SUCCESS=0;
# Remove support for hachoir which is GPLv2 and cannot be distributed
# in binary form. Leave the formatter because it does not link in the
# hachoir code.
rm -f plaso/parsers/hachoir*
sed -i"~" -e '/import hachoir/d' plaso/parsers/__init__.py
SED_SCRIPT="
/_slow': \[/ {
:loop
/'\],/ !{
N
b loop
}
d
}";
sed -i"~" -e "${SED_SCRIPT}" plaso/frontend/presets.py
sed -i"~" '/hachoir_/,/^$/d' utils/check_dependencies.py
exit ${EXIT_SUCCESS};
+289
View File
@@ -0,0 +1,289 @@
# File copied from:
# http://src.chromium.org/chrome/trunk/tools/depot_tools/pylintrc
# Date: 2013-06-29.
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once).
# CHANGED:
# C0103: Invalid name ""
# C0111: Missing docstring
# C0302: Too many lines in module (N)
#
# F0401: Unable to import 'module'
# pylint acting strangely: plaso/lib/event.py: F0401: 26,0: Unable to import 'google.protobuf'
#
# I0010: Unable to consider inline option ''
# I0011: Locally disabling WNNNN
#
# R0201: Method could be a function
# R0801: Similar lines in N files
# R0901: Too many ancestors (8/7)
# R0902: Too many instance attributes (N/7)
# R0903: Too few public methods (N/2)
# R0904: Too many public methods (N/20)
# R0911: Too many return statements (N/6)
# R0912: Too many branches (N/12)
# R0913: Too many arguments (N/5)
# R0914: Too many local variables (N/15)
# R0915: Too many statements (N/50)
# R0921: Abstract class not referenced
# R0922: Abstract class is only referenced 1 times
# R0924: Badly implemented Container, implements __len__ but not __getitem__ (incomplete-protocol) (pylint 0.26 and later)
# W0122: Use of the exec statement
# W0141: Used builtin function ''
# W0142: Used * or ** magic
# W0402: Uses of a deprecated module 'string'
# W0404: 41: Reimport 'XX' (imported line NN)
# W0511: TODO
# W0603: Using the global statement
# W0703: Catch "Exception"
# W1201: Specify string format arguments as logging function parameters
# W0201: Variables defined initially outside the scope of __init__ (reconsider this, added by Kristinn).
disable=C0103,C0111,C0302,F0401,I0010,I0011,R0201,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,R0924,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201,W0201
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html
output-format=text
# Include message's id in output
include-ids=yes
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
# CHANGED:
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of unused variables.
# By default this is _ and dummy but we prefer _ and unused.
dummy-variables-rgx=_|unused
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject,twisted.internet.reactor,hashlib,google.appengine.api.memcache
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent,multiprocessing.managers.SyncManager
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
# CHANGED:
indent-string=' '
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branchs=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
+288
View File
@@ -0,0 +1,288 @@
# File copied from:
# http://src.chromium.org/chrome/trunk/tools/depot_tools/pylintrc
# Date: 2013-06-29.
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once).
# CHANGED:
# C0103: Invalid name ""
# C0111: Missing docstring
# C0302: Too many lines in module (N)
#
# F0401: Unable to import 'module'
# pylint acting strangely: plaso/lib/event.py: F0401: 26,0: Unable to import 'google.protobuf'
#
# I0010: Unable to consider inline option ''
# I0011: Locally disabling WNNNN
#
# R0201: Method could be a function
# R0801: Similar lines in N files
# R0901: Too many ancestors (8/7)
# R0902: Too many instance attributes (N/7)
# R0903: Too few public methods (N/2)
# R0904: Too many public methods (N/20)
# R0911: Too many return statements (N/6)
# R0912: Too many branches (N/12)
# R0913: Too many arguments (N/5)
# R0914: Too many local variables (N/15)
# R0915: Too many statements (N/50)
# R0921: Abstract class not referenced
# R0922: Abstract class is only referenced 1 times
# W0122: Use of the exec statement
# W0141: Used builtin function ''
# W0142: Used * or ** magic
# W0402: Uses of a deprecated module 'string'
# W0404: 41: Reimport 'XX' (imported line NN)
# W0511: TODO
# W0603: Using the global statement
# W0703: Catch "Exception"
# W1201: Specify string format arguments as logging function parameters
# W0201: Variables defined initially outside the scope of __init__ (reconsider this, added by Kristinn).
disable=C0103,C0111,C0302,F0401,I0010,I0011,R0201,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201,W0201
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html
output-format=text
# Include message's id in output
include-ids=yes
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
# CHANGED:
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of unused variables.
# By default this is _ and dummy but we prefer _ and unused.
dummy-variables-rgx=_|unused
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject,twisted.internet.reactor,hashlib,google.appengine.api.memcache
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent,multiprocessing.managers.SyncManager
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
# CHANGED:
indent-string=' '
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branchs=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
+165
View File
@@ -0,0 +1,165 @@
#!/bin/bash
# A small script that submits a code for code review.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_SUCCESS=0;
EXIT_MISSING_ARGS=2;
EXIT_SUCCESS=0;
SCRIPTNAME=`basename $0`;
BROWSER_PARAM="";
CACHE_PARAM="";
USE_CL_FILE=1;
while test $# -gt 0;
do
case $1 in
--nobrowser | --no-browser | --no_browser )
BROWSER_PARAM="--no_oauth2_webbrowser";
shift;
;;
--noclfile | --no-clfile | --no_clfile )
USE_CL_FILE=0;
shift;
;;
*)
REVIEWER=$1;
shift
;;
esac
done
if test -z "${REVIEWER}";
then
echo "Usage: ./${SCRIPTNAME} [--nobrowser] [--noclfile] REVIEWER";
echo "";
echo " REVIEWER: the email address of the reviewer that is registered with:"
echo " https://codereview.appspot.com";
echo "";
exit ${EXIT_MISSING_ARGS};
fi
if ! test -f "utils/common.sh";
then
echo "Missing common functions, are you in the wrong directory?";
exit ${EXIT_FAILURE};
fi
. utils/common.sh
# Check for double status codes, upload.py cannot handle these correctly.
STATUS_CODES=`git status -s | cut -b1,2 | grep '\S\S' | grep -v '??' | sort | uniq`;
if ! test -z "${STATUS_CODES}";
then
echo "Upload aborted - detected double git status codes."
echo "Run: 'git stash && git stash pop'.";
exit ${EXIT_FAILURE};
fi
# Check if the linting is correct.
if ! linter;
then
echo "Upload aborted - fix the issues reported by the linter.";
exit ${EXIT_FAILURE};
fi
# Check if all the tests pass.
if test -e run_tests.py;
then
echo "Running tests.";
python run_tests.py
if test $? -ne 0;
then
echo "Upload aborted - fix the issues reported by the failing test.";
exit ${EXIT_FAILURE};
fi
fi
MISSING_TESTS="";
FILES=`git status -s | grep -v "^?" | awk '{if ($1 != 'D') { print $2;}}' | grep "\.py$" | grep -v "_test.py$"`
for CHANGED_FILE in ${FILES};
do
TEST_FILE=`echo ${CHANGED_FILE} | sed -e 's/\.py//g'`
if ! test -f "${TEST_FILE}_test.py";
then
MISSING_TESTS="${MISSING_TESTS} + ${CHANGED_FILE}"
fi
done
if test -z "${MISSING_TESTS}";
then
MISSING_TEST_FILES=".";
else
MISSING_TEST_FILES="These files are missing unit tests:
${MISSING_TESTS}
";
fi
echo -n "Short description of code review request: ";
read DESCRIPTION
TEMP_FILE=`mktemp .tmp_plaso_code_review.XXXXXX`;
# Check if we need to set --cache.
STATUS_CODES=`git status -s | cut -b1,2 | sed 's/\s//g' | sort | uniq`;
for STATUS_CODE in ${STATUS_CODES};
do
if test "${STATUS_CODE}" = "A";
then
CACHE_PARAM="--cache";
fi
done
if ! test -z "${BROWSER_PARAM}";
then
echo "You need to visit: https://codereview.appspot.com/get-access-token";
echo "and copy+paste the access token to the window (no prompt)";
fi
python utils/upload.py \
--oauth2 ${BROWSER_PARAM} -y ${CACHE_PARAM} \
-r ${REVIEWER} --cc log2timeline-dev@googlegroups.com \
-m "${MISSING_TEST_FILES}" -t "${DESCRIPTION}" \
--send_mail | tee ${TEMP_FILE};
CL=`cat ${TEMP_FILE} | grep codereview.appspot.com | awk -F '/' '/created/ {print $NF}'`;
cat ${TEMP_FILE};
rm -f ${TEMP_FILE};
echo "";
if test -z ${CL};
then
echo "Unable to upload code change for review.";
exit ${EXIT_FAILURE};
elif test ${USE_CL_FILE} -ne 0;
then
echo ${CL} > ._code_review_number;
echo "Code review number: ${CL} is saved, so no need to include that in future updates/submits.";
fi
exit ${EXIT_SUCCESS};
+39
View File
@@ -0,0 +1,39 @@
#!/bin/bash
# A small script that runs the linter on all files.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_SUCCESS=0;
if ! test -f "utils/common.sh" ;
then
echo "Missing common functions, are you in the wrong directory?";
exit ${EXIT_FAILURE};
fi
. utils/common.sh
if ! linter;
then
echo "Aborted - fix the issues reported by the linter.";
exit ${EXIT_FAILURE};
fi
exit ${EXIT_SUCCESS};
+89
View File
@@ -0,0 +1,89 @@
#!/bin/bash
# A small script that runs all tests
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_SUCCESS=0;
COVERAGE="/usr/bin/coverage";
COVERAGE_REPORT="tests-coverage.txt";
PYTHON="/usr/bin/python";
if ! test -x "${PYTHON}";
then
# MSYS-MinGW allows to run the script using the Windows Python version.
PYTHON="/c/python27/python.exe";
fi
if ! test -x "${PYTHON}";
then
echo "Unable to locate Python interpreter."
echo "";
exit ${EXIT_FAILURE};
fi
if test -x "${COVERAGE}";
then
rm -f .coverage ${COVERAGE_REPORT};
fi
# Run the tests in a specific order.
SUBDIRS="lib serializer winreg filters classifier engine events preprocessors parsers output analysis multi_processing frontend";
for SUBDIR in ${SUBDIRS};
do
TEST_FILES=`find "plaso/${SUBDIR}" -name "*_test.py" | grep -v "\/build\/"`;
for TEST_FILE in ${TEST_FILES};
do
if test ${TEST_FILE} = "plaso/parsers/pcap_test.py";
then
continue;
fi
echo "---+ ${TEST_FILE} +---"
if test -x "${COVERAGE}";
then
PYTHONPATH=. ${COVERAGE} run -a ${TEST_FILE};
else
PYTHONPATH=. ${PYTHON} ${TEST_FILE};
fi
if test $? -ne 0;
then
echo "TEST FAILED: ${TEST_FILE}.";
echo "";
echo "Stopping further testing.";
echo "";
exit ${EXIT_FAILURE};
fi
echo "";
done
done
if test -x "${COVERAGE}";
then
echo "Writing tests coverage report: ${COVERAGE_REPORT}";
SITE_PACKAGES="/usr/lib/python2.7/site-packages";
${COVERAGE} report -m --omit="${SITE_PACKAGES}/*,*_test.py" > ${COVERAGE_REPORT};
rm -f .coverage
fi
exit ${EXIT_SUCCESS};
+213
View File
@@ -0,0 +1,213 @@
#!/bin/bash
# A small script that submits a code for code review.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_MISSING_ARGS=2;
EXIT_SUCCESS=0;
SCRIPTNAME=`basename $0`;
BROWSER_PARAM="";
CACHE_PARAM="";
CL_NUMBER="";
USE_CL_FILE=0;
while test $# -gt 0;
do
case $1 in
--nobrowser | --no-browser | --no_browser )
BROWSER_PARAM="--no_oauth2_webbrowser";
shift;
;;
*)
CL_NUMBER=$1;
shift
;;
esac
done
if test -z "${CL_NUMBER}";
then
if test -f ._code_review_number;
then
CL_NUMBER=`cat ._code_review_number`
RESULT=`echo ${CL_NUMBER} | sed -e 's/[0-9]//g'`;
if ! test -z "${RESULT}";
then
echo "File ._code_review_number exists but contains an incorrect CL number.";
exit ${EXIT_FAILURE};
fi
USE_CL_FILE=1;
fi
fi
if test -z "${CL_NUMBER}";
then
echo "Usage: ./${SCRIPTNAME} [--nobrowser] CL_NUMBER";
echo "";
echo " CL_NUMBER: optional change list (CL) number that is to be submitted.";
echo " If no CL number is provided the value is read from:";
echo " ._code_review_number";
echo "";
exit ${EXIT_MISSING_ARGS};
fi
if ! test -f "utils/common.sh";
then
echo "Unable to find common functions, are you in the wrong directory?";
exit ${EXIT_FAILURE};
fi
# Source the common library.
. utils/common.sh
# Check if we're on the master branch.
BRANCH=`git branch | grep -e "^[*]" | sed "s/^[*] //"`;
if test "${BRANCH}" != "master";
then
echo "Submit aborted - current branch is not master.";
exit ${EXIT_FAILURE};
fi
# Check for double status codes, upload.py cannot handle these correctly.
STATUS_CODES=`git status -s | cut -b1,2 | grep '\S\S' | grep -v '??' | sort | uniq`;
if ! test -z "${STATUS_CODES}";
then
echo "Submit aborted - detected double git status codes."
echo "Run: 'git stash && git stash pop'.";
exit ${EXIT_FAILURE};
fi
# Check if the local repo is in sync with the origin.
git fetch
if test $? -ne 0;
then
echo "Submit aborted - unable to fetch updates from origin repo";
exit ${EXIT_FAILURE};
fi
NUMBER_OF_CHANGES=`git log HEAD..origin/master --oneline | wc -l`;
if test $? -ne 0;
then
echo "Submit aborted - unable to determine if local repo is in sync with origin";
exit ${EXIT_FAILURE};
fi
if test ${NUMBER_OF_CHANGES} -ne 0;
then
echo "Submit aborted - local repo out of sync with origin."
echo "Run: 'git stash && git pull && git stash pop'.";
exit ${EXIT_FAILURE};
fi
# Check if the linting is correct.
if ! linter;
then
echo "Submit aborted - fix the issues reported by the linter.";
exit ${EXIT_FAILURE};
fi
# Check if all the tests pass.
if test -e run_tests.py;
then
echo "Running tests.";
python run_tests.py
if test $? -ne 0;
then
echo "Submit aborted - fix the issues reported by the failing test.";
exit ${EXIT_FAILURE};
fi
fi
URL_CODEREVIEW="https://codereview.appspot.com";
# Get the description of the change list.
RESULT=`which json_xs`;
# TODO: check if curl exists.
if ! test -z "${RESULT}";
then
DESCRIPTION=`curl -s ${URL_CODEREVIEW}/api/${CL_NUMBER} | json_xs | grep '"subject"' | awk -F '"' '{print $(NF-1)}'`;
else
DESCRIPTION=`curl ${URL_CODEREVIEW}/${CL_NUMBER}/ -s | grep "Issue ${CL_NUMBER}" | awk -F ':' '{print $2}' | tail -1`;
fi
if test -z "${DESCRIPTION}";
then
echo "Submit aborted - unable to find change list with number: ${CL_NUMBER}.";
exit ${EXIT_FAILURE};
fi
# Update the version information.
echo "Updating version information to match today's date."
DATE_NOW=`date +"%Y%m%d"`
sed -i -e "s/^VERSION_DATE.*$/VERSION_DATE = '${DATE_NOW}'/g" plaso/__init__.py
COMMIT_DESCRIPTION="Code review: ${CL_NUMBER}: ${DESCRIPTION}";
echo "Submitting ${COMMIT_DESCRIPTION}";
# Check if we need to set --cache.
STATUS_CODES=`git status -s | cut -b1,2 | sed 's/\s//g' | sort | uniq`;
for STATUS_CODE in ${STATUS_CODES};
do
if test "${STATUS_CODE}" = "A";
then
CACHE_PARAM="--cache";
fi
done
python utils/upload.py \
--oauth2 ${BROWSER_PARAM} -y -i ${CL_NUMBER} ${CACHE_PARAM} \
-t "Submitted." -m "Code Submitted." --send_mail
git commit -a -m "${COMMIT_DESCRIPTION}";
git push
if test -f "~/codereview_upload_cookies";
then
curl -b ~/.codereview_upload_cookies ${URL_CODEREVIEW}/${CL_NUMBER}/close -d ''
else
echo "Could not find an authenticated session to codereview. You need to"
echo "manually close the ticket on the code review site."
fi
if ! test -z "${USE_CL_FILE}" && test -f "._code_review_number";
then
rm -f ._code_review_number
fi
exit ${EXIT_SUCCESS};
+129
View File
@@ -0,0 +1,129 @@
#!/bin/bash
# A small script that updates a change list for code review.
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXIT_FAILURE=1;
EXIT_MISSING_ARGS=2;
EXIT_SUCCESS=0;
SCRIPTNAME=`basename $0`;
BROWSER_PARAM="";
CACHE_PARAM="";
CL_NUMBER="";
while test $# -gt 0;
do
case $1 in
--nobrowser | --no-browser | --no_browser )
BROWSER_PARAM="--no_oauth2_webbrowser";
shift;
;;
*)
CL_NUMBER=$1;
shift
;;
esac
done
if test -z "${CL_NUMBER}";
then
if test -f ._code_review_number;
then
CL_NUMBER=`cat ._code_review_number`
RESULT=`echo ${CL_NUMBER} | sed -e 's/[0-9]//g'`;
if ! test -z "${RESULT}";
then
echo "File ._code_review_number exists but contains an incorrect CL number.";
exit ${EXIT_FAILURE};
fi
fi
fi
if test -z "${CL_NUMBER}";
then
echo "Usage: ./${SCRIPTNAME} [--nobrowser] [CL_NUMBER]";
echo "";
echo " CL_NUMBER: optional change list (CL) number that is to be updated.";
echo " If no CL number is provided the value is read from:";
echo " ._code_review_number";
echo "";
exit ${EXIT_MISSING_ARGS};
fi
if [ ! -f "utils/common.sh" ];
then
echo "Missing common functions, are you in the wrong directory?";
exit ${EXIT_FAILURE};
fi
. utils/common.sh
# Check for double status codes, upload.py cannot handle these correctly.
STATUS_CODES=`git status -s | cut -b1,2 | grep '\S\S' | grep -v '??' | sort | uniq`;
if ! test -z "${STATUS_CODES}";
then
echo "Update aborted - detected double git status codes."
echo "Run: 'git stash && git stash pop'.";
exit ${EXIT_FAILURE};
fi
# Check if the linting is correct.
if ! linter;
then
echo "Update aborted - fix the issues reported by the linter.";
exit ${EXIT_FAILURE};
fi
# Check if all the tests pass.
if test -e run_tests.py;
then
echo "Running tests.";
python run_tests.py
if test $? -ne 0;
then
echo "Update aborted - fix the issues reported by the failing test.";
exit ${EXIT_FAILURE};
fi
fi
# Check if we need to set --cache.
STATUS_CODES=`git status -s | cut -b1,2 | sed 's/\s//g' | sort | uniq`;
for STATUS_CODE in ${STATUS_CODES};
do
if test "${STATUS_CODE}" = "A";
then
CACHE_PARAM="--cache";
fi
done
python utils/upload.py \
--oauth2 ${BROWSER_PARAM} -y -i ${CL_NUMBER} ${CACHE_PARAM} \
-t "Uploading changes made to code." -m "Code updated.";
exit ${EXIT_SUCCESS};
+656
View File
@@ -0,0 +1,656 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to update prebuilt versions of the dependencies."""
import argparse
import glob
import logging
import os
import platform
import re
import subprocess
import sys
import urllib2
if platform.system() == 'Windows':
import wmi
class DownloadHelper(object):
"""Class that helps in downloading a project."""
def __init__(self):
"""Initializes the build helper."""
super(DownloadHelper, self).__init__()
self._cached_url = u''
self._cached_page_content = ''
def DownloadPageContent(self, download_url):
"""Downloads the page content from the URL and caches it.
Args:
download_url: the URL where to download the page content.
Returns:
The page content if successful, None otherwise.
"""
if not download_url:
return
if self._cached_url != download_url:
url_object = urllib2.urlopen(download_url)
if url_object.code != 200:
return
self._cached_page_content = url_object.read()
self._cached_url = download_url
return self._cached_page_content
def DownloadFile(self, download_url):
"""Downloads a file from the URL and returns the filename.
The filename is extracted from the last part of the URL.
Args:
download_url: the URL where to download the file.
Returns:
The filename if successful also if the file was already downloaded
or None on error.
"""
_, _, filename = download_url.rpartition(u'/')
if not os.path.exists(filename):
logging.info(u'Downloading: {0:s}'.format(download_url))
url_object = urllib2.urlopen(download_url)
if url_object.code != 200:
return
file_object = open(filename, 'wb')
file_object.write(url_object.read())
file_object.close()
return filename
class GoogleCodeDownloadHelper(DownloadHelper):
"""Class that helps in downloading a Google Code project."""
def GetGoogleCodeDownloadsUrl(self, project_name):
"""Retrieves the Download URL from the Google Code project page.
Args:
project_name: the name of the project.
Returns:
The downloads URL or None on error.
"""
download_url = u'https://code.google.com/p/{0:s}/'.format(project_name)
page_content = self.DownloadPageContent(download_url)
if not page_content:
return
# The format of the project downloads URL is:
# https://googledrive.com/host/{random string}/
expression_string = (
u'<a href="(https://googledrive.com/host/[^/]*/)"[^>]*>Downloads</a>')
matches = re.findall(expression_string, page_content)
if not matches or len(matches) != 1:
return
return matches[0]
def GetPackageDownloadUrls(self, google_drive_url):
"""Retrieves the package downloads URL for a given URL.
Args:
google_drive_url: the Google Drive URL.
Returns:
A list of package download URLs.
"""
page_content = self.DownloadPageContent(google_drive_url)
if not page_content:
return
# The format of the project download URL is:
# /host/{random string}/3rd%20party/{sub directory}/{filename}
expression_string = u'/host/[^/]+/3rd%20party/[^/">]+/[^">]+'
matches = re.findall(expression_string, page_content)
for match_index in range(0, len(matches)):
matches[match_index] = u'https://googledrive.com{0:s}'.format(
matches[match_index])
return matches
def Download(self, download_url):
"""Downloads the project for a given project name and version.
Args:
download_url: the download URL.
Returns:
The filename if successful also if the file was already downloaded
or None on error.
"""
return self.DownloadFile(download_url)
def CompareVersions(first_version_list, second_version_list):
"""Compares two lists containing version parts.
Note that the version parts can contain alpha numeric characters.
Args:
first_version_list: the first list of version parts.
second_version_list: the second list of version parts.
Returns:
1 if the first is larger than the second, -1 if the first is smaller than
the second, or 0 if the first and second are equal.
"""
first_version_list_length = len(first_version_list)
second_version_list_length = len(second_version_list)
for index in range(0, first_version_list_length):
if index >= second_version_list_length:
return 1
if first_version_list[index] > second_version_list[index]:
return 1
elif first_version_list[index] < second_version_list[index]:
return -1
if first_version_list_length < second_version_list_length:
return -1
return 0
def Main():
args_parser = argparse.ArgumentParser(description=(
u'Installs the latest versions of plaso dependencies.'))
args_parser.add_argument(
'-f', '--force', dest='force_install', action='store_true',
default=False, help=(
u'Force installation. This option removes existing versions '
u'of installed dependencies. The default behavior is to only'
u'install a dependency if not or an older version is installed.'))
options = args_parser.parse_args()
operating_system = platform.system()
cpu_architecture = platform.machine().lower()
linux_name = None
sub_directory = None
noarch_sub_directory = None
if operating_system == u'Darwin':
# TODO: determine OSX version
if cpu_architecture != u'x86_64':
logging.error(u'CPU architecture: {0:s} not supported.'.format(
cpu_architecture))
# Note that the sub directory should be URL encoded.
sub_directory = u'macosx%2010.10'
elif operating_system == u'Linux':
linux_name, linux_version, _ = platform.linux_distribution()
if linux_name == u'Fedora' and linux_version == u'20':
if cpu_architecture != u'x86_64':
logging.error(u'CPU architecture: {0:s} not supported.'.format(
cpu_architecture))
sub_directory = u'fedora20-x86_64'
noarch_sub_directory = u'fedora20-noarch'
elif linux_name == u'Ubuntu' and linux_version == u'12.04':
if cpu_architecture == u'i686':
sub_directory = u'ubuntu12.04-i386'
noarch_sub_directory = u'ubuntu12.04-all'
elif cpu_architecture == u'x86_64':
sub_directory = u'ubuntu12.04-amd64'
noarch_sub_directory = u'ubuntu12.04-all'
else:
logging.error(u'CPU architecture: {0:s} not supported.'.format(
cpu_architecture))
else:
logging.error(u'Linux variant: {0:s} {1:s} not supported.'.format(
linux_name, linux_version))
elif operating_system == u'Windows':
if cpu_architecture == u'x86':
sub_directory = u'win32-vs2008'
elif cpu_architecture == u'amd64':
sub_directory = u'win-amd64-vs2010'
else:
logging.error(u'CPU architecture: {0:s} not supported.'.format(
cpu_architecture))
else:
logging.error(u'Operating system: {0:s} not supported.'.format(
operating_system))
return False
download_helper = GoogleCodeDownloadHelper()
google_drive_url = download_helper.GetGoogleCodeDownloadsUrl(u'plaso')
package_urls = download_helper.GetPackageDownloadUrls(
u'{0:s}/3rd%20party/{1:s}'.format(google_drive_url, sub_directory))
if noarch_sub_directory:
noarch_package_urls = download_helper.GetPackageDownloadUrls(
u'{0:s}/3rd%20party/{1:s}'.format(
google_drive_url, noarch_sub_directory))
package_urls.extend(noarch_package_urls)
dependencies_directory = u'dependencies'
if not os.path.exists(dependencies_directory):
os.mkdir(dependencies_directory)
os.chdir(dependencies_directory)
package_filenames = {}
package_versions = {}
for package_url in package_urls:
_, _, package_filename = package_url.rpartition(u'/')
if package_filename.endswith(u'.deb'):
name, _, version = package_filename.partition(u'_')
# Ignore devel and tools DEB packages.
if name.endswith(u'-dev') or name.endswith(u'-tools'):
continue
if name.endswith(u'-python'):
package_prefix = name
name, _, _ = name.partition(u'-')
else:
package_prefix = u'{0:s}_'.format(name)
version, _, _ = version.partition(u'-')
elif package_filename.endswith(u'.dmg'):
name, _, version = package_filename.partition(u'-')
version, _, _ = version.partition(u'.dmg')
package_prefix = name
elif package_filename.endswith(u'.msi'):
name, _, version = package_filename.partition(u'-')
version, _, _ = version.partition(u'.win')
package_prefix = name
elif package_filename.endswith(u'.rpm'):
name, _, version = package_filename.partition(u'-')
# Ignore debuginfo, devel and tools RPM packages.
if (version.startswith(u'debuginfo') or version.startswith(u'devel') or
version.startswith(u'tools')):
continue
# Ignore the sleuthkit tools RPM package.
if name == u'sleuthkit' and not version.startswith(u'libs'):
continue
package_prefix, _, version = version.partition(u'-')
version, _, _ = version.partition(u'-')
package_prefix = u'{0:s}-{1:s}'.format(name, package_prefix)
else:
# Ignore all other file exensions.
continue
version = version.split(u'.')
if name == u'pytsk':
last_part = version.pop()
version.extend(last_part.split(u'-'))
if name not in package_versions:
compare_result = 1
else:
compare_result = CompareVersions(version, package_versions[name])
if compare_result > 0:
package_filenames[name] = package_filename
package_versions[name] = version
if not os.path.exists(package_filename):
filenames = glob.glob(u'{0:s}*'.format(package_prefix))
for filename in filenames:
print u'Removing: {0:s}'.format(filename)
os.remove(filename)
print u'Downloading: {0:s}'.format(package_filename)
_ = download_helper.Download(package_url)
os.chdir(u'..')
if operating_system == u'Darwin':
result = True
command = u'/usr/sbin/pkgutil --packages'
print 'Running: "{0:s}"'.format(command)
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
if process.returncode is None:
packages, _ = process.communicate()
else:
packages = ''
if process.returncode != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
return False
for package_name in packages.split('\n'):
if not package_name:
continue
if (package_name.startswith(u'com.github.libyal.') or
package_name.startswith(u'com.github.log2timeline.') or
package_name.startswith(u'com.github.sleuthkit.') or
package_name.startswith(u'com.google.code.p.') or
package_name.startswith(u'org.samba.') or
package_name.startswith(u'org.python.pypi.') or
package_name.startswith(u'net.sourceforge.projects.')):
if package_name.startswith(u'com.github.libyal.'):
name = package_name[18:]
elif package_name.startswith(u'com.github.log2timeline.'):
name = package_name[24:]
elif package_name.startswith(u'com.github.sleuthkit.'):
name = package_name[21:]
elif package_name.startswith(u'com.google.code.p.'):
name = package_name[18:]
elif package_name.startswith(u'org.samba.'):
name = package_name[10:]
elif package_name.startswith(u'org.python.pypi.'):
name = package_name[16:]
elif package_name.startswith(u'net.sourceforge.projects.'):
name = package_name[25:]
# Detect the PackageMaker naming convention.
if name.endswith(u'.pkg'):
_, _, sub_name = name[:-4].rpartition(u'.')
is_package_maker_pkg = True
else:
is_package_maker_pkg = False
name, _, _ = name.partition(u'.')
if name in package_versions:
# Determine the package version.
command = u'/usr/sbin/pkgutil --pkg-info {0:s}'.format(package_name)
print 'Running: "{0:s}"'.format(command)
process = subprocess.Popen(
command, stdout=subprocess.PIPE, shell=True)
if process.returncode is None:
package_info, _ = process.communicate()
else:
package_info = ''
if process.returncode != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
continue
location = None
version = None
volume = None
for attribute in package_info.split('\n'):
if attribute.startswith(u'location: '):
_, _, location = attribute.rpartition(u'location: ')
elif attribute.startswith(u'version: '):
_, _, version = attribute.rpartition(u'version: ')
elif attribute.startswith(u'volume: '):
_, _, volume = attribute.rpartition(u'volume: ')
version = version.split(u'.')
if options.force_install:
compare_result = -1
elif name not in package_versions:
compare_result = 1
# TODO: handle pytsk.
else:
compare_result = CompareVersions(version, package_versions[name])
if compare_result >= 0:
# The latest or newer version is already installed.
del package_versions[name]
if compare_result < 0:
# Determine the files in the package.
command = u'/usr/sbin/pkgutil --files {0:s}'.format(package_name)
print 'Running: "{0:s}"'.format(command)
process = subprocess.Popen(
command, stdout=subprocess.PIPE, shell=True)
if process.returncode is None:
package_files, _ = process.communicate()
else:
package_files = ''
if process.returncode != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
continue
directories = []
files = []
for filename in package_files.split('\n'):
if is_package_maker_pkg:
filename = u'{0:s}{1:s}/{2:s}/{3:s}'.format(
volume, location, sub_name, filename)
else:
filename = u'{0:s}{1:s}'.format(location, filename)
if os.path.isdir(filename):
directories.append(filename)
else:
files.append(filename)
print 'Removing: {0:s} {1:s}'.format(name, version)
for filename in files:
if os.path.exists(filename):
os.remove(filename)
for filename in directories:
if os.path.exists(filename):
try:
os.rmdir(filename)
except OSError:
# Ignore directories that are not empty.
pass
command = u'/usr/sbin/pkgutil --forget {0:s}'.format(
package_name)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
if not result:
return False
elif operating_system == u'Windows':
connection = wmi.WMI()
query = u'SELECT Name FROM Win32_Product'
for product in connection.query(query):
name = getattr(product, 'Name', u'')
# Windows package names start with 'Python' or 'Python 2.7 '.
if name.startswith('Python '):
_, _, name = name.rpartition(u' ')
if name.startswith('2.7 '):
_, _, name = name.rpartition(u' ')
name, _, version = name.partition(u'-')
version = version.split(u'.')
if options.force_install:
compare_result = -1
elif name not in package_versions:
compare_result = 1
elif name == u'pytsk':
# We cannot really tell by the version number that pytsk needs to
# be update. Just update it any way.
compare_result = -1
else:
compare_result = CompareVersions(version, package_versions[name])
if compare_result >= 0:
# The latest or newer version is already installed.
del package_versions[name]
if compare_result < 0:
print 'Removing: {0:s} {1:s}'.format(name, u'.'.join(version))
product.Uninstall()
result = True
if operating_system == u'Darwin':
for name, version in package_versions.iteritems():
package_filename = package_filenames[name]
command = u'sudo /usr/bin/hdiutil attach {0:s}'.format(
os.path.join(dependencies_directory, package_filename))
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
continue
volume_path = u'/Volumes/{0:s}.pkg'.format(package_filename[:-4])
if not os.path.exists(volume_path):
logging.error(u'Missing volume: {0:s}.'.format(volume_path))
result = False
continue
pkg_file = u'{0:s}/{1:s}.pkg'.format(volume_path, package_filename[:-4])
if not os.path.exists(pkg_file):
logging.error(u'Missing pkg file: {0:s}.'.format(pkg_file))
result = False
continue
command = u'sudo /usr/sbin/installer -target / -pkg {0:s}'.format(
pkg_file)
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
command = u'sudo /usr/bin/hdiutil detach {0:s}'.format(volume_path)
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
elif operating_system == u'Linux':
if linux_name == u'Fedora':
# TODO: move these to a separate file?
dependencies = [
u'ipython',
u'libyaml'
u'python-dateutil',
u'pyparsing',
u'pytz',
u'PyYAML',
u'protobuf-python']
command = u'sudo yum install {0:s}'.format(u' '.join(dependencies))
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
command = u'sudo rpm -Fvh {0:s}/*'.format(dependencies_directory)
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
elif linux_name == u'Ubuntu':
# TODO: add -dbg package support.
# TODO: move these to a separate file?
dependencies = [
u'ipython',
u'libprotobuf7',
u'libyaml-0-2',
u'python-bencode',
u'python-dateutil',
u'python-dpkt',
u'python-hachoir-core',
u'python-hachoir-metadata',
u'python-hachoir-parser',
u'python-protobuf',
u'python-six',
u'python-tz',
u'python-yaml']
command = u'sudo apt-get install {0:s}'.format(u' '.join(dependencies))
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
command = u'sudo dpkg -i {0:s}/*.deb'.format(dependencies_directory)
print 'Running: "{0:s}"'.format(command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
elif operating_system == u'Windows':
for name, version in package_versions.iteritems():
# TODO: add RunAs ?
package_filename = package_filenames[name]
command = u'msiexec.exe /i {0:s} /q'.format(os.path.join(
dependencies_directory, package_filename))
print 'Installing: {0:s} {1:s}'.format(name, u'.'.join(version))
exit_code = subprocess.call(command, shell=False)
if exit_code != 0:
logging.error(u'Running: "{0:s}" failed.'.format(command))
result = False
return result
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
+2645
View File
File diff suppressed because it is too large Load Diff