提交 536554fd 编写于 作者: M Marbin Tan 提交者: C.J. Jameson

Remove gpcoverage

Signed-off-by: NLarry Hamel <lhamel@pivotal.io>
上级 e0c3a8da
......@@ -38,7 +38,6 @@ try:
import getopt, math, StringIO, stat, subprocess, signal
from gppylib.gpparseopts import OptParser
from gppylib.util import ssh_utils
from gppylib.gpcoverage import GpCoverage
except ImportError, e:
sys.exit('Error: unable to import module: ' + str(e))
......@@ -922,9 +921,6 @@ def printResult(title, result):
################
coverage = GpCoverage()
coverage.start()
try:
parseCommandLine()
runSetup()
......@@ -972,6 +968,3 @@ try:
except KeyboardInterrupt:
print '[Abort] Keyboard Interrupt ...'
finally:
coverage.stop()
coverage.generate_report()
......@@ -16,7 +16,6 @@ try:
from gppylib.gpparseopts import OptParser, OptChecker
from gppylib.gparray import *
from gppylib.gplog import *
from gppylib.gpcoverage import GpCoverage
from gppylib.commands import unix, gp, base
from gppylib.db import dbconn
from gppylib import gparray, pgconf
......@@ -263,9 +262,6 @@ def delete_cluster(options):
g_warnings_generated = False
g_errors_generated = False
coverage = GpCoverage()
coverage.start()
# setup logging
logger = get_default_logger()
setup_tool_logging(EXECNAME, unix.getLocalHostname(), unix.getUserName())
......@@ -308,8 +304,6 @@ except Exception, ex:
if options.verbose:
logger.exception(ex)
finally:
coverage.stop()
coverage.generate_report()
if g_errors_generated:
logger.error('Delete system failed')
sys.exit(2)
......
......@@ -31,7 +31,6 @@ try:
from gppylib.system.environment import GpMasterEnvironment
from pygresql.pgdb import DatabaseError
from pygresql import pg
from gppylib.gpcoverage import GpCoverage
from gppylib.gpcatalog import MASTER_ONLY_TABLES
from gppylib.operations.package import SyncPackages
from gppylib.operations.utils import ParallelOperation
......@@ -2987,9 +2986,6 @@ if __name__ == '__main__':
remove_pid = True
table_expand_error = False
coverage = GpCoverage()
coverage.start()
try:
# setup signal handlers so we can clean up correctly
......@@ -3162,6 +3158,3 @@ if __name__ == '__main__':
if gp_expand is not None:
gp_expand.halt_work()
coverage.stop()
coverage.generate_report()
......@@ -58,7 +58,6 @@ try:
from pygresql import pg # Database interaction
from gppylib.gparray import * # Greenplum Array datastructures
from gppylib.gplog import * # Greenplum logging facility
from gppylib.gpcoverage import GpCoverage
from getpass import getpass
from gppylib.parseutils import line_reader, parse_fspacename, parse_gpfilespace_line, \
canonicalize_address
......@@ -1149,9 +1148,6 @@ To add this filespace to the database please run the command:
logger = None
if __name__ == '__main__':
coverage = GpCoverage()
coverage.start()
opt = ParseOptions()
if opt.verbose:
......@@ -1247,6 +1243,3 @@ Once a filespace is created, it can be used by one or more tablespaces.
except BaseException, e:
sys.stderr.write("error %s\n" % str(e))
sys.exit(1)
finally:
coverage.stop()
coverage.generate_report()
......@@ -25,7 +25,6 @@ try:
from gppylib.db import catalog
from pygresql import pg # Database interaction
from gppylib import gplog # Greenplum logging facility
from gppylib.gpcoverage import GpCoverage
from getpass import getpass
except ImportError, e:
......
......@@ -22,7 +22,6 @@ try:
from gppylib.gpparseopts import *
from gppylib.datetimeutils import str_to_datetime, str_to_duration, DatetimeValueError
from gppylib.logfilter import *
from gppylib.gpcoverage import GpCoverage
except ImportError, e:
sys.exit('ERROR: Cannot import modules. Please check that you have sourced greenplum_path.sh. Detail: ' + str(e))
......@@ -326,9 +325,6 @@ def openOutputFile(ifn, zname, options):
# ------------------------------- Mainline --------------------------------
coverage = GpCoverage()
coverage.start()
# Use default locale specified by LANG environment variable
try:
locale.setlocale(locale.LC_ALL, '')
......@@ -489,6 +485,3 @@ except IOError, msg:
execname = os.path.basename(sys.argv[0])
print >> sys.stderr, '%s: (IOError) "%s"' % (execname, msg)
sys.exit(2)
finally:
coverage.stop()
coverage.generate_report()
......@@ -26,7 +26,6 @@ try:
from gppylib.operations.startSegments import *
from pygresql.pgdb import DatabaseError
from pygresql import pg
from gppylib.gpcoverage import GpCoverage
from gppylib import gparray, gplog, pgconf, userinput, utils
from gppylib.parseutils import line_reader, parse_filespace_order, parse_gpmovemirrors_line, \
canonicalize_address
......@@ -330,9 +329,6 @@ pidfilepid = None # pid of the process which has the lock
locktorelease = None
sml = None # sml (simple main lock)
coverage = GpCoverage()
coverage.start()
try:
# setup signal handlers so we can clean up correctly
......@@ -476,6 +472,3 @@ except KeyboardInterrupt:
finally:
if sml is not None:
sml.release()
coverage.stop()
coverage.generate_report()
......@@ -374,7 +374,7 @@ class ExecutionContext():
propagate_env_map = {}
"""
Dict. mapping environment variables to their values. See gpcoverage.py for example usage.
Dict. mapping environment variables to their values.
"""
def __init__(self):
......
"""
This file is a wrapper around figleaf and will start/stop coverage as
needed. It also includes a method for generating the HTML reports.
"""
import os
import random
import figleaf
import pickle
from glob import glob
from gppylib import gplog
from gppylib.commands.base import Command, LOCAL, REMOTE, ExecutionContext, RemoteExecutionContext, WorkerPool
from gppylib.commands.unix import RemoveFiles, Scp
from gppylib.operations import Operation
from gppylib.operations.unix import ListFiles, ListRemoteFiles, MakeDir
logger = gplog.get_default_logger()
COVERAGE_FILENAME = 'cover.out'
# ------------------------------------------------------------------------------
class GpWriteFigleafCoverageHtml(Command):
"""Command to write out figleaf html reports to disk based on the
coverage information that has been collected."""
def __init__(self, name, filename, directory, ctxt=LOCAL, remoteHost=None):
gphome = os.getenv("GPHOME", None)
if not gphome:
raise Exception('GPHOME environment variable not set.')
cmdStr = "%s -d %s %s" % (os.path.normpath(gphome + '/lib/python/figleaf/figleaf2html'), directory, filename)
Command.__init__(self, name, cmdStr, ctxt, remoteHost)
@staticmethod
def local(name, coverfile, directory):
cmd = GpWriteFigleafCoverageHtml(name, coverfile, directory)
cmd.run(validateAfter=True)
# ------------------------------------------------------------------------------
# TODO: We should not allow this class to be instantiated. It offers static
# functionality, and its exposed methods should reflect that.
class GpFigleafCoverage:
"""
Distributed code coverage, built atop figleaf.
Figleaf code coverage is a two-phase process: recording and reporting. Recording simply involves
starting and stopping instrumentation. This results in a pickled data file in a designated location
on disk. (The distributed adaptation here of figleaf relies on this point.) Lastly, we invoke
figleaf2html via the Command above to produce html from the recorded data.
Like figleaf, GpFigleafCoverage is a similar two-phase process: enable recording and enable reporting.
To enable recording, gppylib must be *reactive* to coverage requests; in other words, the entry points to gppylib
must invoke GpFigleafCoverage. Currently, there are two such entry points: gppylib.mainUtils.simple_main and
sbin/gpoperation.py. Moreover, gppylib must be *proactive* to propagate requests to subprocesses or remote processes.
This is accomplished below by hooking gppylib.commands.base.ExecutionContext, and its inherited classes, in order
to propagate a couple of key environment variables needed below: USE_FIGLEAF, FIGLEAF_DIR, and FIGLEAF_PID.
To enable reporting, we must aggregate the data that the various python interpreters across subprocesses
and remote processes had generated. This Operaiton will rely on the knowledge of how figleaf resultant data is stored
on disk. For more detail, see FinalizeCoverage below.
It will help to explain how recording and reporting come together. GpFigleafCoverage recording is expected to produce,
and its reporting is dependent upon, the following directory structure:
<base>/*.out,*.html - Global coverage data, aggregated across multiple runs
<base>/<pid>/*.out,*.html - Coverage data pertaining to <pid>, where <pid> is the
process id of the originating python program, on the master
<base>/<pid>/<comp>/*.out,*html - Coverage data pertaining to some subprocess or remote process
that is invoked as a subcomponent of the overall program given by <pid>
For clarity, the rest of the code will adopt the following coding convention:
base_dir := <base>
pid_dir := <base>/<pid>
comp_dir := <base>/<pid>/<comp>
"""
# TODO: change directory structure to something more human-readable
# How about <base>/<program_name><pid>/<program_name><rand>/*.out,*.html ?
def __init__(self):
try:
self.directory = os.getenv('FIGLEAF_DIR', None)
if self.directory is None:
self.directory = os.path.normpath(os.path.expanduser("~") + '/.figleaf')
self.my_pid = str(os.getpid())
self.main_pid = os.getenv('FIGLEAF_PID', self.my_pid)
randstring = ''.join(random.choice('0123456789') for x in range(20))
self.filename = os.path.join(self.directory, self.main_pid, randstring, COVERAGE_FILENAME)
self.running = False
except Exception, e:
logger.exception('Error initializing code coverage')
def start(self):
"""Starts coverage collection if the environment variable USE_FIGLEAF is set."""
try:
if os.getenv('USE_FIGLEAF', None):
logger.info('Code coverage will be generated')
MakeDir(os.path.dirname(self.filename)).run()
self.running = True
ExecutionContext.propagate_env_map.update({'FIGLEAF_DIR': os.getenv('FIGLEAF_DIR', self.directory),
'USE_FIGLEAF': 1,
'FIGLEAF_PID': self.main_pid})
figleaf.start()
except Exception, e:
logger.error('Error starting code coverage: %s' % e)
def stop(self):
"""Stops code coverage."""
try:
if self.running:
logger.info('Stopping code coverage')
figleaf.stop()
figleaf.write_coverage(self.filename)
self.running = False
for k in ['FIGLEAF_DIR', 'USE_FIGLEAF', 'FIGLEAF_PID']:
del ExecutionContext.propagate_env_map[k]
except Exception, e:
logger.error('Error stopping code coverage: %s' % e)
def generate_report(self):
"""Generates the html reports and puts them in the directory specified."""
if os.getenv('USE_FIGLEAF', None):
try:
directory = os.path.dirname(self.filename)
logger.info('Generating code coverage HTML reports to %s' % directory)
GpWriteFigleafCoverageHtml.local('Generate HTML', self.filename, directory)
if self.main_pid == self.my_pid:
FinalizeCoverage(trail=RemoteExecutionContext.trail,
pid=self.main_pid,
base_dir=self.directory).run()
except Exception, e:
logger.exception('Error generating HTML code cover reports.')
def delete_files(self):
"""Deletes code coverage files."""
if os.getenv('USE_FIGLEAF', None):
logger.info('Deleting coverage files...')
try:
RemoveFiles.local('Remove coverage file', self.filename)
directory = os.path.dirname(self.filename)
RemoveFiles.local('Remove html files', directory + '/*.html')
except:
logger.error('Failed to clean up coverage files')
# The coverage tool to use
# if os.getenv('USE_FIGLEAF', None):
GP_COVERAGE_CLASS = GpFigleafCoverage
# else:
# GP_COVERAGE_CLASS=<some other coverage class>
# ------------------------------------------------------------------------------
class GpCoverage(GP_COVERAGE_CLASS):
"""Class the controls code coverage. Right now this inherits from
GpFigleafCoverage, but in the future we may find a better code coverage
tool and switch to that. With this class, we can do that without
touching any of the management utilities or modules."""
pass
# ------------------------------------------------------------------------------
class FinalizeCoverage(Operation):
"""
This aggregates coverage data from across the cluster for this current process (which is soon to complete.)
Then, we update the global coverage data that persists from run to run at <base_dir>/*.out,*.html.
"""
def __init__(self, trail, pid, base_dir):
self.trail = trail
self.pid = pid
self.base_dir = base_dir
def execute(self):
pid_dir = os.path.join(self.base_dir, self.pid)
# update the pid-level coverage statistics, which reside within pid_dir
# this requires: collect coverage data, merge data, save, and generate html
CollectCoverage(trail=self.trail, pid_dir=pid_dir).run()
partial_coverages = LoadPartialCoverages(pid_dir=pid_dir).run()
cumulative_coverage = {}
for partial_coverage in partial_coverages:
MergeCoverage(input=partial_coverage, output=cumulative_coverage).run()
SaveCoverage(obj=cumulative_coverage,
path=os.path.join(pid_dir, COVERAGE_FILENAME)).run()
GpWriteFigleafCoverageHtml.local('Generate HTML', os.path.join(pid_dir, COVERAGE_FILENAME), pid_dir)
# update the global coverage statistics, which reside within self.base_dir
overall_coverage = LoadCoverage(os.path.join(self.base_dir, COVERAGE_FILENAME)).run()
MergeCoverage(input=cumulative_coverage, output=overall_coverage).run()
SaveCoverage(obj=overall_coverage,
path=os.path.join(self.base_dir, COVERAGE_FILENAME)).run()
GpWriteFigleafCoverageHtml.local('Generate HTML', os.path.join(self.base_dir, COVERAGE_FILENAME), self.base_dir)
# ------------------------------------------------------------------------------
class CollectCoverage(Operation):
"""
Simply copy over <base>/<pid>/<comp> dirs back to the master. This may
be an unnecessary step IF <base> is an NFS mount.
"""
def __init__(self, trail, pid_dir):
self.trail = trail
self.pid_dir = pid_dir
def execute(self):
pool = WorkerPool()
given = set(ListFiles(self.pid_dir).run())
try:
for host in self.trail:
available = ListRemoteFiles(self.pid_dir, host).run()
to_copy = [dir for dir in available if dir not in given]
for dir in to_copy:
comp_dir = os.path.join(self.pid_dir, dir)
pool.addCommand(Scp('collect coverage',
srcFile=comp_dir,
srcHost=host,
dstFile=comp_dir,
recursive=True))
pool.join()
finally:
pool.haltWork()
# ------------------------------------------------------------------------------
class LoadCoverage(Operation):
""" Unpickles and returns an object residing at a current path """
def __init__(self, path):
self.path = path
def execute(self):
try:
with open(self.path, 'r') as f:
obj = pickle.load(f)
return obj
except (IOError, OSError):
logger.exception('Failed to un-pickle coverage off disk.')
return {}
# ------------------------------------------------------------------------------
class SaveCoverage(Operation):
""" Pickles a given object to disk at a designated path """
def __init__(self, path, obj):
self.path = path
self.obj = obj
def execute(self):
with open(self.path, 'w') as f:
pickle.dump(self.obj, f)
# ------------------------------------------------------------------------------
class LoadPartialCoverages(Operation):
""" Returns an array of unpickled coverage objects from <base>/<pid>/*/<COVERAGE_FILENAME> """
def __init__(self, pid_dir):
self.pid_dir = pid_dir
def execute(self):
coverage_files = glob(os.path.join(self.pid_dir, '*', COVERAGE_FILENAME))
return [LoadCoverage(path).run() for path in coverage_files]
# ------------------------------------------------------------------------------
# TODO: Support a parallel merge? Or would there be no point with the Python GIL?
class MergeCoverage(Operation):
"""
Figleaf coverage data is pickled on disk as a dict of filenames to sets of numbers,
where each number denotes a covered line number.
e.g. { "gparray.py" : set(0, 1, 2, ...),
"operations/dump.py" : set(175, 13, 208, ...),
... }
Here, we merge such an input dict into an output dict. As such, we'll be able to pickle
the result back to disk and invoke figleaf2html to get consolidated html reports.
"""
def __init__(self, input, output):
self.input, self.output = input, output
def execute(self):
for filename in self.input:
if filename not in self.output:
self.output[filename] = self.input[filename]
else:
self.output[filename] |= self.input[filename] # set union
......@@ -8,9 +8,6 @@ mainUtils.py
This file provides a rudimentary framework to support top-level option
parsing, initialization and cleanup logic common to multiple programs.
It also implements workarounds to make other modules we use like
GpCoverage() work properly.
The primary interface function is 'simple_main'. For an example of
how it is expected to be used, see gprecoverseg.
......@@ -33,7 +30,6 @@ from gppylib.commands.base import ExecutionError
from gppylib.system import configurationInterface, configurationImplGpdb, fileSystemInterface, \
fileSystemImplOs, osInterface, osImplNative, faultProberInterface, faultProberImplGpdb
from optparse import OptionGroup, OptionParser, SUPPRESS_HELP
from gppylib.gpcoverage import GpCoverage
from lockfile.pidlockfile import PIDLockFile, LockTimeout
......@@ -188,13 +184,7 @@ def simple_main(createOptionParserFn, createCommandFn, mainOptions=None):
parentpidvar (string)
"""
coverage = GpCoverage()
coverage.start()
try:
simple_main_internal(createOptionParserFn, createCommandFn, mainOptions)
finally:
coverage.stop()
coverage.generate_report()
simple_main_internal(createOptionParserFn, createCommandFn, mainOptions)
def simple_main_internal(createOptionParserFn, createCommandFn, mainOptions):
......
......@@ -5,7 +5,7 @@ import tempfile
import platform
import getpass
import tarfile
from gppylib.db import dbconn
from gppylib.gparray import GpArray
from gppylib.gpversion import MAIN_VERSION
......@@ -29,21 +29,21 @@ def get_os():
os_string += 'suse'
os_string += major_release
return os_string
OS = get_os()
ARCH = platform.machine()
# AK: use dereference_symlink when mucking with RPM database for the same reason
# AK: use dereference_symlink when mucking with RPM database for the same reason
# it's used in the gppylib.operations.package. For more info, see the function definition.
GPHOME = dereference_symlink(gp.get_gphome())
ARCHIVE_PATH = os.path.join(GPHOME, 'share/packages/archive')
RPM_DATABASE = os.path.join(GPHOME, 'share/packages/database')
RPM_DATABASE = os.path.join(GPHOME, 'share/packages/database')
GPPKG_EXTENSION = ".gppkg"
SCRATCH_SPACE = os.path.join(tempfile.gettempdir(), getpass.getuser())
GPDB_VERSION = '.'.join([str(n) for n in MAIN_VERSION[:2]])
GPDB_VERSION = '.'.join([str(n) for n in MAIN_VERSION[:2]])
MASTER_PORT = os.getenv("PGPORT")
def skipIfNoStandby():
......@@ -53,8 +53,8 @@ def skipIfNoStandby():
"""
standby = get_host_list()[0]
if standby is None:
return unittest.skip('requires standby')
return lambda o: o
return unittest.skip('requires standby')
return lambda o: o
def skipIfSingleNode():
"""
......@@ -88,7 +88,7 @@ def get_host_list():
elif seg.isSegmentMaster(current_role=True):
master = seg.getSegmentHostName()
#Deduplicate the hosts so that we
#Deduplicate the hosts so that we
#dont install multiple times on the same host
segment_host_list = list(set(segment_host_list))
if master in segment_host_list:
......@@ -99,14 +99,14 @@ def get_host_list():
def run_command(cmd_str):
"""
Runs a command on the localhost
@param cmd_str: The command string to be executed
@return: stdout/stderr output as a string
@param cmd_str: The command string to be executed
@return: stdout/stderr output as a string
"""
cmd = Command("Local Command", cmd_str)
cmd.run(validateAfter = True)
results = cmd.get_results()
if results.rc != 0:
return results.stderr.strip()
else:
......@@ -115,7 +115,7 @@ def run_command(cmd_str):
def run_remote_command(cmd_str, host):
"""
Runs a command on a remote host
@param cmd_str: The command string to be executed
@return: stdout/stderr output as a string
"""
......@@ -124,9 +124,9 @@ def run_remote_command(cmd_str, host):
ctxt = REMOTE,
remoteHost = host)
cmd.run(validateAfter = True)
results = cmd.get_results()
if results.rc != 0:
return results.stderr.strip()
else:
......@@ -138,7 +138,7 @@ class GppkgSpec:
"""
All the parameters require arguments of type string.
"""
self.name = name
self.name = name
self.version = version
self.gpdbversion = gpdbversion
self.os = os
......@@ -146,13 +146,13 @@ class GppkgSpec:
def get_package_name(self):
"""Returns the package name of the form <name>-<version>"""
return self.name + '-' + self.version
return self.name + '-' + self.version
def get_filename(self):
"""Returns the complete filename of the gppkg"""
return self.get_package_name() + '-' + self.os + '-' + self.arch + GPPKG_EXTENSION
def __str__(self):
def __str__(self):
"""Returns the GppkgSpec in the form of a string"""
gppkg_spec_file = '''
PkgName: ''' + self.name + '''
......@@ -160,7 +160,7 @@ Version: ''' + self.version + '''
GPDBVersion: ''' + self.gpdbversion + '''
Description: Temporary Test Package
OS: ''' + self.os + '''
Architecture: ''' + self.arch
Architecture: ''' + self.arch
return gppkg_spec_file
......@@ -182,15 +182,15 @@ class BuildGppkg(Operation):
if not os.path.exists(SCRATCH_SPACE):
os.mkdir(SCRATCH_SPACE)
gppkg_spec_file = str(self.gppkg_spec)
#create gppkg_dir
gppkg_dir = os.path.join(SCRATCH_SPACE, "package")
if not os.path.exists(gppkg_dir):
os.mkdir(gppkg_dir)
dependent_rpms = [rpm.get_package_name() for rpm in self.dependent_rpm_specs]
rpm_file = BuildRPM(self.main_rpm_spec).run()
dependent_rpms = [rpm.get_package_name() for rpm in self.dependent_rpm_specs]
rpm_file = BuildRPM(self.main_rpm_spec).run()
shutil.move(rpm_file, gppkg_dir)
try:
......@@ -201,7 +201,7 @@ class BuildGppkg(Operation):
for spec in self.dependent_rpm_specs:
rpm_file = BuildRPM(spec).run()
shutil.move(rpm_file, deps_dir)
#create gppkg
with open(os.path.join(gppkg_dir, "gppkg_spec.yml"), "w") as f:
f.write(gppkg_spec_file)
......@@ -216,7 +216,7 @@ class BuildGppkg(Operation):
class RPMSpec:
"""Represents an RPM spec file used for creating an RPM"""
def __init__(self, name, version, release, depends = []):
"""
"""
@param depends: List of dependecies for the rpm
@type depends: List of strings
"""
......@@ -227,10 +227,10 @@ class RPMSpec:
def get_package_name(self):
"""Returns the package name of the form <name>-<version>-<release>"""
return self.name + '-' + self.version + '-' + self.release
return self.name + '-' + self.version + '-' + self.release
def get_filename(self):
"""Returns the complete filename of the rpm"""
"""Returns the complete filename of the rpm"""
return self.get_package_name() + '.' + ARCH + ".rpm"
def __str__(self):
......@@ -241,22 +241,22 @@ class RPMSpec:
%define __os_install_post %{nil}
Summary: Temporary test package
License: GPLv2
Name: ''' + self.name + '''
License: GPLv2
Name: ''' + self.name + '''
Version: ''' + self.version + '''
Release: ''' + self.release + '''
Release: ''' + self.release + '''
Group: Development/Tools
Prefix: /temp
AutoReq: no
AutoProv: no
BuildArch: ''' + ARCH + '''
BuildArch: ''' + ARCH + '''
Provides: ''' + self.name + ''' = '''+ self.version +''', /bin/sh
BuildRoot: %{_topdir}/BUILD '''
if self.depends != []:
if self.depends != []:
rpm_spec_file += '''
Requires: ''' + ','.join(self.depends)
rpm_spec_file += '''
%description
Temporary test package for gppkg.
......@@ -284,7 +284,7 @@ class BuildRPM(Operation):
def execute(self):
rpm_spec_file = str(self.spec)
build_dir = os.path.join(SCRATCH_SPACE, "BUILD")
build_dir = os.path.join(SCRATCH_SPACE, "BUILD")
os.mkdir(build_dir)
rpms_dir = os.path.join(SCRATCH_SPACE, "RPMS")
os.mkdir(rpms_dir)
......@@ -293,7 +293,7 @@ class BuildRPM(Operation):
with tempfile.NamedTemporaryFile() as f:
f.write(rpm_spec_file)
f.flush()
os.system("cd " + SCRATCH_SPACE + "; rpmbuild --quiet -bb " + f.name)
shutil.copy(os.path.join(SCRATCH_SPACE, "RPMS", ARCH, self.spec.get_filename()), os.getcwd())
......@@ -301,7 +301,7 @@ class BuildRPM(Operation):
shutil.rmtree(build_dir)
shutil.rmtree(rpms_dir)
return self.spec.get_filename()
return self.spec.get_filename()
class GppkgTestCase(unittest.TestCase):
"""
......@@ -309,9 +309,9 @@ class GppkgTestCase(unittest.TestCase):
Provides default RPMSpec and GppkgSpecs for the inherited classes
to work with.
Default RPMSpec will have no dependencies, have a version and release 1
Default RPMSpec will have no dependencies, have a version and release 1
and will be called A
Default GppkgSpec will have no dependencies, have version 1.0 and will
Default GppkgSpec will have no dependencies, have version 1.0 and will
be called alpha
"""
def setUp(self):
......@@ -319,14 +319,14 @@ class GppkgTestCase(unittest.TestCase):
self.A_spec = RPMSpec("A", "1", "1")
self.alpha_spec = GppkgSpec("alpha", "1.0")
def tearDown(self):
def tearDown(self):
self.cleanup()
@classmethod
@classmethod
def setUpClass(self):
self.extra_clean = set()
self.start_output, self.end_output = (1, None) if os.getenv('USE_FIGLEAF', None) is None else (2, -2)
self.start_output, self.end_output = (1, None)
@classmethod
def tearDownClass(self):
for gppkg in self.extra_clean:
......@@ -335,7 +335,7 @@ class GppkgTestCase(unittest.TestCase):
def cleanup(self):
"""Cleans up gppkgs that are installed"""
results = run_command("gppkg -q --all")
gppkgs = results.split('\n')[self.start_output:self.end_output] #The first line is 'Starting gppkg with args', which we want to ignore.
gppkgs = results.split('\n')[self.start_output:self.end_output] #The first line is 'Starting gppkg with args', which we want to ignore.
for gppkg in gppkgs:
run_command("gppkg --remove " + gppkg)
......@@ -344,7 +344,7 @@ class GppkgTestCase(unittest.TestCase):
"""
Builds a gppkg and checks if the build was successful.
if the build was successful, returns the gppkg filename
@param gppkg_spec: The spec file required to build a gppkg
@type gppkg_spec: GppkgSpec
@param rpm_spec: The spec file required to build the main rpm
......@@ -358,20 +358,20 @@ class GppkgTestCase(unittest.TestCase):
self.assertTrue(self._check_build(gppkg_file, gppkg_spec))
self.extra_clean.add(gppkg_file)
return gppkg_file
def _check_build(self, gppkg_file, gppkg_spec):
"""
Check if the gppkg build was successful
@return: True if build was successful
False otherwise
@rtype: bool
"""
return gppkg_file == gppkg_spec.get_filename()
def install(self, gppkg_filename):
"""
Install a given gppkg and checks if the installation was
Install a given gppkg and checks if the installation was
successful.
@param gppkg_filename: The name of the gppkg file
......@@ -379,11 +379,11 @@ class GppkgTestCase(unittest.TestCase):
"""
run_command("gppkg --install %s" % gppkg_filename)
self.assertTrue(self.check_install(gppkg_filename))
def check_install(self, gppkg_filename):
"""
Checks if a gppkg was installed successfully.
@param gppkg_filename: The name of the gppkg file
@type gppkg_filename: str
@return: True if the gppkg was installed successfully
......@@ -392,8 +392,8 @@ class GppkgTestCase(unittest.TestCase):
"""
cmd = "gppkg -q %s" % gppkg_filename
results = run_command(cmd)
test_str = ''.join(gppkg_filename.split('-')[:1]) + " is installed"
is_installed = test_str in results
test_str = ''.join(gppkg_filename.split('-')[:1]) + " is installed"
is_installed = test_str in results
return is_installed and CheckFile(os.path.join(ARCHIVE_PATH, gppkg_filename)).run()
def remove(self, gppkg_filename):
......@@ -410,7 +410,7 @@ class GppkgTestCase(unittest.TestCase):
def update(self, gppkg_filename):
"""
Updates a gppkg and checks if the update was
Updates a gppkg and checks if the update was
successful.
@param gppkg_filename: The name of the gppkg to be updated
......@@ -427,7 +427,7 @@ class GppkgTestCase(unittest.TestCase):
[timestamp] gppkg:host:user[INFO]:- Some message
We want to strip out the timestamp and other unwanted
text such as [INFO], gppkg, host, user etc.
@param result: The output from stdout/stderr
@type result: str
@return: The output with the timestamps and unwanted text removed
......@@ -435,19 +435,19 @@ class GppkgTestCase(unittest.TestCase):
"""
result_without_timestamp = []
results = result.split('\n')[self.start_output:self.end_output]
for res in results:
res = res.split(':-')[1].strip()
res = ' '.join(res.split())
result_without_timestamp.append(res)
return result_without_timestamp
return result_without_timestamp
def check_rpm_install(self, rpm_package_name):
"""
Checks if an rpm has been installed or not.
@param rpm_package_name: Name of the rpm package of the form <name>-<version>-<release>
@param rpm_package_name: Name of the rpm package of the form <name>-<version>-<release>
@type rpm_package_name: str
"""
results = run_command("rpm -q %s --dbpath %s" % (rpm_package_name, RPM_DATABASE))
......@@ -469,7 +469,7 @@ class GppkgTestCase(unittest.TestCase):
@param rpm_package_name: Name of rpm package of the form <name>-<version>-<release>
@type rpm_package_name: str
@param host: Remote host
@param host: Remote host
@type host: str
"""
results = run_remote_command("rpm -q %s --dbpath %s" % (rpm_package_name, RPM_DATABASE), host)
......@@ -481,25 +481,25 @@ class GppkgTestCase(unittest.TestCase):
@param rpm_package_name: Name of rpm package of the form <name>-<version>-<release>
@type rpm_package_name: str
@param host: Remote host
@param host: Remote host
@type host: str
"""
with self.assertRaisesRegexp(ExecutionError, "%s is not installed" % rpm_package_name):
results = run_remote_command("rpm -q %s --dbpath %s" % (rpm_package_name, RPM_DATABASE), host)
def install_rpm(self, rpm_filename, rpm_database = RPM_DATABASE, installation_prefix = GPHOME):
"""
Installs a given rpm and checks if the installation was successful.
@param rpm_filename: Name of the rpm file to be installed
@type rpm_filename: str
@param rpm_database: The rpm database against which rpms will be installed
@param rpm_database: The rpm database against which rpms will be installed
@type rpm_database: str
@param installation_prefix: The installation path for the rpm
@param installation_prefix: The installation path for the rpm
@param type installation_prefix: str
"""
run_command("rpm -i %s --dbpath %s --prefix=%s" % (rpm_filename, rpm_database, installation_prefix))
rpm_package_name = rpm_filename[:rpm_filename.index('.')]
rpm_package_name = rpm_filename[:rpm_filename.index('.')]
self.check_rpm_install(rpm_package_name)
def uninstall_rpm(self, rpm_filename, rpm_database = RPM_DATABASE):
......@@ -508,12 +508,12 @@ class GppkgTestCase(unittest.TestCase):
@param rpm_filename: Name of the rpm file to be uninstalled
@type rpm_filename: str
@param rpm_database: The rpm database against which rpms will be uninstalled
@param rpm_database: The rpm database against which rpms will be uninstalled
@type rpm_database: str
"""
rpm_package_name = rpm_filename[:rpm_filename.index('.')]
run_command("rpm -e %s --dbpath %s" % (rpm_package_name, rpm_database))
self.check_rpm_uninstall(rpm_package_name)
self.check_rpm_uninstall(rpm_package_name)
def install_rpm_remotely(self, rpm_filename, host, rpm_database = RPM_DATABASE, installation_prefix = GPHOME):
"""
......
......@@ -26,7 +26,6 @@ import getopt
import popen2
from gppylib.util import ssh_utils
from gppylib.gpparseopts import OptParser
from gppylib.gpcoverage import GpCoverage
from gppylib.parseutils import canonicalize_address
progname = os.path.split(sys.argv[0])[-1]
......@@ -115,9 +114,6 @@ def run(cmd, peer):
#############
coverage = GpCoverage()
coverage.start()
try:
parseCommandLine()
hostlist = ssh_utils.HostList()
......@@ -160,6 +156,3 @@ try:
except KeyboardInterrupt:
sys.exit('\nInterrupted...')
finally:
coverage.stop()
coverage.generate_report()
......@@ -52,7 +52,6 @@ try:
from gppylib.commands import unix
from gppylib.util import ssh_utils
from gppylib.gpparseopts import OptParser
from gppylib.gpcoverage import GpCoverage
except ImportError, e:
sys.exit('Error: unable to import module: ' + str(e))
......@@ -516,9 +515,6 @@ def addHost(hostname, hostlist, localhost=False):
tempDir = None
coverage = GpCoverage()
coverage.start()
try:
nullFile = logging.FileHandler('/dev/null')
logging.getLogger('paramiko.transport').addHandler(nullFile)
......@@ -966,6 +962,3 @@ finally:
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(tempDir)
coverage.stop()
coverage.generate_report()
......@@ -32,7 +32,6 @@ try:
from gppylib import pgconf
from gppylib.commands.pg import PgControlData
from gppylib.operations.startSegments import *
from gppylib.gpcoverage import GpCoverage
from gppylib.utils import TableLogger
from gppylib.gp_dbid import GpDbidFile
from gppylib.gp_era import GpEraFile
......
......@@ -28,7 +28,6 @@ try:
from gppylib.commands import base
from gppylib.commands import pg
from gppylib.commands import dca
from gppylib.gpcoverage import GpCoverage
from gppylib.utils import TableLogger
from gppylib.gp_era import GpEraFile
from gppylib.operations.unix import CleanSharedMem
......
......@@ -15,7 +15,6 @@ try:
from gppylib.gparray import *
from gppylib.commands.gp import *
from gppylib.db import dbconn
from gppylib.gpcoverage import GpCoverage
except ImportError, e:
sys.exit('Cannot import modules. Please check that you have sourced greenplum_path.sh. Detail: ' + str(e))
......@@ -52,44 +51,37 @@ def parseargs():
# ------------------------------- Mainline --------------------------------
coverage = GpCoverage()
coverage.start()
options = parseargs()
try:
options = parseargs()
files = list()
files = list()
# get the files to edit from STDIN
line = sys.stdin.readline()
while line:
# get the files to edit from STDIN
line = sys.stdin.readline()
while line:
directory = line.rstrip()
directory = line.rstrip()
filename = directory + "/postgresql.conf"
if not os.path.exists(filename):
raise Exception("path does not exist" + filename)
filename = directory + "/postgresql.conf"
if not os.path.exists(filename):
raise Exception("path does not exist" + filename)
files.append(filename)
files.append(filename)
line = sys.stdin.readline()
line = sys.stdin.readline()
fromString = "(^\s*" + options.entry + "\s*=.*$)"
toString = "#$1"
name = "mycmd"
fromString = "(^\s*" + options.entry + "\s*=.*$)"
toString = "#$1"
name = "mycmd"
# update all the files
for f in files:
# update all the files
for f in files:
# comment out any existing entries for this setting
cmd = InlinePerlReplace(name, fromString, toString, f)
cmd.run(validateAfter=True)
# comment out any existing entries for this setting
cmd = InlinePerlReplace(name, fromString, toString, f)
cmd.run(validateAfter=True)
if options.removeonly:
continue
if options.removeonly:
continue
cmd = GpAppendGucToFile(name, f, options.entry, options.value)
cmd.run(validateAfter=True)
finally:
coverage.stop()
coverage.generate_report()
cmd = GpAppendGucToFile(name, f, options.entry, options.value)
cmd.run(validateAfter=True)
......@@ -26,11 +26,6 @@ logger = gplog.get_default_logger()
operation = pickle.load(sys.stdin)
from gppylib.gpcoverage import GpCoverage
coverage = GpCoverage()
coverage.start()
try:
ret = operation.run()
except Exception, e:
......@@ -57,9 +52,6 @@ except Exception, e:
sys.exit(2) # signal that gpoperation.py has hit unexpected error
else:
pickled_ret = pickle.dumps(ret) # Pickle return data for stdout transmission
finally:
coverage.stop()
coverage.generate_report()
sys.stdout = old_stdout
print pickled_ret
......
......@@ -25,7 +25,6 @@ from gppylib.commands.gp import SEGMENT_STOP_TIMEOUT_DEFAULT
from gppylib.commands import pg
from gppylib.db import dbconn
from gppylib import pgconf
from gppylib.gpcoverage import GpCoverage
from gppylib.commands.gp import is_pid_postmaster
description = ("""
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册