summaryrefslogtreecommitdiff
path: root/client/utils
diff options
context:
space:
mode:
authorMark Wong2017-07-20 16:38:15 +0000
committerMark Wong2017-08-04 04:51:01 +0000
commit60284eb4b0755c2a5b6e6559774690dddd369ee3 (patch)
treeff17121af18a83e95c66c6efb7cb3a414e8440f2 /client/utils
parentd2e86de59149c0078a8d1709c837f13ce5313775 (diff)
pep8 coding style
Diffstat (limited to 'client/utils')
-rw-r--r--client/utils/cluster.py126
-rw-r--r--client/utils/git.py106
-rw-r--r--client/utils/locking.py24
-rw-r--r--client/utils/logging.py13
-rw-r--r--client/utils/misc.py78
5 files changed, 176 insertions, 171 deletions
diff --git a/client/utils/cluster.py b/client/utils/cluster.py
index 6e48970..b81eaef 100644
--- a/client/utils/cluster.py
+++ b/client/utils/cluster.py
@@ -9,64 +9,68 @@ from utils.logging import log
class PgCluster(object):
- 'basic manipulation of postgres cluster (init, start, stop, destroy)'
-
- def __init__(self, bin_path, data_path):
- self._bin = bin_path
- self._data = data_path
-
-
- def _initdb(self):
- 'initialize the data directory'
-
- with TemporaryFile() as strout:
- log("initializing cluster into '%s'" % (self._data,))
- call(['pg_ctl', '-D', self._data, 'init'], env={'PATH' : self._bin}, stdout=strout, stderr=STDOUT)
-
-
- def _configure(self, config):
- 'update configuration of a cluster (using postgresql.auto.conf)'
-
- log("configuring cluster in '%s'" % (self._data,))
- with open('%s/postgresql.auto.conf' % (self._data,), 'a+') as f:
- for k in config:
- f.write("%(name)s = '%(value)s'\n" % {'name' : k, 'value' : config[k]})
-
-
- def _destroy(self):
- 'forced cleanup of possibly existing cluster processes and data directory'
-
- with TemporaryFile() as strout:
- log("killing all existing postgres processes")
- call(['killall', 'postgres'], stdout=strout, stderr=STDOUT)
-
- # remove the data directory
- if os.path.exists(self._data):
- shutil.rmtree(self._data)
-
-
- def start(self, config, destroy=True):
- 'init, configure and start the cluster'
-
- # cleanup any previous cluster running, remove data dir if it exists
- if destroy:
- self._destroy()
-
- self._initdb()
- self._configure(config)
-
- with TemporaryFile() as strout:
- log("starting cluster in '%s' using '%s' binaries" % (self._data, self._bin))
- call(['pg_ctl', '-D', self._data, '-l', 'pg.log', '-w', 'start'], env={'PATH' : self._bin}, stdout=strout, stderr=STDOUT)
-
-
- def stop(self, destroy=True):
- 'stop the cluster'
-
- with TemporaryFile() as strout:
- log("stopping cluster in '%s' using '%s' binaries" % (self._data, self._bin))
- call(['pg_ctl', '-D', self._data, '-w', '-t', '60', 'stop'], env={'PATH' : self._bin}, stdout=strout, stderr=STDOUT)
-
- # kill any remaining processes, remove the data dir
- if destroy:
- self._destroy()
+ 'basic manipulation of postgres cluster (init, start, stop, destroy)'
+
+ def __init__(self, bin_path, data_path):
+ self._bin = bin_path
+ self._data = data_path
+
+ def _initdb(self):
+ 'initialize the data directory'
+
+ with TemporaryFile() as strout:
+ log("initializing cluster into '%s'" % (self._data,))
+ call(['pg_ctl', '-D', self._data, 'init'], env={'PATH': self._bin},
+ stdout=strout, stderr=STDOUT)
+
+ def _configure(self, config):
+ 'update configuration of a cluster (using postgresql.auto.conf)'
+
+ log("configuring cluster in '%s'" % (self._data,))
+ with open('%s/postgresql.auto.conf' % (self._data,), 'a+') as f:
+ for k in config:
+ f.write("%(name)s = '%(value)s'\n" %
+ {'name': k, 'value': config[k]})
+
+ def _destroy(self):
+ """
+ forced cleanup of possibly existing cluster processes and data
+ directory
+ """
+
+ with TemporaryFile() as strout:
+ log("killing all existing postgres processes")
+ call(['killall', 'postgres'], stdout=strout, stderr=STDOUT)
+
+ # remove the data directory
+ if os.path.exists(self._data):
+ shutil.rmtree(self._data)
+
+ def start(self, config, destroy=True):
+ 'init, configure and start the cluster'
+
+ # cleanup any previous cluster running, remove data dir if it exists
+ if destroy:
+ self._destroy()
+
+ self._initdb()
+ self._configure(config)
+
+ with TemporaryFile() as strout:
+ log("starting cluster in '%s' using '%s' binaries" %
+ (self._data, self._bin))
+ call(['pg_ctl', '-D', self._data, '-l', 'pg.log', '-w', 'start'],
+ env={'PATH': self._bin}, stdout=strout, stderr=STDOUT)
+
+ def stop(self, destroy=True):
+ 'stop the cluster'
+
+ with TemporaryFile() as strout:
+ log("stopping cluster in '%s' using '%s' binaries" %
+ (self._data, self._bin))
+ call(['pg_ctl', '-D', self._data, '-w', '-t', '60', 'stop'],
+ env={'PATH': self._bin}, stdout=strout, stderr=STDOUT)
+
+ # kill any remaining processes, remove the data dir
+ if destroy:
+ self._destroy()
diff --git a/client/utils/git.py b/client/utils/git.py
index 11b00c1..587e5a4 100644
--- a/client/utils/git.py
+++ b/client/utils/git.py
@@ -8,75 +8,75 @@ from utils.logging import log
class GitRepository(object):
- 'a simple management of a git repository / source building'
+ 'a simple management of a git repository / source building'
- def __init__(self, url, path):
- 'url - repository URL, path - local directory for the clone'
+ def __init__(self, url, path):
+ 'url - repository URL, path - local directory for the clone'
- self._url = url
- self._path = path
+ self._url = url
+ self._path = path
+ def _exists(self):
+ 'check that a local repository clone exists'
- def _exists(self):
- 'check that a local repository clone exists'
+ # TODO verify that the repository uses the proper upstream url
+ return os.path.exists(self._path)
- # TODO verify that the repository uses the proper upstream url
- return os.path.exists(self._path)
+ def _clone(self):
+ ''
+ log("cloning repository '%s' to '%s'" % (self._url, self._path))
+ with TemporaryFile() as strout:
+ call(['git', 'clone', self._url, self._path], stdout=strout,
+ stderr=STDOUT)
- def _clone(self):
- ''
- log("cloning repository '%s' to '%s'" % (self._url, self._path))
+ def _update(self):
+ 'update an existing repository clone'
- with TemporaryFile() as strout:
- call(['git', 'clone', self._url, self._path], stdout=strout, stderr=STDOUT)
+ log("updating repository '%s' from '%s'" % (self._path, self._url))
+ # simply call git-pull and redirect stdout/stderr
+ # FIXME should verify that the repository uses the proper upstream url
+ with TemporaryFile() as strout:
+ call(['git', 'pull'], cwd=self._path, stdout=strout, stderr=STDOUT)
- def _update(self):
- 'update an existing repository clone'
+ def current_commit(self):
+ 'returns current commit hash'
- log("updating repository '%s' from '%s'" % (self._path, self._url))
+ with TemporaryFile() as strout:
+ call(['git', 'rev-parse', 'HEAD'], cwd=self._path, stdout=strout,
+ stderr=STDOUT)
+ strout.seek(0)
+ return strout.read().strip()
- # simply call git-pull and redirect stdout/stderr
- # FIXME should verify that the repository uses the proper upstream url
- with TemporaryFile() as strout:
- call(['git', 'pull'], cwd=self._path, stdout=strout, stderr=STDOUT)
+ def clone_or_update(self):
+ 'refreshes the repository (either clone from scratch or refresh)'
+ if self._exists():
+ self._update()
+ else:
+ self._clone()
- def current_commit(self):
- 'returns current commit hash'
+ log("current commit '%s'" % (self.current_commit(),))
- with TemporaryFile() as strout:
- call(['git', 'rev-parse', 'HEAD'], cwd=self._path, stdout=strout, stderr=STDOUT)
- strout.seek(0)
- return strout.read().strip()
+ def build_and_install(self, path, remove=True):
+ 'builds and installs the sources'
+ # TODO collect output of configure and make commands
+ if os.path.exists(path):
+ shutil.rmtree(path)
- def clone_or_update(self):
- 'refreshes the repository (either clone from scratch or refresh)'
+ with TemporaryFile() as strout:
+ log("configuring sources in '%s' with prefix '%s'" %
+ (self._path, path))
+ call(['./configure', '--prefix', path], cwd=self._path,
+ stdout=strout, stderr=STDOUT)
- if self._exists():
- self._update()
- else:
- self._clone()
+ with TemporaryFile() as strout:
+ log("building sources and installing into '%s'" % (path,))
- log("current commit '%s'" % (self.current_commit(),))
-
-
- def build_and_install(self, path, remove=True):
- 'builds and installs the sources'
-
- # TODO collect output of configure and make commands
- if os.path.exists(path):
- shutil.rmtree(path)
-
- with TemporaryFile() as strout:
- log("configuring sources in '%s' with prefix '%s'" % (self._path, path))
- call(['./configure', '--prefix', path], cwd=self._path, stdout=strout, stderr=STDOUT)
-
- with TemporaryFile() as strout:
- log("building sources and installing into '%s'" % (path,))
-
- # cleanup and build using multiple cpus
- call(['make', '-s', 'clean'], cwd=self._path, stdout=strout, stderr=STDOUT)
- call(['make', '-s', '-j', str(cpu_count()), 'install'], cwd=self._path, stdout=strout, stderr=STDOUT)
+ # cleanup and build using multiple cpus
+ call(['make', '-s', 'clean'], cwd=self._path, stdout=strout,
+ stderr=STDOUT)
+ call(['make', '-s', '-j', str(cpu_count()), 'install'],
+ cwd=self._path, stdout=strout, stderr=STDOUT)
diff --git a/client/utils/locking.py b/client/utils/locking.py
index dfc8f63..d3cbd64 100644
--- a/client/utils/locking.py
+++ b/client/utils/locking.py
@@ -3,19 +3,19 @@ import os
class FileLock():
- 'a simple wrapper around file lock'
+ 'a simple wrapper around file lock'
- def __init__(self, filename):
- self._file = open(filename, 'w')
+ def __init__(self, filename):
+ self._file = open(filename, 'w')
- def __enter__(self):
- 'locks the file and writes the PID of the current process into it'
- fcntl.flock(self._file, fcntl.LOCK_EX)
- self._file.write(str(os.getpid()))
- self._file.flush()
+ def __enter__(self):
+ 'locks the file and writes the PID of the current process into it'
+ fcntl.flock(self._file, fcntl.LOCK_EX)
+ self._file.write(str(os.getpid()))
+ self._file.flush()
- return self._file
+ return self._file
- def __exit__(self, type, value, traceback):
- 'unlock the file'
- fcntl.flock(self._file, fcntl.LOCK_UN)
+ def __exit__(self, type, value, traceback):
+ 'unlock the file'
+ fcntl.flock(self._file, fcntl.LOCK_UN)
diff --git a/client/utils/logging.py b/client/utils/logging.py
index 964480f..1e55fac 100644
--- a/client/utils/logging.py
+++ b/client/utils/logging.py
@@ -1,12 +1,13 @@
import sys
import time
+
def log(message):
- ''
+ ''
- print '%(epoch)s %(date)s %(message)s' % {
- 'epoch' : time.time(),
- 'date' : time.strftime('%Y-%m-%d %H:%M:%S'),
- 'message' : message}
+ print '%(epoch)s %(date)s %(message)s' % {
+ 'epoch': time.time(),
+ 'date': time.strftime('%Y-%m-%d %H:%M:%S'),
+ 'message': message}
- sys.stdout.flush()
+ sys.stdout.flush()
diff --git a/client/utils/misc.py b/client/utils/misc.py
index 6f73998..fa4e54c 100644
--- a/client/utils/misc.py
+++ b/client/utils/misc.py
@@ -8,62 +8,62 @@ from tempfile import TemporaryFile
def available_ram():
- 'determine amount of RAM in the system (in megabytes)'
+ 'determine amount of RAM in the system (in megabytes)'
- return int(os.popen("free -m").readlines()[1].split()[1])
+ return int(os.popen("free -m").readlines()[1].split()[1])
def run_cmd(args, env=None, cwd=None):
- 'run command (a subprocess.call wrapper)'
+ 'run command (a subprocess.call wrapper)'
- with TemporaryFile() as strout:
+ with TemporaryFile() as strout:
- start = time.time()
- retcode = call(args, env=env, cwd=cwd, stdout=strout, stderr=STDOUT)
+ start = time.time()
+ retcode = call(args, env=env, cwd=cwd, stdout=strout, stderr=STDOUT)
- strout.seek(0)
- return (retcode, strout.read(), (time.time() - start))
+ strout.seek(0)
+ return (retcode, strout.read(), (time.time() - start))
-def connect(dbname, conn, cursor, nretries = 60, delay = 1.0):
- '''Try opening a connection and a cursor. If it does not succeed (e.g.
- when the database is performing recovery after a crash, retry multiple
- times (as specified by nretries and delay in seconds).
- '''
+def connect(dbname, conn, cursor, nretries=60, delay=1.0):
+ '''Try opening a connection and a cursor. If it does not succeed (e.g.
+ when the database is performing recovery after a crash, retry multiple
+ times (as specified by nretries and delay in seconds).
+ '''
- # if we already have connection and a cursor, return it
- if conn and cursor:
- return (conn, cursor)
+ # if we already have connection and a cursor, return it
+ if conn and cursor:
+ return (conn, cursor)
- # we'll try repeatedly, with delays between the attempts
- i = 0
- while i < nretries:
+ # we'll try repeatedly, with delays between the attempts
+ i = 0
+ while i < nretries:
- i += 1
+ i += 1
- try:
- conn = psycopg2.connect('host=localhost dbname=%s' % (dbname,))
- # TODO do we actually need autocommit?
- conn.autocommit = True
- cursor = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
+ try:
+ conn = psycopg2.connect('host=localhost dbname=%s' % (dbname,))
+ # TODO do we actually need autocommit?
+ conn.autocommit = True
+ cursor = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
- return (conn, cursor)
- except:
- # connection failure - sleep for a while, then try again
- time.sleep(delay)
+ return (conn, cursor)
+ except Exception as e:
+ # connection failure - sleep for a while, then try again
+ time.sleep(delay)
- return (None, None)
+ return (None, None)
def disconnect(conn, cursor):
- '''Make sure we're disconnected (but prevent exceptions)'''
+ '''Make sure we're disconnected (but prevent exceptions)'''
- try:
- cursor.close()
- except:
- pass
+ try:
+ cursor.close()
+ except Exception as e:
+ pass
- try:
- conn.close()
- except:
- pass
+ try:
+ conn.close()
+ except Exception as e:
+ pass