From 6f8f00817ce1f581bb587edb12d50bcd80387925 Mon Sep 17 00:00:00 2001 From: Hamatoma Date: Mon, 19 Feb 2024 22:03:37 +0100 Subject: [PATCH 1/1] Initial state --- .gitignore | 10 + All.sh | 14 + Env.sh | 6 + InstallAll | 25 + MakeCloud.py | 135 + MkTar.sh | 6 + appl/.gitignore | 1 + appl/BackupBase.py | 306 +++ appl/BackupTool.py | 979 ++++++++ appl/BenchTool.py | 243 ++ appl/DbTool.py | 941 +++++++ appl/DirTool.py | 1206 +++++++++ appl/FtpTool.py | 176 ++ appl/Monitor.py | 1184 +++++++++ appl/RestoreTool.py | 2174 +++++++++++++++++ appl/SvgTool.py | 741 ++++++ appl/TextTool.py | 1313 ++++++++++ appl/UrlChecker.py | 357 +++ appl/WebDashClient.py | 426 ++++ appl/WebDashServer.py | 336 +++ appl/Webserver.py | 107 + appl/WikiTool.py | 205 ++ appl/ZipTool.py | 181 ++ appl/__init__.py | 0 base/BaseLogger.py | 98 + base/BaseTool.py | 1454 +++++++++++ base/CryptoEngine.py | 400 +++ base/FileHelper.py | 678 +++++ base/JavaConfig.py | 92 + base/LinuxUtils.py | 234 ++ base/Logger.py | 49 + base/MemoryLogger.py | 64 + base/ProcessHelper.py | 224 ++ base/PythonToTypeScript.py | 652 +++++ base/StringUtils.py | 436 ++++ base/TextProcessor.py | 1776 ++++++++++++++ base/ThreadLogger.py | 38 + base/Zipper.py | 468 ++++ base/__init__.py | 0 doc/mysql.txt | 1 + examples/safe/php.ini | 562 +++++ net/EMail.py | 130 + net/FileTcpTaskHandler.py | 242 ++ net/FtpEngine.py | 295 +++ net/HttpClient.py | 154 ++ net/LinuxTcpTaskHandler.py | 176 ++ net/TcpClient.py | 82 + net/TcpServer.py | 151 ++ net/TcpTaskHandler.py | 66 + net/__init__.py | 5 + unittest/PackageTest.py | 25 + unittest/UnitTestCase.py | 402 +++ unittest/UnitTestSuite.py | 116 + unittest/__init__.py | 0 unittest/appl/BackupToolTest.py | 508 ++++ unittest/appl/BaseToolTest.py | 191 ++ unittest/appl/BenchToolTest.py | 48 + unittest/appl/DbToolTest.py | 390 +++ unittest/appl/DirToolTest.py | 418 ++++ unittest/appl/FtpToolTest.py | 154 ++ unittest/appl/MonitorTest.py | 276 +++ unittest/appl/RestoreToolTest.py | 694 ++++++ unittest/appl/SvgToolTest.py | 176 ++ unittest/appl/TextToolTest.py | 812 ++++++ unittest/appl/WikiToolTest.py | 101 + unittest/appl/__init__.py | 0 unittest/base/BaseTester.py | 36 + unittest/base/CryptoEngineTest.py | 174 ++ unittest/base/FileHelperTest.py | 371 +++ unittest/base/JavaConfigTest.py | 68 + unittest/base/LinuxUtilsTest.py | 96 + unittest/base/LoggerTest.py | 92 + unittest/base/MemoryLoggerTest.py | 27 + unittest/base/ProcessHelperTest.py | 70 + unittest/base/PythonToTypeScriptTest.py | 230 ++ unittest/base/StringUtilsTest.py | 301 +++ unittest/base/TextProcessorTest.py | 481 ++++ unittest/base/ThreadLoggerTest.py | 30 + unittest/base/ZipperTest.py | 274 +++ unittest/base/__init__.py | 0 unittest/base/sample.env | 6 + unittest/configuration/default.conf | 33 + unittest/configuration/error/default.conf | 32 + unittest/configuration/localhost.conf | 23 + unittest/configuration/observed/default.conf | 23 + .../configuration/observed/localhost.conf | 29 + unittest/configuration/sites/otherport | 5 + unittest/configuration/sites/wiki.hamatoma.de | 20 + unittest/data/etc.tgz | Bin 0 -> 25579 bytes unittest/data/etc.work.tgz | Bin 0 -> 785 bytes unittest/data/example.html | 1 + unittest/data/example.sh | 2 + unittest/data/example.tar | Bin 0 -> 20480 bytes unittest/data/example.tbz | Bin 0 -> 1025 bytes unittest/data/example.tgz | Bin 0 -> 1044 bytes unittest/data/example.txt | 35 + unittest/data/example.zip | Bin 0 -> 3680 bytes unittest/data/examples2.tgz | Bin 0 -> 1004 bytes unittest/data/monitor/localhost.dump.txt | 68 + unittest/img/landscape.jpg | Bin 0 -> 798 bytes unittest/img/portrait.jpg | Bin 0 -> 798 bytes unittest/net/FileTcpTaskHandlerTest.py | 165 ++ unittest/net/HttpClientTest.py | 30 + unittest/net/LinuxTcpTaskHandlerTest.py | 50 + unittest/net/NetTester.py | 20 + unittest/net/__init__.py | 0 106 files changed, 26702 insertions(+) create mode 100644 .gitignore create mode 100755 All.sh create mode 100755 Env.sh create mode 100755 InstallAll create mode 100644 MakeCloud.py create mode 100755 MkTar.sh create mode 100644 appl/.gitignore create mode 100755 appl/BackupBase.py create mode 100755 appl/BackupTool.py create mode 100755 appl/BenchTool.py create mode 100755 appl/DbTool.py create mode 100755 appl/DirTool.py create mode 100755 appl/FtpTool.py create mode 100755 appl/Monitor.py create mode 100755 appl/RestoreTool.py create mode 100755 appl/SvgTool.py create mode 100755 appl/TextTool.py create mode 100755 appl/UrlChecker.py create mode 100755 appl/WebDashClient.py create mode 100755 appl/WebDashServer.py create mode 100755 appl/Webserver.py create mode 100755 appl/WikiTool.py create mode 100755 appl/ZipTool.py create mode 100644 appl/__init__.py create mode 100644 base/BaseLogger.py create mode 100644 base/BaseTool.py create mode 100644 base/CryptoEngine.py create mode 100644 base/FileHelper.py create mode 100644 base/JavaConfig.py create mode 100644 base/LinuxUtils.py create mode 100644 base/Logger.py create mode 100644 base/MemoryLogger.py create mode 100644 base/ProcessHelper.py create mode 100644 base/PythonToTypeScript.py create mode 100644 base/StringUtils.py create mode 100644 base/TextProcessor.py create mode 100644 base/ThreadLogger.py create mode 100644 base/Zipper.py create mode 100644 base/__init__.py create mode 100644 doc/mysql.txt create mode 100644 examples/safe/php.ini create mode 100644 net/EMail.py create mode 100644 net/FileTcpTaskHandler.py create mode 100644 net/FtpEngine.py create mode 100644 net/HttpClient.py create mode 100644 net/LinuxTcpTaskHandler.py create mode 100644 net/TcpClient.py create mode 100644 net/TcpServer.py create mode 100644 net/TcpTaskHandler.py create mode 100644 net/__init__.py create mode 100644 unittest/PackageTest.py create mode 100644 unittest/UnitTestCase.py create mode 100644 unittest/UnitTestSuite.py create mode 100644 unittest/__init__.py create mode 100644 unittest/appl/BackupToolTest.py create mode 100644 unittest/appl/BaseToolTest.py create mode 100644 unittest/appl/BenchToolTest.py create mode 100644 unittest/appl/DbToolTest.py create mode 100644 unittest/appl/DirToolTest.py create mode 100644 unittest/appl/FtpToolTest.py create mode 100644 unittest/appl/MonitorTest.py create mode 100644 unittest/appl/RestoreToolTest.py create mode 100644 unittest/appl/SvgToolTest.py create mode 100644 unittest/appl/TextToolTest.py create mode 100644 unittest/appl/WikiToolTest.py create mode 100644 unittest/appl/__init__.py create mode 100644 unittest/base/BaseTester.py create mode 100644 unittest/base/CryptoEngineTest.py create mode 100644 unittest/base/FileHelperTest.py create mode 100644 unittest/base/JavaConfigTest.py create mode 100644 unittest/base/LinuxUtilsTest.py create mode 100644 unittest/base/LoggerTest.py create mode 100644 unittest/base/MemoryLoggerTest.py create mode 100644 unittest/base/ProcessHelperTest.py create mode 100644 unittest/base/PythonToTypeScriptTest.py create mode 100644 unittest/base/StringUtilsTest.py create mode 100644 unittest/base/TextProcessorTest.py create mode 100644 unittest/base/ThreadLoggerTest.py create mode 100644 unittest/base/ZipperTest.py create mode 100644 unittest/base/__init__.py create mode 100644 unittest/base/sample.env create mode 100644 unittest/configuration/default.conf create mode 100644 unittest/configuration/error/default.conf create mode 100644 unittest/configuration/localhost.conf create mode 100644 unittest/configuration/observed/default.conf create mode 100644 unittest/configuration/observed/localhost.conf create mode 100644 unittest/configuration/sites/otherport create mode 100644 unittest/configuration/sites/wiki.hamatoma.de create mode 100644 unittest/data/etc.tgz create mode 100644 unittest/data/etc.work.tgz create mode 100644 unittest/data/example.html create mode 100644 unittest/data/example.sh create mode 100644 unittest/data/example.tar create mode 100644 unittest/data/example.tbz create mode 100644 unittest/data/example.tgz create mode 100644 unittest/data/example.txt create mode 100644 unittest/data/example.zip create mode 100644 unittest/data/examples2.tgz create mode 100644 unittest/data/monitor/localhost.dump.txt create mode 100644 unittest/img/landscape.jpg create mode 100644 unittest/img/portrait.jpg create mode 100644 unittest/net/FileTcpTaskHandlerTest.py create mode 100644 unittest/net/HttpClientTest.py create mode 100644 unittest/net/LinuxTcpTaskHandlerTest.py create mode 100644 unittest/net/NetTester.py create mode 100644 unittest/net/__init__.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..8407395 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +.project +.pydevproject +.settings +*.pyc +2*.sh +/unpackShell.sh +examples/data/ +examples/config +/upd-monitor.sh +/2dragon diff --git a/All.sh b/All.sh new file mode 100755 index 0000000..fa3ef01 --- /dev/null +++ b/All.sh @@ -0,0 +1,14 @@ +#! /bin/bash +TAR=/tmp/rsh.tgz +FN=unpackShell.sh +./MkTar.sh +All2Tmp all $TAR +rsync -av $TAR hm@dromedar:/home/www/public +cat <$FN +#! /bin/bash +cd /usr/share/pyrshell +./upd.sh +EOS +chmod +x $FN +Cmd2All all $FN + diff --git a/Env.sh b/Env.sh new file mode 100755 index 0000000..ce97eb1 --- /dev/null +++ b/Env.sh @@ -0,0 +1,6 @@ +#! /bin/bash +export PYTHONPATH=/home/ws/py/pyrshell:/home/ws/py/pyrshell/base:/usr/lib/python35.zip:/usr/lib/python3.5:/usr/lib/python3.5/plat-x86_64-linux-gnu:/usr/lib/python3.5/lib-dynload:/usr/local/lib/python3.5/dist-packages:/usr/lib/python3/dist-packages:/home/ws/py/pyrshell +export MONITOR_HOST=hawk +export MONITOR_CONFIG=/etc/pymonitor/config.d +export MONITOR_APPL=pymonitor +export MONITOR_LOGFILE=/var/log/local/pymonitor.log diff --git a/InstallAll b/InstallAll new file mode 100755 index 0000000..a355a76 --- /dev/null +++ b/InstallAll @@ -0,0 +1,25 @@ +#! /bin/bash +MODE=$1 +if [ "$MODE" != 'min' -a "$MODE" != 'all' -a "$MODE" != 'std' ]; then + echo "Usage: InstallAll [MODE]" + echo "Installs all applications" + echo "MODE: min std all" +else + appl/DbTool.py -v3 install + appl/DirTool.py -v3 install + appl/RestoreTool.py -v3 install + appl/SvgTool.py -v3 install + appl/TextTool.py -v3 install + appl/ZipTool.py -v3 install + if [ "$MODE" = 'std' -o "$MODE" = 'all' ]; then + appl/BackupTool.py -v3 install + appl/Monitor.py -v3 install + fi + if [ "$MODE" = 'all' ]; then + appl/UrlChecker.py -v3 install + appl/WebDashClient.py -v3 install + appl/WebDashServer.py -v3 install + appl/WikiTool.py -v3 install + appl/FtpTool.py -v3 install + fi +fi diff --git a/MakeCloud.py b/MakeCloud.py new file mode 100644 index 0000000..f3fc182 --- /dev/null +++ b/MakeCloud.py @@ -0,0 +1,135 @@ +#!/usr/local/bin/python2.7 +# encoding: utf-8 +''' +MakeCloud -- shortdesc + +MakeCloud is a description + +It defines classes_and_methods + +@author: user_name + +@copyright: 2018 organization_name. All rights reserved. + +@license: license + +@contact: user_email +@deffield updated: Updated +''' + +import sys +import os + +from argparse import ArgumentParser +from argparse import RawDescriptionHelpFormatter + +__all__ = [] +__version__ = 0.1 +__date__ = '2018-03-13' +__updated__ = '2018-03-13' + +DEBUG = 1 +TESTRUN = 0 +PROFILE = 0 + +class CLIError(Exception): + '''Generic exception to raise and log different fatal errors.''' + def __init__(self, msg): + super(CLIError).__init__(type(self)) + self.msg = "E: %s" % msg + def __str__(self): + return self.msg + def __unicode__(self): + return self.msg + +def main(argv=None): # IGNORE:C0111 + '''Command line options.''' + + if argv is None: + argv = sys.argv + else: + sys.argv.extend(argv) + + program_name = os.path.basename(sys.argv[0]) + program_version = "v%s" % __version__ + program_build_date = str(__updated__) + program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date) + program_shortdesc = __import__('__main__').__doc__.split("\n")[1] + program_license = '''%s + + Created by user_name on %s. + Copyright 2018 organization_name. All rights reserved. + + Licensed under the Apache License 2.0 + http://www.apache.org/licenses/LICENSE-2.0 + + Distributed on an "AS IS" basis without warranties + or conditions of any kind, either express or implied. + +USAGE +''' % (program_shortdesc, str(__date__)) + + try: + # Setup argument parser + parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter) + parser.add_argument("-r", "--recursive", dest="recurse", action="store_true", help="recurse into subfolders [default: %(default)s]") + parser.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]") + parser.add_argument("-i", "--include", dest="include", help="only include paths matching this regex pattern. Note: exclude is given preference over include. [default: %(default)s]", metavar="RE" ) + parser.add_argument("-e", "--exclude", dest="exclude", help="exclude paths matching this regex pattern. [default: %(default)s]", metavar="RE" ) + parser.add_argument('-V', '--version', action='version', version=program_version_message) + parser.add_argument(dest="paths", help="paths to folder(s) with source file(s) [default: %(default)s]", metavar="path", nargs='+') + + # Process arguments + args = parser.parse_args() + + paths = args.paths + verbose = args.verbose + recurse = args.recurse + inpat = args.include + expat = args.exclude + + if verbose > 0: + print("Verbose mode on") + if recurse: + print("Recursive mode on") + else: + print("Recursive mode off") + + if inpat and expat and inpat == expat: + raise CLIError("include and exclude pattern are equal! Nothing will be processed.") + + for inpath in paths: + ### do something with inpath ### + print(inpath) + return 0 + except KeyboardInterrupt: + ### handle keyboard interrupt ### + return 0 + except Exception, e: + if DEBUG or TESTRUN: + raise(e) + indent = len(program_name) * " " + sys.stderr.write(program_name + ": " + repr(e) + "\n") + sys.stderr.write(indent + " for help use --help") + return 2 + +if __name__ == "__main__": + if DEBUG: + sys.argv.append("-h") + sys.argv.append("-v") + sys.argv.append("-r") + if TESTRUN: + import doctest + doctest.testmod() + if PROFILE: + import cProfile + import pstats + profile_filename = 'MakeCloud_profile.txt' + cProfile.run('main()', profile_filename) + statsfile = open("profile_stats.txt", "wb") + p = pstats.Stats(profile_filename, stream=statsfile) + stats = p.strip_dirs().sort_stats('cumulative') + stats.print_stats() + statsfile.close() + sys.exit(0) + sys.exit(main()) \ No newline at end of file diff --git a/MkTar.sh b/MkTar.sh new file mode 100755 index 0000000..1740d7c --- /dev/null +++ b/MkTar.sh @@ -0,0 +1,6 @@ +#! /bin/bash +TAR=/tmp/rsh.tgz +test -f $TAR && rm -f $TAR +tar czf $TAR appl base net unittest doc examples InstallAll +ls -ld $TAR + diff --git a/appl/.gitignore b/appl/.gitignore new file mode 100644 index 0000000..ce97416 --- /dev/null +++ b/appl/.gitignore @@ -0,0 +1 @@ +/DoIt.py diff --git a/appl/BackupBase.py b/appl/BackupBase.py new file mode 100755 index 0000000..9a00eae --- /dev/null +++ b/appl/BackupBase.py @@ -0,0 +1,306 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import time +import datetime +import sys +import os.path +import stat +import subprocess +import tempfile +import gzip +import fnmatch +import re +import traceback + +from platform import node +import shutil +sys.path.insert(0, '/usr/share/pyrshell') +import base.Logger +import base.MemoryLogger +import base.StringUtils +import base.JavaConfig +import base.BaseTool +import net.EMail + +class BackupBase (base.BaseTool.BaseTool): + + def __init__(self, globalOptions): + '''Constructor. + @param logger: the logger + ''' + base.BaseTool.BaseTool.__init__(self, globalOptions, 'backup.conf') + self._lastTargetDir = None + + def allDatabases(self, metadataToo=False): + '''Returns all mysql databases. + @param metadataToo: True: mysql is returned too + @param user: a user having the rights to read the database mysql + @param password: the user's password + @return: array with databases + ''' + if '_mysql' in sys.modules: + mode = 'intern' + import _mysql + else: + mode = self._configuration.getString('mysql.mode', 'file') + + rc = [] + user = self._configuration.getString('mysql.user') + password = self._configuration.getString('mysql.code') + if user == None or password == None: + self._logger.error('missing mysql.user and/or mysql.code in configuration (backup.conf)') + elif mode == 'file': + path = '/var/lib/mysql' + nodes = os.listdir(path) + for node in nodes: + full = path + os.sep + node + if node != 'sys' and os.path.isdir(full): + if not metadataToo and (node == 'mysql' or node == 'information_schema' or node == 'performance_schema'): + continue + rc.append(node) + elif mode == 'extern': + argv = ['/usr/bin/mysql', '-u', user, '-p' + password, 'mysql'] + errorFile = tempfile.gettempdir() + os.sep + 'backuptool.err.txt' + with open(errorFile, 'w') as fpError: + proc = subprocess.Popen(argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=fpError.fileno()) + proc.communicate(b'show databases;\nquit;\n') + lineNo = 0 + while True: + lineNo += 1 + line = proc.stdout.readline().decode() + if line == '': + break + if lineNo < 4: + continue + if line.startswith('|'): + db = line.strip('| \n') + if not metadataToo and (db == 'mysql' or db == 'information_schema' or db == 'performance_schema'): + continue + rc.append(db) + self.errorFileToLog(errorFile, 'database mysql') + os.unlink(errorFile) + else: + db =_mysql.connect('localhost', user, password, 'mysql') + db.query('show databases;') + dbs = db.store_result() + results = dbs.fetch_row(0) + for item in results: + value = item[0] + if not metadataToo and (value == b'mysql' or value == b'information_schema' or value == b'performance_schema'): + continue + rc.append(value.decode()) + return rc + + def findConfig(self, variable, value, subdir): + '''Finds a configuration file given by a "variable" and a value. + @param variable: a configuration variable + @param value: the value to search + @param subdir: the subdirectory inside the backup configuration, e.g. 'webapps.d' + @return: None: not found otherwise: the JavaConfig instance of the found configuration + ''' + rc = None + configDir = self._configDir + os.sep + subdir + if not os.path.isdir(configDir): + self._logger.error('missing {:s} in {:s}'.format(subdir, self._configDir)) + else: + nodes = os.listdir(configDir) + for node in nodes: + if node.endswith('.conf'): + full = configDir + os.sep + node + config = base.JavaConfig.JavaConfig(full, self._logger) + if config.getString(variable) == value: + rc = config + break + return rc + + def findDbInfo(self, path, defaultUser, defaultPassword): + '''Searches the info about the database in a given directory. + @param path: the directory to search + @return: a tuple (db, user, password) + ''' + configWordPress = path + os.sep + 'wp-config.php' + configShop = path + os.sep + 'config.php' + configTimetracking = path + '/config/sys/main.php' + configNextCloud = path + '/config/config.php' + configMediaWiki = path + '/LocalSettings.php' + configWinfothek = path + '/resources/winfothek.conf' + if os.path.exists(configWinfothek): + # .dburl=mysql:host=localhost;dbname=winfothek + # .dbuser=winfothek + db = base.BaseTool.BasicStatics.grep('.dburl=mysql:', configWinfothek)[0].split('dbname=')[1].strip() + user = base.BaseTool.BasicStatics.grep('.dbuser=', configWinfothek)[0].split('.dbuser=')[1].strip() + password = base.BaseTool.BasicStatics.grep('.dbcode=', configWinfothek)[0].split('.dbcode=')[1].strip() + elif os.path.exists(configWordPress): + # define('DB_NAME', 'testdb'); // Der Name der Datenbank, die du benutzt. + db = base.BaseTool.BasicStatics.grep('DB_NAME', configWordPress)[0].split(')')[0].split(',')[1].strip().strip(",');").strip('"') + user = base.BaseTool.BasicStatics.grep('DB_USER', configWordPress)[0].split(')')[0].split(',')[1].strip().strip(",');").strip('"') + password = base.BaseTool.BasicStatics.grep('DB_PASSWORD', configWordPress)[0].split(');')[0].split("DB_PASSWORD',")[1].strip().strip(",');").strip('"') + elif os.path.exists(configNextCloud): + db = base.BaseTool.BasicStatics.grep('dbname', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"') + user = base.BaseTool.BasicStatics.grep('dbuser', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"') + password = base.BaseTool.BasicStatics.grep('dbpassword', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"') + elif os.path.exists(configShop): + db = base.BaseTool.BasicStatics.grep('dbname', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"') + user = base.BaseTool.BasicStatics.grep('username', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"') + password = base.BaseTool.BasicStatics.grep('password', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"') + elif os.path.exists(configTimetracking): + db = base.BaseTool.BasicStatics.grep("'dsn'", configTimetracking)[0].split('dbname=')[1].strip().strip(',').strip("'").strip('"').strip(',') + user = base.BaseTool.BasicStatics.grep("'user'", configTimetracking)[0].split('=>')[1].strip().strip(',').strip("'").strip('"') + password = base.BaseTool.BasicStatics.grep("'pwd'", configTimetracking)[0].split('=>')[1].strip().strip(',').strip("'").strip('"') + elif os.path.exists(configMediaWiki): + # $wgDBuser = "forum"; + db = base.BaseTool.BasicStatics.grep("wgDBname", configMediaWiki)[0].split('wgDBname =')[1].strip().strip(';').strip('"') + user = base.BaseTool.BasicStatics.grep("wgDBuser", configMediaWiki)[0].split('wgDBuser =')[1].strip().strip(';').strip('"') + password = base.BaseTool.BasicStatics.grep("wgDBpassword", configMediaWiki)[0].split('wgDBpassword =')[1].strip().strip(';').strip('"') + else: + db = 'unknowndb' + user = defaultUser + password = defaultPassword + return (db, user, password) + + def relativeBackupPath(self, mode, timestamp=None): + '''Returns the (date based) relative backup path, e.g. '/dayly/Sun' + ''' + relPath = os.sep + mode + os.sep + if mode == 'dayly': + relPath += ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][time.localtime(timestamp).tm_wday] + elif mode == 'weekly': + aTuple = time.localtime(timestamp) + # %W: week number with Mon as first day of the week + weekNo = int(time.strftime('%W', aTuple)) + no = int(weekNo) % 4 + relPath += str(no) + elif mode == 'monthly': + relPath += str(time.localtime(timestamp).tm_mon) + else: + relPath = relPath[0:-1] + return relPath + + def replaceMacros(self, text): + '''Replaces macros (written as '%%') with its values + @param text: a text with macros, e.g. '%etc%/dirs.d/*.conf' + @return: the text with replaced macros, e.g. '/etc/pyrshell/dirs.d/*.conf' + ''' + rc = text.replace('%etc%', self._configDir) + if text.find('%backup') >= 0: + rc = rc.replace('%backup.base%', self._baseTargetDir) + rc = rc.replace('%backup.current%', self._targetDir) + if rc.find('%backup.last%') >= 0: + if self._lastTargetDir == None: + self._logger.error('%backup.last% is undefined. we replace it with %backup.current%: ' + self._targetDir) + rc = rc.replace('%backup.last%', self._targetDir) + else: + rc = rc.replace('%backup.last%', self._lastTargetDir) + node = os.path.basename(self._targetDir) + if node == 'often': + minipath = node + else: + minipath = os.path.basename(os.path.dirname(self._targetDir)) + os.sep + node + rc = rc.replace('%backup.current.minipath%', minipath) + rc = rc.replace('%backup.current.node%', node) + return rc + + def synchronize(self, source, target, deleteToo): + '''Synchronizes a directory from another. + Copies the newer or not existing files from the source to the target, recursive + @param source: the source directory + @param target: the target directory + @param deleteToo: files in target but not existing in source will be deleted + ''' + def logError(arg): + self._logger.error('error while removing directory {:s}: {:s}'.format(fullTarget, arg)) + sourceNodes = os.listdir(source) + targetNodes = os.listdir(target) + dirs = [] + for node in sourceNodes: + fullSource = source + os.sep + node + fullTarget = target + os.sep + node + doCopy = False + infoSource = os.lstat(fullSource) + if stat.S_ISDIR(infoSource.st_mode): + dirs.append(node) + else: + try: + infoTarget = os.lstat(fullTarget) + doCopy = infoTarget.st_mtime > infoSource.st_mtime + except OSError: + doCopy = True + if doCopy: + self._logger.log('copying {:s}'.format(fullSource), 3) + shutil.copy(fullSource, fullTarget) + if deleteToo: + if node in targetNodes: + targetNodes.remove(node) + if deleteToo: + for node in targetNodes: + fullTarget = target + os.sep + node + self._logger.log('deleting {:s}'.format(fullTarget), 3) + if os.path.isdir(fullTarget): + shutil.rmtree(fullTarget) + else: + os.unlink(fullTarget) + for node in dirs: + trgDir = target + os.sep + node + if os.path.exists(trgDir): + if not os.path.isdir(trgDir): + os.unlink(trgDir) + os.mkdir(trgDir) + else: + os.mkdir(trgDir) + self.synchronize(source + os.sep + node, trgDir, deleteToo) + +class ConfigurationSet(): + '''Administrates a set of configuration files stored in a given directory. + ''' + def __init__(self, pattern, logger): + '''Constructor. + @param pattern: a file pattern to identify the configuration files. + @param logger: the logger + ''' + self._logger = logger + parts = pattern.split(':') + self._pattern = parts[0] + self._variables = parts[1:] + self._configurations = [] + self._path = os.path.dirname(self._pattern) + self._namePattern = os.path.basename(self._pattern) + self._lastHandledFile = None + if not os.path.isdir(self._path): + self._logger.error('not a directory: ' + self._path) + else: + files = os.listdir(self._path) + for node in files: + if fnmatch.fnmatch(node, self._namePattern): + self._configurations.append(node) + + def nextFile(self): + '''Generator method returning the next configuration file of the set. + @yields: the next file + ''' + for node in self._configurations: + yield self._path + os.sep + node + + def nextParameters(self): + '''Generator method returning the parameters stored in the next configuration file of the set. + @yields: the parameters of the next file + ''' + for name in self.nextFile(): + self._lastHandledFile = name + config = base.JavaConfig.JavaConfig(name, self._logger) + rc = [] + if len(self._variables) == 0: + self._logger.error('no variables defined in ' + self._pattern) + for variable in self._variables: + value = config.getString(variable) + if value == None: + self._logger.error('variable {:s} not defined in {:s}'.format(variable, name)) + rc.append(config.getString(variable)) + yield rc + +if __name__ == '__main__': + pass diff --git a/appl/BackupTool.py b/appl/BackupTool.py new file mode 100755 index 0000000..94da96d --- /dev/null +++ b/appl/BackupTool.py @@ -0,0 +1,979 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import time +import datetime +import sys +import os.path +import gzip +import re +import traceback +import subprocess +import shutil +from base.ProcessHelper import ProcessHelper +from base.MemoryLogger import MemoryLogger + +sys.path.insert(0, '/usr/share/pyrshell') +import base.Logger +import base.StringUtils +import base.BaseTool +import net.EMail +import appl.BackupBase +import appl.DbTool + +class BackupTool (appl.BackupBase.BackupBase): + + def __init__(self, globalOptions): + '''Constructor. + @param logger: the logger + ''' + appl.BackupBase.BackupBase.__init__(self, globalOptions) + self._logger.setErrorFilter(re.compile(r'/bin/tar: .*(file changed as we read it|tar: Removing leading|mysqldump:.*Warning.*Using a password)'), True) + + def btrfsInfo(self): + rc = '' + lines = self._processHelper.executeInputOutput(['/usr/sbin/blkid']) + devs = [] + for line in lines: + if line.find('TYPE="btrfs"') >= 0: + # remove leading ':' + devs.append(line.split(' ')[0][0:-1]) + mountDir = '/media/tmp' + self.ensureDirectory(mountDir) + files = os.listdir(mountDir); + if len(files) > 0: + self._processHelper.execute(['/bin/umount', mountDir], False) + for dev in devs: + self._processHelper.execute(['mount', '-o', 'ro', dev, mountDir], False) + lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'subvol', 'list', mountDir]) + if rc == '': + rc = '\n=== btrfs-Info\n' + rc += '= btrfs: ' + dev + '\n' + '\n'.join(lines) + lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'device', 'usage', mountDir]) + rc += '= device usage: ' + dev + '\n' + '\n'.join(lines) + lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'device', 'stats', mountDir]) + rc += '= device stats: ' + dev + '\n' + '\n'.join(lines) + lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'filesystem', 'df', mountDir]) + rc += '= filesytem df: ' + dev + '\n' + '\n'.join(lines) + self._processHelper.execute(['/bin/umount', mountDir], False) + self._logger.log('devs: {}'.format(len(devs)), 2) + return rc + + def createConfigExample(self): + self.ensureDirectory(self._configDir) + dirWebApps = self._configDir + os.sep + 'webapps.d' + self.ensureDirectory(dirWebApps) + dirDirs = self._configDir + os.sep + 'dirs.d' + self.ensureDirectory(dirDirs) + filename = self._configDir + os.sep + 'backup.conf' + if not base.StringUtils.hasContent(filename, '#'): + filename = self._configDir + os.sep + 'backup.example' + base.StringUtils.toFile(filename, '''# backup example configuration +target.path=/media/backup +log.file=/var/log/local/backup.log +log.mirror=/media/backup/log/backup.log +zip.volume.size=10g +# jobs to do: Fix is job. (job.dayly...) +job.often=job.often.dirs job.chown job.rsync +job.often.dirs=&saveDirByZipLatest 1 @%etc%/dirs.d/minimal.*.conf:directory +job.dayly=job.sysinfo job.save.webapps job.sys job.minimal.1 job.chown job.once +job.weekly=job.sysinfo job.save.webapps job.sys job.full job.db.all job.chown +job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded +job.full=&saveDirByTar @%etc%/dirs.d/std.*.conf:directory:excluded +job.minimal.1=&saveDirByZipLatest 7 @%etc%/dirs.d/std.*.conf:directory:excluded +job.minimal.2=&saveDirByZipLatest 7 @%etc%/webapps.d/*.conf:directory +job.db=&saveMysql @%etc%/webapps.d/*.conf:db:user:password:sql.file +job.db.all=&saveAllDatabases +job.sysinfo=&systemInfo +job.rsync=&rsync setDromedarDayly %backup.current% +job.once=job.once.zip job.once.rsync job.chown +job.once.rsync=&rsync setOnce %backup.last%/ +job.once.zip=&saveDirByZip @%etc%/webapps.d/*.conf:directory:excluded +job.save.webapps=&saveWebapps @%etc%/webapps.d/*.conf:db:user:password:directory +# job.gitlab.backup=&gitlabBackup +job.chown=&chown bupsrv.bupsrv %backup.current% +# Reading metadata from mysql: +mysql.user=backup +mysql.code=Secret.Phrase +mysql.mode=file +# rsync set for dayly synchronisation +setDromedarDayly.user=extbup +setDromedarDayly.target=bupcaribou@dromedar:/opt/extbup/caribou/often/%factor% +setDromedarDayly.factor=4 + +setOnce.user=root +setOnce.target=%backup.base%/once +setOnce.factor=1 + +# Email data for error report: +# Receiver of the error messages: may be a blank separated list of email addresses +send.always=False +location={:s} +admin.email=hm.neutral@gmx.de +smtp.host=smtp.gmx.de +smtp.port=587 +smtp.sender=hm.neutral@gmx.de +smtp.user=hm.neutral@gmx.de +smtp.code=sEcReT +smtp.tls=True +'''.format(self.hostname(True))) + self._logger.log('created: ' + filename) + filenameWebApp = dirWebApps + os.sep + 'testdb.example' + base.StringUtils.toFile(filenameWebApp, '''# Example created by backuptool +db=testdb +user=testuser +password=NotVerySecret +sql.file=testdb +directory=/opt/webapp/testapp +''') + self._logger.log('created: ' + filenameWebApp) + BackupTool.createDirsD(dirDirs, self._logger, True) + + @staticmethod + def createDirsD(dirDirs, logger, varLogToo): + '''Creates the standard files in dirs.d. + @param dirDirs: the full path of the directory dirs.d + @param logger: None or the logger + @param varLogToo: True: the configuration file to store /var/log will be created + Note: the files can be read only by root + ''' + def createConfig(name, content): + if not os.path.exists(name): + base.StringUtils.toFile(name, content) + if logger != None: + logger.log('created: ' + name) + createConfig(dirDirs + os.sep + 'sys.etc.conf', '''# Example created by backuptool +directory=/etc +excluded= +''') + createConfig(dirDirs + os.sep + 'sys.home.bin.conf', '''# Example created by backuptool +directory=/home/bin +excluded= +''') + createConfig(dirDirs + os.sep + 'sys.usr.local.conf', '''# Example created by backuptool +directory=/usr/local +excluded= +''') + if varLogToo: + createConfig(dirDirs + os.sep + 'std.var.log.conf', '''# Example created by backuptool +directory=/var/log +excluded= +''') + createConfig(dirDirs + os.sep + 'std.srv.conf', '''# Example created by backuptool +directory=/srv +excluded= +''') + + def createLockAndReadyFile(self, mode): + '''Creates the names of the lock file and the ready file. + @param mode: 'dayly', 'weekly', 'monthly' + @return: an array [lockFile, readyFile] + ''' + lockFile = "{}{}.{}.lck".format(self._baseTargetDir, os.sep, mode) + readyFile = "{}{}.{}.finished".format(self._baseTargetDir, os.sep, mode) + return [lockFile, readyFile] + + def createWebapp(self, password, patterns): + '''Creates the webapps.d files given by the nginx configurations. + @param patterns: a list of patterns to select the nginx configurations, e.g. ["*.de", "*.com"] + ''' + nodes = [] + sourceDir = '/etc/nginx/sites-enabled' + trgDir = self._configDir + os.sep + 'webapps.d/' + for pattern in patterns: + nodes += base.BaseTool.BasicStatics.findFiles(sourceDir, pattern, False) + for node in nodes: + if self._verboseLevel >= 3: + self._logger.log('working on ' + node) + root = base.BaseTool.BasicStatics.grep('root', sourceDir + os.sep + node)[0].strip() + root = root.split()[1].replace('/;', '').replace(';', '') + self._logger.log('root: ' + root) + (db, user, password) = self.findDbInfo(root, 'backup', password) + if db == 'unknowndb': + dbFile = root + os.sep + '.dbname' + if os.path.exists(dbFile): + db = base.StringUtils.fromFile(dbFile).strip() + self._logger.log('db: {:s} user: {:s} pw: {:s}'.format(db, user, password)) + base.StringUtils.toFile(trgDir + node + '.conf', '''db={:s} +user={:s} +password={:s} +sql.file={:s} +directory={:s} +'''.format(db, user, password, node + '_' + db, root)) + + def doJob(self, config, statement): + '''Interpretes one statement. + @param config: the configuration file + @param statement: the statement to execute + ''' + statement = statement.strip() + tokens = statement.split(' ') + if statement.startswith('&'): + if tokens[0] == '&saveDirByTar': + self.jobSaveDirByTar(tokens[1:]) + elif tokens[0] == '&saveDirByZip': + self.jobSaveDirByZip(tokens[1:]) + elif tokens[0] == '&saveDirByZipLatest': + self.jobSaveDirByZipLatest(tokens[1:]) + elif tokens[0] == '&deleteFile': + self.jobDeleteFile(tokens[1:]) + elif tokens[0] == '&saveMysql': + self.jobSaveMysql(tokens[1:]) + elif tokens[0] == '&saveAllDatabases': + self.jobSaveAllDatabases() + elif tokens[0] == '&systemInfo': + self.systemInfo(self._targetDir) + elif tokens[0] == '&synchronize': + self.jobSynchronize(tokens[1:]) + elif tokens[0] == '&gitlabBackup': + self.jobGitlabBackup() + elif tokens[0] == '&saveWebApps': + self.jobSaveWebApps(tokens[1:]) + elif tokens[0] == '&chown': + self.jobChown(tokens[1:]) + elif tokens[0] == '&rsync': + self.jobRSynchronize(tokens[1:]) + else: + self._logger.error('unknown command: ' + tokens[0]) + else: + for token in tokens: + savePath = None + if token == 'job.once': + savePath = self._lastTargetDir = self._targetDir + self._targetDir = self._baseTargetDir + os.sep + 'once' + self.ensureDirectory(self._targetDir) + statement2 = config.getString(token) + if statement2 == None: + self._logger.error('unknown variable {:s} in {:s} while working on "{:s}"'.format( + token, config._filename, statement)) + else: + self.doJob(config, statement2) + if savePath != None: + self._targetDir = savePath + + def doBackup(self, mode): + '''Does the backup process controlled by configuration files. + @param mode: 'dayly', 'weekly' ... + ''' + logFile = self._configuration.getString('log.file') + if logFile != None and (not hasattr(self._logger, '_logfile') or os.path.abspath(logFile) != os.path.abspath(self._logger._logfile)): + logger2 = base.Logger.Logger(logFile, self._logger._verbose) + logger2.transferErrors(self._logger) + self._logger = logger2 + logFile2 = self._configuration.getString('log.mirror') + if logFile2 != None: + logger3 = base.Logger.Logger(logFile, False) + self._logger.setMirror(logger3) + self._mode = mode + self._baseTargetDir = self._configuration.getString('target.path') + self._targetDir = self._baseTargetDir + try: + if self._configuration.getString('job.' + mode) == None: + self._logger.error('missing job.{:s} in {:s} Wrong mode?'.format(mode, self._configuration._filename)) + elif self._targetDir == None: + self._logger.error('missing "{:s}" in {:s}'.format(self._targetDir, self._configuration._filename)) + elif not os.path.isdir(self._targetDir): + self._logger.error('{:s} is not a directory. See "target.path" in '.format(self._targetDir, self._configuration._filename)) + else: + relPath = self.relativeBackupPath(mode) + self._targetDir += relPath + if self.ensureDirectory(self._targetDir) != None: + if mode == 'dayly' or mode == 'weekly' or mode == 'monthly': + linkTarget = os.path.dirname(self._targetDir) + os.sep + 'current' + if os.path.exists(linkTarget): + os.unlink(linkTarget) + os.symlink(os.path.basename(self._targetDir), linkTarget) + statement = self._configuration.getString('job.' + mode ); + if statement == None: + self._logger.error('missing job.' + mode + ' in ' + self._configuration._filename); + else: + [lockFile, readyFile] = self.createLockAndReadyFile(mode) + if os.path.exists(lockFile): + self._logger.error('backup [{}] is already running: {} exists'.format(mode, lockFile)) + base.StringUtils.toFile(lockFile, '') + base.BaseTool.BasicStatics.ensureFileDoesNotExist(readyFile, None, self._logger) + self.doJob(self._configuration, statement) + base.StringUtils.toFile(readyFile, '') + base.BaseTool.BasicStatics.ensureFileDoesNotExist(lockFile, None, self._logger) + except Exception as exc: + self._logger.error('backup aborted with exception: ' + str(exc)) + traceback.print_exc() + subject = None + if self._logger._errors > 0: + subject = 'Backup {:s} failed on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True))) + elif self._configuration.getString('send.always', 'False').startswith('T'): + subject = 'Backup {:s} finished on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True))) + if subject != None: + now = datetime.datetime.now() + body = now.strftime('%Y.%m.%d %H:%M:%S') + ' Backup {:s} finished with {:d} error(s)\n\n'.format(mode, self._logger._errors) + body += '\n'.join(self._logger._firstErrors) + self.sendStatusEmail(subject, body) + + def example(self): + example = '''# backup example configuration +target.path=/media/backup +log.file=/var/log/local/backup.log +log.mirror=/media/backup/log/backup.log +zip.volume.size=10g +# jobs to do: Fix is job. (job.dayly...) +job.often=job.often.dirs job.chown job.rsync +job.often.dirs=&saveDirByZipLatest 1 @%etc%/dirs.d/minimal.*.conf:directory +job.dayly=job.sysinfo job.sys job.minimal.1 job.chown +job.weekly=job.sysinfo job.sys job.full job.db.all job.chown +job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded +job.full=&saveDirByTar @%etc%/dirs.d/std.*.conf:directory:excluded +job.minimal.1=&saveDirByZipLatest 7 @%etc%/dirs.d/std.*.conf:directory:excluded +job.minimal.2=&saveDirByZipLatest 7 @%etc%/webapps.d/*.conf:directory +job.db=&saveMysql @%etc%/webapps.d/*.conf:db:user:password:sql.file +job.db.all=&saveAllDatabases +job.sysinfo=&systemInfo +job.rsync=&rsync setDromedarDayly %backup.current% + +# job.gitlab.backup=&gitlabBackup +job.chown=&chown bupsrv.bupsrv %backup.current% +# Reading metadata from mysql: +mysql.user=backup +mysql.code=Secret.Phrase +mysql.mode=file +# rsync set for dayly synchronisation +setDromedarDayly.user=extbup +setDromedarDayly.target=bupcaribou@dromedar:/opt/extbup/caribou/often/%factor% +setDromedarDayly.factor=4 +# Email data for error report: +# Receiver of the error messages: may be a blank separated list of email addresses +send.always=False +location={:s} +admin.email=hm.neutral@gmx.de +smtp.host=smtp.gmx.de +smtp.port=587 +smtp.sender=hm.neutral@gmx.de +smtp.user=hm.neutral@gmx.de +smtp.code=sEcReT +smtp.tls=True +'''.format(base.BaseTool.BasicStatics.hostname(True)) + self.storeExample(example) + + def jobGitlabBackup(self): + # gitlab_rails['backup_path'] = "/var/opt/gitlab/backups" + start = time.time() + backupPath = '/var/opt/gitlab/backups' + regExpr = re.compile(r'gitlab_rails\[\'backup_path\'\]\s*=\s*"(.*?)"') + lines = base.StringUtils.grepInFile('/etc/gitlab/gitlab.rb', regExpr, 1) + if len(lines) != 1: + self._logger.error('cannot find backup path in /etc/gitlab/gitlab.rb') + else: + matcher = regExpr.search(lines[0]) + if matcher: + backupPath = matcher.group(1) + self.clearDirectory(backupPath) + executable = '/usr/bin/gitlab-rake' + if not os.path.exists(executable): + self._logger.error('missing {}: could not save gitlab'.format(executable)) + else: + argv = [executable, 'gitlab:backup:create'] + self._processHelper.execute(argv, True) + backupFile = base.BaseTool.BasicStatics.firstFile(backupPath, '*.tar') + if backupFile == None: + self._logger.error('missing backup file in ' + backupPath) + else: + self.logFile(backupFile, 'gitlab backup file: %f %s %t (%r)', start) + target = self._targetDir + os.sep + 'gitlab_backup.tar' + nameFile = self._targetDir + os.sep + 'gitlab_name.txt' + base.StringUtils.toFile(nameFile + '\n#above: the original name of the backup file\n# current name: gitlab_backup.tar', backupFile) + self._logger.log('name saved into {}'.format(nameFile), 2) + try: + shutil.copy(backupFile, target) + os.unlink(backupFile) + self._logger.log('{} moved to {}'.format(backupFile, target), 3) + except OSError as exc: + self._logger.error('cannot move archive {:s} to {:s}: {:s}'.format(backupFile, target, str(exc))) + + def jobChown(self, argv): + '''Changes the owner of some directories (recursive). + @param argv: contains the owner[.group] and the directories + ''' + if len(argv) < 1: + self._logger.error('jobChown: missing owner') + elif re.match(r'\w+([.:]\w+)?$', argv[0]) == None: + self._logger.error('jobChown: wrong user[.group]: ' + argv[0]) + elif len(argv) < 2: + self._logger.error('jobChown: missing directory') + elif self._beingRoot: + owner = argv[0] + self._logger.log('jobChown ' + ' '.join(argv), 2) + for name in argv[1:]: + path = self.replaceMacros(name) + if not os.path.isdir(path): + self._logger.error('jobChown: not a directory: ' + path) + else: + self._logger.log('chown {:s} {:s}'.format(owner, path), 3) + argv = ['/bin/chown', '-R', owner, path] + self._processHelper.execute(argv, True) + self._logger.log('chmod -R g+w {:s}'.format(path), 3) + argv = ['/bin/chmod', '-R', 'g+rw', path] + self._processHelper.execute(argv, True) + + def jobDeleteFile(self, argv): + '''Saves some directories in some tar archivs. + @param argv: contains the source directories + ''' + for name in argv: + if os.path.exists(name): + self._logger.log('deleting ' + name, 3) + os.unlink(name) + if os.path.exists(name): + self._logger.error('cannot delete ' + name) + else: + self._logger.log('deleted: ' + name) + + def jobRSynchronize(self, argv): + '''Executes the program rsync to copy files to another host. + @param argv: contains the name of a parameter set and a list of source files/dirs + e.g.['setDromedarDayly', '%backup.current%'] + ''' + self._logger.log('jobRSynchronize ' + ' '.join(argv), 2) + ix = 0 + setName = argv[0] + user = self._configuration.getString(setName + '.user', '') + target = self._configuration.getString(setName + '.target', '') + factor = self._configuration.getInt(setName + '.factor', 0) + if user == '' or target == '' or factor == '': + self._logger.error('rsync set {} incomplete: user: {} target: {} factor: {}'.format(setName, user, target, '' if factor == 0 else str(factor) )) + else: + time = datetime.datetime.now().time() + hour = int(round(time.hour*60+time.minute / 60.0)) % factor + target = self.replaceMacros(target.replace('%factor%', str(hour))) + cmd = ['/usr/bin/sudo', '-u'] + cmd.append(user) + cmd.append('/usr/bin/rsync') + cmd.append('-a') + if self._verboseLevel >= 2: + cmd.append('-v') + cmd.append('--progress') + argv = argv[1:] + while ix < len(argv): + source = self.replaceMacros(argv[ix]) + cmd.append(source) + ix += 1 + cmd.append(target) + self._processHelper.execute(cmd, self._verboseLevel > 0) + + def jobSaveAllDatabases(self): + '''Saves all databases of a mysql system. + ''' + self._logger.log('saving all databases', 2) + dbTool = appl.DbTool.DbTool(self._globalOptions) + admin = dbTool._configuration.getString('admin.user') + code = dbTool._configuration.getString('admin.code') + dbTool.exportAllDatabases(admin, code, self._targetDir) + + def jobSaveAllDatabases2(self): + '''Saves all databases of a mysql system. + ''' + self._logger.log('saving all databases', 2) + dbs = self.allDatabases(True) + user = self._configuration.getString('mysql.user') + password = self._configuration.getString('mysql.code') + dbTool = appl.DbTool.DbTool(self._globalOptions) + if dbs != None: + for db in dbs: + name = db + currentUser = user + currentPassword = password + config = self.findConfig('db', db, 'webapps.d') + if config != None: + path = config.getString('directory') + if path != None: + name = os.path.basename(path) + '_' + db + currentUser = config.getString('user') + currentPassword = config.getString('password') + if currentPassword == None: + currentUser = user + currentPassword = password + if self._verboseLevel >= 3: + self._logger.log('saving db ' + db) + start = time.time() + outputFile = self._targetDir + os.sep + name + '.sql.gz' + dbTool.exportDatabase(db, currentUser, currentPassword, outputFile) + self.logFile(outputFile, '%f: %s %t created in %r', start) + + def jobSaveDirByTar(self, argv): + '''Saves some directories in some tar archivs. + @param argv: contains the source directories + ''' + self._logger.log('jobSaveDirByTar' + ' '.join(argv), 2) + + for source in argv: + if not source.startswith('@'): + self._logger.log('saving directory {:s} into tar'.format(source), 3) + name = self.fullPathToName(source) + self.saveDirectoryByTar(name, source, self._targetDir, None) + else: + pattern = self.replaceMacros(source[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + opts = None + if len(arguments) < 1: + self._logger.error('jobSaveDirByTar: configuration pattern does not contain at least 1 variable (directory[, excluded]): {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + path = arguments[0] + if len(arguments) > 1: + excluded = arguments[1] + if excluded != None and len(excluded) > 2: + listExcluded = excluded[1:].split(excluded[0]) + for entry in listExcluded: + if entry != '': + if opts == None: + opts = ['--exclude=' + entry] + else: + opts.append('--exclude=' + entry) + name = self.fullPathToName(path) + self._logger.log('saving directory {:s} into tar'.format(source), 3) + self.saveDirectoryByTar(name, path, self._targetDir, opts) + + def jobSaveDirByZip(self, argv): + '''Saves some directories in some tar archives. + Note: target is taken from self._targetDir + @param argv: contains the source directories + ''' + if len(argv) < 1: + self._logger.error('SaveDirByZip: missing source') + else: + self._logger.log('jobSaveDirByZip' + ' '.join(argv), 2) + for source in argv: + if not source.startswith('@'): + name = self.fullPathToName(source) + self._logger.log('saving {:s} into {:s}'.format(source, name), 3) + self.saveDirectoryByZip(name, source, self._targetDir, None) + else: + pattern = self.replaceMacros(source[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + opts = None + if len(arguments) < 1: + self._logger.error('jobSaveDirByZip: configuration pattern does not contain at least 1 variable (directory[, excluded]): : {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + path = arguments[0] + if len(arguments) > 1: + excluded = arguments[1] + if excluded != None and len(excluded) > 2: + listExcluded = excluded[:1].split(excluded[0]) + for entry in listExcluded: + if entry == '': + continue + if opts == None: + opts = ['-x', entry] + else: + opts.append(entry) + name = self.fullPathToName(path) + self._logger.log('saving {:s} into {:s} {:s}'.format(path, name, + '' if opts == None else ' '.join(opts)), 3) + self.saveDirectoryByZip(name, path, self._targetDir, opts) + + def jobSaveDirByZipLatest(self, argv): + '''Saves some directories in some tar archivs but only the "latest" files. + @param argv: contains the source directories + ''' + doIt = False + if len(argv) < 2: + self._logger.error('missing source for SaveDirByZipLatest') + else: + try: + interval = int(argv[0]) + doIt = True + except ValueError: + self._logger.error('SaveDirByZipLatest: argument 1 is not an integer: ' + argv[0]) + if doIt: + self._logger.log('jobSaveDirByZipLatest' + ' '.join(argv), 2) + for source in argv[1:]: + if not source.startswith('@'): + name = self.fullPathToName(source) + self._logger.log('saving {:s} into {:s}'.format(source, name), 3) + self.saveDirectoryByZip(name, source, self._targetDir, None, interval) + else: + pattern = self.replaceMacros(source[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + if len(arguments) < 1: + self._logger.error('jobSaveDirByZipLatest: configuration pattern does not contain 1 variable (directory): : {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + if len(arguments) > 1: + self._logger.log("{} contains ignored parameter(s) below {}".format(configSet._lastHandledFile, arguments[0])) + path = arguments[0] + name = self.fullPathToName(path) + self._logger.log('saving {:s} into {:s}'.format(path, name), 3) + self.saveDirectoryByZip(name, path, self._targetDir, None, interval) + + def jobSaveMysql(self, argv): + '''Dumps a mysql database into a gzipped sql file. + @param argv: contains a list of configuration patterns, + e.g.['@%etc%/webapps.d/*.conf:db:user:password:sql.file'] + ''' + self._logger.log('jobSaveMysql ' + ' '.join(argv), 2) + dbTool = appl.DbTool.DbTool(self._globalOptions) + for pattern in argv: + if pattern.startswith('@'): + pattern = self.replaceMacros(pattern[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + if len(arguments) != 4: + self._logger.error('jobSaveMysql: configuration pattern does not contain 4 variables (db,user,pw,sqlfile): : {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + domain = os.path.basename(configSet._lastHandledFile)[0:-5] + self._logger.log('saving db {:s}'.format(arguments[0]), 3) + # self.saveMysql(arguments[3], arguments[0], arguments[1], arguments[2], self._targetDir) + dbTool.exportDatabase(arguments[0], arguments[1], arguments[2], self._targetDir + os.sep + domain + '.sql.gz') + + def jobSynchronize(self, argv): + '''Synchronizes a directory from another. + @param argv: contains a list of configuration patterns, + e.g.['@%etc%/dirs.d/rsync_*.conf:source:target:deletetoo'] + ''' + self._logger.log('jobSynchronize ' + ' '.join(argv), 2) + ix = 0 + deleteToo = False + while ix < len(argv): + pattern = argv[ix] + ix += 1 + if pattern.startswith('--delete'): + if ix >= len(argv): + break + deleteToo = True + pattern = pattern[8:].lower() + if pattern.startswith('=f'): + deleteToo = False + pattern = argv[ix] + ix += 1 + if pattern.startswith('@'): + pattern = self.replaceMacros(pattern[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + if len(arguments) < 2: + self._logger.error('jobSynchronize: configuration pattern does not contain 2 variables (source,target[,deleteToo]): : {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + deleteToo2 = deleteToo + if (len(arguments) > 2): + deleteToo2 = arguments[2].startswith('T') or arguments[2].startswith('t') + source = self.replaceMacros(arguments[0]) + target = self.replaceMacros(arguments[1]) + self.ensureDirectory(target) + self.synchronize(source, target, deleteToo2) + else: + source = self.replaceMacros(pattern) + if ix >= len(argv): + self._logger.error('jobSynchronize: missing target to source ' + pattern) + else: + target = self.replaceMacros(argv[ix]) + ix += 1 + self.ensureDirectory(target) + self.synchronize(source, target, deleteToo) + + def jobSaveWebApps(self, argv): + '''for all webapps: dump the database into the directory /db + @param argv: contains a list of configuration patterns, + e.g.['@%etc%/webapps.d/*.conf:db:user:password:sql.file:directory'] + ''' + self._logger.log('jobSaveWebapps ' + ' '.join(argv), 2) + for pattern in argv: + if pattern.startswith('@'): + pattern = self.replaceMacros(pattern[1:]) + configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger) + for arguments in configSet.nextParameters(): + if len(arguments) != 4: + self._logger.error('jobSaveMysql: configuration pattern does not contain 4 variables (db,user,pw,directory): : {} file: {}' + .format(pattern, configSet._lastHandledFile)) + break + else: + # config file without '.conf': + domain = os.path.basename(configSet._lastHandledFile)[0:-5] + db = arguments[0] + user = arguments[1] + code = arguments[2] + target = arguments[3] + if not os.path.isdir(target): + self._logger.error('missing directory {} for {}'.format(target, domain)) + else: + self._logger.log('saving db {:s}'.format(arguments[0]), 3) + tool = appl.DbTool.DbTool(self._globalOptions) + target += os.sep + 'db' + self.ensureDirectory(target) + tool.exportDatabase(db, user, code, target + os.sep + domain + '.sql.gz') + + def sendStatusEmail(self, subject, body): + '''Sends an email. + @param subject the email's subject + @param body the emails's content + ''' + recipient = self._configuration.getString('admin.email') + sender = self._configuration.getString('smtp.sender'), + host = self._configuration.getString('smtp.host') + port = self._configuration.getString('smtp.port') + user = self._configuration.getString('smtp.user') + code = self._configuration.getString('smtp.code') + value = self._configuration.getString('smtp.tls') + withTls = value != None and value.lower().startswith('t') + if not (recipient and sender and host and port and user and code): + self._logger.error('email configuration is wrong') + else: + net.EMail.sendStatusEmail(recipient, subject, body, sender, host, port, user, code, withTls, self._logger) + + def snapshot(self, argv): + '''Makes a snapshot. + @param argv: program arguments, e.g. ['dayly', '/media/cloud/mycloud/data', '--create-if-missing'] + ''' + (argv, opts) = self.splitArgsAndOpts(argv) + if len(argv) < 2: + self.usage('missing arguments') + else: + mode = argv[0] + dataDir = argv[1] + baseDir = os.path.dirname(dataDir) + snapshotBase = baseDir + os.sep + mode + argv = argv[2:] + createIfMissing = False + if len(opts) > 0: + for opt in opts: + if opt == '--create-if-missing': + createIfMissing = True + else: + self.usage('unknown option: ' + opt) + if mode not in ['dayly', 'weekly', 'monthly']: + self.usage('invalid mode {}: expected: dayly or weekly or monthly'.format(mode)) + elif not os.path.isdir(dataDir): + self.usage('not a data directory: ' + dataDir) + elif baseDir == '': + self.usage('missing the parent directory: ' + dataDir) + elif not base.FileHelper.distinctPaths(dataDir, snapshotBase): + self.usage('nested directories: {} [] / {} [{}]'.format(dataDir, os.path.realpath(dataDir), + snapshotBase, os.path.realpath(snapshotBase))) + else: + if not os.path.isdir(snapshotBase): + if not createIfMissing: + self.usage('not a snapshot directory: ' + snapshotBase) + else: + self.ensureDirectory(snapshotBase) + tool = appl.DirTool.DirTool(self._globalOptions) + snapshotDir = baseDir + self.relativeBackupPath(mode) + if os.path.exists(snapshotDir): + shutil.rmtree(snapshotDir, False) + if os.path.exists(snapshotDir): + os.rename(snapshotDir, snapshotDir + '.{:.0}'.format(time.time())) + tool.snapshot([dataDir, snapshotDir]) + + def smartInfo(self): + '''Assembles the SMART infos about some disk devices. + @return a list of lines describing the SMART info + ''' + devices = self._configuration.getString('smart.devices').split(',') + output = ['= SMART info'] + for device in devices: + output.append('== ' + device) + lines = self._processHelper.executeInputOutput(['/usr/sbin/smartctl', '-a', device]) + output += lines + return output + + def systemInfo(self, target): + '''Assembles interesting informations about the current linux system. + Manually installed packets + disk devices + mounts + @param target: the target directory + ''' + start = time.time() + # zcat /var/log/apt/history.log.*.gz | cat - /var/log/apt/history.log | grep -Po '^Commandline: apt-get install (?!.*--reinstall)\K.*' > $fnManuallyInstalled + path = '/var/log/apt' + nodes = os.listdir(path) + packets = [] + for node in nodes: + if node.startswith('history.log') and node.endswith('.gz'): + full = path + os.sep + node + with gzip.open(full, 'r') as fp: + for line in fp: + if line.startswith(b'Commandline: apt-get install'): + packets += line[29:].decode().strip().replace(' ', ' ').replace(' ', ' ').split(' ') + with open('/var/log/apt/history.log', 'r') as fp: + for line in fp: + if line.startswith('Commandline: apt-get install'): + packets += line[29:].strip().replace(' ', ' ').replace(' ', ' ').split(' ') + + mounts = [] + with open('/proc/mounts', 'r') as fp: + for line in fp: + parts = line.split(' ') + if parts[0].find('/') >= 0: + mounts.append(line) + lsblkInfo = subprocess.check_output('/bin/lsblk') + fdiskInfo = subprocess.check_output(['/sbin/fdisk', '-l']) + networkInfo = subprocess.check_output(['/sbin/ip', 'addr']) + routeInfo = subprocess.check_output(['/sbin/ip', 'route', 'show']) + with open('/proc/diskstats', 'r') as fp: + diskStats = fp.read() + lvmInstalled = self._beingRoot and os.path.exists('/sbin/vgdisplay') + if lvmInstalled and self._beingRoot: + pvInfoShort = subprocess.check_output('/sbin/pvs') + pvInfo = subprocess.check_output('/sbin/pvdisplay') + vgInfoShort = subprocess.check_output('/sbin/vgs') + vgInfo = subprocess.check_output('/sbin/vgdisplay') + lvInfoShort = subprocess.check_output('/sbin/lvs') + lvInfo = subprocess.check_output('/sbin/lvdisplay') + fnOutput = target + os.sep + 'system.info.gz' + if os.geteuid() == 0: + btrfsInfo = self.btrfsInfo() + smartInfo = self.smartInfo() + with gzip.open(fnOutput, 'wb') as fp: + fp.write(b'=== manually installed packets:\n') + for packet in packets: + if packet != '': + fp.write('apt-get install -y {:s}\n'.format(packet).encode()) + fp.write(b'\n=== lsblk:\n' + lsblkInfo) + fp.write(b'\n=== fdisk:\n' + fdiskInfo) + fp.write(b'\n=== mounts:\n' + ''.join(mounts).encode()) + fp.write(b'\n=== diskstats:\n' + diskStats.encode()) + if os.path.exists('/proc/mdstat'): + with open('/proc/mdstat', 'r') as fp2: + mdstat = fp2.read() + fp.write(b'\n=== mdstat:\n' + mdstat.encode()) + if lvmInstalled and self._beingRoot: + fp.write(b'\n=== pvs:\n' + pvInfoShort) + fp.write(b'\n=== vgs:\n' + vgInfoShort) + fp.write(b'\n=== lvs:\n' + lvInfoShort) + fp.write(b'\n=== pvdisplay:\n' + pvInfo) + fp.write(b'\n=== vgdisplay:\n' + vgInfo) + fp.write(b'\n=== lvdisplay:\n' + lvInfo) + if btrfsInfo != '': + fp.write(btrfsInfo.encode('utf-8')) + fp.write(b'\n=== ip addr:\n' + networkInfo) + fp.write(b'\n=== ip route:\n' + routeInfo) + self.logFile(fnOutput, '%f: %s %t created in %r', start) + + def testCompleteness(self, target): + '''Tests whether the backup is complete. + ''' + target = self._targetDir + dbs = self.allDatabases() + for db in dbs: + name = db + config = self.findConfig('db', db, 'webapps.d') + if config != None: + path = config.getString('directory') + if path != None: + name = os.path.basename(path) + '_' + db + outputFile = target + os.sep + name + '.sql.gz' + if not os.path.exists(outputFile): + self._logger.error('missing SQL save: ' + outputFile) + else: + self._logger.log('SQL file found: ' + outputFile, 3) + + def testReady(self, mode): + '''Tests whether a backup run has been finished normally. + @param mode: 'dayly', 'weekly', 'monthly' + ''' + self._logger.log('Testing whether the last backup mode [{}] has been finished normally'.format(mode), 1) + self._baseTargetDir = self._configuration.getString('target.path') + if self.ensureDirectory(self._baseTargetDir) != None: + [lockFile, readyFile] = self.createLockAndReadyFile(mode) + if os.path.exists(readyFile): + self._logger.log('ready file found: {}'.format(readyFile), 1) + else: + if not os.path.exists(lockFile): + error = 'backup [{}] not ready but no lockfile exists'.format(mode) + else: + date = datetime.datetime.fromtimestamp(os.stat(lockFile).st_mtime) + error = 'backup [{}] not ready. Start time: {}'.format(mode, date.strftime('%Y.%m.%d %H:%M:%S')) + self._logger.error(error) + subject = 'Backup {:s} not ready on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True))) + self.sendStatusEmail(subject, error) + +def usage(): + '''Returns an info about usage. + ''' + return """usage: backuptool [] + Dayly/weekly/monthly backup including system info and mysql backup +GLOBAL_OPTS +GLOBAL_MODES +: + completeness + tests whether the last backup task was successfully completed + dayly + dayly actions + monthly + monthly actions + snapshot mode data-dir [--create-if-missing] + makes a snapshot for the given mode and directory + mode: dayly or weekly or monthly + data-dir: the directory laying in the base directory containing 'dayly' ... + create-if-missing:the mode specific base (data-dir../dayly ...) will be created if needed + test-email + Sends an email to test email configuration + test-ready + Tests whether the last backup with has been finished successfully + webapps ... + Creates a configuration from nginx configuration + weekly + weekly actions +Examples: +backuptool -q backup dayly +backuptool -r -v3 backup weekly + """ + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/backuptool', '-v3', 'backup', 'dayly'] + ''' + appInfo = base.BaseTool.ApplicationInfo('backup', 'appl/BackupTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = BackupTool(options) + rc = None + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'backup': + if len(argv) == 0: + mode = 'dayly' + else: + mode = argv[0] + rc = tool.doBackup(mode) + elif cmd == 'webapps': + if len(argv) < 2: + tool.usage('missing password and/or pattern(s)') + else: + rc = tool.createWebapp(argv[0], argv[1:]) + elif cmd == 'test-email': + tool.sendStatusEmail('Test email sending', 'Seems to work: email sending'); + elif cmd == 'test-ready': + if len(argv) < 1: + tool.usage('missing for command test-ready') + else: + mode = argv[0] + if mode != 'dayly' and mode != 'weekly' and mode != 'monthly': + tool.usage('unknown mode for command test-ready: ' + mode) + else: + tool.testReady(mode) + elif cmd == 'completeness': + tool.testCompleteness() + elif cmd == 'snapshot': + tool.snapshot(argv) + else: + tool.usage('unknown command: ' + cmd) + if rc != None: + tool.usage(rc) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/BenchTool.py b/appl/BenchTool.py new file mode 100755 index 0000000..1d5feb2 --- /dev/null +++ b/appl/BenchTool.py @@ -0,0 +1,243 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' +import time +import sys +import os.path +import math + +sys.path.insert(0, '/usr/share/pyrshell') +import base.BaseTool +import base.CryptoEngine + +class Statistics: + def __init__(self): + self._readOnly = False + self._readCount = 0 + self._bytesRead = 0 + self._writeCount = 0 + self._bytesWritten = 0 + self._timeRead = 0 + self._timeWrite = 0 + +class BenchTool (base.BaseTool.BaseTool): + + def __init__(self, globalOptions): + '''Constructor. + @param logger: the logger + ''' + self._globalOptions = globalOptions + base.BaseTool.BaseTool.__init__(self, globalOptions, 'benchtool.conf') + self._dirs = [] + self._files = [] + self._written = [] + self._factorPrint = 10000 + self._random = base.CryptoEngine.CryptoEngine(self._logger) + self._random.setSeedRandomly() + self._ratioRead = 0.80 + self._maxWritten = 1000 + self._statistics = Statistics() + self._bufferLength = 8192 + self._buffer = self._random.nextString(self._bufferLength, 'ascii95') + # in Blocks + self._maxFileLength = 4092 + self._ratioSmallFiles = 0.8 + self._fileNo = 0 + self._maxLoops = 0x7fffffffffff + self._maxWritten = 1000 + self._readOnly = False + + def benchFiles(self): + loopNo = 0 + while loopNo < self._maxLoops: + loopNo += 1 + if self._readOnly: + self.benchRead() + else: + current = self._random.nextInt(100000) + if current < 100000*self._ratioRead: + self.benchRead() + else: + self.benchWrite() + if loopNo % self._factorPrint == 0: + self.fileStatistics() + self.fileStatistics() + + def benchRead(self): + '''Reads a randomly choosen file from the tree. + ''' + self._statistics._readCount += 1 + ix = self._random.nextInt(len(self._files)) + start = time.time() + with open(self._files[ix], 'rb') as fp: + again = True + while again: + buffer = fp.read(self._bufferLength) + if len(buffer) == 0: + again = False + else: + self._statistics._bytesRead += len(buffer) + self._statistics._timeRead += time.time() - start + + def benchWrite(self): + '''Reads a randomly choosen file from the tree. + ''' + self._statistics._writeCount += 1 + if len(self._written) >= self._maxWritten: + ix = self._random.nextInt(len(self._written)) + os.unlink(self._written[ix]) + del self._written[ix] + ix = self._random.nextInt(len(self._files)) + start = time.time() + with open(self._files[ix], 'w') as fp: + if self._random.nextInt(100000) < 100000 * self._ratioSmallFiles: + # small files + uBound = self._random.nextInt(self._bufferLength, 16) + fp.write(self._buffer[0:uBound]) + self._statistics._bytesWritten += uBound + else: + for ix in range(self._random.nextInt(self._maxFileLength, 4)): + fp.write(self._buffer) + self._statistics._bytesWritten += self._bufferLength + self._statistics._timeWrite += time.time() - start + + def buildName(self): + '''Builds a unique filename inside the given file tree. + ''' + self._fileNo += 1 + path = self._dirs[self._random.nextInt(len(self._dirs))] + name = '{}/x!%{}.bench'.format(path, self._fileNo) + return name + + def calculate(self, argv): + '''Calulate benchmark: reads and write files. + @param argv: the program arguments, e.g. ['/home', '--read-only'] + ''' + def div(a,b): + try: + if abs(b) < 1E-20: + b = 1E-20 + rc = a / b + except ZeroDivisionError as exc: + print(str(exc)) + return rc + + args, options = self.splitArgsAndOpts(argv) + for opt in options: + if opt.startswith('--max-loops='): + self._maxLoops = self.integerOption(opt) + elif opt.startswith('--max-written='): + self._maxWritten = self.integerOption(opt) + elif opt.startswith('--factor-print='): + self._factorPrint = self.integerOption(opt) + else: + self.usage('unknown option: ' + opt) + loops = 0 + x = 1.03 + while loops < self._maxLoops: + loops += 1 + for ix in range(10000): + x = x * ix + math.sqrt(abs(x)*3) + x -= abs(math.pow(0.1 + abs(x), 0.33)) + x *= math.sin(x+div(loops, div(loops, loops * 5.3333))) + math.log(1+abs(x*x)) + a = 0.03 + abs(min(20 - div(x, (abs(x) + 10)), abs(div(x, (x + math.cos(div(x + 99, x + 3))))))) + x -= math.exp(a) + x = max(min(x, 1E10), -1E10) + div(ix + 77, -ix -37) + self._random.nextInt(100000)/100000 + if loops % self._factorPrint == 0: + self._logger.log('Calculate: {} loops: {:.3f} k'.format(x, loops / 1000.0)) + time.sleep(0.001) + self._result = x + + def files(self, argv): + '''File benchmark: reads and write files. + @param argv: the program arguments, e.g. ['/home', '--read-only'] + ''' + args, options = self.splitArgsAndOpts(argv) + for opt in options: + if opt.startswith('--max-loops='): + self._maxLoops = self.integerOption(opt) + elif opt.startswith('--max-written='): + self._maxWritten = self.integerOption(opt) + elif opt.startswith('--factor-print='): + self._factorPrint = self.integerOption(opt) + else: + self.usage('unknown option: ' + opt) + if len(argv) < 1: + self.usage('missing directory') + else: + baseDir = argv[0] + if not os.path.isdir(baseDir): + self.usage('not a directory: ' + baseDir) + else: + self.findFiles(baseDir) + self.benchFiles() + + def fileStatistics(self): + readMb = self._statistics._bytesRead / 1E6 + writeMb = self._statistics._bytesWritten / 1E6 + self._logger.log('read : files: {:4d} MBytes: {:10.3f} rate: {:10.3f} MByte/s'.format( + self._statistics._readCount, readMb, readMb / (self._statistics._timeRead + 0.000001))) + self._logger.log('write: files: {:4d} MBytes: {:10.3f} rate: {:10.3f} MByte/s'.format( + self._statistics._writeCount, writeMb, writeMb / (self._statistics._timeWrite + 0.000001))) + + def findFiles(self, baseDir): + '''Builds the list of the files in the given directory tree + @param baseDir: the base directory of the file tree + ''' + for node in os.listdir(baseDir): + full = baseDir + os.sep + node + if os.path.isdir(full): + self._dirs.append(full) + self.findFiles(full) + else: + self._files.append(full) + if len(self._files) % self._factorPrint == 0: + self._logger.log('files/dirs found: {}/{}'.format(len(self._files), len(self._dirs)), 2) + + def example(self): + example = '''# benchtool example configuration +''' + self.storeExample(example) + +def usage(): + return """usage: benchtool [] + Benchmarks and/or burn in. +GLOBAL_OPTS +GLOBAL_MODES +: + files [--read-only] [--max-loops=] [--max-written=] [--factor-print=] + Read and write randomly files. + calculate + Calculates some things + +Example: +benchtool -v0 files /media/pool +benchtool files /home --read-only +""" + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/dirtool', 'run'] + ''' + appInfo = base.BaseTool.ApplicationInfo('benchtool', 'appl/BenchTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = BenchTool(options) + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'files': + tool.files(argv) + elif cmd == 'calculate': + tool.calculate(argv) + else: + tool.usage("unknown command: " + cmd) + base.BaseTool.setLatestTool(tool) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/DbTool.py b/appl/DbTool.py new file mode 100755 index 0000000..670e5c5 --- /dev/null +++ b/appl/DbTool.py @@ -0,0 +1,941 @@ +#! /usr/bin/python3 +''' +DbTool: Scalable Vector Graphics tool + +@author: hm +''' +import os.path +import sys +import re +import time +import tempfile + +sys.path.insert(0, '/usr/share/pyrshell') +import base.BaseTool +import base.JavaConfig +import base.ProcessHelper + +DbToolResult = None + +def setResult(data): + global DbToolResult + DbToolResult = data + +def result(): + global DbToolResult + return DbToolResult + +def removeFromArrayIfExists(anArray, item): + for ix in reversed(range(len(anArray) )): + if item == anArray[ix]: + del(anArray[ix]) + +class DbTool (base.BaseTool.BaseTool): + '''Translates from one wiki syntax into another. + ''' + def __init__(self, globalOptions): + '''Constructor. + @param globalOptions: the basic configurations + ''' + base.BaseTool.BaseTool.__init__(self, globalOptions, 'dbtool.conf') + self._logger.setErrorFilter(re.compile(r'mysqldump:.*Warning.*Using a password'), True) + self._processTool = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger) + # True: delete user/db without confirmation: use only in unittests! + self._forceDeleting = False + + def archiveWebapp(self, argv): + '''Deletes a web application: + Saving of the database, creation of a tar archive with directory and sql script. + Removing database and directory. + @param argv: program arguments, e.g. ['huber.de', '/media/backup/archive'] + ''' + if len(argv) < 2: + self.usage('too few arguments') + else: + argv, options = self.splitArgsAndOpts(argv) + if len(options) != 0: + usage('unknown option(s): ' ' + '.join(options)) + (admin, adminCode) = defaultAdmin(argv, 2, self) + domain = argv[0] + dirArchive = argv[1] + fnConfig = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf') + fnArchive = dirArchive + os.sep + domain + '.tgz' + if not os.path.isdir(dirArchive): + self.usage('missing archive dir: ' + dirArchive) + elif os.path.exists(fnArchive): + self.usage('archive already exists: ' + fnArchive) + elif not os.path.exists(fnConfig): + self.usage('missing ' + fnConfig) + else: + config = base.JavaConfig.JavaConfig(fnConfig, self._logger) + db = config.getString('db') + user = config.getString('user') + dirWebapp = config.getString('directory') + if not os.path.isdir(dirWebapp): + self.usage('missing directory of {}: {}'.format(domain, dirWebapp)) + else: + dirDb = dirWebapp + os.sep + 'db' + self.ensureDirectory(dirDb) + fnSql = dirDb + os.sep + domain + '.sql.gz' + self.exportDatabase(db, admin, adminCode, fnSql) + size = os.path.getsize(fnSql) + fnNginx = self.getTarget('/etc/nginx/sites-available', domain) + base.FileHelper.copyIfExists(fnNginx, dirDb + os.sep + domain + '.nginx', None, self._logger) + base.FileHelper.copyIfExists(fnConfig, dirDb + os.sep + domain + '.conf', None, self._logger) + self._logger.log('{} exported DB {}'.format(base.StringUtils.formatSize(size), fnSql)) + nodes = os.listdir(dirDb) + self._logger.log('content of {}:\n{}'.format(dirDb, ' '.join(nodes))) + self._processHelper.execute(['/usr/bin/du', '-sh', dirWebapp], True) + self.saveDirectoryByTar(domain, dirWebapp, dirArchive, None) + size = os.path.getsize(fnArchive) + self._logger.log('{}: archive {}'.format(base.StringUtils.formatSize(size), fnArchive)) + fnScript = base.FileHelper.tempFile('Remove.' + domain) + base.StringUtils.toFile(fnScript, '''#! /bin/bash +dbtool -v3 delete-db-and-user {} {} --no-saving $1 +rm -Rf {} {} {} +rm -f /etc/ssl/certs/{}.pem /etc/ssl/private/{}.pem /etc/nginx/sites-enabled/{} +'''.format(db, user, dirWebapp, fnNginx, fnConfig, domain, domain, domain)) + self._logger.log('ToDo: bash {}'.format(fnScript)) + + def buildArgvMysql(self, db, user, passwd): + '''Creates the argument vector to start the command mysql + @param db: the db to handle + @param user: the user for login + @param passwd: the password of user: None or '-': no password given + @return: the program arguments + ''' + argv = ['/usr/bin/mysql', '-u', user] + if passwd != None and len(passwd) > 0 and passwd != '-': + argv.append('-p' + passwd) + argv.append(db) + return argv; + + def createAdmin(self, admin, adminPassword, readOnly, superuser='root', superuserPassword=None): + '''Creates an user able to process all databases + @param admin: new user + @param adminPassword: the password of user + @param readOnly: True: the new user may not change any database + @param superuser: a user which can write to mysql + @param superuserPassword: the password of user + ''' + argv = self.buildArgvMysql('mysql', superuser, superuserPassword) + sql = '''GRANT {} ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}'{}; +flush privileges;'''.format('SELECT, SHOW VIEW' if readOnly else 'ALL', admin, adminPassword, '' if readOnly else ' WITH GRANT OPTION') + self._logger.log('creating admin {}...'.format(admin), 1) + self._logger.log(sql, 4) + self._processTool.executeInput(argv, self._verboseLevel >= 2, sql) + + # create-and-import-all-webapps + def createAndImportAllWebApps(self, dirBackup, adminUser, adminPasswd): + '''Creates for all webapps the DB and the user stored in the configuration and imports the SQL file. + @param backupDir: the directory containing the SQL files for import + @param adminUser: a user which can read mysql + @param adminPasswd: the password of adminUser + ''' + path = os.path.dirname(self.getTarget(self._configDir + '/webapps.d', 'dummy')) + files = os.listdir(path) + count = 0 + for filename in files: + if filename.endswith('.conf'): + count += 1 + config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger) + db = config.getString('db') + user = config.getString('user') + passwd = config.getString('password') + domain = filename[0:-5] + sqlFileNode = config.getString('sql.file', 'sql.file_is_not_defined') + sqlFile = dirBackup + os.sep + sqlFileNode + '.sql.gz' + if db == None or user == None or passwd == None: + self._logger.error('missing auth data in ' + filename) + else: + self.createDbAndUser(db, user, passwd, adminUser, adminPasswd) + self.importWebApp(domain, sqlFile) + self._logger.log("= {} file(s) scanned".format(count), 2) + + def createAndImportWebApp(self, domain, backupDir, adminUser, adminPasswd): + '''Creates for all webapps the DB and the user stored in the configuration. + @param adminUser: a user which can read mysql + @param adminPasswd: the password of adminUser + ''' + path = os.path.dirname(self.getTarget(self._configDir + '/webapps.d', 'dummy')) + filename = path + os.sep + domain + '.conf' + config = base.JavaConfig.JavaConfig(filename, self._logger) + db = config.getString('db') + user = config.getString('user') + passwd = config.getString('password') + sqlFileNode = config.getString('sql.file', 'undefined_sql.file') + '.sql' + sqlFile = backupDir + os.sep + sqlFileNode + '.gz' + if not os.path.isdir(backupDir): + self.usage('not a directory: ' + backupDir) + elif not os.path.exists(sqlFile): + self.usage('missing SQL file: ' + sqlFile) + elif db == None or user == None or passwd == None: + self._logger.error('missing auth data in ' + filename) + else: + self.createDbAndUser(db, user, passwd, adminUser, adminPasswd) + self.importWebApp(domain, sqlFile) + + def createBackupAdmin(self, admin, adminPasswd): + '''Creates the backup admin stored in backup.conf. + @param admin: a user which can write to mysql + @param adminPasswd: the password of user + ''' + config = base.JavaConfig.JavaConfig('/etc/pyrshell/backup.conf', self._logger) + user = config.getString('mysql.user') + passw = config.getString('mysql.code') + if user != None and passw != None: + self.createBackupUser(user, passw, admin, adminPasswd) + + # create-db-and-user + def createDbAndUser(self, db, user, passwd, adminUser, adminPasswd): + '''Creates an user able to read all databases (not write) + @param db: name of the new database + @param user: new user + @param passwd: the password of user + @param adminUser: a user which can write to mysql + @param adminPasswd: the password of adminUser + ''' + argv = self.buildArgvMysql('mysql', adminUser, adminPasswd) + sql = '''GRANT ALL ON {}.* TO '{}'@'localhost' IDENTIFIED BY '{}' WITH GRANT OPTION; +flush privileges; +create database if not exists {};'''.format(db, user, passwd, db) + self._logger.log(' '.join(argv) + '\n' + sql) + self._logger.log('creating db {} and user {}...'.format(db, user), 1) + self._processTool .executeInput(argv, self._verboseLevel >= 2, sql) + + def createBackupUser(self, backupUser, backupPassword, user='root', userPassword=None): + '''Creates an user able to read all databases (not write) + @param backupUser: new user + @param backupPassword: the password of user + @param user: a user which can write to mysql + @param userPassword: the password of user + ''' + argv = self.buildArgvMysql('mysql', user, userPassword) + sql = '''GRANT USAGE ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}'; +GRANT SELECT, LOCK TABLES ON mysql.* TO '{}'@'localhost'; +GRANT SELECT, LOCK TABLES, SHOW VIEW, EVENT, EXECUTE, TRIGGER ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}'; +flush privileges;'''.format(backupUser, backupPassword, backupUser, backupUser, backupPassword) + self._logger.log('creating readonly user {}...'.format(backupUser), 1) + self._processTool.executeInput(argv, self._verboseLevel >= 2, sql) + + def createWebAppConfiguration(self, argv): + '''Creates a configuration file for a web application. + @param argv: the program arguments, e.g. ['example.com', '/home/example.com', 'dbexample', 'uexample', 'TopSecret'] + ''' + if len(argv) < 5: + self.usage('too few arguments') + else: + domain = argv[0] + directory = argv[1] + db = argv[2] + user = argv[3] + password = argv[4] + if re.match(r'^[\w.-]+$', domain) == None: + self.usage('illegal characters in domain: ' + domain) + if re.match(r'^[\w.-]+$', db) == None: + self.usage('illegal characters in db: ' + db) + if not os.path.isdir(directory): + self.usage('directory does not exists: ' + directory) + fn = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf') + base.StringUtils.toFile(fn, '''db={} +user={} +password={} +sql.file={} +directory={} +excluded= +'''.format(db, user, password, domain + '_' + db, directory)) + + def deleteDb(self, db, admin, passwd, doConfirm = True, doSave = True): + '''Deletes the database. + @param db: the db to delete + @param admin: a user with enough rights to delete the user + @param passwd: the password of the admin + @param doConfirm: True: the deletion must be confirmed (by typing the database name) + @param doSave: True: the database is saved into the tempororary directory + ''' + argv = self.buildArgvMysql('mysql', admin, passwd) + answer = db + if doConfirm and not self._forceDeleting and not base.BaseTool.unitTestIsActive(): + print('Do you want to delete db {}? Please enter the db name again: '.format(db)) + answer = input() + if answer != db: + self._logger.error('confirmation failed') + else: + self._logger.log('deleting database {}...'.format(db), 1) + sql = 'drop database `{}`;'.format(db) + self._processTool .executeInput(argv, self._verboseLevel >= 2, sql) + + def deleteDbAndUser(self, argv): + '''Deletes the database. + @param argv: program arguments, e.g. ['wordpress', 'jonny', '--no-save', '--no-confirmation'] + ''' + if len(argv) < 1: + self.usage('too few arguments') + else: + argv, options = self.splitArgsAndOpts(argv) + doConfirmation = True + doSaving = True + for opt in options: + if opt == '--no-confirmation': + doConfirmation = False + elif opt == '--no-saving': + doSaving = False + else: + self.usage('unknown option: ' + opt) + (admin, code) = defaultAdmin(argv, 2, self) + db = argv[0] + user = argv[1] if len(argv) > 1 else '-' + if doSaving: + sqlFile = base.FileHelper.tempFile(db + '.sql.' + str(int(time.time())) + '.gz') + self._logger.log('saving {} ...'.format(sqlFile), 1) + self.exportDatabase(db, admin, code, sqlFile) + self.deleteDb(db, admin, code, doConfirmation, doSaving) + if user != '-': + self.deleteUser(user, admin, code, doConfirmation) + + def deleteUser(self, user, admin, passwd, doConfirm = True): + '''Deletes the database user. + @param user: the user to delete + @param admin: a user with enough rights to delete the user + @param passwd: the password of the admin + @param doConfirm: True: the deletion must be confirmed (by typing the user name) + ''' + argv = self.buildArgvMysql('mysql', admin, passwd) + answer = user + if doConfirm and not self._forceDeleting and not (user == 'testadmin' and base.BaseTool.unitTestIsActive()): + print('Do you want to delete db user {}? Please enter the username again: '.format(user)) + answer = input() + if answer != user: + self._logger.error('confirmation failed') + else: + sql = '''delete from user where user='{}'; +flush privileges;'''.format(user) + self._logger.log('deleting database user {}...'.format(user), 1) + self._processTool .executeInput(argv, self._verboseLevel >= 2, sql) + + def example(self): + '''Creates a example configuration. + ''' + example = '''# dbtool example configuration +log.file=/var/log/local/dbtool.log +admin.user=admin +admin.code=TopSecret + ''' + self.storeExample(example) + + def exportAllDatabases(self, user, code, target): + '''Exports a database. + @param db: the DB's name + @param user: the DB user + @param code: '' or the password + @param target: the target file + ''' + # def findAllDbs(self, adminUser, adminPasswd, internalTo = False): + if user == None: + user = self._configuration.getString('admin.user') + code = self._configuration.getString('admin.code') + if user == None: + # configuration of backuptool: + user = self._configuration.getString('mysql.user') + code = self._configuration.getString('mysql.code') + if user == None: + self._logger.error('exportAllDatabases(): missing admin user') + else: + dbs = self.findAllDbs(user, code) + for db in dbs: + trg = target + os.sep + db + '.sql.gz' + self.exportDatabase(db, user, code, trg) + + def exportDatabase(self, db, user, code, target): + '''Exports a database. + @param db: the DB's name + @param user: the DB user + @param code: '' or the password + @param target: the target file + ''' + argv = ['/usr/bin/mysqldump', '--default-character-set=utf8mb4', '--single-transaction', '-u', user] + if code != '': + argv.append('-p' + code) + argv.append(db) + if target.endswith('.gz'): + self._processHelper.executeScript('''#! /bin/bash +/usr/bin/mysqldump --default-character-set=utf8mb4 --single-transaction -u{} '-p{}' '{}' | gzip -c > {} +'''.format(user, code, db, target)) + else: + self._processHelper.executeScript('''#! /bin/bash +/usr/bin/mysqldump --default-character-set=utf8mb4 --single-transaction -u{} '-p{}' '{}' > {} +'''.format(user, code, db, target)) + + def exportWebApp(self, argv): + '''Exports a web application's database. + @param argv: program arguments, e.g. ['x01.interfacemaker.com', '/tmp/x01.sql.gz'] + ''' + domain = argv[0] + argv = argv[1:] + if len(argv) == 0: + target = tempfile.gettempdir() + os.sep + domain + '.sql' + else: + target = argv[0] + argv = argv[1:] + fnConfig = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf') + if not os.path.exists(fnConfig): + self.usage('missing ' + fnConfig) + else: + config = base.JavaConfig.JavaConfig(fnConfig, self._logger) + db = config.getString('db') + user = config.getString('user') + code = config.getString('password') + if db == None or user == None or code == None: + self.usage('incomplete data in ' + fnConfig) + else: + self.exportDatabase(db, user, code, target) + + def filterRecords(self, argv): + '''Filters some records from a given table in a given SQL file into a given output SQL file. + @param argv: the program arguments, e.g. ['mysql.sql', 'user', 'users.sql', '--exclude=,root,'] + ''' + argv, options = self.splitArgsAndOpts(argv) + filterFunc = None + regExclude = None + for opt in options: + if opt.startswith('--exclude='): + regExclude = self.regExprCompile(opt[10:].encode('utf-8'), 'filterRecord', False) + else: + self.usage('unknown option: ' + opt) + if regExclude != None: + filterFunc = lambda record, excludeExpr: excludeExpr.search(record) == None + fnIn = argv[0] + table = argv[1] + fnOut = argv[2] + if not os.path.exists(fnIn): + self.usage('missing input file: ' +fnIn) + elif re.search(r'\W', table) != None: + self.usage('illegal char in table name: ' + table) + else: + self.filterRecordsInSqlFile(table, fnIn, fnOut, filterFunc, regExclude) + + def filterRecordsInSqlFile(self, table, sqlInput, sqlOutput, filterFunc, paramFilter): + '''Parses a SQL file and searches for a given table. These tables will be written to another file: + @param table: name of the table to extract + @param sqlInput: name of the input file (created from mysqldump) + @param sqlOutput: name of the output file + @param filterFunc: none or a method to filter the record + if objectFilter != None filter() is a method of objectFilter + otherwise filter() is .filter is a function + signature: filter(record): bool. If it returns True the record is written + @param paramFilter: 2nd parameter of filterFunc() + ''' + if not os.path.exists(sqlInput): + self.usage('missing input SQL file: ' + sqlInput) + else: + with open(sqlInput, 'rb') as fpInput, open(sqlOutput, 'wb') as fpOutput: + state = None + table1 = b' ' + table.encode('utf-8') + b' '; + table2 = b'`' + table.encode('utf-8') + b'`'; + countRecords = 0 + for line in fpInput: + ixStart = 0 + if state == None: + if line.startswith(b'DROP TABLE IF EXISTS'): + state = 'searchTable' + else: + fpOutput.write(line) + elif state == 'searchTable' and (line.startswith(b'CREATE') or line.startswith(b'create')): + if line.find(table1) > 0 or line.find(table2) > 0: + state = 'foundTable' + firstInsert = True + else: + state = 'searchTable' + elif state == 'foundTable': + if line.startswith(b'INSERT') or line.startswith(b'insert'): + countValues = 0 + if firstInsert: + fpOutput.write(b'LOCK TABLES `!` WRITE;\n'.replace(b'!', table.encode('utf-8'))) + firstInsert = False + ixStart = line.find(b'VALUES') + if ixStart < 0: + ixStart = line.find(b'values') + if ixStart < 0: + self._logger.error('missing "VALUES":' + line.encode('utf-8')[0:80]) + ixStart = 0 + else: + ixStart += 6 + if countValues == 0: + fpOutput.write(b'\n') + else: + fpOutput.write(b',\n') + fpOutput.write(line[0:ixStart]) + state = 'inInserts' + elif line.startswith(b'UNLOCK') or line.startswith(b'unlock'): + state = 'end' + fpOutput.write(b'UNLOCK TABLES;\n') + if state == 'inInserts': + if line.startswith(b'UNLOCK') or line.startswith(b'unlock') or line.startswith(b'DROP') or line.startswith(b'drop') or line.startswith(b'CREATE') or line.startswith(b'create'): + state = 'end' + fpOutput.write(b';\nUNLOCK TABLES;\n') + else: + if ixStart == 0: + fpOutput.write(b',') + while True: + record = None + ix = line.find(b'),(', ixStart) + if ix >= 0: + record = line[ixStart:ix+1] + ixStart = ix + 2 + else: + record = line[ixStart:].strip(b'\n\r;,') + ok = filterFunc(record, paramFilter) + if ok: + if countValues == 0: + fpOutput.write(b'\n') + else: + fpOutput.write(b',\n') + fpOutput.write(record) + countValues += 1 + countRecords += 1 + if ix < 0: + break + self._logger.log('found records: {}'.format(countRecords), 2) + + # all-dbs [] + def findAllDbs(self, adminUser, adminPasswd, internalTo = False): + '''Logs the database names (without internal dbs like mysql) + @param adminUser: a user which can read mysql + @param adminPasswd: the password of adminUser + @param internalTo: False: internal databases (mysql, ...) are ignored + @return: a list of all database names + ''' + argv = self.buildArgvMysql('mysql', adminUser, adminPasswd) + sql = '''show databases;''' + rc = self._processTool.executeInputOutput(argv, sql) + if len(rc) > 0 and rc[0] == 'Database': + rc = rc[1:] + removeFromArrayIfExists(rc, '') + if not internalTo: + removeFromArrayIfExists(rc, 'mysql') + removeFromArrayIfExists(rc, 'information_schema') + removeFromArrayIfExists(rc, 'performance_schema') + return rc + + # all-dbs [] + def findAllUsers(self, adminUser, adminPasswd): + '''Logs the database names (without internal dbs like mysql) + @param adminUser: a user which can read mysql + @param adminPasswd: the password of adminUser + @param internalTo: False: internal databases (mysql, ...) are ignored + @return: a list of all database names + ''' + argv = self.buildArgvMysql('mysql', adminUser, adminPasswd) + sql = '''select user from user;''' + self._logger.log(' '.join(argv) + '\n' + sql, 2) + rc = self._processTool.executeInputOutput(argv, sql) + setResult(rc) + return rc + + # create-webapp-dbs + def createWebAppDbsAndUser(self, adminUser, adminPasswd): + '''Creates for all webapps the DB and the user stored in the configuration. + @param adminUser: a user which can read mysql + @param adminPasswd: the password of adminUser + ''' + self.createBackupAdmin(adminUser, adminPasswd) + path = self._configDir + '/webapps.d' + files = os.listdir(path) + count = 0 + for filename in files: + if filename.endswith('.conf'): + count += 1 + config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger) + db = config.getString('db') + user = config.getString('user') + passwd = config.getString('password') + if db == None or user == None or passwd == None: + self._logger.error('missing auth data in ' + filename) + else: + self.createDbAndUser(db, user, passwd, adminUser, adminPasswd) + self._logger.log("= {} file(s) scanned".format(count), 2) + + # import-all-webapps + def importAllWebappDbs(self, sqlDirectory): + '''Imports all webapp dbs from files in a given directory. + Most of the parameters are read from the configuration files. + @param sqlDirectory: the directory containing the SQL files to import + ''' + path = self._configDir + '/webapps.d' + files = os.listdir(path) + count = 0 + for filename in files: + if filename.endswith('.conf'): + count += 1 + config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger) + db = config.getString('db') + user = config.getString('user') + passwd = config.getString('password') + sqlFilePrefix = config.getString('sql.file') + if db == None or user == None or passwd == None or sqlFilePrefix == None: + self._logger.error('missing needed data in ' + filename) + else: + fnSql = sqlDirectory + '/' + sqlFilePrefix + '.sql' + if not os.path.exists(fnSql): + fnSql2 = fnSql + '.gz' + if os.path.exists(fnSql2): + fnSql = fnSql2 + else: + self._logger.error('sql not found: ' + fnSql) + fnSql = None + if fnSql != None: + cmd = '/bin/zcat' if fnSql.endswith('.gz') else '/bin/cat' + self._logger.log('importing {} into {}...'.format(fnSql, db), 1) + self._processTool.executeInChain([cmd, fnSql], None, ['/usr/bin/mysql', '-u', user, '-p' + passwd, db]) + + # import-webapp + def importWebApp(self, domain, sqlFile): + '''Imports a SQL file into the db of a webapp. + @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu' + @param sqlFile: the file to import + ''' + filename = self.getTarget(self._configDir + '/webapps.d', domain + '.conf') + if not os.path.exists(sqlFile): + self._logger.error('sql file does not exist: ' + sqlFile) + elif not os.path.exists(filename): + self._logger.error('not found: ' + filename) + else: + configuration = base.JavaConfig.JavaConfig(filename, self._logger) + db = configuration.getString('db') + user = configuration.getString('user') + passwd = configuration.getString('password') + if db == None or user == None or passwd == None: + self._logger.error('missing needed data in ' + filename) + else: + cmd = '/bin/zcat' if sqlFile.endswith('.gz') else '/bin/cat' + argv1 = [cmd, sqlFile] + argv2 = ['/usr/bin/mysql', '-u', user, '-p' + passwd, db] + self._logger.log('importing {} into {}...'.format(sqlFile, db), 1) + self._processTool.executeInChain(argv1, None, argv2) + + def saveWebApp(self, domain, archive): + '''Saves the db into a subdirectory db and stores the home of the webapp into a tar achive. + @param domain: the domain of the webapp, e.g. 'cloud.infeos.de' + @param archive: the name of the tar achive (will be created) + ''' + fn = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf') + if not os.path.exists(fn): + self.usage('unknown web application ' + domain) + config = base.JavaConfig.JavaConfig(fn, self._logger) + homeWebApp = config.getString('directory') + opts = config.getString('excluded') + options = [] + if opts != None and opts != '': + sep = opts[0] + for opt in sep.split(opts[1:]): + options.append('--exclude=' + opt) + if not os.path.isdir(homeWebApp): + self.usage('home of {} not found: {}'.format(domain, homeWebApp)) + dbDir = self.ensureDirectory(homeWebApp + os.sep + 'db') + if dbDir == None: + self.error('save-webapp aborted: cannot create subdir db') + else: + fnDb = dbDir + os.sep + domain + '.sql.gz' + self.exportWebApp([domain, fnDb]) + self._logger.log('db saved to {}: {}'.format(fnDb, base.StringUtils.formatSize(os.path.getsize(fnDb))), 2) + name = os.path.basename(archive) + if name.endswith('.tgz'): + name = name[0:-4] + self.saveDirectoryByTar(name, homeWebApp, os.path.dirname(archive), None if len(options) == 0 else options) + + # show-tables [] + def showTables(self, db, user, passwd, countRecords): + '''Displays th tables of the given database. + @param db: the name of the db + @param user: a db user with access to db + @param passwd: the password of user + @param countRecords: True: the number of records is displayed too + ''' + argv = self.buildArgvMysql(db, user, passwd) + sql = 'show tables;'; + lines = self._processTool.executeInputOutput(argv, sql) + tables = lines[1:] + removeFromArrayIfExists(tables, '') + self._logger.log('= database {}:'.format(db)) + if not countRecords: + self._logger.log('\n'.join(tables)) + else: + DbToolResult = tables + for table in tables: + sql = 'select count(*) from {};'.format(table) + result = self._processTool.executeInputOutput(argv, sql) + if len(result) > 1: + count = result[1] + self._logger.log("{}: {}".format(table, count)) + + # show-webapps + def showWebApps(self): + '''Displays th tables of the given database. + @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu' + ''' + path = self._configDir + '/webapps.d' + files = os.listdir(path) + DbToolResult = [] + for item in files: + if item.endswith('.conf'): + DbToolResult.append(item[:-5]) + self._logger.log(item[:-5]) + + # show-webapp-config + def showWebAppConfiguration(self, domain): + '''Displays th tables of the given database. + @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu' + ''' + filename = self._configDir + '/webapps.d/' + domain + '.conf' + if not os.path.exists(filename): + self._logger.error('webapp configuration not found: ' + filename) + else: + content = base.StringUtils.fromFile(filename) + DbToolResult = content.split('\n') + self._logger.log(filename + ':\n' + content) + +def usage(): + '''Returns an info about usage. + ''' + return """dbtool [] + Info and manipulation of (mysql) databases. +GLOBAL_OPTS +GLOBAL_MODES +: + all-dbs [ [ []]] + list all databases. + : 'true': internal databases like mysql are listed to + all-users [ []] + list all databases. + archive-webapp [ []] + archives a webapp (directory, db, configuration) into a tar archive + create-admin [--read-only | -r] [ []] + creates an user which can access to all databases + create-and-import-webapp [ []] + creates the DB and user, an imports the *.sql lying in the + create-and-import-all-webapps + creates the DBs and users for all web applications and imports the *.sql lying in the + create-backup-user [ []] + creates an user which can access to all databases but readonly only + create-db-and-user [ []] + creates a database (if not exists) and a user who can access to this db only + create-webapp-dbs [ []] + creates for all webapps db and a user (stored in the webapp configuration) + create-webapp-configuration [ []] + creates a configuration file in */webapps.d for a web application + delete-db-and-user [ [ []]] [--no-saving] [--no-confirmation] + deletes the database and (if given) a database user connected as user + --no-saving: the database is not saved before + --no-confirmation: no confirmation (retyping db/user name) is done + delete-user [ []] + deletes the database user connected as user + export-all-dbs [ []] + exports all databases into a directory + export-db + exports a db into a sql file + export-webapp [] + exports the database of the . user and password are taken from configuration + : the output file. Default: /tmp/.sql + filter-records [--exclude=] + reads an SQL file, searches for insert statements of the table
and writes a SQL file with this records + : name of the SQL file to read (formatted like from mysqldump) + : name of the result file + : a regular expression of the records which should not be copied + import-webapp + imports the sql-file (may be compressed) into the db of the webapp related to the domain + import-all-webapps + import all webapps. contains the sql files + save-webapp + store the database content in a subdirectory db and create a tar archive of the webapp directory + : a tar archive with the webapplication (files + db) + show-all-db-tables [] + shows all databases with all tables. if == true: the number of records is showed too + show-tables [ [ []]] + shows the tables of the db. if == true: the number of records is showed too + show-webapp-config + shows the configuration data of the web application for the + show-webapps + shows the domains of all webapps +example: + dbtool -v3 archive-webapp huber.de /media/backup/archive + dbtool -v3 show-tables wordpress wp_user TopSecret [] + dbtool -v3 create-backup-user wordpress wp_user TopSecret root NotKnown + dbtool -v3 create-webapp-configuration example.com /home/example.com dbexample usrexample NeverUsed + dbtool -v3 create-and-import-all-webapps /backup/dayly/Mon + dbtool -v3 create-and-import-webapp www.example.com /backup/dayly/Mon dbadmin ExtremeSecret + dbtool -v3 delete-db-and-user testdb testusr --no-saving --no-confirmation + dbtool -v3 filter-records mysql.sql user users.sql --exclude=,root, +""" + +def defaultAdmin(argv, indexAdmin, tool): + '''Returns the default admin and its password. + If argv does not contains the data the configuration will be asked. + If nothing is found, ('root', '') is returned. + @param argv the argument vector + @param indexAdmin: the index of the admin in argv, the password has index+1 + @param tool: the DirTool instance (for configuration data) + @returns: a tuple (admin, password) + ''' + admin = tool._configuration.getString('admin.user') if len(argv) < indexAdmin + 1 or argv[indexAdmin] == '-' else argv[indexAdmin] + passwd = tool._configuration.getString('admin.code') if len(argv) < indexAdmin + 2 or argv[indexAdmin + 1] == '-' else argv[indexAdmin+1] + if admin == None: + admin = 'root' + if passwd == None: + passwd = '' + return (admin, passwd) + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/dbtool', 'run'] + ''' + appInfo = base.BaseTool.ApplicationInfo('dbtool', 'appl/DbTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = DbTool(options) + rc = None + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'archive-webapp': + tool.archiveWebapp(argv) + elif cmd == 'all-dbs': + # all-dbs ] + (admin, code) = defaultAdmin(argv, 0, tool) + lines = tool.findAllDbs(admin, code, len(argv) > 4 and argv[4].upper().startswith('T')) + setResult(lines) + if tool._verboseLevel > 0: + print('\n'.join(lines)) + elif cmd == 'all-users': + (admin, code) = defaultAdmin(argv, 3, tool) + lines = tool.findAllUsers(admin, code) + setResult(lines) + if tool._verboseLevel > 0: + print('\n'.join(lines)) + elif cmd == 'create-db-and-user': + # create-db-and-user + if len(argv) < 3: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 3, tool) + tool.createDbAndUser(argv[0], argv[1], argv[2], admin, code) + elif cmd == 'create-admin': + # + argv, opts = tool.splitArgsAndOpts(argv) + readOnly = False + for opt in opts: + if opt == '-r' or opt == '--read-only': + readOnly = True + else: + usage('unknown option: ' + opt) + if len(argv) < 2: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 2, tool) + tool.createAdmin(argv[0], argv[1], readOnly, admin, code) + elif cmd == 'create-backup-user': + # + if len(argv) < 2: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 2, tool) + tool.createBackupUser(argv[0], argv[1], admin, code) + elif cmd == 'create-and-import-webapp': + # + if len(argv) < 2: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 2, tool) + tool.createAndImportWebApp(argv[0], argv[1], admin, code) + elif cmd == 'create-and-import-all-webapps': + # + if len(argv) < 1: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 1, tool) + tool.createAndImportAllWebApps(argv[0], admin, code) + elif cmd == 'create-webapp-dbs': + # create-webapp-dbs + if len(argv) < 1: + tool.usage('too few arguments') + else: + tool.createWebAppDbsAndUser(argv[0], '' if len(argv) < 2 else argv[1]) + elif cmd == 'create-webapp-configuration': + tool.createWebAppConfiguration(argv) + elif cmd == 'delete-db-and-user': + # + tool.deleteDbAndUser(argv) + elif cmd == 'delete-user': + # + if len(argv) < 1: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 1, tool) + tool.deleteUser(argv[0], admin, code) + elif cmd == 'export-all-dbs': + if len(argv) < 1: + tool.usage('too few arguments') + else: + (admin, code) = defaultAdmin(argv, 1, tool) + tool.exportAllDatabases(admin, code, argv[0]) + elif cmd == 'export-db': + if len(argv) < 4: + tool.usage('too few arguments') + else: + tool.exportDatabase(argv[0], argv[1], '' if len(argv) <= 2 else argv[2], argv[3]) + elif cmd == 'export-webapp': + if len(argv) < 1: + tool.usage('too few arguments') + else: + tool.exportWebApp(argv) + elif cmd == 'filter-records': + if len(argv) < 3: + tool.usage('too few arguments') + else: + tool.filterRecords(argv) + elif cmd == 'import-webapp': + # import-webapp + if len(argv) < 2: + tool.usage('too few arguments') + else: + tool.importWebApp(argv[0], argv[1]) + elif cmd == 'import-all-webapps': + # import-all-webapps + if len(argv) < 1: + tool.usage('too few arguments') + else: + tool.importAllWebappDbs(argv[0]) + elif cmd == 'save-webapp': + # save-webapp + if len(argv) < 2: + tool.usage('missing arguments') + lines = tool.saveWebApp(argv[0], argv[1]) + setResult(lines) + elif cmd == 'show-tables': + # show-tables [] + (user, code) = defaultAdmin(argv, 0, tool) + lines = tool.showTables(argv[0], user, code, len(argv) > 3 and argv[3].upper().startswith('T')) + setResult(lines) + elif cmd == 'show-all-db-tables': + # show-all-db-tables [] + (user, code) = defaultAdmin(argv, 0, tool) + lines = tool.showAllDbsAndTables(argv[0], user, len(argv) > 2 and argv[3].upper().startswith('T')) + setResult(lines) + elif cmd == 'show-webapp-config': + # show-webapp-config + if len(argv) < 1: + usage('too few arguments') + else: + lines = tool.showWebAppConfiguration(argv[0]) + setResult(lines) + elif cmd == 'show-webapps': + # show-webapps + tool.showWebApps() + else: + tool.usage("unknown command: " + cmd) + if rc != None: + tool.usage(rc) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/DirTool.py b/appl/DirTool.py new file mode 100755 index 0000000..d707403 --- /dev/null +++ b/appl/DirTool.py @@ -0,0 +1,1206 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import time +import sys +import os.path +import stat +import shutil +import datetime +import tarfile +import re + +sys.path.insert(0, '/usr/share/pyrshell') +import base.StringUtils +import base.BaseTool +import base.LinuxUtils +import base.FileHelper +import base.ProcessHelper + +class ProgramOptions: + '''Stores the common program options. + ''' + def __init__(self, parent): + '''Constructor. + @param parent: the DirTool instance + ''' + self._parent = parent + self._verboseLevel = parent._verboseLevel + self._logger = parent._logger + self._maxDepth = 999 + self._fileOnly = False + self._dirOnly = False + self._excluded = None + self._included = None + self._ignoreCase = False + self._regExprCaseMode = 0 + self._dirInfo = base.FileHelper.DirInfo() + self._blocksize = 1024*1024*16 + self._testDate = True + self._maxDifferenceCount = None + self._silent = False + self._short = False + self._infoCount = 5 + self._minSize = None + self._maxOldest = self._infoCount + self._maxYoungest = self._infoCount + self._maxSmallest = self._infoCount + self._maxLargest = self._infoCount + self._filesOnly = False + self._dirsOnly = False + self._humanReadable = True + self._orderDateSize = False + + def parseOptions(self, options): + '''Stores the given options. + @param options: the list of program options, e.g. ['--max-depth=3'] + ''' + for opt in options: + options = options[1:] + if opt.startswith('--pattern='): + self._includes = self._parent.regExprCompile(opt[10:], self._ignoreCase) + elif opt.startswith('--excluded='): + self._includes = self._parent.regExprCompile(opt[11:], self._ignoreCase) + elif opt.startswith('--max-depth='): + self._maxDepth = self._parent.integerOption(opt) + elif opt == '--file-only' or opt == '-f': + self._filesOnly = True + elif opt == '--dir-only' or opt == '-d': + self._dirsOnly = True + elif opt == '--i' or opt == '-ignore-case': + self._ignoreCase = True + elif opt.startswith('--blocksize'): + self._blocksize = self.integerOption(opt, 1024*1024*16) + elif opt == '-i' or opt == '--ignore-case': + self._ignoreCase = True + elif opt == '--ignore-time': + self._testDate = False + elif opt.startswith('--max-differences'): + self._maxDifferenceCount = self.integerOption(opt, 20) + elif opt == '-q' or opt == '--quiet': + self._silent = True + elif opt == '-s' or opt == '--short': + self._short = True + elif opt == '-t' or opt == '--test-content': + self._testContent = True + elif opt.startswith('-n') or opt.startswith('--count'): + self._infoCount = self.integerOption(opt) + elif opt.startswith('--min-size='): + self._minSize = self.integerOption(opt) + elif opt.startswith('--max-oldest='): + self._maxOldest = self.integerOption(opt) + elif opt.startswith('--max-youngest='): + self._maxYoungest = self.integerOption(opt) + elif opt.startswith('--max-smallest='): + self._maxSmallest = self.integerOption(opt) + elif opt.startswith('--max-largest='): + self._maxLargest = self.integerOption(opt) + elif opt == '--file-only' or opt == '-f': + self._filesOnly = True + elif opt == '--dir-only' or opt == '-d': + self._dirsOnly = True + elif opt == '--byte-size' or opt == '--mbyte-size': + self._humanReadable = False + elif opt == '--order-date-size': + self._orderDateSize = True + else: + self._parent.usage('unknown option ' + opt) + +class FileInfo: + def init(self): + '''Constructor. + ''' + self._name = None + self._size = None + self._modified = None + + def set(self, name, size, modified): + '''Sets the attributes. + @param name: the name without path + @param size: the size in bytes, -1 for directories + @param modified: the modify datetime + ''' + self._name = name + self._size = size + self._modified = modified + +class FileContainer: + '''Base class of file containers. + ''' + def __init__(self, options): + '''Constructor. + @param options: the program options + ''' + self._kind = None + self._options = options + self._currentFile = FileInfo() + self._action = 'list' + self._relPath = '' + self._base = '.' + self._dirInfo = base.FileHelper.DirInfo() + + def actionList(self): + '''Lists the metadata of a file/directory. + ''' + + if self._currentFile._size < 0: + size = '' + elif self._options._humanReadable: + size = "{:>8s}".format(base.StringUtils.formatSize(self._currentFile._size)) + else: + size = '{:13.6f} MB'.format(self._currentFile._size / 1000000) + fdate = datetime.datetime.fromtimestamp(self._currentFile._modified) + dateString = fdate.strftime("%Y.%m.%d %H:%M:%S") + if self._options._orderDateSize: + line = '{:s} {:>12s} {:s}{}{}{}'.format(dateString, size, self._base, os.sep, self._relPath, self._node) + else: + line = '{:>12s} {:s} {:s}{}{}{}'.format(size, dateString, self._base, os.sep, self._relPath, self._node) + if self._options._silent: + self._lines.append(line) + else: + print(line) + + def onSelected(self): + if self._currentFile._size >= 0: + self._dirInfo._fileCount += 1 + self._dirInfo._fileSizes += self._currentFile._size + if self._action == 'list': + self.actionList() + + def traverse(self, relPath, depth): + '''Traverses the container tree and does the wanted job. + @param relPath: the relative path + @param depth: the nesting level: 0: the first level + ''' + while self.nextNode(): + if not self.matches(): + if self._currentFile._size >= 0: + self._dirInfo._ignoredDirs += 1 + else: + self._dirInfo._ignoredFiles += 1 + else: + self.onSelected() + if self._isDir: + if depth >= self._options._maxDepth: + self._dirInfo._ignoredDirs += 1 + else: + self._dirInfo._dirCount += 1 + self._nextDir = relPath + os.sep + self._node + self.changeDir(self._nextDir) + self.traverse(self._nextDir, depth + 1) + +class TarContainer (FileContainer): + def __init__(self, archive, options): + FileContainer.__init__(self, options) + self._tar = tarfile.open(archive, 'r:*') + self._members = self._tar.getmembers() + self._currentMember = -1 + + def _finish(self): + '''Deconstructor. + ''' + self._tar.close() + + def changeDir(self, relPath): + '''Starts handling of the next directory. + @param relPath: the relative path from the base + ''' + pass + + def matches(self): + '''Tests whether the current file is not excluded by the option controlled filtering. + @return: True: the current file is not excluded + ''' + self._node = self._members[self._currentMember].name + self._isDir = self._members[self._currentMember].isdir() + rc = self._options._fileOnly and not self._options._isDir + rc = rc or self._options._dirOnly and self._options._isDir + if not rc: + rc = self._options._included == None or self._options._included.match(self._node, self._regExprCaseMode) + rc = rc or self._options._excluded == None or not self._options._excluded.match(self._node, self._options._regExprCaseMode) + self._currentFile.set(self._node, -1 if self._isDir else self._members[self._currentMember].size, self._members[self._currentMember].mtime) + return rc + + def nextNode(self): + '''Setups the next file in the container. + @return: False: no more file is available. + ''' + self._currentMember += 1 + return self._currentMember < len(self._members) + + +class DirectoryContainer (FileContainer): + def __init__(self, path, options): + FileContainer.__init__(self, options) + self._base = path + self._nodes = os.listdir(path) + self._currentNode = -1 + + def changeDir(self, relPath): + '''Starts handling of the next directory. + @param relPath: the relative path from the base + ''' + self._currentDir = self._base + relPath + self._nodes = os.listdir(self._currentDir) + + def matches(self): + '''Tests whether the current file is not excluded by the option controlled filtering. + @return: True: the current file is not excluded + ''' + self._node = self._nodes[self._currentNode] + self._full = self._base + os.sep + self._node + self._isDir = os.path.isdir(self._full) + rc = self._options._fileOnly and not self._isDir + rc = rc or self._options._dirOnly and self._isDir + if not rc: + rc = self._options._included == None or self._options._included.match(self._node, self._regExprCaseMode) + rc = rc or self._options._excluded == None or not self._options._excluded.match(self._node, self._regExprCaseMode) + if rc: + if not self._isDir: + self._dirInfo._fileCount += 1 + self._dirInfo._sizesFiles += os.path.getsize(self._full) + else: + if self._isDir: + self._ignoredDirs += 1 + else: + self._ignoredFiles += 1 + self._currentFile.set(self._node, -1 if self._isDir else os.path.getsize(self._full), os.path.getmtime(self._full)) + return rc + + def nextNode(self): + '''Setups the next file in the container. + @return: False: no more file is available. + ''' + self._currentNode += 1 + return self._currentNode < len(self._nodes) + +class DirTool (base.BaseTool.BaseTool): + + def __init__(self, globalOptions): + '''Constructor. + @param logger: the logger + ''' + self._globalOptions = globalOptions + base.BaseTool.BaseTool.__init__(self, globalOptions, 'dirtool.conf') + self._useRename = True + self._infos = [] + self._filenames = [] + self._maxDepth = 2048 + self._withDirs = True + self._withFiles = True + self._count = int(self._configuration.getString('count.default', '20')) + self._dateSize = True + self._humanReadableSize = True + self._euid = os.geteuid() + self._egid = os.getegid() + self._testDate = True + self._timePrecision = 2.0 + self._blocksize = 1024*1024*16 + self._differenceCount = 0 + self._maxDifferenceCount = 100 + self._differences = None + self._short = False + self._silent = False + self._testContent = False + self._dirInfo = None + + def buildExamples(self): + '''Prepares system to executes the examples of the usage message. + ''' + def setTime(fn, modified): + modified2 = datetime.datetime.strptime(modified, '%Y-%m-%d %H:%M:%S') + base.FileHelper.setModified(fn, None, modified2) + + def build(fn, content = 'line1', modified = '2018-01-03 07:03:53'): + self._logger.log('creating ' + fn, 1) + base.StringUtils.toFile(fn, content) + setTime(fn, modified) + + baseDir = '/tmp/%examples' + # self.ensureDirectory(baseDir) + baseDir1 = baseDir + os.sep + 'dir1' + self.ensureDirectory(baseDir1) + for no in range(5): + build(baseDir1 + os.sep + 'file{}.txt'.format(no + 1), + 'content of the file file{}.txt'.format(no) + '\nbla bla' * (no + 1), '2018-{:02d}-03 07:03:5{}'.format(no+1, no)) + no = 47 + for dirNo in range(1, 4): + no += 3 + subDir = baseDir1 + os.sep + 'subdir{}'.format(dirNo) + self.ensureDirectory(subDir) + for fileNo in range(4, 7): + no *= 7 + build(subDir + os.sep + 'data_{}.txt'.format(fileNo + 1), + 'content of the file file{}.txt'.format(fileNo) + '\nbla bla' * (no % 5 + 1), + '2017-{:02d}-{} 07:03:5{}'.format(fileNo+1, dirNo, dirNo)) + setTime(subDir, '2018-{:02d}-{:02d} 07:03:53'.format(no%12+1, dirNo + 7)) + + setTime(baseDir1, '2017-01-02 04:17:22') + baseDir2 = baseDir + os.sep + 'dir2' + shutil.rmtree(baseDir2, True) + shutil.copytree(baseDir1, baseDir2) + + os.unlink(baseDir + '/dir2/file2.txt') + os.unlink(baseDir + '/dir2/subdir1/data_6.txt') + + shutil.rmtree(baseDir + '/dir2/subdir2') + build(baseDir + '/dir2/subdir2', 'is a file instead of the name') + + build(baseDir + '/dir2/new.txt', 'line1\nline2\nline3') + build(baseDir + '/dir2/subdir1/new2.txt', 'line1\nline2\nline3') + + base.FileHelper.setModified(baseDir + '/dir2/file1.txt', None, datetime.datetime.strptime('2018-01-03 09:03:53', '%Y-%m-%d %H:%M:%S')) + base.FileHelper.setModified(baseDir + '/dir2/subdir1/data_5.txt', None, datetime.datetime.strptime('2018-01-03 09:03:53', '%Y-%m-%d %H:%M:%S')) + + build(baseDir + '/dir2/file4.txt', 'newer content in file4.txt', '2018-10-03 09:03:53') + build(baseDir + '/dir2/subdir3/data5.txt', 'line1\nline2', '2018-10-03 09:07:53') + + fn = baseDir + '/dir2/file5.txt' + content = base.StringUtils.fromFile(fn) + aTime = os.path.getmtime(fn) + build(fn, content.replace('e', 'E'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(aTime))) + + fn = baseDir + '/dir2/subdir3/data_6.txt' + content = base.StringUtils.fromFile(fn) + aTime = os.path.getmtime(fn) + build(fn, content.replace('e', 'E'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(aTime))) + + for dirNo in range(1, 4): + if dirNo != 2: + subDir = baseDir2 + os.sep + 'subdir{}'.format(dirNo) + setTime(subDir, '2018-{}-03 09:03:53'.format(1 + (dirNo + 7) % 12)) + setTime(baseDir2, '2018-04-03 09:03:53') + + + def compare(self, argv): + '''Compares two directory trees. + @param dir1: the first directory to compare + @param dir2: the second directory to compare + ''' + (argv, opts) = self.splitArgsAndOpts(argv) + if len(argv) < 2: + self.usage('missing arguments') + else: + dir1 = argv[0] + dir2 = argv[1] + argv = argv[2:] + if not os.path.isdir(dir1): + self.usage('dir1 is not a directory: ' + dir1) + elif not os.path.isdir(dir1): + self.usage('dir2 is not a directory: ' + dir2) + else: + self._blocksize = 1024*1024*16 + self._differenceCount = 0 + self._maxDifferenceCount = 100 + self._differences = [] + self._differenceCount = 0 + self._silent = False + self._short = False + self._testContent = False + self._testDate = True + caseSensitive = True + exclude = None + pattern = None + for opt in opts: + if opt.startswith('--blocksize'): + self._blocksize = self.integerOption(opt, 1024*1024*16) + elif opt.startswith('--pattern='): + pattern = opt[10:] + elif opt.startswith('--exclude='): + exclude = opt[10:] + elif opt == '-i' or opt == '--ignore-case': + caseSensitive = False + elif opt == '--ignore-time': + self._testDate = False + elif opt.startswith('--max-differences'): + self._maxDifferenceCount = self.integerOption(opt, 20) + elif opt == '-q' or opt == '--quiet': + self._silent = True + elif opt == '-s' or opt == '--short': + self._short = True + elif opt == '-t' or opt == '--test-content': + self._testContent = True + self._exclude = None if exclude == None else self.regExprCompile(exclude, 'compare-exclude', caseSensitive) + self._pattern = None if pattern == None else self.regExprCompile(pattern, 'compare-pattern', caseSensitive) + self._dirInfo = base.FileHelper.DirInfo() + self.compareDir(dir1, dir2) + base.BaseTool.setResult(self._differences) + if self._verboseLevel > 0: + info = self._dirInfo + tail = '' if info._fileSizes == 0 else ' with {}'.format( base.StringUtils.formatSize(info._fileSizes)) + self._logger.log('inspected {} dir(s) and {} file(s){}'.format( + info._dirCount, info._fileCount, tail)) + + def compareDir(self, dir1, dir2): + '''Compares all files two directories. + @param dir1: the first directory to compare + @param dir2: the 2nd directory to compare + @return: True: success False: stop processing + ''' + rc = False + self._dirInfo._dirCount += 1 + nodes1 = os.listdir(dir1) + nodes2 = os.listdir(dir2) + for node in nodes1: + full1 = dir1 + os.sep + node + isDir1 = os.path.isdir(full1) + if not isDir1 and self._pattern != None and self._pattern.match(node) == None: + self._logger.log('ignored (pattern): ' + node, 4) + self._dirInfo._ignoredFiles += 1 + continue + if self._exclude != None and self._exclude.match(node): + if isDir1: + self._dirInfo._ignoredDirs += 1 + else: + self._dirInfo._ignoredFiles += 1 + self._logger.log('ignored (exclude): ' + node, 4) + continue + if node not in nodes2: + self.compareLog('{} {}'.format('+' if self._short else 'additional:' , dir1 + os.sep + node)) + else: + full2 = dir2 + os.sep + node + type1 = base.FileHelper.fileType(full1) + type2 = base.FileHelper.fileType(full2) + if type1 != type2: + self.compareLog('{} {} / {} {}'.format('!' if self._short else 'different types:', type1, type2, full1)) + elif type1 == 'dir': + self.compareDir(full1, full2) + else: + self.compareFile(full1, full2) + rc = self._differenceCount < self._maxDifferenceCount + if not rc: + break + if rc: + for node in nodes2: + if self._pattern != None and self._pattern.match(node): + self._logger.log('ignored (pattern): ' + node, 4) + continue + if self._exclude != None and self._exclude.match(node): + self._logger.log('ignored (exclude): ' + node, 4) + continue + if node not in nodes1: + self.compareLog('{} {}'.format('-' if self._short else 'missing counterpart:' , dir1 + os.sep + node)) + rc = self._differenceCount < self._maxDifferenceCount + if not rc: + break + return rc + + def compareFile(self, file1, file2): + '''Compares two files. + @param file1: the first file to compare + @param file2: the 2ndfile to compare + @return: True: both files are equal + ''' + self._dirInfo._fileCount += 1 + info1 = os.lstat(file1) + info2 = os.lstat(file2) + rc = info1.st_size == info2.st_size + if not rc: + self.compareLog('{} {} / {} {}'.format('S' if self._short else 'size:', info1.st_size, info2.st_size, file1)) + elif self._testDate: + rc = abs(info1.st_mtime - info2.st_mtime) < self._timePrecision + if not rc: + self.compareLog('{} {} / {} {}'.format('T' if self._short else 'date:', + time.strftime('%Y.%m.%d %H:%M:%S', time.localtime(info1.st_mtime)), + time.strftime('%Y.%m.%d %H:%M:%S', time.localtime(info2.st_mtime)), + file1)) + if rc and self._testContent: + with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2: + sumSize = 0 + while rc: + block1 = fp1.read(self._blocksize) + block2 = fp2.read(self._blocksize) + rc = len(block1) == len(block2) + if not rc: + self.compareLog('{} {} / {} {}'.format('L' if self._short else 'length:', sumSize + len(block1), sumSize + len(block2), file1)) + else: + rc = block1 == block2 + if not rc: + for ix in range(len(block1)): + if block1[ix] != block2[ix]: + desc = 'C [{}]' if self._short else 'different at position [{}]: ' + self.compareLog('{} {}'.format(desc.format(1 + sumSize + ix), file1)) + break + sumSize += len(block1) + if len(block1) == 0: + break + self._dirInfo._fileSizes += sumSize + return rc + + def compareLog(self, message): + '''Logs a difference of file tree comparism. + @param message: the difference as string + ''' + self._differenceCount += 1 + if self._differenceCount < self._maxDifferenceCount: + if self._differences != None: + self._differences.append(message) + if not self._silent: + self._logger.log(message) + + def traverseContainer(self, relPath, depth): + ''' + ''' + + def check(self, argv): + '''Searches for "wrong" files: + @param argv: command arguments, e.g. ['/home', '/etc' ] + ''' + def checkFile(name): + try: + with open(name, 'rb') as fp: + while fp.read(): + pass + except Exception as exc: + self._logger.error(name + ': ' + str(exc)) + def checkDir(full): + try: + for node in os.listdir(full): + if not os.path.isdir(full): + checkFile(full + os.sep + node) + except Exception as exc: + self._logger.error(full + ': ' + str(exc)) + # === def check + for arg in argv: + if not os.path.isdir(arg): + checkFile(arg) + else: + checkDir(arg) + + def dirInfo(self, argv): + '''Lists summary info and "extreme" files (e.g. the youngest files) of a directory tree + @param argv: the program arguments, e.g. ['/home', '--max-depth=7'] + ''' + argv, options = self.splitArgsAndOpts(argv) + filePattern = dirPattern = maxDepth = None + minSize = 1 + maxYoungest = maxLargest = maxOldest = count = maxSmallest = None + dirsOnly = filesOnly = False + trace = 0 + while len(options) > 0 and options[0].startswith('-'): + opt = options[0] + options = options[1:] + if opt.startswith('--pattern='): + dirPattern = opt[10:] + elif opt.startswith('--max-depth='): + maxDepth = self.integerOption(opt) + elif opt.startswith('-n') or opt.startswith('--count'): + count = self.integerOption(opt) + elif opt.startswith('--min-size='): + minSize = self.integerOption(opt) + elif opt.startswith('--max-oldest='): + maxOldest = self.integerOption(opt) + elif opt.startswith('--max-youngest='): + maxYoungest = self.integerOption(opt) + elif opt.startswith('--max-smallest='): + maxSmallest = self.integerOption(opt) + elif opt.startswith('--max-largest='): + maxLargest = self.integerOption(opt) + elif opt.startswith('--trace'): + trace = self.integerOption(opt) + elif opt == '--file-only' or opt == '-f': + filesOnly = True + elif opt == '--dir-only' or opt == '-d': + dirsOnly = True + if count != None: + if maxYoungest == None: + maxYoungest = count + if maxLargest == None: + maxLargest = count + if maxOldest == None: + maxOldest = count + if maxSmallest == None: + maxSmallest = count + if maxYoungest == None: + maxYoungest = 5 + if maxOldest == None: + maxOldest = 5 + if maxLargest == None: + maxLargest = 5 + if maxSmallest == None: + maxSmallest = 5 + info = None + if len(argv) == 0: + argv = ['.'] + for source in argv: + info = base.FileHelper.directoryInfo(source, filePattern, dirPattern, maxDepth, + info, maxYoungest, maxLargest, maxOldest, maxSmallest, minSize, dirsOnly, filesOnly, trace) + result = ['Directories: {} Files: {} / {}'.format(info._dirCount, info._fileCount, base.StringUtils.formatSize(info._fileSizes))] + result.append('Ignored: {} file(s) / {} dir(s)'.format(info._ignoredFiles, info._ignoredDirs)) + if info._maxSmallest != None and info._maxSmallest > 0: + result.append('The smallest files:') + for item in info._smallest: + full = item.split(':')[1] + statInfo = os.lstat(full) + name = full + if stat.S_ISLNK(statInfo.st_mode): + name += ' -> ' + os.readlink(full) + result.append(base.FileHelper.listFile(statInfo, name, True, True)) + if info._maxOldest != None and info._maxOldest > 0: + result.append('The oldest files:') + for item in info._oldest: + full = item.split(':')[1] + statInfo = os.lstat(full) + name = full + if stat.S_ISLNK(statInfo.st_mode): + name += ' -> ' + os.readlink(full) + result.append(base.FileHelper.listFile(statInfo, name, True, True)) + if info._maxLargest != None and info._maxLargest > 0: + result.append('The largest files:') + for item in info._largest: + full = item.split(':')[1] + statInfo = os.lstat(full) + if stat.S_ISLNK(statInfo.st_mode): + full = '-> ' + os.readlink(full) + result.append(base.FileHelper.listFile(statInfo, full, True, True)) + if info._maxYoungest != None and info._maxYoungest > 0: + result.append('The youngest files:') + for item in info._youngest: + full = item.split(':')[1] + statInfo = os.lstat(full) + name = full + if stat.S_ISLNK(statInfo.st_mode): + name += ' -> ' + os.readlink(full) + result.append(base.FileHelper.listFile(statInfo, name, True, True)) + base.BaseTool.setResult(result) + self._rawOutput = '\n'.join(result) + if self._verboseLevel > 0: + print(self._rawOutput) + return result + + def example(self): + example = '''# dirtool example configuration + log.file=/var/log/local/dirtool.log + ''' + self.storeExample(example) + + def findOptions(self, argv, isDate): + '''Evaluates the options relevant for date/size searchings. + @param argv: the program arguments to inspect + @param isDate: True: mode is oldest or youngest + @return: string: error message + array: the argument vector without the options. + ''' + rc = None + if len(argv) == 0: + return [os.sep] + else: + while len(argv) > 0: + arg = argv[0] + if not arg.startswith('-'): + break + if isDate and (arg == '-f' or arg == '--files-only'): + self._withDirs = False + self._withFiles = True + argv = argv[1:] + elif isDate and (arg == '-d' or arg == '--dirs-only'): + self._withDirs = True + self._withFiles = False + argv = argv[1:] + elif arg == '-b' or arg == '--byte-size': + self._humanReadableSize = False + argv = argv[1:] + else: + rc = 'unknown option: ' + arg + break + if rc == None: + rc = argv + return rc + + def handleOneFile(self, filename, statInfo, moreInteresting): + '''Compares a given files with the previous found files (self._files) and inserts if meaningful. + @param statInfo: the info of the current file + @param moreInteresting: a lambda function f(statFile1, statFile2, mayBeEqual): bool returning whether file1 replaces file2 + ''' + if len(self._filenames) == 0: + self._filenames.append(filename) + self._infos.append(statInfo) + else: + if len(self._filenames) < self._count: + # insert always: + last = self._infos[-1] + if moreInteresting(last, statInfo, True): + self._filenames.append(filename) + self._infos.append(statInfo) + else: + for ix in range(len(self._infos)): + if moreInteresting(statInfo, self._infos[ix], True): + self._filenames.insert(ix, filename) + self._infos.insert(ix, statInfo) + break + else: + # replace an entry + # more interesting than the last? + if moreInteresting(statInfo, self._infos[-1], False): + # more interesting than the first? + if moreInteresting(statInfo, self._infos[0], True): + self._filenames.insert(0, filename) + self._infos.insert(0, statInfo) + else: + for ix in range(len(self._infos)): + if moreInteresting(statInfo, self._infos[ix], False): + self._filenames.insert(ix, filename) + self._infos.insert(ix, statInfo) + break + # Remove the last entry: + del self._filenames[self._count] + del self._infos[self._count] + self.testOrder(moreInteresting) + + def handleTree(self, directory, depth, moreInteresting): + '''Finds recursively the n most interesting files in a directory tree. + @param directory: the directory to inspect + @param depth: the depth of the directory in the directory tree + @param moreInteresting: a lambda function f(statFile1, statFile2, mayBeEqual): bool returning whether file1 replaces file2 + ''' + self._logger.log(directory + os.sep + ' ...', 3) + if depth == 0: + try: + statInfo = os.lstat(directory) + accepted = (base.LinuxUtils.isExecutable(statInfo, self._euid, self._egid) + and base.LinuxUtils.isReadable(statInfo, self._euid, self._egid)) + if accepted: + self.handleOneFile(directory, statInfo, moreInteresting) + else: + self._logger.log('no permission: ' + directory, 2) + except FileNotFoundError: + self._logger.log('no permission: ' + directory, 2) + directory = None + dirs = [] + nodes = None + if directory != None: + try: + nodes = os.listdir(directory) + except PermissionError: + self._logger.log('no permission: ' + directory, 2) + directory = None + if directory != None: + for node in nodes: + if node != '.' and node != '..': + full = directory + os.sep + node + try: + statInfo = os.lstat(full) + except FileNotFoundError: + self._logger.log('no permission: ' + directory + os.sep + node, 2) + continue + isDir = stat.S_ISDIR(statInfo.st_mode) + if self._withDirs and isDir or self._withFiles and not isDir: + accepted = (base.LinuxUtils.isExecutable(statInfo, self._euid, self._egid) + and base.LinuxUtils.isReadable(statInfo, self._euid, self._egid)) + if accepted: + self.handleOneFile(full, statInfo, moreInteresting) + self._logger.log('no permission: ' + directory + os.sep + node, 2) + if isDir: + dirs.append(node) + if depth <= self._maxDepth: + for node in dirs: + self.handleTree(directory + os.sep + node, depth + 1, moreInteresting) + + def imageResize(self, argv): + '''resizes images. + @param argv: program arguments, e.g. ['src', 'trg', '--max-width=1024', '--max-height=768'] + ''' + argv, opts = self.splitArgsAndOpts(argv) + maxWidth = None + maxHeight = None + for opt in opts: + if opt.startswith('--max-width='): + maxWidth = self.integerOption(opt) + elif opt.startswith('--max-height='): + maxHeight = self.integerOption(opt) + else: + self.usage('unknown option: ' + opt) + if not os.path.exists('/usr/bin/identify') or not os.path.exists('/usr/bin/convert'): + self.usage('missing commands identify or convert: please install imagemagick') + elif len(argv) < 2: + self.usage('missing arguments') + else: + source = argv[0] + target = argv[1] + if not os.path.isdir(source): + self.usage(' is not a directory: ' + source) + elif not os.path.isdir(target): + self.usage(' is not a directory: ' + target) + else: + nodes = os.listdir(source) + regExtend = re.compile(r'\.(jpg|png|gif)$', re.I) + regDimension = re.compile(' (\d+)x(\d+) ') + for node in nodes: + src = source + os.sep + node + if regExtend.search(src) != None: + info = self._processHelper.executeInputOutput(['identify', src], None, False) + matcher = regDimension.search(info[0]) + if matcher != None: + width = int(matcher.group(1)) + height = int(matcher.group(2)) + newWidth = None + newHeight = None + if width > height: + # landscape + if maxWidth != None and width > maxWidth: + newWidth = maxWidth + newHeight = int(newWidth * height / width) + else: + # portrait + if maxHeight != None and height > maxHeight: + newHeight = maxHeight + newWidth = int(newHeight * width / height) + if newWidth != None: + trg = target + os.sep + node + start = time.time() + oldSize = os.path.getsize(src) + self._processHelper.execute(['convert', src, '-resize', '{}x{}'.format(newWidth, newHeight), trg], None, False) + base.FileHelper.setModified(trg, os.path.getmtime(src)) + newSize = os.path.getsize(trg) + duration = time.time() - start + self._logger.log('{} [{}x{}]: {:.1f}% ({:.3f} sec)'.format(node, width, height, newSize * 100 / oldSize, duration), 1) + + def jobLargest(self, argv): + '''Find the n youngest files in one or more directories. + @param argv: options and directories + @return: None or an error message + ''' + argv.append('--max-youngest=0') + argv.append('--max-smallest=0') + argv.append('--max-oldest=0') + rc = self.dirInfo(argv) + return rc + + def jobOldest(self, argv): + '''Find the n oldest files in one or more directories. + @param argv: options and directories + @return: None or an error message + ''' + argv.append('--max-largest=0') + argv.append('--max-smallest=0') + argv.append('--max-youngest=0') + rc = self.dirInfo(argv) + return rc + + def jobSmallest(self, argv): + '''Find the n smallest files in one or more directories. + @param argv: options and directories + @return: None or an error message + ''' + argv.append('--max-largest=0') + argv.append('--max-youngest=0') + argv.append('--max-oldest=0') + rc = self.dirInfo(argv) + return rc + + def jobYoungest(self, argv): + '''Find the n youngest files in one or more directories. + @param argv: options and directories + @return: None or an error message + ''' + argv.append('--max-largest=0') + argv.append('--max-smallest=0') + argv.append('--max-oldest=0') + rc = self.dirInfo(argv) + return rc + + def list(self, argv): + '''Lists a directory or a container (tar archive...) + @param argv: the program arguments, e.g. ['/home', '--max-depth=7'] + ''' + argv, options = self.splitArgsAndOpts(argv) + opts = ProgramOptions(self) + opts.parseOptions(options) + info = None + if len(argv) == 0: + argv = ['.'] + dirInfo = base.FileHelper.DirInfo() + for source in argv: + container = None + aClass, subClass = base.FileHelper.fileClass(source) + if aClass == 'container': + if subClass == 'tar' or subClass == 'tgz' or subClass == 'tbz': + container = TarContainer(source, opts) + elif subClass == 'dir': + container = DirectoryContainer(source, opts) + else: + self._logger.error('unknown archive type: {} / {}', subClass, source) + else: + self._logger.error('not a directory or an archive: ' + source) + if container != None: + container._dirInfo = dirInfo + container.traverse('', 0) + result = ['Directories: {} Files: {} / {}'.format(dirInfo._dirCount, dirInfo._fileCount, base.StringUtils.formatSize(dirInfo._fileSizes))] + result.append('Ignored: {} file(s) / {} dir(s)'.format(dirInfo._ignoredFiles, dirInfo._ignoredDirs)) + base.BaseTool.setResult(result) + self._rawOutput = '\n'.join(result) + if self._verboseLevel > 0: + print(self._rawOutput) + return result + + def listFiles(self): + '''Print the file list of the found files. + ''' + for ix in range(len(self._filenames)): + # statInfo, full, orderDateSize = True, humanReadable = True + base.FileHelper.listFile(self._infos[ix], self._filenames[ix], self._dateSize, self._humanReadableSize) + + def snapshot(self, argv): + '''Duplicates a source directory tree into a target with hard links: needs space only for directory info (meta data). + @precondition: source and target must lay in a common filesystem to allow making hardlinks. + @param argv: the program arguments, e.g. ['/media/server/data', '/media/server/backup/Monday'] + ''' + if len(argv) < 2: + self.usage('too few arguments') + else: + source = argv[0] + target = argv[1] + if not os.path.isdir(source): + self.usage('source is not a directory: ' + source) + elif not os.path.isdir(os.path.dirname(target)): + self.usage('parent of target is not a directory: ' + target) + elif os.path.isdir(target): + self.usage('target already exists: ' + target) + else: + process = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger) + process.execute(['/bin/cp', '-al', source, target], True, False) + + def synchronize(self, argv): + '''Copies all changed/new files from a base directory to a "mirror". + But no file of the mirror will be deleted: they will be moved to a "safe". + @param argv: the program arguments, e.g. ['/media/data', '/media/backup', '/media/safe'] + ''' + if len(argv) < 3: + self.usage('too few arguments') + elif (not os.path.isdir(argv[0])): + self.usage('base is not a directory: ' + argv[0]) + elif (not os.path.isdir(argv[1])): + self.usage('mirror is not a directory: ' + argv[1]) + elif (not os.path.isdir(argv[2])): + self.usage('base is not a directory: ' + argv[2]) + else: + try: + self.synchronizeOneDir(argv[0], argv[1], argv[2], '') + except Exception as exc: + self._logger.error('the mirroring has failed: ' + str(exc)) + raise exc + + def synchronizeOneDir(self, baseDir, mirrorDir, safeDir, relPath): + '''Copies all changed/new files from a baseDir directory to a "mirrorDir". + But no file of the mirrorDir will be deleted: they will be moved to a "safeDir". + @param baseDir: the source directory + @param mirrorDir: the target directory + @param safeDir: the directory for "deleted" files from the target + @param relPath: defines the effective paths to process: will be added to baseDir, mirrorDir and safeDir + example: source/py + ''' + baseFull = baseDir + mirrorFull = mirrorDir + safeFull = safeDir + if relPath != '': + baseFull += os.sep + relPath + mirrorFull += os.sep + relPath + safeFull += os.sep + relPath + self._logger.log('processing ' + baseFull, 2) + baseNodes = os.listdir(baseFull) + mirrorNodes = os.listdir(mirrorFull) + # move deleted files from mirrorDir to safeDir: + for node in mirrorNodes: + if node not in baseNodes: + self.synchronizeMove(mirrorDir, safeDir, relPath, node) + # copy new/changed files from baseDir to mirrorDir + for node in baseNodes: + source = baseFull + os.sep + node + target = mirrorFull + os.sep + node + if not os.path.exists(target): + if os.path.isdir(source): + if not base.FileHelper.distinctPaths(source, target): + self._logger.error('nested directories: {} / {} [{} / {}]'.format( + source, target, os.path.realpath(source), os.path.realpath(target))) + else: + shutil.copytree(source, target) + else: + self._logger.log('{} -> {}'.format(source, target), 4) + shutil.copy2(source, target) + else: + if base.FileHelper.fileType(source) != base.FileHelper.fileType(target): + self.synchronizeMove(mirrorDir, safeDir, relPath, node) + elif not os.path.isdir(source): + infoSource = os.lstat(source) + infoTarget = os.lstat(target) + if abs(infoSource.st_mtime - infoTarget.st_mtime) > 2: + self.synchronizeMove(mirrorDir, safeDir, relPath, node) + self._logger.log('{} => {}'.format(source, target), 4) + shutil.copy2(source, target) + infoTarget = os.lstat(target) + if infoSource.st_mtime != infoTarget.st_mtime or infoSource.st_size != infoTarget.st_size: + self._logger.error('copy failed: {}/{}: {}/{} {}/{}'.format( + relPath, node, infoSource.st_mtime, infoTarget.st_mtime, + infoSource.st_size, infoTarget.st_size)) + else: + prefix = relPath + os.sep if relPath != '' else '' + self.synchronizeOneDir(baseDir, mirrorDir, safeDir, prefix + node) + + def synchronizeMove(self, mirror, safe, relPath, node): + '''Moves a file/directory from mirror to safe: + @param mirror: the source directory + @param safe: the target directory + @param relPath: defines the effective paths (in mirror and safe) + @param node: the node (filename without path) of the source and target file + ''' + relPath2 = os.sep + relPath if relPath != '' else '' + source = mirror + relPath2 + os.sep + node + dirTarget = safe + relPath2 + if not os.path.exists(dirTarget): + os.makedirs(dirTarget, 0o777) + if not os.path.exists(dirTarget): + self._logger.error('cannot create: ' + dirTarget) + else: + target = dirTarget + os.sep + node + self._logger.log('moving {} => {}'.format(source, target), 3) + if os.path.exists(target): + self._logger.error('target exists in safe: {} => {}'.format(source, target)) + os.rename(target, target + '.' + str(time.time())) + if self._useRename: + os.rename(source, target) + elif os.path.isdir(source): + shutil.copytree(source, target) + else: + shutil.copy2(source, target) + + def testOrder(self, moreInteresting): + for ix in range(len(self._infos) - 1): + if not moreInteresting(self._infos[ix], self._infos[ix+1], True): + print(self._filenames[ix] + '\n' + self._filenames[ix + 1] + '\n') + +def usage(): + return """usage: dirtool [] + Offers some services in directory trees. + Note: a container is a directory or an archive (tar, zip). +GLOBAL_OPTS +GLOBAL_MODES +: + build-examples + prepares the system for executing the examples below + check [ + compares two directory trees + : + --max-differences= + the search is stopped if differences has been found + --blocksize= + files are read in this chunk size. Default: 16 MiByte + --exclude= + files and dirs matching this pattern are ignored (not compared) + -i or --ignore-case + used for --pattern and/or --exclude + --ignore-time + only different size or content will be counted as difference + --pattern= + only files and dirs matching this pattern are compared + -q or --quiet + only the summary is displayed + -s or opt == '--short' + the prefix of a difference notice is reduced to one char + -t or --test-content + the file content is inspected for differences + image-resize [--max-width=] [--max-height=] + resizes *.jpg or *.png images + info [ ... ] + displays a summary info about the given directories + largest [ ... ] + list the n youngest files in the directory tree + list [ ...] + show the metadata of the files of the container + : + --order-date-size + the displayed data: instead of + --byte-size or --mbyte-sizes + the size is displayed in MBytes (instead of "human readable" with different units + oldest [ ... ] + list the n youngest files in the directory tree(s) + smallest [ ... ] [--min-size=] + list the n smallest files (but >= min-size) in the directory tree + sync + copy modified/new files from to mirror. + no file from mirror will be deleted/replaced: instead it will be moved to safe + youngest [] [ ... ] + list the n youngest files in the directory tree(s) + : + -n= or --count= + number of displayed entries (replaces max-largest or max-youngest or max-oldest or max-smallest) + -d or --dir-only + the counters respects directory only + -f or --file-only + only files land in the youngest array + --max-depth= + the maximal nesting depth of directories. 0: only the called directory is inspected + --max-largest= + the maximal length of the array for the largest files + --max-oldest= + the maximal length of the array for the oldest files + --max-smallest= + the maximal length of the array for the smallest files + --max-youngest= + the maximal length of the array for the youngest files + --pattern= + filename pattern, e.g. "*.png" +Example: +dirtool check /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 +dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 +dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 --ignore-time --short --test-content +dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 -s --exlude=sub.*2 "--pattern=[^u]*[1-3]" --ignore-case +dirtool info /usr/share/pyrshell/examples/dir1 --max-largest=2 --max-youngest=3 --file-only +dirtool info self._dir1 --count=0 --max-youngest=3 --max-oldest=4 --dir-only +dirtool -v4 largest /usr/share/pyrshell/examples/dir1 '--pattern=.*1.*' +dirtool -v4 youngest /usr/share/pyrshell/examples/dir1 --max-depth=0 +dirtool -v4 oldest /usr/share/pyrshell/examples/dir1 --file-only +dirtool latest /home/jonny -n5 +dirtool -v3 list /usr/share/pyrshell/unittest/data/example.tgz --dir-only +dirtool -v3 list /usr/share/pyrshell/unittest/data --order-date-size --file-only -mbytes-size +dirtool -v3 image-convert /pic /out --max-width=1920 --max-height=1080 +""" + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/dirtool', 'run'] + ''' + appInfo = base.BaseTool.ApplicationInfo('dirtool', 'appl/DirTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = DirTool(options) + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'build-examples': + tool.buildExamples() + elif cmd == 'check': + tool.check(argv) + elif cmd == 'compare': + tool.compare(argv) + elif cmd == 'info': + tool.dirInfo(argv) + elif cmd == 'largest': + tool.jobLargest(argv) + elif cmd == 'list': + tool.list(argv) + elif cmd == 'oldest': + tool.jobOldest(argv) + elif cmd == 'smallest': + tool.jobSmallest(argv) + elif cmd == 'snapshot': + tool.snapshot(argv) + elif cmd == 'sync': + tool.synchronize(argv) + elif cmd == 'youngest': + tool.jobYoungest(argv) + elif cmd == 'image-resize': + tool.imageResize(argv) + else: + tool.usage("unknown command: " + cmd) + base.BaseTool.setLatestTool(tool) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/FtpTool.py b/appl/FtpTool.py new file mode 100755 index 0000000..e4233bd --- /dev/null +++ b/appl/FtpTool.py @@ -0,0 +1,176 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import time +import sys +import os.path +sys.path.insert(0, '/usr/share/pyrshell') +import base.BaseTool +import base.JavaConfig +import net.FtpEngine + +class FtpTool (base.BaseTool.BaseTool): + '''Implements a command interpreter for FTP. + ''' + def __init__(self, globalOptions): + '''Constructor. + @param configurationBase: directory containing the configuration file ftptool.conf + @param logger: None: use the logger given by the configuration file + ''' + base.BaseTool.BaseTool.__init__(self, globalOptions, 'ftptool.conf') + self._client = None + + def example(self): + '''Builds an example configuration. + @param options: an instance of GlobalOptions + ''' + exampleConfig = self._configDir + os.sep + 'ftptool.conf.example' + base.StringUtils.toFile(exampleConfig, 'log.file=/tmp/ftptool.log') + print('created: ' + exampleConfig) + subdir = self._configDir + os.sep + 'ftp.d' + base.BaseTool.BasicStatics.ensureDirectory(subdir) + exampleConfig = subdir + os.sep + 'example.conf' + base.StringUtils.toFile(exampleConfig, 'host=hamatoma.de\nport=21\nuser=jonny\ncode=Secret\n') + print('created: ' + exampleConfig) + + def setServer(self, name): + full = self._configDir + os.sep + 'ftp.d' + os.sep + name + '.conf' + self._client = None + if not os.path.exists(full): + self._logger.error('server {:s} is not configured: Please create {}'.format(name, full)) + else: + self._server = base.JavaConfig.JavaConfig(full, self._logger) + host = self._server.getString('host') + port = self._server.getString('port') + user = self._server.getString('user') + pw = self._server.getString('code') + if host == None or port == None or user == None or pw == None: + self._logger.error('missing FTP authority data for server {:s}. See {:s}'.format(name, full)) + else: + try: + port2 = int(port) + self._client = net.FtpEngine.FtpEngine(host, port2, user, pw, self._logger, self._verboseLevel) + except ValueError: + self._logger.error('port must be an integer: ' + port) + except Exception as exc: + self._logger.error('connection failed: {:s}:{:d} [{:s}]'.format(host, port2, str(exc))) + + def close(self): + '''Frees the resources. + ''' + if self._client != None: + self._client.close() + +def usage(): + '''Returns an info about usage + ''' + return """usage: ftptool [ [...]]" +GLOBAL_OPTS +GLOBAL_MODES +: + the name of a server which is defined in %etc%/ftp.d/.conf +: + du (or diskusage) [ []] + calculate file size (over all files and directories) + : start directory, default: '/' + : only directories lower this depth will be displayed. default: 0 + info + print welcome message and features + compare + compare a local file tree with an ftp file tree + : ftp start directory + : local start directory + lstree [...] + lists the directory infos of + rmtree [ [...]] + : deletes one or more directories recursive + : directories to remove +example: +ftptool du jonny / 3 +ftptool info contabo +ftptool rmtree contabo /storage /jonny/trash +""" + +def buildClient(configurationBase): + config = configurationBase + os.sep + 'ftptool.conf' + +def addServer(options, argv): + '''Adds a server configuration to the configuration directory. + @param options: an instance of GlobalOptions + @param argv: the arguments, [, , , [, ]] + ''' + if len(argv) < 4: + usage('missing argument(s): expected: []') + else: + configFile = options._configDir + os.sep + 'ftp.d' + os.sep + argv[0] + try: + port = 21 if len(argv) < 4 else argv[4] + base.StringUtils.toFile(configFile, 'host={:s}\nport={}\nuser={}\ncode={}\n'.format(argv[1], port, argv[2], argv[3])) + print('created: ' + configFile) + except ValueError: + print('+++ port must be an integer, e.g. 21, not ' + argv[4]) + +def main(argv): + appInfo = base.BaseTool.ApplicationInfo('ftptool', 'appl/FtpTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = FtpTool(options) + if len(argv) > 1: + tool.setServer(argv[1]) + (cmd, argv) = tool.handleStandardCommands(argv) + if tool._client == None: + pass + elif cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd in ['du', 'diskusage']: + startDir = '/' if len(argv) <= 0 else argv[0] + tool._client._printDepth = -1 if len(argv) <= 1 else int(argv[1]) + (size, files, dirs) = tool._client.diskUsage(startDir, 0) + unit = 'm' if size < 1000000000 else 'g' + size2 = size / 1000000000.0 if unit == 'g' else size / 1000000.0 + digits = '9' if unit == 'g' else '6' + tool._logger.log(startDir + (": {:d} bytes [{:." + digits + "f} {:s}b] files: {:d} dirs: {:d}").format(size, size2, unit, files, dirs)) + elif cmd in ['info']: + tool._client.serverInfo() + elif cmd in ['compare']: + if len(argv) < 2: + tool.usage('to few arguments') + elif not os.path.isdir(argv[1]): + tool.usage('not a directory: ' + argv[1]) + else: + (size, files, dirs) = tool._client._client.compare(argv[0], argv[1], 0) + unit = 'm' if size < 1000000000 else 'g' + size2 = size / 1000000000.0 if unit == 'g' else size / 1000000.0 + digits = '9' if unit == 'g' else '6' + print(server + (": {:d} bytes [{:." + digits + "f} {:s}b] files: {:d} dirs: {:d}").format(size, size2, unit, files, dirs)) + elif cmd in ['rmtree']: + if len(argv) < 1: + tool.usage('too few arguments: missing directory') + else: + for directory in argv: + tool.tool._client.removeTree(directory) + elif cmd in ['lstree']: + if len(argv) < 1: + tool.usage('too few arguments: missing directory') + else: + if len(argv) < 2: + depth = 9999 + else: + try: + depth = int(argv[1]) + except ValueError: + tool._logger.error('depth must be an integer: ' + argv[1]) + depth = -1 + if depth >= 0: + tool._client._maxDepth = depth + tool._client.listTree(argv[0], 0) + tool.close() + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/Monitor.py b/appl/Monitor.py new file mode 100755 index 0000000..892ed51 --- /dev/null +++ b/appl/Monitor.py @@ -0,0 +1,1184 @@ +#! /usr/bin/python3 +''' +Created on 27.04.2018 + +@author: hm +''' +import sys +import os.path +import re +import time +import datetime +import traceback +import pwd +sys.path.insert(0, '/usr/share/pyrshell') +import base.BaseTool +import base.LinuxUtils +import net.EMail +import net.HttpClient +import urllib + +IGNORE_CASE = re.I +class Property: + + def __init__(self, name, value=None): + '''Constructor. + @param name: the property name + @param value: the value + ''' + self._id = name + self._value = value + + def copy(self, source): + '''Copies the properties from a template + @param source: the source, type: Property + ''' + if source._value != None: + self._value = source._value + + def dump(self, indent): + '''Dumps a property. + @param indent: indent level + @return: a string describing the property + ''' + if self._id == None: + self._id = None + if self._value == None: + value = '' + elif type(self._value) == float: + value = '{:.3f'.format(self._value) + elif type(self._value) == int: + value = str(self._value) + else: + value = self._value + return ' ' * indent + self._id + ': ' + value + "\n" + +class ObservedProperty: + + def __init__(self, name, warnLimit, errorLimit, receivers): + '''Constructor. + @param name: the name + @param warnLimit: a higher value raises a warning + @param errorLimit: a higher value raises an error + ''' + self._id = name + self._warnLimit = warnLimit + self._errorLimit = errorLimit + self._receivers = receivers + + def copy(self, source): + '''Copies the properties from a template + @param source: the source, type: Property + ''' + if source._warnLimit != None: + self._warnLimit = source._warnLimit + if source._errorLimit != None: + self._errorLimit = source._errorLimit + if source._receivers != None: + self._receivers = source._receivers + + def dump(self, indent): + '''Dumps a observed property. + @param indent: indent level + @return: a string describing the property + ''' + rc = ' ' * indent + self._id + ': ' + str(self._warnLimit) + ' ' + str(self._errorLimit) + ' "' + self._receivers._name + '"\n' + return rc + +class Container: + + def __init__(self, aType, name, parent, keyword = None): + '''Constructor. + @param aType: the container type: 'host', 'disk' + @param name: the name of the container. '': the default container + @param parent: type Monitor or another container + ''' + self._type = aType + self._name = name + self._properties = dict() + self._observed = dict() + self._parent = parent + self._keyword = keyword if keyword != None else aType.lower() + + def copy(self, source): + '''Copies the properties from a source + @param source: the source, type: Property + ''' + for prop in self._observed: + if prop in source._observed: + self._observed[prop].copy(source._observed[prop]) + + def dump(self, indent,): + '''Dumps an container. + @param indent: indent level + @return: a string describing the container + ''' + rc = ' ' * indent + self._type + ' "' + self._name + '":\n' + for key in sorted(self._properties): + rc += self._properties[key].dump(indent + 3) + for key in sorted(self._observed): + rc += self._observed[key].dump(indent + 3) + return rc + +class WebSite(Container): + def __init__(self, name, monitor): + '''Constructor. + @param name: name of the group + @param monitor: the parent with type Monitor + ''' + Container.__init__(self, 'WebSite', name, monitor, 'site') + self._properties = { + 'url': Property('url', ''), + } + +class ReceiverGroup(Container): + + def __init__(self, name, monitor): + '''Constructor. + @param name: name of the group + @param monitor: the parent with type Monitor + ''' + Container.__init__(self, 'ReceiverGroup', name, monitor) + self._properties = { + 'warning': Property('warning', ''), + 'error': Property('error', '') + } + +class Disk(Container): + + def __init__(self, name, host): + '''Constructor. + @param name: name of the host, if empty default host is assumed + @param host: the parent with type Host + ''' + Container.__init__(self, 'Disk', name, host) + if '' not in host._parent._hosts: + defaultDisk = self + receiverGroup = ReceiverGroup('', host._parent) + else: + if name in host._parent._hosts['']._disks: + defaultDisk = host._parent._hosts['']._disks[name] + else: + defaultDisk = host._parent._hosts['']._disks[''] + receiverGroup = defaultDisk._properties['receivers'] + self._observed = { + 'used': ObservedProperty('used', '80%', '90%', receiverGroup), + } + self._properties = { + 'receivers': receiverGroup + } + self.copy(defaultDisk) + +class RaidDevice(Container): + def __init__(self, name, host): + '''Constructor. + @param name: the device name, e.g. 'md0' + @param host: the parent, type: Host + ''' + Container.__init__(self, 'RaidDevice', name, host, 'raid') + receiverGroup = host._properties['receivers'] + self._properties = { + 'receivers': receiverGroup, + 'raidtype': Property('raidtype'), + 'members': Property('members'), + 'blocks': Property('blocks') + } +class Host(Container): + + def __init__(self, name, monitor): + '''Constructor. + @param name: name of the host, if empty default host is assumed + @param monitor: the parent with type Monitor + ''' + Container.__init__(self, 'Host', name, monitor) + receiverGroup = monitor._receiverGroups[''] + if name == '': + disk = Disk('', self) + else: + disk = monitor._hosts['']._disks[''] + self._disks = { + '': disk + } + self._raids = dict() + self._properties = { + 'address': Property('address'), + 'receivers' : receiverGroup, + 'interval' : Property('interval', 60) + } + self._observed = { + 'load1': ObservedProperty('load1', 10, 20, receiverGroup), + 'load5': ObservedProperty('load5', 10, 20, receiverGroup), + 'load10': ObservedProperty('load10', 10, 20, receiverGroup), + 'processes': ObservedProperty('processes', 500, 1000, receiverGroup), + 'swap': ObservedProperty('swap', 100.01, 100.01, receiverGroup), + } + if name != '' and '' in monitor._hosts: + self.copy(monitor._hosts['']) + + def copy(self, source): + '''Copies the properties from a source + @param source: the source, type: Disk + ''' + Container.copy(self, source) + for name in self._disks: + if name in source._disks: + self._disks[name].copy(source._disks[name]) + + def dump(self, indent): + '''Dumps a host. + @param indent: indent level + @return: a string describing the receiver group + ''' + rc = "===\n" + Container.dump(self, 0) + for disk in sorted(self._disks): + rc += self._disks[disk].dump(indent + 3) + return rc + +class SmtpHost (Container): + + def __init__(self, name, monitor): + '''Constructor. + @param name: the name of the smtp host + @param monitor: the parent, type: Monitor + ''' + Container.__init__(self, 'SmtpHost', name, monitor) + self._properties = { + 'host': Property('host'), + 'port' : Property('port', '587'), + 'sender' : Property('sender'), + 'user' : Property('user'), + 'code' : Property('code'), + 'tls' : Property('tls', 'True') + } + +class Monitor (base.BaseTool.BaseTool): + '''Implements a monitor for hosts to detect service faults. + ''' + + def __init__(self, globalOptions, additionalConfigDir = None): + '''Constructor. + @param globalOptions: a instance of GlobalOptions + ''' + config = '/etc/pyrshell/monitor.d' if additionalConfigDir == None else additionalConfigDir + base.BaseTool.BasicStatics.ensureDirectory(config) + base.BaseTool.BaseTool.__init__(self, globalOptions, 'pymonitor.conf', False, config) + self._start = time.time() + self._loops = 0 + self._openProblems = dict() + self._openProblems = [] + # self._configDir = dirConfiguration + self._reloadRequestFile = '/tmp/{}.reload.request'.format(globalOptions._appInfo._applicationName) + self._readAllConfigs() + self._lastStatus = dict() + + def _readAllConfigs(self): + self._sites = dict() + self._hosts = dict() + self._smtpHosts = dict() + self._receiverGroups = dict() + self._receiverGroups[''] = ReceiverGroup('', self) + defaultConf = self._additionalConfigDir + os.sep + 'default.conf' + if os.path.exists(defaultConf): + self.readConfigurationFile(defaultConf) + else: + self._hosts[''] = Host('', self) + nodes = os.listdir(self._additionalConfigDir) + for node in nodes: + if node.endswith('.conf') and node != 'default.conf' and node != 'pymonitor.conf': + self.readConfigurationFile(self._additionalConfigDir + os.sep + node) + + def checkReload(self): + '''Tests whether a reload request exists. + If yes the configuration will be read again. + ''' + fn = self._configDir + '/reload.request' + if os.path.exists(self._reloadRequestFile): + self.reload() + os.unlink (self._reloadRequestFile) + if os.path.exists(self._reloadRequestFile): + self._logger.error('cannot delete ' + fn) + + def createSiteServer(self, argv): + '''Creates the configuration for an observed site on the server side. + @param argv: the program arguments, e.g. ['--nginx'] + ''' + webserver = 'nginx' + argv, options = self.splitArgsAndOpts(argv) + ip = None + domain = None + overwrite = False + for opt in options: + if opt == '--nginx': + webserver = 'nginx' + elif opt == '--apache': + webserver = 'apache' + elif opt == '--overwrite': + overwrite = True + elif opt.startswith('--ip='): + ip = opt[5:] + elif opt.startswith('--domain='): + domain = opt[9:] + else: + self.usage('unknown option: ' + opt) + if domain == None: + domain = base.BaseTool.BasicStatics.hostname(True) + if ip == None: + ip = self.publicIp() + if ip == None: + self.usage('cannot detect public ip. Please use the --ip= option') + baseDir = self.getTarget('/var', 'www' + os.sep + domain) + self.ensureDirectory(baseDir) + fn = baseDir + os.sep + 'index.html' + base.StringUtils.toFile(fn, '

Ups. Verirrt?

\n') + fn = baseDir + os.sep + 'index.php' + base.StringUtils.toFile(fn, '

Ups. Verirrt?

";\n') + fn = baseDir + os.sep + 'domain.txt' + base.StringUtils.toFile(fn, domain + '\n') + if webserver == None: + if os.path.isdir('/etc/nginx'): + webserver = 'nginx' + elif os.path.isdir('/etc/apache2'): + webserver = 'apache' + if webserver == 'nginx': + available = self.getTarget('/etc/nginx', 'sites-available') + enabled = os.path.dirname(available) + os.sep + 'sites-enabled' + fn = available + os.sep + domain + if os.path.exists(fn) and not overwrite: + self.usage('{} exists. Use --overwrite to overwrite'.format(fn)) + base.StringUtils.toFile(fn, '''server {} + listen 80; + server_name {} {}; + root {}; + location / {} + allow all; + {} +{} +'''.format('{', domain, ip, baseDir, '{', '}', '}')) + linkTarget = enabled + os.sep + domain + if os.path.islink(linkTarget): + self._logger.log('deleting ' + linkTarget, 2) + os.unlink(linkTarget) + self._logger.log('creating symlink ' + linkTarget, 2) + os.symlink('../sites-available/' + domain, linkTarget) + if self._isRoot: + self._processHelper.execute(['/bin/systemctl', 'reload', 'nginx'], True) + elif webserver == 'apache': + available = self.getTarget('/etc/apache2', 'sites-available') + enabled = os.path.dirname(available) + os.sep + 'sites-enabled' + fn = available + os.sep + domain + '.conf' + if os.path.exists(fn) and not overwrite: + self.usage('{} exists. Use --overwrite to overwrite'.format(fn)) + base.StringUtils.toFile(fn, ''' + ServerName {} + ServerAlias {} + ServerAdmin webmaster@localhost + DocumentRoot {} + + AllowOverride all + Require all granted + Order allow,deny + allow from all + + +'''.format(domain, ip, baseDir, baseDir)) + linkTarget = enabled + os.sep + domain + '.conf' + if os.path.islink(linkTarget): + self._logger.log('deleting ' + linkTarget, 2) + os.unlink(linkTarget) + self._logger.log('creating symlink ' + linkTarget, 2) + os.symlink('../sites-available/' + domain + '.conf', linkTarget) + fn = baseDir + os.sep + 'index.html' + if self._isRoot: + self._processHelper.execute(['/bin/systemctl', 'reload', 'apache2'], True) + else: + self.usage('unknown webserver: ' + webserver) + + def dump(self): + '''Dumps a monitor. + @return: a string describing the monitor + ''' + rc = '' + for group in sorted(self._receiverGroups): + rc += self._receiverGroups[group].dump(0) + for host in sorted(self._smtpHosts): + rc += self._smtpHosts[host].dump(0) + for host in sorted(self._hosts): + rc += self._hosts[host].dump(0) + for site in sorted(self._sites): + rc += self._sites[site].dump(0) + return rc + + def diskLimitReached(self, limit, total, free): + '''Tests whether a limit is reached. + @param limit: in bytes or as percent + @param total: total amount of disk bytes + @param free: free disk bytes + @return: True: limit is reached + ''' + limitBytes = int (total * int(limit[0:-1]) / 100) if limit.endswith('%') else int(limit) + rc = limitBytes < int(total - free) + return rc + + def example(self): + '''Prints a configuration example to files and prints / returns a reference notice. + @param doPrint: True: the reference notice will be displayed. + @return: the reference notice, e.g. 'created: /etc/pyrshell/monitor.d/default.example' + ''' + content = '''# Example config for pymonitor: +log=/var/log/local/pymonitor.log +''' + self.storeExample(content) + contentDefault = ''' +receivergroup "" { + warning: hm.neutral@gmx.de + error: hm.neutral@gmx.de +} +receivergroup "std" { + warning: hm.neutral@gmx.de + error: hm.neutral@gmx.de +} +smtphost "" { + host: smtp.gmx.de + port: 587 + sender: hm.neutral@gmx.de + user: hm.neutral@gmx.de + code: sEcReT + tls: True +} +host "" { + receivers: std + disk "" { + receivers: std + used: 85% 90% + } +} +''' + self.storeExample(contentDefault, 'default.conf', self._additionalConfigDir) + content = '''host "{}" { +address: localhost +interval: 60 +''' + host = base.BaseTool.BasicStatics.hostname(False) + content = content.replace('{}', host) + diskInfos = base.LinuxUtils.diskFree() + diskDescription = '' + for info in diskInfos: + # info: [ name, total, free ] + total = 1 if info[1] == 0 else info[1] + warnLimit = (100 * (total - info[2]) / total + 100) / 2 + errorLimit = (warnLimit + 100) / 2 + diskDescription += '\tdisk "' + info[0] + '" {\n' + diskDescription += '\t\tused: {:.0f}% {:.0f}%\n'.format(warnLimit, errorLimit) + diskDescription += '\t}\n' + content += diskDescription + infos = base.LinuxUtils.mdadmInfo() + for info in infos: + # [name, raidType, members, blocks, status + content += '\traid "' + info[0] + '" {\n' + content += '\t\traidtype: ' + info[1] + '\n' + content += '\t\tmembers: ' + info[2] + '\n' + content += '\t\tblocks: {:d}\n'.format(info[3]) + content += '\t}\n' + + infos = base.LinuxUtils.load() + content += '\tload1: {:.1f} {:.1f}\n'.format(infos[0] * 10, infos[0] * 20) + content += '\tload5: {:.1f} {:.1f}\n'.format(infos[1] * 10, infos[1] * 20) + content += '\tload10: {:.1f} {:.1f}\n'.format(infos[2] * 10, infos[2] * 20) + content += '\tprocesses: {:.0f} {:.0f}\n'.format(int(infos[4] * 1.1), int(infos[4] * 2)) + infos = base.LinuxUtils.memoryInfo() + total = infos[2] if infos[2] != 0 else 1 + swapUsage = (100 + infos[3] * 100 / total) / 2 + content += '\tswap: {}% {}%\n'.format(swapUsage, (swapUsage+100)/2) + content += '}\n' + self.storeExample(content, host + '.conf', self._additionalConfigDir) + + def getLocalHost(self): + '''Returns the name of the host describing the localhost. + @return: the name of the local host + ''' + rc = None + for host in self._hosts: + if host != '': + if host == 'localhost': + rc = host + else: + rc = host + break + return rc + + def observe(self, hostname='localhost'): + '''Observes a host and return the new errors/warnings. + @param hostname: name of the host to observer: used for the configuration + @return: a tuple (newErrors, closedErrors) + ''' + self._logger.log('observe...', 3) + self._currentHostname = hostname + rc = self.observeCore(hostname) + self.observeDisks(hostname) + self.observeRaid(hostname) + rc += self.observeSites(hostname) + currentProblems = dict() + newProblems = dict() + closedProblems = [] + for message in rc: + parts = message.split('|') + key = parts[0] + ':' + parts[1] + if key in self._openProblems: + currentProblems[key] = self._openProblems[key] + else: + newProblems[key] = message + for key in self._openProblems: + if key not in newProblems and key not in currentProblems: + closedProblems.append(self._openProblems[key]) + self._openProblems = currentProblems + self._openProblems.update(newProblems) + return (newProblems.values(), closedProblems) + + def observerBuildMessage(self, section, location, isWarning, limitType, current, observed): + '''Builds the message used for the transporting all infos about an error/warning. + @param section: identifies the caller, e.g. 'core' + @param location: the part of the message specific for the caller, e.g. 'core detection' + @param isWarning: False: describes an error + @param limitType: load1,load5,load10 or swap + @param current: the current value + @param observed: the property describing the raised limit, type: ObservedProperty + @return: list of notice info: notice_type|property_key|message|receivers|time, 'E|localhost:disk:/home|disk usage: free: 0 of 512.000 GiByte + ''' + receivers = '' + if observed == None: + limit = '' + else: + limit = observed._warnLimit if isWarning else observed._errorLimit + if observed._receivers != None: + receivers = observed._receivers._properties['warning' if isWarning else 'error']._value + msg = 'S' if isWarning == None else ('W' if isWarning else 'E') + msg += '|{}:{}:{}|{} '.format(self._currentHostname, section, limitType, location) + msg += 'warning' if isWarning else 'error' + msg += ' ' + str(current) + if limit != None and limit != '': + msg += ' Limit: ' + str(limit) + msg += '|' + receivers + '|' + "{:.0f}".format(time.time()) + return msg + + def observeCore(self, hostname='localhost'): + '''Checks whether a load state and swap usage should be noticed (warning or error). + @return: list of notice info: notice_type|property_key|message|receivers|time + e.g. 'E|localhost:load5|load is to heavy: 5.10|a@bc.de|147382902.3928302 + ''' + def buildMessage(isWarning, limitType, current): + return self.observerBuildMessage('core', 'core detection', isWarning, limitType, current, host._observed[limitType]) + + def checkOne(rc, current, limitType): + if float(current) >= float(host._observed[limitType]._errorLimit): + rc.append(buildMessage(False, limitType, current)) + elif float(current) >= float(host._observed[limitType]._warnLimit): + rc.append(buildMessage(True, limitType, current)) + else: + self._lastStatus[host._name + ':core:' + limitType] = buildMessage(None, limitType, current) + + self._logger.log('observeCore...', 3) + rc = [] + if hostname in self._hosts: + infos = base.LinuxUtils.load() + host = self._hosts[hostname] + checkOne(rc, infos[0], 'load1') + checkOne(rc, infos[1], 'load5') + checkOne(rc, infos[2], 'load10') + checkOne(rc, infos[4], 'processes') + infos = base.LinuxUtils.memoryInfo() + total = infos[2] if infos[2] != 0 else 1 + usagePercent = 0 if total <= 1 else (total - infos[3]) * 100 / total + checkOne(rc, usagePercent, 'swap') + + return rc + + def observeDisks(self, hostname='localhost'): + '''Checks whether a disk usage should be noticed (warning or error). + @return: list of notice info: notice_type|property_key|message|receivers|time, 'E|localhost:disk:/home|disk usage: free: 0 of 512.000 GiByte + ''' + + def buildMessage(isWarning, diskName, total, free, limitType, observed): + total = 1 if total < 1E-6 else total + current = 'free: {:.3f} GiByte ({:.1f}%) of {:.3f} GiByte'.format(free / 1024.0 / 1024 / 1024, free * 100.0 / total, total / 1024.0 / 1024 / 1024) + return self.observerBuildMessage('disk', 'disk usage', isWarning, limitType, current, observed) + + self._logger.log('observeDisks...', 3) + rc = [] + diskInfos = base.LinuxUtils.diskFree() + if hostname in self._hosts: + host = self._hosts[hostname] + for info in diskInfos: + # info: [ name, total, free ] + if info[0] not in host._disks: + # rc.append('E|'+ hostname + ':disk:' + info[0] + '|disk not found|' + host._properties['receivers']._properties['error']._value) + pass + else: + disk = host._disks[info[0]] + if self.diskLimitReached (disk._observed['used']._errorLimit, info[1], info[2]): + msg = buildMessage(False, info[0], info[1], info[2], disk._name, disk._observed['used']) + self._logger.log(msg, 2) + rc.append(msg) + elif self.diskLimitReached (disk._observed['used']._warnLimit, info[1], info[2]): + msg = buildMessage(True, info[0], info[1], info[2], disk._name, disk._observed['used']) + self._logger.log(msg, 2) + rc.append(msg) + else: + self._lastStatus[host._name + ':disk:' + info[0]] = buildMessage(None, info[0], info[1], info[2], disk._name, disk._observed['used']) + return rc + + def observeRaid(self, hostname='localhost'): + self._logger.log('observeDisks...', 3) + '''Tests whether a raid is broken. + @return: list of notice info: notice_type|property_key|message|receivers|time, + e.g. 'E|localhost:raid|raid is broken|a@bc.de|147382902.3928302' + ''' + + def buildMessage(isWarning, limitType, raid, message): + if raid._properties['members']._value != None: + message += ' members: ' + raid._properties['members']._value + if raid._properties['blocks']._value != None: + message += ' blocks: ' + raid._properties['blocks']._value + return self.observerBuildMessage('raid', 'type: {} {}'.format(raid._properties['raidtype']._value, message), isWarning, limitType, '', None) + + self._logger.log('observeRaid...', 3) + infos = base.LinuxUtils.mdadmInfo() + host = self._hosts[hostname] + raidMap = dict() + rc = [] + for info in infos: + raidMap[info[0]] = info + for name in host._raids: + raid = host._raids[name] + if raid._name not in raidMap: + rc.append(buildMessage(False, raid._name, raid, 'missing raid device')) + else: + [name, raidType, members, blocks, status] = raidMap[raid._name] + if status == 'recover': + rc.append(buildMessage(True, raid._name, raid, 'rebuilding the raid')) + elif status == 'broken': + rc.append(buildMessage(False, raid._name, raid, 'raid is broken')) + else: + self._lastStatus[host._name + ':raid:' + raid._name] = buildMessage(None, raid._name, raid, 'OK') + return rc + + def observeSites(self, hostname): + '''Tests whether a site is reachable. + @param hostname: the hostname of 'localhost' + @return: list of notice info: notice_type|property_key|message|receivers|time, + e.g. 'E|localhost:site:https://wiki.example.com|site is not reachable|a@bc.de|147382902.3928302' + ''' + if self._verboseLevel >= 3: + self._logger.log('observeSites...') + rc = [] + for site in self._sites: + url = self._sites[site]._properties['url']._value + client = net.HttpClient.HttpClient(self._verboseLevel, self._logger) + content = client.getContent(url, 1) + # 404 returns an empty bytes object + if content == None or content == '' or content == b'' or content.find(b'404') >= 0: + receivers = self._hosts[hostname]._properties['receivers']._properties['error']._value + msg = 'E|{}:sites:{}|site is not reachable|{}|{:.0f}'.format(hostname, site, receivers, time.time()) + rc.append(msg) + else: + self._lastStatus['{}:sites:{}'.format(hostname, site)] = 'S|{}:sites:{}|site is running||{}'.format(hostname, site, time.time()) + client.close() + return rc + + def readConfigurationFile(self, filename): + '''Reads a configuration file. + @param filename: file to read + Example of a configuration file: + host "localhost" { + address: localhost; + disk "/" { + used: 80% 90% + } + load1: 3.0 5.0 + } + ''' + + def _error(msg): + '''Puts an error with context information. + @param msg: the error message + ''' + self._logger.error('{:s}-{:d}: {:s}\n{:s}'.format(filename, lineNo, msg, line)) + + def _checkEmails(emails): + '''Tests a list of emails. + @param emails: a string with email addresses separated by blanks + @return: True: emails correct + ''' + rc = True + for item in emails.split(): + if not reEmailAddress.match(item): + _error('invalid email address: ' + item) + rc = False + return rc + + def _observedValue(name, value): + rc = value[0:-1] if name == 'swap' and value[-1] == '%' else value + return rc + + with open(filename, 'r') as fp: + lineNo = 0 + containerStack = [] + currentContainer = None + reReceiverGroup = re.compile(r'receivergroup\s+"([^"]*)"\s*\{$') + reContainer = re.compile(r'(site|host|disk|raid|smtphost)\s+"([^"]*)"\s*\{$') + reProperty = re.compile(r'(\w+):\s*(.+)$') + reEmailAddress = re.compile(r'[-+\w.=!]+@[\w.]+[.][a-zA-z]+$') + for line in fp: + lineNo += 1 + line = line.strip() + if line == '' or line.startswith('#'): + continue + if line == '}': + if currentContainer == None: + _error('unexpected "}"') + else: + containerStack.pop() + currentContainer = None if len(containerStack) == 0 else containerStack[-1] + continue + matcher = reContainer.match(line) + if matcher: + aType, name = matcher.group(1), matcher.group(2) + if aType == 'host' or aType == 'smtphost' or aType == 'site': + if currentContainer != None: + _error('nested definition of ' + aType) + else: + if aType == 'host': + currentContainer = Host(name, self) + self._hosts[name] = currentContainer + elif aType == 'site': + currentContainer = WebSite(name, self) + self._sites[name] = currentContainer + else: + currentContainer = SmtpHost(name, self) + self._smtpHosts[name] = currentContainer + containerStack.append(currentContainer) + elif aType == 'disk': + if currentContainer == None: + _error('disk not inside a host definition') + elif currentContainer._keyword != 'host': + _error('disk not inside a host definition. Found: ' + currentContainer._keyword) + else: + disk = Disk(name, currentContainer) + currentContainer._disks[name] = disk + currentContainer = disk + containerStack.append(disk) + elif aType == 'raid': + if currentContainer == None: + _error('raid not inside a host definition') + elif currentContainer._keyword != 'host': + _error('raid not inside a host definition. Found: ' + currentContainer._keyword) + else: + raid = RaidDevice(name, currentContainer) + currentContainer._raids[name] = raid + currentContainer = raid + containerStack.append(raid) + else: + _error('unknown container type: ' + aType) + continue + matcher = reProperty.match(line) + if matcher: + name, propValue = matcher.group(1), matcher.group(2) + if currentContainer == None: + _error('property outside a container') + elif name not in currentContainer._properties and name not in currentContainer._observed: + _error('unknown property ' + name) + else: + if name in currentContainer._properties: + if name == 'receivers': + if propValue in self._receiverGroups: + currentContainer._properties[name] = self._receiverGroups[propValue] + else: + _error('unknown receivergroup: ' + propValue) + elif (name != 'error' and name != 'warning') or _checkEmails(propValue): + currentContainer._properties[name]._value = propValue + if name == 'address' and propValue != 'localhost' and not propValue.startswith('127.'): + currentContainer._name = propValue + else: + values = propValue.split() + if len(values) == 1: + currentContainer._observed[name]._errorLimit = _observedValue(name, values[0]) + currentContainer._observed[name]._receivers = currentContainer._properties['receivers'] + elif len(values) == 2: + currentContainer._observed[name]._warnLimit = _observedValue(name, values[0]) + currentContainer._observed[name]._errorLimit = _observedValue(name, values[1]) + currentContainer._observed[name]._receivers = currentContainer._properties['receivers'] + else: + if values[2] not in self._receiverGroups: + _error('unknown receivergroup: ' + values[2]) + else: + currentContainer._observed[name]._warnLimit = _observedValue(name, values[0]) + currentContainer._observed[name]._errorLimit = _observedValue(name, values[1]) + currentContainer._observed[name]._receivers = self._receiverGroups[values[2]] + + continue + matcher = reReceiverGroup.match(line) + if matcher: + if currentContainer != None: + _error('receiver group definition inside a container') + else: + group = ReceiverGroup(matcher.group(1), self) + self._receiverGroups[group._name] = group + currentContainer = group + containerStack.append(group) + continue + _error('unexpected input') + if len(containerStack) > 0: + _error('missing "}"') + + def reload(self): + '''Reads the configuration again. + ''' + self._logger.log('reloading configuration...') + self._readAllConfigs() + hostname = base.BaseTool.BasicStatics.hostname(False) + subject = hostname + ': reloaded' + duration = time.time() - self._start + start = datetime.datetime.fromtimestamp(self._start) + currentState = '' + states = list(self._lastStatus.values()) + states.sort() + for state in states: + info = state.split('|') + currentState += info[1] + ': ' + info[2].replace(' error', '') + '\n' + body = '{}\n\nRunning since: {} / {:.0f}d{:.0f}h{:.0f}m loops: {}\n\nOpen problems: {}\n\n{}\n\nCurrent state:\n{}\n'.format(subject, + start.strftime('%Y.%m.%d-%H:%M:%S'), + duration // 86400, duration % 86400 / 3600, duration % 3600 / 60, + self._loops, + len(self._openProblems), '\n'.join(self._openProblems), + currentState) + self.sendEmail(subject, body) + + def reloadRequest(self): + '''Requests a reload of the configuration of the running service. + ''' + base.StringUtils.toFile(self._reloadRequestFile, '') + os.chmod(self._reloadRequestFile, 0o666) + entry = pwd.getpwnam('pymonitor') + uid = self._configuration.getInt('uid', None if entry == None else entry.pw_uid) + if self._isRoot and uid != None: + os.chown(self._reloadRequestFile, uid, uid) + + def run(self, argv): + '''A never ending loop with regulary started observervations. + @param hostname: the hostname to find the matching configuration + ''' + hostname = base.BaseTool.BasicStatics.hostname(False) + interval=60 + if len(argv) > 0: + hostname = argv[0] + if len(argv) > 1: + interval = self.integerArgument(argv[1], 60) + if hostname not in self._hosts: + self._logger.error('unknown host: ' + hostname) + else: + try: + time.sleep(10) + host = self._hosts[hostname] + interval = int(host._properties['interval']._value) + if interval < 1: + interval = 1 + self._logger.log('starting loop (interval: {})...'.format(interval)) + # seconds since midnight + lastClear = time.time() % 86400 + self._loops = 0 + while True: + self._loops += 1 + current = time.time() % 86400 + if current < lastClear: + # day has changed + # enforce new error messages on long term errors + self._openProblems.clear() + lastClear = current + self.checkReload() + [newProblems, closedProblems] = self.observe(hostname) + for key in newProblems: + self.sendStatusEmail(key, True) + for key in closedProblems: + self.sendStatusEmail(key, False) + time.sleep(interval) + except Exception: + exc_type, exc_value, exc_traceback = sys.exc_info() + self._logger.error(''.join(traceback.format_exception(exc_type, exc_value, exc_traceback, 8))) + self._logger.log('daemon finished') + + def sendStatusEmail(self, error, isProblem): + '''Sends one or many emails. + @param errors: a list of error information + @param isProblem: True: the errors have been started False: the errors have been finished + ''' + [aType, key, message, recipients, theTime] = error.split('|') + if len(recipients) > 0: + aType2 = 'error' if aType == 'E' else 'warning' + aDate = datetime.datetime.fromtimestamp(int(float(theTime))) + dateString = aDate.strftime('%d.%m.%Y %H:%M') + [host, scope, dummy] = key.split(':') + if isProblem: + subject = '[PROBLEM] ' + key + ' is on ' + aType2 + text = '''***** Service Monitoring on {:s} ***** +Scope: {:s} +Info: {:s} +Service: {:s} +When: {:s} +'''.format(key, host, message, scope, dateString) + else: + subject = '[RECOVERY] ' + key + ' is ok' + text = '''***** Service Monitoring on {:s} ***** +Scope: {:s} +Recovered from: {:s} +Service: {:s} +When: {:s} - {:s} +'''.format(key, host, message, scope, dateString, datetime.datetime.now().strftime('%d.%m.%Y %H:%M')) + if key in self._lastStatus: + info = self._lastStatus[key].split('|') + text += ''' +current status: {} +from {} +'''.format(info[2], time.strftime('%Y.%m.%d-%H:%M:%S', time.localtime(float(info[4])))) + self.sendEmail(subject, text, recipients) + + def sendEmail(self, subject, text, recipients = None): + '''Sends an email. + @param subject: the subject of the email + @param isProblem: True: the errors have been started False: the errors have been finished + ''' + email = net.EMail.EMail(subject, text) + smtp = self._smtpHosts[''] + if recipients == None: + recipients = self._receiverGroups['']._properties['warning']._value + if recipients == None or recipients == '': + self._logger.error('missing recipients in sendEmail(): subject: {} text:\n{}'.format(subject, text)) + else: + parts = recipients.split(' ') + email.setSmtpLogin(smtp._properties['host']._value, smtp._properties['port']._value, + smtp._properties['user']._value, smtp._properties['code']._value, smtp._properties['tls']._value == 'True') + sender = smtp._properties['sender']._value + cc = None if len(parts) < 2 else parts[1:] + #self._logger.debug('Email: To: {} CC: {} host: {} port: {} user: {} TLS: {}'.format( + # parts[0], '' if cc == None else ' '.join(cc), + # smtp._properties['host']._value, smtp._properties['port']._value, smtp._properties['user']._value, + # smtp._properties['tls']._value)) + try: + email.sendTo(sender, parts[0], cc) + except Exception as exc: + smtpArgs = smtp._properties['host']._value + smtp._properties['port']._value, smtp._properties['user']._value + smtp._properties['code']._value + smtp._properties['tls']._value + self._logger.error('sendmail [{}] to {} failed: {}\n{}\n{}'.format(smtpArgs, recipients, str(exc), subject, text)) + self._logger.debug('email sent to ' + recipients) + + def site(self, argv): + '''Displays the configuration of an observed website. + @param argv: program arguments, e.g. ['--scan'] + ''' + def _handleUrl(url, lines, logger): + if not url.startswith('http'): + url = 'http://' + url + client = net.HttpClient.HttpClient(self._verboseLevel, logger) + url2 = client.handleRequest(url, 'HEAD', 10) + if client._response != None and client._response.status >= 400 and client._response.status < 500: + url += '/works' + url2 = client.handleRequest(url, 'HEAD', 10) + if client._response == None or client._response.status != 200: + logger.error('site not available: ' + url) + else: + parts = urllib.parse.urlparse(url2) + domain = parts.netloc + ix = domain.find(':') + if ix > 0: + domain = domain[0:ix] + output = 'site "' + domain + '" {\n\turl: ' + url2 + '\n}\n' + logger.log(output) + lines.append(output) + + def _scanNginx(filename, lines, logger): + rexprPort = re.compile(r'^\s*listen\s+[sl\s]*(\d+)', IGNORE_CASE) + rexprServer = re.compile(r'^\s*server_name\s+(.*);', IGNORE_CASE) + with open(filename, "r") as fp: + lastPort = 80 + lastIsSsl = False + bestUrl = None + url = None + for line in fp: + matcher = rexprPort.match(line) + if matcher: + lastPort = int(matcher.group(1)) + lastIsSsl = line.lower().find('ssl') > 0 + continue + matcher = rexprServer.match(line) + if matcher: + names = matcher.group(1).split() + url = 'http' + isHttps = lastIsSsl or (lastPort >= 400 and lastPort < 500) + if isHttps: + url += 's' + port = '' if lastPort == 80 or lastPort == 443 else ':' + str(lastPort) + url += '://' + names[0] + port + if bestUrl == None: + bestUrl = url + elif isHttps: + bestUrl = url + break + if bestUrl: + _handleUrl(bestUrl, lines, logger) + + argv, options = self.splitArgsAndOpts(argv) + done = False + lines = [] + for opt in options: + if opt == '--scan': + aDir = self.getSource('/etc/nginx/sites-enabled') + if not os.path.isdir(aDir): + self.usage('--scan: not a directory: ' + aDir) + files = os.listdir(aDir) + for node in files: + _scanNginx(aDir + '/' + node, lines, self._logger) + done = True + else: + self.usage('unknown option: ' + opt) + if not done: + if len(argv) < 1: + self.usage('site: missing ') + else: + for url in argv: + _handleUrl(url, lines, self._logger) + base.BaseTool.setResult(lines) + + def test(self, argv): + '''Tests the configuration. + @param argv: program arguments + ''' + dump = self.dump() + fn = '/tmp/{}.test.dump.txt'.format(self._globalOptions._appInfo._applicationName) + with open(fn, "w") as fp: + fp.write(dump) + print('=== dump stored in ' + fn) + # notice_type|property_key|message|receivers|time + receivers = self._receiverGroups['']._properties['warning']._value + now = time.time() + host = self.getLocalHost() + msg = 'W|{}:testmessage:emailtest|This is only a test to check email sending capability|{}|{}'.format(host, receivers, now) + self.sendStatusEmail(msg, False) + +def usage(): + '''Returns an info about usage. + ''' + return '''Usage: monitor [] +GLOBAL_OPTS +GLOBAL_MODES +: + create-site-server [] + creates the configuration for sites to observe + : + --domain= + used for the virtual server, e.g. hawk.hamatoma.de + --ip= + the public ip, e.g. 217.0.3.99 + --nginx + the configuration is created for the webserver NGINX. This is the default behaviour + --overwrite + the existing configuration will be overwritten + daemon + starts a never ending loop for monitoring + : + -v + -v or --verbose-level= + Verbose level: 0: no tracing 1: summary 2: standard 3: fine Default: 0 + -c or --configuration-directory= + configuraton directory with *.conf files. + Default: os.environ['MONTITOR_CONFIG'] or '/etc/pyrshell/monitor.d' + --host= + host to observe. Default: os.environ['MONTITOR_HOST'] or + -l or --log= + file for logging output. + Default: os.environ['MONTITOR_LOG'] or /var/log/local/.log + --application= + the name of the application. Default: pymonitor + example [] + prints an example configuration file to a file or stdout + install + installs the daemon as a systemd service + : + --application= + the name of the application. Default: pymonitor + --user= + the daemon runs as this user. Default: or 'pymonitor' + --group= + the daemon runs under this group. Default: or 'pymonitor' + --host= + the name of the host used for the daemon. Must be defined in configuration + Default: localhost + --no-auto-start + the service does not start at boot time (systemctl disable ) + -l or --log= + file for logging output (of the daemon). + Default: os.environ['MONTITOR_LOG'] or /var/log/local/.log + uninstall + --purge + remove configuration files too + --application= + the name of the application. Default: 'pymonitor' + --hold-user + the user will not be deleted. + Note: the user will be deleted only if its name is equal to the application + --hold-group + the user will not be deleted + Note: the group will be deleted only if its name is equal to the application + site [ ...] + prints the configuration for + : + the universal resource locator, e.g. https://wiki.hamatoma.de + site --scan + prints configuration filtered from NgInx configuration files in + Default: '/etc/nginx/sites-enabled' + test + tests the configuration files. + : + -c or --configuration-directory= + configuraton directory with *.conf files. + Default: os.environ['MONTITOR_CONFIG'] or '/etc/pyrshell/monitor.d' + -l or --log= + file for logging output. Default: /var/log/local/.log + --email= + email address for email sending test. + Default: warning part of the default ReceiverGroup +Examples: +pymonitor -v3 daemon +pymonitor reload +pymonitor -v3 sites --scan +pymonitor -v3 create-site-server --ip=208.33.99.5 --domain=gustl.example.com --overwrite +pymonitor -v3 create-site-server --apache --overwrite +pymonitor -v3 site http://life.sky.infeos.de/domain.txt +''' + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/pymonitor', 'run'] + ''' + serviceInfo = base.BaseTool.ServiceInfo('pymonitor', 'pymonitor', 'pymonitor', + 'A monitor for system resources, e.g. disk usage.', 'MONITOR', None) + appInfo = base.BaseTool.ApplicationInfo('pymonitor', 'appl/Monitor.py', usage, serviceInfo) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = Monitor(options) + if len(argv) == 0 and 'MONITOR_APPL' in os.environ: + argv = ['daemon'] + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'create-site-server': + tool.createSiteServer(argv) + elif cmd == 'daemon': + tool.run(argv) + elif cmd == 'site': + tool.site(argv) + elif cmd == 'test': + tool.test(argv) + elif cmd == 'reload': + tool.reloadRequest() + else: + tool.usage("unknown subcommand: " + cmd) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/RestoreTool.py b/appl/RestoreTool.py new file mode 100755 index 0000000..05397cf --- /dev/null +++ b/appl/RestoreTool.py @@ -0,0 +1,2174 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import time +import datetime +import sys +import os.path +import stat +import subprocess +import tempfile +import gzip +import traceback +import re +import shutil +import fnmatch + +sys.path.insert(0, '/usr/share/pyrshell') +import base.JavaConfig +import base.Logger +import base.BaseTool +import net.EMail +import appl.BackupBase +import base.ProcessHelper +import appl.TextTool + +class GroupInfo: + + def __init__(self, name, gid, groupLine): + self._name = name + self._groupLine = groupLine + self._gid = gid + self._comments = None + + def asText(self): + rc = '' + if self._comments != None: + rc += '\n'.join(self._comments) + '\n' + rc += self._groupLine + '\n' + return rc + +class UserInfo: + def __init__(self, name, uid, passwdLine): + self._name = name + self._passwdLine = passwdLine + self._uid = uid + self._comments = None + + def asText(self): + rc = '' + if self._comments != None: + rc += '\n'.join(self._comments) + '\n' + rc += self._passwdLine + '\n' + return rc + +class PartitionInfo: + def __init__(self, device, uuid, fsType, label): + '''Constructor. + @param device: the device, e.g. '/dev/sda1' + @param uuid: the UUID of the partition + @param fsType: the filesystem type, e.g. 'ext4 + @param label: None or the label of the filesystem + ''' + self._device = device + self._uuid = uuid + self._fsType = fsType + self._label = label + self._osName = 'linux' + + +class RestoreTool (appl.BackupBase.BackupBase): + + def __init__(self, globalOptions): + '''Constructor. + @param logger: the logger + ''' + base.BaseTool.BaseTool.__init__(self, globalOptions, 'restoretool.conf') + self._backupConfiguration = base.JavaConfig.JavaConfig(self._configDir + '/backup.conf', self._logger) + self._processTool = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger) + self._textTool = appl.TextTool.TextTool(self._globalOptions) + + def basics(self, dirBackup, week=None, day=None): + '''Prepares the following restauration. + @param dirBackup: a directory con + @param week: a weekno: 0..3 (weekno mod 4): locates the weekly backup + @param day: 'Mon' ... 'Sun': locates the dayly backup + ''' + base.BaseTool.BasicStatics.ensureDirectory('/opt/restore/etc_origin', self.logger) + base.BaseTool.BasicStatics.ensureDirectory('/opt/restore/etc', self.logger) + if not os.path.exists('/opt/restore/etc_origin/passwd'): + self.log('saving /etc to /opt/restore/etc_origin') + shutil.copy('/etc', '/opt/restore/etc_origin') + + def btrFs(self, argv): + '''Executes the btrfs command. + @param argv: the arguments, e.g. ['create-subvol', 'cave', 'home'] + ''' + if len(argv) < 1: + self.usage('missing ') + else: + what = argv[0] + argv = argv[1:] + if what == 'create-fs': + self.btrFsCreateFs(argv) + elif what == 'create-subvol': + self.btrFsCreateSubvolume(argv) + elif what == 'create-snapshot': + self.btrFsCreateSnapshot(argv) + else: + self.usage('unknown : ' + what) + + def btrFsCreateFs(self, argv): + '''Creates a subvolume of a btrfs filesystem. + Steps: + if loop device: create image and loop device + create the rootMountpoint + create the automount interface for mounting + create-btrfs
\n'); + self._output.append('\n\n' + .format(xCol._title, '', base.StringUtils.toString(xCol.extremum(True), xCol._dataType, 2), + base.StringUtils.toString(xCol.extremum(False), xCol._dataType, 2), len(xCol._values))) + + for ix in range(len(self._columns) - 1): + yCol = self._columns[ix + 1] + self._output.append('\n' + .format(self._colors[ix % len(self._colors)], yCol._title, yCol.average(), yCol.extremum(True), + yCol.extremum(False))) + self._output.append('\n
TitelSchnittMinimumMaximum
{}:{}{}{}{} Werte
{}:{:.6f}{:.6f}{:.6f}
\n') + + def htmlStart(self, title): + '''Starts a HTML script. + ''' + self._output.append('\n\n

{}

\n'.format(title)) + + def numericLine(self, line, lineNo): + '''Evaluates a "numeric" line (a list of values) + Searches the separator and the titles (if they exists) + @param line: the line to inspect + @param lineNo: the line number + ''' + values = line.split(self._separator) + if len(values) != len(self._columns): + self._logger.error('wrong column number in line {}: {} instead of {}'.format(lineNo, len(values), len(self._columns))) + for ix in range(len(values)): + if ix < len(self._columns): + self._columns[ix].add(base.StringUtils.toString(values[ix], self._columns[ix]._dataType)) + + def polyline(self, width, height, axisAreaWidth, indexX, indexY, properties = None): + '''Converts the CSV data into a polyline. + @param width: the length of the x dimension + @param height: the length of the y dimension + @param axisAreaWidth: the width of the axis area (x and y) + @param indexX: the column index of the x data + @param indexy: the column index of the Y data + @param properties: None or additional SVG properties for polyline, e.g. 'stroke-dasharray="5,5" + ''' + self._output.append('') + + def putCsv(self, target): + '''Puts the internal columns into a CSV file + @param target: the full name of the result file + ''' + with open(target, "w") as fp: + line = '' + for col in self._columns: + line += col._title + ';' + fp.write(line[0:-1] + "\n") + for ix in range(len(self._columns[0]._values)): + line = '' + for col in self._columns: + line += col.toString(ix) + ';' + fp.write(line[0:-1] + "\n") + + def readCsv(self, source): + '''Reads a CSV file with the diagram data. + @param source: the filename, e.g. 'diagram/data1.csv' + ''' + with open(source, "r") as fp: + lineNo = 0 + for line in fp: + line = line.strip() + lineNo += 1 + if lineNo == 1: + self.firstLine(line) + else: + self.numericLine(line, lineNo) + # Remove empty columns: + count = len(self._columns) - 1 + for ix in range(count, -1, -1): + column = self._columns[ix] + if column._max == -1E+100: + column._max = functools.reduce(lambda rc, item: base.StringUtils.toFloat(item) if base.StringUtils.toFloat(item) > rc else rc, column._values, -1E+100) + column._min = functools.reduce(lambda rc, item: base.StringUtils.toFloat(item) if base.StringUtils.toFloat(item) < rc else rc, column._values, +1E+100) + # column.normalize((1 + ix % 5) / count * 0.8) + self.returnToZero() + + def returnToZero(self): + '''Find gaps in x values and set behind every gap a "return to zero" line + example: + x;y;z + 1;99;77 + 2;100;70 + 20;90;60 + There is a gap between line 2 and 3. Minimum gap length is 1 (between line 1 and 2) + We insert "3;0;0" and "19;0;0" + Result: + x;y;z + 1;99;77 + 2;100;70 + 3;0;0 + 19;0;0 + 20;90;60 + ''' + columnX = self._columns[0] + self._minGap = +1E+100 + [last, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(0)) + for ix in range(len(columnX._values) - 1): + [current, dummy] = base.StringUtils.toFloatAndType(columnX._values[1+ix]) + if current - last < self._minGap: + self._minGap = current - last + if self._minGap < 5*60: + self._minGap = 5*60 + [last, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(-1)) + for ix in range(len(columnX._values) - 1, 1, -1): + [current, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(ix-1)) + if last - current > self._minGap: + columnX._values.insert(ix, last - self._minGap) + columnX._values.insert(ix, current + self._minGap) + for col in range(len(self._columns)): + if col > 0: + self._columns[col]._values.insert(ix, 0) + self._columns[col]._values.insert(ix, 0) + last = current + self.putCsv('/tmp/corrected.csv') + + def simpleLine(self, x1, y1, x2, y2, properties = None, color = None): + line = ''.format( + x1, y1, x2, y2, color if color != None else self._color, self._strokeWidth, properties if properties != None else '') + self._output.append(line) + + def simpleText(self, x, y, text): + self._output.append('{}'.format(x, y, self._color, self._fontSize, text)) + + def shrinkData(self, count): + '''Returns an array of columns with count elements per column. + Input is self._columns. + @pre: the first column contains the x data. + @post: the x values (first column) of the result are equidistant. + @post: the local extrema (minimum and maximum) will be saved + @param count: the number of items of each column of the result + @return: the array of the converted columns + ''' + xValues = self._columns[0]._values + rc = [] + if count <= 0 or len(xValues) <= count: + rc = self._columns[:] + else: + xOut = Column(self._columns[0]._title, rc) + rc.append(xOut) + step = (xValues[-1] - xValues[0]) / (count - 1) + x = xValues[0] + for ix in range(count): + xOut._values.append(x) + x += step + + for ixCol in range(len(self._columns) - 1): + yCol = self._columns[1+ixCol] + yValues = yCol._values + yOut = Column(yCol._title, rc) + rc.append(yOut) + ixLastSrc = -1 + yMiddle = 0 + for ixTrg in range(count): + xTrg = xOut._values[ixTrg] + ixLastSrc += 1 + yMin = yValues[ixLastSrc] + if ixTrg == 0: + yOut._values.append(yMin) + elif ixTrg == count - 1: + yOut._values.append(yValues[-1]) + else: + yMax = yMin + while xValues[ixLastSrc] <= xTrg: + if yValues[ixLastSrc] < yMin: + yMin = yValues[ixLastSrc] + elif yValues[ixLastSrc] > yMax: + yMax = yValues[ixLastSrc] + ixLastSrc += 1 + # 4 cases: max: min: line up: line down: + # yHigh: a i i u d + # a a i u d + # yLow: u d + # xLow xHigh + if yOut._values[ixTrg - 1] > yMax: + # y[ixTrg-1] is line down or max: + yOut._values.append(yMin if ixTrg <= 1 or yValues[ixTrg - 2] > yValues[ixTrg - 1] else yMiddle) + else: + # y[ixTrg-1] is line up or min + yOut._values.append(yMax if ixTrg <= 1 or yValues[ixTrg - 2] < yValues[ixTrg - 1] else yMiddle) + yMiddle = (yMax - yMin) / 2 + return rc + + def svgEnd(self): + self._output.append('\n'); + + def svgStart(self, width, height): + '''Starts the SVG block. + @param width: the width of the SVG area + @param height: the height of the SVG area + ''' + self._output.append('\n'.format(height, width)) + + def xAxis(self, width, height, axisAreaWidth, indexX): + '''Creates the x axis. + @param width: the length of the x dimension + @param height: the length of the y dimension + @param axisAreaWidth: the width of the axis area (x and y) + @param indexX: the column index of the x values + ''' + color = self._color + self._color = 'blue' + self.simpleLine(axisAreaWidth, height - axisAreaWidth, width, height - axisAreaWidth) + xCol = self._columns[indexX] + axis = AxisScale(xCol, min((width - axisAreaWidth) / 50, 20)) + y1 = height - axisAreaWidth - self._strokeWidth * 3 + y2 = height - axisAreaWidth + self._strokeWidth * 3 + for ix in range(int(axis._countScales)): + [pos, label] = axis.indexData(ix, width - axisAreaWidth) + x = axisAreaWidth + pos + self.simpleLine(x, y1, x, y2) + self.simpleText(x - 10, y2 + axisAreaWidth / 2, label) + if ix > 0: + self.simpleLine(x, y1 - 5, x, 0, 'stroke-opacity="0.1" stroke-dasharray="5,5"', 'rgb(3,3,3)') + self._color = color + + def yAxis(self, width, height, axisAreaWidth, indexY, color): + '''Creates the x axis. + @param width: the length of the x dimension + @param height: the length of the y dimension + @param axisAreaWidth: the width of the axis area (x and y) + @param indexY: the column index of the y values + ''' + color2 = self._color + self._color = color + self.simpleLine(axisAreaWidth, 0, axisAreaWidth, height - axisAreaWidth) + yCol = self._columns[indexY] + axis = AxisScale(yCol, min((height - axisAreaWidth) / 50, 20)) + x1 = axisAreaWidth - self._strokeWidth * 3 + x2 = axisAreaWidth + self._strokeWidth * 3 + for ix in range(int(axis._countScales)): + [pos, label] = axis.indexData(ix, height - axisAreaWidth) + y = height - axisAreaWidth - pos + self.simpleLine(x1, y, x2, y) + self.simpleText(1+(indexY-1)*30, y, label) + if indexY == 1 and ix > 0: + self.simpleLine(x2 + 5, y, width, y, 'stroke-opacity="0.1" stroke-dasharray="5,5"', 'rgb(3,3,3)') + self._color = color2 + +def usage(): + '''Returns an info about usage + ''' + return """svgtool [] + Builds Scalable Vector Graphics embedded in HTML. +GLOBAL_OPTS +GLOBAL_MODES +: + x-y-diagram + + '-': output will be put to the stdout otherwise: the HTML will be put to this file + : + --width= + the width of the drawing area in pixel. Default: 1000 + --height= + the height of the drawing area in pixel. Default: 500 + --axis-area-width= + the width of the area containing the axis and the related labels (for x and y axis). Default: 15 + --max-average-quotient= + if max/avg(values) < maxAvgQuotient: no clipping is done. Default: 5 + --moving-average= + prepare data with "moving average": for each value a "window" (values and neigbours, symetic left + and right) is used to build the average: this average is used instead of the value + default windows width: 5 + --spread-range= + a % value: only data in this range will be displayed. Default: 90 + --spread-factor + if abs(extremum-endOfRange) / range <= spreadFactor: the range is expanded to the extremum + Example: data [0.5, 1, 2, 7, 99] max=7 min=1 range=7-1=6 + abs(0.5-7)/6=1.099 1.099<1.1 => _min=0.5 + abs(99-1)/6=16 16>1.1 => _max=99 + --title= + Default: Diagram +example: + svgtool -v2 x-y-diagram /tmp/sinus.csv /tmp/sinus.html --width=1920 --height=1024 "--title=Trigonometric functions from [0, 4*pi]" +""" + +def main(argv): + '''The main routine. + @param argv: the program arguments, e.g. ['/usr/local/bin/svgtool', 'run'] + ''' + if len(argv) > 2 and argv[1] == 'example': + global gSvgToolPeriod + try: + gSvgToolPeriod = int(argv[2]) + except ValueError: + pass + appInfo = base.BaseTool.ApplicationInfo('svgtool', 'appl/SvgTool.py', usage) + (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo) + tool = SvgTool(options) + rc = None + (cmd, argv) = tool.handleStandardCommands(argv) + if cmd == None: + pass + elif cmd == '': + tool.usage('missing subcommand') + elif cmd == 'x-y-diagram': + if len(argv) < 2: + tool.usage('too few arguments') + else: + rc = tool.diagram(argv, usage) + else: + tool.usage('unknown command: ' + cmd) + if rc != None: + tool.usage(rc) + tool._finish() + +if __name__ == '__main__': + main(sys.argv) diff --git a/appl/TextTool.py b/appl/TextTool.py new file mode 100755 index 0000000..a5d6e22 --- /dev/null +++ b/appl/TextTool.py @@ -0,0 +1,1313 @@ +#! /usr/bin/python3 +''' +Created on 08.06.2018 + +@author: hm +''' + +import sys +import os.path +import fnmatch +import re +import time +import tempfile +import shutil + +sys.path.insert(0, '/usr/share/pyrshell') +import base.BaseTool +import base.LinuxUtils +import base.FileHelper +import base.PythonToTypeScript +import base.CryptoEngine +import base.StringUtils + +textToolResult = None +textToolTool = None + +class ReplaceRegionInfo: + def __init__(self): + self._regionStart = None + self._regionEnd = None + self._replacement = None + self._startIncluded = True + self._endExcluded = False + +class ReplaceInfo: + def __init__(self, pattern, line, anchor, above, maxCount): + self._pattern = pattern + self._line = line + self._anchor = anchor + self._above = above + self._maxCountPerFile = maxCount + self._countPerFile = 0 + self._regionStart = -1 + self._regionEnd = 0 + +class TextTool (base.BaseTool.BaseTool): + + def __init__(self, globalOptions): + '''Constructor. + @param globalOptions: the basic configurations + ''' + base.BaseTool.BaseTool.__init__(self, globalOptions, 'texttool.conf') + self._out = [] + self._euid = os.geteuid() + self._egid = os.getegid() + self._list = False + self._onlyMatch = False + self._format = None + self._reverse = False + self._excluded = None + self._missing = False + self._dirMustBeWritable = False + self._fileMustBeWritable = False + self._target = None + self._hits = 0 + self._countPerLine = None + self._table = None + self._patternReference = r'[$]\d+' + self._references = [] + self._lines = [] + self._currentChanged = False + self._replaceInfo = None + self._fileCount = 0 + self._maxFileCount = 0x7ffffff + self._noName = False + self._replaceRegionInfo = None + + + def adaptConfiguration(self, argv): + '''Replaces values of given variables in a configuration. + @param variables: an array of strings with variable definitions, e.g. ['abc=123', 'command.log = "/var/log/command.log"'] + @param configuration: the configuration content (as array of text) with comments, e.g. ["#configuration:", "abc=4", "command.log='/var/log/dummy.log'"] + @return: the changed text, e.g. ["#configuration:", "abc=123", 'command.log = "/var/log/command.log"'] + ''' + if len(argv) == 0: + self.usage('missing ') + else: + configFile = argv[0] + argv = argv[1:] + if not os.path.exists(configFile): + self.usage('missing ' + configFile) + elif len(argv) == 0: + self.usage('missing or prefix=') + else: + configuration = base.StringUtils.fromFile(configFile) + arg1 = argv[0] + argv = argv[1:] + if arg1.startswith('prefix='): + prefix = arg1[7:] + variables = self.findVariables(prefix) + else: + variablesFile = arg1 + if not os.path.exists(variablesFile): + self.usage('missing ' + variablesFile) + else: + variables = base.StringUtils.fromFile(variablesFile).split('\n') + self._out = self.adaptVariables(variables, configuration.split('\n')) + if self._hits > 0: + self._logger.log('{} variable(s) changed in {}'.format(self._hits, configFile), 1) + self.createBackup(configFile, base.FileHelper.pathToNode(os.path.dirname(configFile))) + base.StringUtils.toFile(configFile, '\n'.join(self._out)) + + def adaptVariables(self, variables, configuration, logSummary = False): + '''Replaces values of given variables in a configuration. + @post: self._hits: the number of changed variables + + @param variables: an array of strings with variable definitions + e.g. ['abc=123', 'command.log = "/var/log/command.log"'] + @param configuration: the configuration content (as array of text) with comments + e.g. ["#configuration:", "abc=4", "command.log='/var/log/dummy.log'"] + @param logSummary: True: the number of hits will be logged + @return: the changed text, e.g. ["#configuration:", "abc=123", 'command.log = "/var/log/command.log"'] + ''' + rc = [] + variables2 = dict() + self._hits = 0 + for line in variables: + parts = line.split('=', 2) + if len(parts) <= 1: + continue + key = parts[0].strip() + variables2[key] = line + keys = variables2.keys() + self._hasChanged = False + for line in configuration: + parts = line.split('=', 1) + if len(parts) <= 1: + rc.append(line) + continue + key = parts[0].strip() + if not key in keys: + rc.append(line) + else: + value = parts[1].strip() + value2 = variables2[key].split('=', 1)[1].strip() + if value == value2: + self._logger.log('already defined: {}: {}'.format(key, value), 3) + rc.append(line) + else: + self._hits += 1 + self._logger.log('{}: {} => {}'.format(key, value, value2), 2) + rc.append(variables2[key]) + self._hasChanged = True + if self._hits > 0 and self._verboseLevel > 1 and logSummary: + self._logger.log('{} variables changed'.format(self._hits)) + return rc + + def buildExamples(self): + '''Builds the filed used for the examples in the usage message. + ''' + def build(fn, content): + self._logger.log('creating ' + fn, 1) + base.StringUtils.toFile(fn, content) + + baseDir= '/usr/share/pyrshell/examples/' + configDir = self.ensureDirectory(baseDir + 'config') + dataDir = self.ensureDirectory(baseDir + 'data') + dataDir2 = self.ensureDirectory(baseDir + 'data/subdir') + safeDir = self.ensureDirectory(baseDir + 'safe') + build(configDir + os.sep + 'php_minimal.conf', '''memory_limit = 2048M +upload_max_filesize = 512M +max_file_uploads = 100 +post_max_size = 512M +max_execution_time = 600 +max_input_time = 600 +''') + fn = safeDir + os.sep + 'php.ini' + self._logger.log('copying ' + dataDir + os.sep + 'php.ini', 1) + shutil.copy2(fn, dataDir) + build(dataDir + os.sep + 'sum.txt', '''water: 7.32 EUR +cheese: 13.08 EUR +total: 20.40 EUR +''') + build(dataDir + os.sep + 'ip.out.txt', '''1: lo: mtu 65536 qdisc noqueue state UNKNOWN group default qlen 1000 + link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00 + inet 127.0.0.1/8 scope host lo + valid_lft forever preferred_lft forever + inet6 ::1/128 scope host + valid_lft forever preferred_lft forever +2: enp2s0: mtu 1500 qdisc fq_codel state UP group default qlen 1000 + link/ether 8c:16:45:92:9a:c6 brd ff:ff:ff:ff:ff:ff + inet 192.168.2.50/24 brd 192.168.2.255 scope global enp2s0 + valid_lft forever preferred_lft forever + inet6 fe80::8e16:45ff:fe92:9ac6/64 scope link + valid_lft forever preferred_lft forever +3: wlp4s0: mtu 1500 qdisc noop state DOWN group default qlen 1000 + link/ether 50:5b:c2:e9:38:01 brd ff:ff:ff:ff:ff:ff +''') + build(dataDir + os.sep + 'today.sh', '''#! /bin/bash +DATE=31.2.2100 +echo $DATE +''') + build(dataDir + os.sep + 'first.addr', '''jonny@gmx.de +Jonny.Hamburger@web.de +info@whow.com +''') + build(dataDir + os.sep + 'second.addr', '''info@whow.com +jonny@gmx.de +info@bla.com +''') + build(dataDir + os.sep + 'test.addr', '''info@whow.com +jonny@gmx.de +info@bla.com +''') + build(dataDir + os.sep + 'shop.addr', '''jonny@gmx.de +info@bla.com +''') + build(dataDir2 + os.sep + 'third.addr', '''jonny@gmx.de +info@bla.com +''') + build(configDir + os.sep + 'german.txt', '''No\tNr +First Name\tVorname +Last Name\tNachname +''') + build(dataDir + os.sep + 'addr.csv', '''No;First Name;Last Name +1;Henry;Miller +2;Berta;Eastern +''') + + def buildTargetname(self, nameInput): + '''Builds the targetname from a pattern and the input filename. + @param nameInput: the input filename + @return: the target filename + ''' + if self._target == '!full!': + rc = nameInput + '~' + else: + parts = base.FileHelper.splitFilename(nameInput) + rc = self._target + rc = rc.replace('!full!', nameInput) + rc = rc.replace('!path!', parts['path']) + rc = rc.replace('!node!', parts['node']) + rc = rc.replace('!fn!', parts['fn']) + rc = rc.replace('!ext!', parts['ext']) + return rc + + def currentFind(self, regExpr, ixStart=0, ixEnd=None): + '''Searches a regular expression in the current file. + @param regExpr: a regular expression (re.RegExpr) + @param ixStart: the index of the first line to inspect + @param ixEnd: None or the successor index of the last line to inspect (_lines[ixEnd] will not be inspected) + @return: None: not found otherwise: the index of the first hit + ''' + rc = None + if ixStart == None: + ixStart = 0 + if ixEnd == None: + ixEnd = len(self._lines) + for ix in range(ixStart, ixEnd): + line = self._lines[ix] + if regExpr.search(line): + rc = ix + break + return rc + + def currentFind2(self, regExpr, ixStart=0, ixEnd=None): + '''Searches a regular expression in the current file and return the line index and the matcher. + @param regExpr: a string or a regular expression (re.RegExpr) + @param ixStart: the index of the first line to inspect + @param ixEnd: None or the successor index of the last line to inspect (_lines[ixEnd] will not be inspected) + @return: tuple (index, matcher) the index line and the matcher. (None, None): not found + ''' + if type(regExpr) == str: + regExpr = self.regExprCompile(regExpr, 'currentFind2') + rc = (None, None) + if ixEnd == None: + ixEnd = len(self._lines) + for ix in range(ixStart, ixEnd): + line = self._lines[ix] + matcher = regExpr.search(line) + if regExpr.search(line): + rc = (ix, matcher) + break + return rc + + def currentInsertAnchored(self, line, anchor, below=True, start=None, end=None): + '''Inserts a line in front of or below a given anchor. + If the anchor is not found the line is added at the end. + + @param line: the line to insert + @param anchor: a regular expression to search: text or regexpr object + @param below: True the insert position is below the anchor line + @param start: None or the first index to inspect + @param end: None or the successor index to inspect (_lines[end] is not inspected) + @return: True: success (key found) + ''' + found = False + if start == None: + start = 0 + if end == None: + end = len(self._lines) + if anchor != None: + if type(anchor) == str: + anchor = self.regExprCompile(anchor, 'currentInsertAnchored') + ixFound = None + for ix in range(start, end): + current = self._lines[ix] + if anchor.search(current): + ixFound = ix + 1 if below else ix + break + if ixFound != None: + self._lines.insert(ixFound, line) + found = True + if not found: + self._lines.insert(end, line) + self._currentChanged = True + return found + + def currentReplace(self, regExpr, line, anchor=None, below=False, start=None, end=None): + '''Replaces a line containing given regular expression or inserts it. + If not found: + if an anchor is given: the anchor is searched. + If found, insertion is done. + If not found: nothing is done + if no anchor is given: + line is inserted at the end + @param regExpr: a pattern string or an re.regexpr object describing the key content of the line to replace + @param line: the line to replace (or insert) + @param anchor: None or a regular expression (string or re.RegExpr) describing an insert point (if not found) + @param below: True: the insert point is below the anchor + @param start: None or the first index to inspect + @param end: None or the successor index to inspect (_lines[end] is not inspected) + @return: True: success + False: anchor != None: anchor not found, nothing replaced + ''' + if type(regExpr) == str: + regExpr = re.compile(regExpr) + found = False + if start == None: + start = 0 + if end == None: + end = len(self._lines) + for ix in range(start, end): + current = self._lines[ix] + if regExpr.search(current) != None: + found = True + if self._lines[ix] != line: + self._currentChanged = True + self._logger.log('replaced: {} -> {}'.format(self._lines[ix], line), 3) + else: + self._logger.log('unchanged: {}'.format(self._lines[ix], line), 4) + self._lines[ix] = line + break + if not found and anchor != None: + if type(anchor) == str: + anchor = re.compile(anchor) + for ix in range(len(self._lines)): + current = self._lines[ix] + if anchor.search(current) != None: + found = True + ixNew = ix if below else ix + 1 + self._logger.log('insert into [{}: {}'.format(ixNew, line), 3) + self._lines.insert(ixNew, line) + self._currentChanged = True + break + if anchor == None and not found: + self._lines.insert(end, line) + self._currentChanged = True + found = True + return found + + def currentSimpleInsert(self, full, key, line, anchor, below=True, mustExist=False): + '''Inserts a line in front of or below a given anchor. + If the anchor is not found the line is added at the end. + + @param full: the filename + @param key: the key for the line to replace + @param line: the line to replace/insert + @param anchor: a regular expression to mark the insert position + @param below: True the insert position is below the anchor line + @param mustExist: True: the routine will be aborted if the file does not exist + @return: True: success (key found) + ''' + rc = False + if self.readCurrent(full, mustExist): + if not self.currentSetLine(key, line): + self.currentInsertAnchored(line, anchor, below) + self.writeCurrent(full) + rc = True + return rc + + def currentSetLine(self, key, value, count=1, start=None, end=None): + '''Sets a line marked by key. + @param key: a regular expression to search + @param line: line to replace + @param count: the key is search count times in the given range [start, end[ + @param start: None or the first index to inspect + @param end: None or the successor index to inspect (_lines[end] is not inspected) + @return: True: success (key found) + ''' + found = False + regKey = self.regExprCompile(key, 'currentSetLine') if type(key) == str else key + firstIx = start + if count == None: + count = 0x7fffffff + for current in range(count): + ix = self.currentFind(regKey, firstIx, end) + if ix == None: + break + if self._lines[ix] != value: + self._currentChanged = True + self._lines[ix] = value + firstIx = ix + 1 + found = True + return found + + def example(self): + example = '''# texttool example configuration +log.file=/var/log/local/texttool.log +php.upload_max_filesize=624M +php.max_file_uploads=102 +php.post_max_size=448M +php.max_execution_time=900 +php.max_input_time=630 +''' + self.storeExample(example) + + def extractExcluded(self, opt): + '''Sets the excluded option. + @param opt: the option value, e.g. ';*.txt;text*' + ''' + excluded = [] + if re.match(r'[*?\[\]]', opt[0]): + self.usage('wrong first char (meta character: ?*[]/) in: ' + opt) + patterns = opt[1:].split(opt[0]) + for pattern in patterns: + excluded.append((os.path.dirname(pattern), os.path.basename(pattern))) + return excluded + + def findRegion(self, regExprFirst, firstIncluding, regExprLast = None, lastIncluding = True, regExprBlock = None, onlyOneFirst = True): + '''Finds a sequence of lines with given first and last line. + @param regExprFirst: a regular expression (as string or re.RegExpr) to define the first line + @param firstIncluding: True: the first line is part of the region False: the line below the first line starts the region + @param regExprLast: a regular expression (as string or re.RegExpr) to define the last line + @param lastIncluding: True: the last line is part of the region False: the line above the last line starts the region + @param regExprBlock: None or a regular expression (as string or re.RegExpr) to define the block between start and last + regExprLast and regExprBlock are exclusive: only one should be not None + @param onlyOneFirst: False: if the regExprFirst is found it will tested not anymore + True: after finding the regExprFirst the line is tested again: if found it belongs to the block + @return: a tuple (ixStart, ixEnd) or (-1, x) if not found + ''' + if type(regExprFirst) == str: + regExprFirst = re.compile(regExprFirst) + if type(regExprLast) == str: + regExprLast = re.compile(regExprLast) + if type(regExprBlock) == str: + regExprBlock = re.compile(regExprBlock) + self._regionStart = -1 + self._regionEnd = 0 + for ix in range(len(self._lines)): + line = self._lines[ix] + if self._regionStart < 0 and regExprFirst.search(line): + self._regionStart = ix if firstIncluding else ix + 1 + elif self._regionStart >= 0: + if not onlyOneFirst and regExprFirst.search(line): + continue + if regExprLast != None and regExprLast.search(line): + self._regionEnd = ix + 1 if lastIncluding else ix + break + elif regExprBlock != None and regExprBlock.search(line) == None: + self._regionEnd = ix + 1 + break + if self._regionEnd <= 0: + self._regionEnd = len(self._lines) + self._logger.log('region {}..{}: [{},{}['.format(regExprFirst.pattern, regExprLast.pattern, self._regionStart, self._regionEnd), 3) + return self._regionStart, self._regionEnd + + def findVariables(self, prefix, configuration = None): + '''Return an array of variables defined in the internal configuration. + Example: the configuration contains 'php.max_memory=2M' + The prefix is 'php.', the result is [... 'max_memory=2M' ...] + @param prefix: the common prefix to filter the definitions in the configuration + @param configuration: None: the internal configuration is taken otherwise: the configuration to inspect + @return: an array (may be empty) with the specified variables + ''' + variables = [] + if configuration == None: + configuration = self._configuration + # mask meta characters: + # @doc re.sub(r'([\[\]{}().+*])', r'\\1', 'a.b) returns r'a\\1b' instead of r'a\.b' + pattern = re.sub(r'([\[\]{}().+*])', r'~!slash!~\1', prefix) + pattern = pattern.replace('~!slash!~', '\\') + keys = configuration.getKeys('^' + pattern) + for key in keys: + if key.startswith(prefix): + line = '{}={}'.format(key[len(prefix):], self._configuration.getString(key)) + variables.append(line) + return variables + + def grep(self, argv): + '''Search a regular expression in files matching a wildcard expression. + @param argv: the arguments, e.g. ['grep', 'jonny', *.txt' + ''' + (argv, options) = self.splitArgsAndOpts(argv) + pattern = argv[0] + filePattern = argv[1] + argv = argv[2:] + excluded = None + self._list = False + self._onlyMatching = False + self._format = None + self._out = [] + self._withLineNo = False + maxDepth = 999 + ignoreCase = False + ''' + -i or --ignore-case + ''' + for opt in options: + if opt.startswith('--excluded='): + excluded = self.extractExcluded(opt[11:]) + elif opt.startswith('--format='): + self._format = opt[9:] + if self._format == '': + self.usage('format may not be empty') + elif opt == '-i' or opt == '--ignore-case': + ignoreCase = True + elif opt == '--line-no': + self._withLineNo = True + elif opt == '--list' or opt == '-l': + self._list = True + elif opt.startswith('--max-depth='): + maxDepth = self.integerOption(opt) + elif opt == '--missing': + self._missing = True + elif opt == '--no-name': + self._noName = True + elif opt == '-o' or opt == '--only-matching': + self._onlyMatching = True + elif opt == '-v' or opt == '--reverse': + self._reverse = True + else: + self.usage('unknown option: ' + opt) + self._regSearch = self.regExprCompile(pattern, 'pattern', not ignoreCase) + if self._format != None: + self._formatGroups = re.findall(r'\$([0-9]+)', self._format) + self._formatGroups = list(map(lambda x: int(x), self._formatGroups)) + self._formatGroups.sort(reverse=True) + self.traverse(filePattern, 'grep', maxDepth, excluded) + msg = '\n'.join(self._out) + base.BaseTool.setResult(msg) + if msg != '': + print(msg) + if base.BaseTool.result() == '' and not self._exitWith0: + sys.exit(10) + + def grepOneFile(self, full): + '''Search in a given file. + @param full: the path and node + ''' + self._logger.log('inspecting {} ...'.format(full), 2) + with open(full, "rb") as fp: + lineNo = 0 + for rawline in fp: + lineNo += 1 + line = base.FileHelper.fromBytes(rawline.rstrip()) + matcher = self._regSearch.search(line) + if matcher == None: + if self._reverse: + if self._noName: + self._out.append(line) + elif self._withLineNo: + self._out.append('{}-{}: {}'.format(full, lineNo, line)) + else: + self._out.append('{}: {}'.format(full, line)) + elif self._missing: + self._out.append(full) + break + elif self._list: + self._out.append(full) + break + elif self._onlyMatching: + if self._noName: + self._out.append(line) + elif self._withLineNo: + self._out.append('{}-{}: {}'.format(full, lineNo, matcher.group(0))) + else: + self._out.append('{}: {}'.format(full, matcher.group(0))) + elif self._format != None: + theFormat = self._format.replace('!full!', full).replace('!node!', os.path.basename(full)).replace('!no!', str(lineNo)) + for groupNo in self._formatGroups: + if groupNo <= matcher.lastindex: + theFormat = theFormat.replace('$' + str(groupNo), matcher.group(groupNo)) + self._out.append(theFormat) + elif not self._missing and not self._reverse: + if self._noName: + self._out.append(line) + elif self._withLineNo: + self._out.append('{}-{}: {}'.format(full, lineNo, line)) + else: + self._out.append('{}: {}'.format(full, line)) + + def handleOneDir(self, directory, pattern, action, depth, maxDepth=999): + '''Search for files matching and processes them depending on . + @param directory: directory to process + @param pattern: only files matching this will be processed + @param action: the action, e.g. 'replace-or-insert' + @param depth: the current subdirectory nesting level + @param maxDepth: maximum subdirectory nesting level. < 0: not limited + @return: True: success False: stop the processing + ''' + rc = True + if depth == 0: + self._fileCount = 0 + self._logger.log('processing ' + directory + '/ ...', 2) + if not os.path.isdir(directory): + self.usage('not a directory: ' + directory) + elif maxDepth < 0 or depth <= maxDepth: + for node in os.listdir(directory): + full = directory + os.sep + node + try: + info = os.stat(full) + except FileNotFoundError: + self._logger.log('no permission: ' + directory + os.sep + node, 2) + continue + except OSError as exc2: + self._logger.log('os error on {}: {}'.format(directory + os.sep + node, str(exc2)), 2) + continue + if os.path.isdir(full): + if maxDepth < 0 or depth < maxDepth: + if self._dirMustBeWritable and base.LinuxUtils.isReadable(info, self._euid, self._egid): + self._logger.log('no dir write permission: ' + directory + os.sep + node, 2) + elif self.notExcluded(directory, node): + accepted = (base.LinuxUtils.isExecutable(info, self._euid, self._egid) + and base.LinuxUtils.isReadable(info, self._euid, self._egid)) + if accepted: + rc = self.handleOneDir(full, pattern, action, depth + 1, maxDepth) + self._logger.log('no permission: ' + directory + os.sep + node, 2) + elif fnmatch.fnmatch(node, pattern) and self.notExcluded(directory, node): + if not self._isRoot and self._fileMustBeWritable and not base.LinuxUtils.isWritable(info, self._euid, self._egid): + self._logger.log('no write permission: ' + directory + os.sep + node, 2) + continue + accepted = base.LinuxUtils.isReadable(info, self._euid, self._egid) + if not accepted: + self._logger.log('no permission: ' + directory + os.sep + node, 2) + else: + rc = self.handleOneFile(full, action) + if self._fileCount >= self._maxFileCount: + self._logger.log('maximal file count reached: ' + str(self._fileCount), 3) + rc = False + break + return rc + + def handleOneFile(self, filename, action): + '''Search for files matching and processes them depending on . + @param filename: the full name of the file to process + @param action: the action, e.g. 'replace-or-insert' + @return: True: success False: stop the processing + ''' + if action == 'replace-or-insert': + rc = self.replaceOrInsertOneFile(filename) + elif action == 'replace': + rc = self.replaceOneFile(filename) + elif action == 'replace-region': + rc = self.replaceRegionOneFile(filename) + elif action == 'grep': + rc = self.grepOneFile(filename) + else: + rc = False + raise Exception('handleOneFile(): unknown action ' + action) + return rc + + def notExcluded(self, path, node): + '''Tests whether a file or a directory is marked as "excluded". + @param node: the file's node + @param path: the path without node + @return True: the file is not excluded + ''' + rc = True + if self._excluded != None: + for pair in self._excluded: + rc = not (pair[0] != '' and fnmatch.fnmatch(path, pair[0]) or fnmatch.fnmatch(node, pair[1])) + if not rc: + break + if not rc: + self._logger.log('excluded: ' + path + os.sep + node, 2) + return rc + + def random(self, argv): + '''Prints a random number. + syntax: random { [ ] | } + @param argv: the program arguments, e.g. ['100', '1000', '--seed=ThisIsMyPassword + ''' + argv, options = self.splitArgsAndOpts(argv) + if len(argv) == 0: + minValue, maxValue = '0', '2147483648' + elif len(argv) == 1: + minValue, maxValue = '0', argv[0] + elif len(argv) == 2: + minValue, maxValue = argv[0], argv[1] + if base.StringUtils.asInt(minValue) == None: + self._usage('minimum is not an integer: ' + argv[1]) + elif base.StringUtils.asInt(maxValue) == None: + self._usage('maximum is not an integer: ' + (argv[1] if len(argv) >= 2 else argv[0])) + random = base.CryptoEngine.CryptoEngine(self._logger) + statusFile = None + seedString = None + clearSeconds = 60 + for opt in options: + if opt.startswith('--seed='): + seedString = opt[7:] + elif opt.startswith('--status-file='): + statusFile = opt[14:] + elif opt.startswith('--clear-status-after='): + clearSeconds = self.integerOption(opt, 60) + else: + self.usage('unknown option: ' + opt) + if seedString == None: + seedString = str(time.time()) + if statusFile == None: + seedName = hex(random.hash('Hi' + random.saveSeed()))[2:] + statusFile = '{}{}texttool.{}.seed'.format(tempfile.gettempdir(), os.sep, seedName) + content = '' + if clearSeconds != 0 and os.path.exists(statusFile): + statInfo = os.stat(statusFile) + if time.time() - statInfo.st_mtime <= clearSeconds: + content = base.StringUtils.fromFile(statusFile) + if content != '': + random.restoreSeed(content) + random.setSeedFromString(seedString) + base.BaseTool.setResult(str(random.nextInt(int(maxValue), int(minValue)))) + + def readCurrent(self, full, mustExist=True): + '''Reads a given file into _lines[]. + @param full: the filename + @return: True: success + ''' + self._currentChanged = False + rc = True + self._lines = [] + self._currentFile = full + if not os.path.exists(full): + if mustExist: + self._logger.error('missing ' + full) + rc = False + else: + self._logger.log('reading {}...'.format(full), 3) + with open(full, 'rb') as fp: + self._table = [] + self._lines = [] + for rawLine in fp: + line = base.FileHelper.fromBytes(rawLine.rstrip(b'\n')) + self._lines.append(line) + return rc + + def removeRegion(self, first = None, last = None): + '''Removes the region. + @param first: None or the index of the first line None: self._regionFirst is taken + @param last: None or the index of the last line None: self._regionEnd is taken + ''' + if first == None: + first = self._regionStart + if last == None: + last = self._regionEnd + if first < 0: + first = 0 + if last >= len(self._lines): + last = len(self._lines) - 1 + if first <= last: + self._logger.log('removing lines [{}..{}[...'.format(first, last)) + self._lines = self._lines[0:first] + self._lines[last:] + + def replace(self, argv): + '''Search a regular expression in files matching a wildcard expression. + @param argv: the arguments, e.g. ['grep', 'jonny', *.txt'] + ''' + (argv, options) = self.splitArgsAndOpts(argv) + self._table = None + self._regSearch = None + self._replacement = None + self._patternSearch = None + excluded = None + maxDepth = 1024 + if len(argv) < 2: + self.usage('too few arguments') + elif argv[0].startswith('@'): + table = argv[0][1:] + self.readTable(table) + argv = argv[1:] + else: + if len(argv) < 3: + self.usage('too few arguments') + else: + self._patternSearch = argv[0] + self._regSearch = self.regExprCompile(self._patternSearch, 'pattern') + self._replacement = argv[1] + argv = argv[2:] + filePattern = argv[0] + argv = argv[1:] + if len(argv) == 0: + self._target = '!full!' + else: + self._target = argv[0] + argv = argv[1:] + for opt in options: + if opt.startswith('--excluded='): + excluded = self.extractExcluded(opt[11:]) + elif opt.startswith('--prefix-back-reference=') or opt.startswith('-p'): + cc = opt[24:] if opt[1] == '=' else opt[1:] + if cc == '': + self.usage('empty prefix is forbidden: ' + opt) + else: + self._patternReference = r'[{}]\d+'.format(cc) + elif opt.startswith('-p'): + self._regReference = re.compile(r'[{}]\d+'.format(opt[24:])) + elif opt.startswith('--count='): + self._countPerLine = self.integerOption(opt) + elif opt.startswith('--max-depth'): + maxDepth = self.integerOption(opt, 1024) + else: + self.usage('unknown option: ' + opt) + dirMustBeWritable = self._target.find('!full!') >= 0 or self._target.find('!path!') >= 0 + fileMustBeWritable = self._target == '!full!' + if self._replacement != None: + references = re.findall(self._patternReference, self._replacement) + if len(references) > 0: + self._references = list(set(map(lambda x: int(x[1:]), references))) + self._references.sort(reverse=True) + if len(self._references) == 0 or self._patternSearch.count('(') < self._references[0]: # ) + self.usage('missing "(" for groups in "{}". Highest back reference {} in "{}" has no group'.format( + self._patternSearch, 0 if len(self._references) == 0 else self._references[0], self._replacement)) + if filePattern.startswith('='): + for line in filePattern[1:].split('\n'): + self._out.append(self.replaceLine(line)) + else: + self.traverse(filePattern, 'replace', maxDepth, excluded, dirMustBeWritable, fileMustBeWritable) + msg = '\n'.join(self._out) + base.BaseTool.setResult(msg) + self._logger.log('hits: {}'.format(self._hits), 1) + if msg != '' and self._verboseLevel > 0: + print(base.BaseTool.result()) + if msg == '' and not self._exitWith0: + sys.exit(10) + + def replaceLine(self, line): + '''Handle replacement for one line. + @param line: the input line to inspect + @param fpOut: the filehandle of the target file + @return: the (possible changed) line + ''' + outline = line + count = 0 + pos = 0 + if self._regSearch != None: + again = True + while again: + again = False + matcher = self._regSearch.search(outline, pos) + if matcher != None: + self._hits += 1 + replacement = self._replacement + for group in self._references: + try: + pattern = '${}'.format(group) + except IndexError: + self.usage('too few "(" for groups in search pattern.') + repl = matcher.group(group) + replacement = replacement.replace(pattern, repl) + outline = outline[0:matcher.start()] + replacement + outline[matcher.end():] + pos = matcher.end() + count += 1 + again = self._countPerLine == None or count < self._countPerLine + else: + for (key, replacement) in self._table: + pos = 0 + count = 0 + again = True + while again: + again = False + ix = outline.find(key, pos) + if ix < 0: + again = False + else: + self._hits += 1 + outline = outline[0:ix] + replacement + outline[ix + len(key):] + pos = ix + len(replacement) + count += 1 + again = self._countPerLine == None or count < self._countPerLine + return outline + + def replaceOrInsert(self, argv): + '''Replaces or inserts a given line into files. + syntax: replace-or-insert [] + @param argv: the command arguments, e.g. [r'count\s*=', 'count=1', '/etc/pyrshell/dirs.d/*.conf', r'--below-anchor=\[special\]'] + --count-files= + ''' + argv, options = self.splitArgsAndOpts(argv) + if len(argv) < 3: + self.usage('missing arguments') + else: + pattern = self.regExprCompile(argv[0], '') + line = argv[1] + filePattern = argv[2] + anchor = None + above = False + isOK = True + maxDepth = 999 + excluded = None + argv = argv[2:] + self._maxFileCount = 0x7ffffff + createIfMissing = False + for opt in options: + if opt.startswith('--above-anchor='): + anchor = self.regExprCompile(opt[15:], 'above anchor') + above = True + elif opt.startswith('--excluded='): + excluded = self.extractExcluded(opt[11:]) + elif opt.startswith('--below-anchor='): + anchor = self.regExprCompile(opt[15:], 'below anchor') + elif opt.startswith('--file-count'): + self._maxFileCount = self.integerOption(opt) + if self._maxFileCount < 0: + isOK = False + break + elif opt.startswith('--max-depth'): + maxDepth = self.integerOption(opt) + if maxDepth < 0: + isOK = False + break + elif opt == '--create-if-missing': + createIfMissing = True + else: + self.usage('unknown option: ' + opt) + self._replaceInfo = ReplaceInfo(pattern, line, anchor, above, None) + if isOK: + if not base.FileHelper.hasWildcards(filePattern): + self._maxDepth = 0 + if createIfMissing: + base.FileHelper.ensureFileExists(filePattern, '', self._logger) + self.traverse(filePattern, 'replace-or-insert', maxDepth, excluded, False, True) + + def replaceOrInsertOneFile(self, filename): + '''Handles one file for the command 'replace-or-insert'. + @param filename: the file to process + @return: True: success False: stop processing + ''' + rc = True + if self.currentSimpleInsert(filename, self._replaceInfo._pattern, self._replaceInfo._line, + self._replaceInfo._anchor, not self._replaceInfo._above): + rc = True + self._fileCount += 1 + return rc + + def replaceOneFile(self, full): + '''Handles one file for the command 'replace'. + @param full: the filename to process + @return True: success False: stop processing + ''' + rc = True + target = self.buildTargetname(full) + changed = False + asBinary = self._table != None + with open(full, 'rb') as fp, open(target, 'wb'if asBinary else 'w') as fpOut: + self._logger.log('inspecting {} ...'.format(full), 3) + lineNo = 0 + for rawLine in fp: + if asBinary: + line = rawLine + else: + line = base.FileHelper.fromBytes(rawLine) + line = line.rstrip(b'\n' if asBinary else '\n') + lineNo += 1 + out = self.replaceLine(line) + if line != out: + changed = True + fpOut.write(out + (b'\n' if asBinary else '\n')) + if not changed: + self._logger.log('leaving unchanged: ' + full, 4) + else: + self._logger.log('replaced {} hit(s) in {}'.format(self._hits, full), 4) + if self._target == '!full!': + os.unlink(full) + os.rename(target, full) + self._fileCount += 1 + return rc + + def replaceRegion(self, replacement, regExprAnchor = None): + '''Replaces the last found region with the given text. + @param replacement: the region to replace + @param regExprAnchor: None or a regular expression (text or re.RegExpr) for the line to insert if no region has been found + ''' + lines = replacement.split('\n') + if self._regionStart < 0: + ix = None + if type(regExprAnchor) == str: + regExprAnchor = re.compile(regExprAnchor) + ix = self.currentFind(regExprAnchor) + if ix == None: + self._lines += lines + else: + self._lines = self._lines[0:ix] + lines + self._lines[ix+1:] + self._currentChanged = True + elif replacement != '\n'.join(self._lines[self._regionStart:self._regionEnd]): + self._currentChanged = True + self._lines = self._lines[0:self._regionStart] + lines + self._lines[self._regionEnd:] + self._logger.log('replacing with {} line(s)'.format(replacement.count('\n') + 1), 3) + else: + self._logger.log('replacement is equal to region', 3) + + def replaceRegionOneFile(self, full): + '''Handles one file for the command 'replace'. + @param full: the filename to process + @return True: success False: stop processing + ''' + rc = True + info = self._regionReplaceInfo + self.readCurrent(full, True) + self.findRegion(info._regionStart, info._startIncluded, info._regionEnd, info._endExcluded, None, True) + self.replaceRegion(info._replacement) + self.writeCurrent(full) + self._fileCount += 1 + return rc + + def replaceRegionCmd(self, argv): + '''Replaces a region with a replacement. + syntax: replace-region [] + @param argv: the command arguments, e.g. ['', '', '

site locked

', 'index.php', '--start-excluded', ] + ''' + argv, options = self.splitArgsAndOpts(argv) + if len(argv) < 4: + self.usage('missing arguments') + else: + self._regionReplaceInfo = info = ReplaceRegionInfo() + info._regionStart = self.regExprCompile(argv[0], '') + info._regionEnd = self.regExprCompile(argv[1], '') + info._replacement = argv[2] + filePattern = argv[3] + excluded = None + maxDepth = None + for opt in options: + if opt == '--start-excluded': + info._startIncluded = False + elif opt == '--end-included': + info._endExcluded = True + elif opt.startswith('--max-depth='): + maxDepth = self.integerOption(opt) + elif opt.startswith('--excluded='): + excluded = self.extractExcluded(opt[11:]) + else: + self.usage('unknown option: ' + opt) + self.traverse(filePattern, 'replace-region', maxDepth, excluded, True, True) + + def readTable(self, full): + '''Reads the file with the key/replacement strings. + @param full: the filename + ''' + with open(full, 'rb') as fp: + self._table = [] + for line in fp: + line = line.rstrip(b'\n') + pair = line.split(b'\t', 1) + self._table.append(pair) + + def writeCurrent(self, full=None, forceWrite=False): + '''Reads a given file into _lines[]. + @param full: the filename + ''' + if forceWrite or self._currentChanged: + if full == None: + full = self._currentFile + with open(full, 'w') as fp: + self._table = [] + for line in self._lines: + fp.write(line + '\n') + self._logger.log('writing {}...'.format(full), 3) + + def traverse(self, fullPattern, action, maxDepth, excluded=None, dirMustBeWritable=False, fileMustBeWritable=False): + '''Traverses the directory tree and do the given action. + @param fullPattern: only files matching this will be processed. + If a path is given that is the start directory. Otherwise the current directory is used + @param action: the action, e.g. 'replace-or-insert' + @param depth: the current subdirectory nesting level + @param maxDepth: maximum subdirectory nesting level + @param excluded: None or an array of patterns for file exclusion + @param dirMustBeWritable: True the directory may be changed + @param fileMustBeWritable: True the file may be changed + ''' + self._excluded = excluded + self._dirMustBeWritable = dirMustBeWritable + self._fileMustBeWritable = fileMustBeWritable + startDir = os.path.dirname(fullPattern) + if maxDepth == None: + maxDepth = -1 + self.handleOneDir(startDir if startDir != '' else '.', os.path.basename(fullPattern), action, 0, maxDepth) + +def usage(): + '''Returns an info about usage. + ''' + return r"""usage: texttool [] [] [] + Text manipulation tool +: file to manipulate +GLOBAL_OPTS +GLOBAL_MODES +: + adapt[-configuration] + Sets specified variables in a configuration file to given values. + The variables are defined in a separate file. + + the file to change + + the file containing the values to change. One variable per line, e.g. 'verbose=True' + adapt[-configuration] prefix= + Sets specified variables in a configuration file to given values. + The variables are defined in the texttool configuration file. + + the file to change + + the prefix to filter the keys in the internal configuration file, e.g. 'php.' + build-examples + Populates the example directory: Than the below examples can be used unmodified. + e(xecute) [] + Executes the statements on the file . + If is missed the statements must contain a "read " statement. + If is '-' the input is read from stdin. + grep [] + Search the (a regular expression) in all files matching + + file name pattern, with wilcards *, ? [chars] and [!not chars] + : + --excluded= + shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak" + --format= + prints the format: Wildcards: $1 first group (delimited by a parentesis pair). First from left + placeholders: !full! (path and node), !node! (node only) '!no! (line number), e.g. "!full!-!no!: $1 $3" + -i or --ignore-case + the search is case insensitive + --line-no + the line number is displayed (behind the filename) + -l --list + prints the filename only + --max-depth= + the maximum nesting level of the directory searched in + --missing + lists the full filenames not containing + --no-name + the filename (and linenumber) is not displayed on top of the line + -o or --only-matching + prints the matching string only + -v or --reverse + prints lines not matching the given regular expression + exit code is 10 if no hit is found + python-to-c + Converts python source code into C like syntax (C++, C, TypeStript. This is a 80% tool: many corrections must be done manually. + : the file with the python source code + : the file with the C like syntax + replace {} | @ {} [] [] + Search the (a regular expression) in all files matching and replace it by a given string + + a regular expression + + may contain $ for back references ($1 replaces group 1...) + @
+ a text file containing the strings and replacements separated by a TAB, one pair per line, e.g. "@table.txt" + + '=': the input is given as string, e.g. "=Needle" + file name pattern, with wilcards *, ? [chars] and [!not chars]. + If the file name pattern starts with '=', take '[=]' for distinction + + if given the name of the target file, can contain macros !full!, !path!, !node!, !fn!, !ext! + e.g. "!path!/!fn!.replaced!ext!" + example: !full!: /tmp/abc.def !path!: /tmp/ !node!: abc.def !fn!: abc !ext!: .def + default: !full! (replaces the source file) + : + --excluded= + shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak" + --max-depth + 0: only the directory given by will be processed. + otherwise: the maximal nesting level of subdirectories to be processed + exit code is 10 if no hit is found + replace-or-insert [] + Search for the regular expression in files. + If found the place is replaced by . + Else the is searched. If found the line is insert + : + a regular expression defining the line. Should contain content from + : + precondition: this line is in the content of the files + : + a file pattern with wildcards '*' and '?'. All files matching this pattern will be processed. In all subdirectories too + : + --above-anchor= + if is not found the line is inserted above the first line containing this regular expression + --below-anchor= + if is not found the line is inserted below the first line containing this regular expression + --create-if-missing + if does not exists and the name does not contain wildcards it will be created + --excluded= + shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak" + --file-count= + only files will be processed + --max-depth + 0: only the directory given by will be processed. + otherwise: the maximal nesting level of subdirectories to be processed + replace-region [] + Replace a region (some sequential lines) in a file + + a regular expression describing the start of the region (this line is part of the region) + if not found nothing is replaced + + a regular expression describing the end of the region (this line is not part of the region) + if this pattern is not found the file end is the region end + + the replacement text: will be inserted instead of the old region + + this files will be inspected + : + --start-excluded + is not part of the region + --end-included + is part of the region + --max-depth= + maximum nesting level + --excluded= + file name pattern to exclude, e.g.':*.txt:*.bak' + random { [ ] | } + Prints a pseudo random number between 0 or (inclusive) and or 2**31 (excluded). + If is not given a time related seed is taken. Otherwise the rand generator starts with the given seed. + : + --seed= + the rand generator starts with the given seed. + --status-file= + the status of the random generator is stored in this file. Default: /tmp/texttool..seed or /tmp/texttool.default.seed + script