--- /dev/null
+.project
+.pydevproject
+.settings
+*.pyc
+2*.sh
+/unpackShell.sh
+examples/data/
+examples/config
+/upd-monitor.sh
+/2dragon
--- /dev/null
+#! /bin/bash
+TAR=/tmp/rsh.tgz
+FN=unpackShell.sh
+./MkTar.sh
+All2Tmp all $TAR
+rsync -av $TAR hm@dromedar:/home/www/public
+cat <<EOS >$FN
+#! /bin/bash
+cd /usr/share/pyrshell
+./upd.sh
+EOS
+chmod +x $FN
+Cmd2All all $FN
+
--- /dev/null
+#! /bin/bash
+export PYTHONPATH=/home/ws/py/pyrshell:/home/ws/py/pyrshell/base:/usr/lib/python35.zip:/usr/lib/python3.5:/usr/lib/python3.5/plat-x86_64-linux-gnu:/usr/lib/python3.5/lib-dynload:/usr/local/lib/python3.5/dist-packages:/usr/lib/python3/dist-packages:/home/ws/py/pyrshell
+export MONITOR_HOST=hawk
+export MONITOR_CONFIG=/etc/pymonitor/config.d
+export MONITOR_APPL=pymonitor
+export MONITOR_LOGFILE=/var/log/local/pymonitor.log
--- /dev/null
+#! /bin/bash
+MODE=$1
+if [ "$MODE" != 'min' -a "$MODE" != 'all' -a "$MODE" != 'std' ]; then
+ echo "Usage: InstallAll [MODE]"
+ echo "Installs all applications"
+ echo "MODE: min std all"
+else
+ appl/DbTool.py -v3 install
+ appl/DirTool.py -v3 install
+ appl/RestoreTool.py -v3 install
+ appl/SvgTool.py -v3 install
+ appl/TextTool.py -v3 install
+ appl/ZipTool.py -v3 install
+ if [ "$MODE" = 'std' -o "$MODE" = 'all' ]; then
+ appl/BackupTool.py -v3 install
+ appl/Monitor.py -v3 install
+ fi
+ if [ "$MODE" = 'all' ]; then
+ appl/UrlChecker.py -v3 install
+ appl/WebDashClient.py -v3 install
+ appl/WebDashServer.py -v3 install
+ appl/WikiTool.py -v3 install
+ appl/FtpTool.py -v3 install
+ fi
+fi
--- /dev/null
+#!/usr/local/bin/python2.7
+# encoding: utf-8
+'''
+MakeCloud -- shortdesc
+
+MakeCloud is a description
+
+It defines classes_and_methods
+
+@author: user_name
+
+@copyright: 2018 organization_name. All rights reserved.
+
+@license: license
+
+@contact: user_email
+@deffield updated: Updated
+'''
+
+import sys
+import os
+
+from argparse import ArgumentParser
+from argparse import RawDescriptionHelpFormatter
+
+__all__ = []
+__version__ = 0.1
+__date__ = '2018-03-13'
+__updated__ = '2018-03-13'
+
+DEBUG = 1
+TESTRUN = 0
+PROFILE = 0
+
+class CLIError(Exception):
+ '''Generic exception to raise and log different fatal errors.'''
+ def __init__(self, msg):
+ super(CLIError).__init__(type(self))
+ self.msg = "E: %s" % msg
+ def __str__(self):
+ return self.msg
+ def __unicode__(self):
+ return self.msg
+
+def main(argv=None): # IGNORE:C0111
+ '''Command line options.'''
+
+ if argv is None:
+ argv = sys.argv
+ else:
+ sys.argv.extend(argv)
+
+ program_name = os.path.basename(sys.argv[0])
+ program_version = "v%s" % __version__
+ program_build_date = str(__updated__)
+ program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date)
+ program_shortdesc = __import__('__main__').__doc__.split("\n")[1]
+ program_license = '''%s
+
+ Created by user_name on %s.
+ Copyright 2018 organization_name. All rights reserved.
+
+ Licensed under the Apache License 2.0
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Distributed on an "AS IS" basis without warranties
+ or conditions of any kind, either express or implied.
+
+USAGE
+''' % (program_shortdesc, str(__date__))
+
+ try:
+ # Setup argument parser
+ parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter)
+ parser.add_argument("-r", "--recursive", dest="recurse", action="store_true", help="recurse into subfolders [default: %(default)s]")
+ parser.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]")
+ parser.add_argument("-i", "--include", dest="include", help="only include paths matching this regex pattern. Note: exclude is given preference over include. [default: %(default)s]", metavar="RE" )
+ parser.add_argument("-e", "--exclude", dest="exclude", help="exclude paths matching this regex pattern. [default: %(default)s]", metavar="RE" )
+ parser.add_argument('-V', '--version', action='version', version=program_version_message)
+ parser.add_argument(dest="paths", help="paths to folder(s) with source file(s) [default: %(default)s]", metavar="path", nargs='+')
+
+ # Process arguments
+ args = parser.parse_args()
+
+ paths = args.paths
+ verbose = args.verbose
+ recurse = args.recurse
+ inpat = args.include
+ expat = args.exclude
+
+ if verbose > 0:
+ print("Verbose mode on")
+ if recurse:
+ print("Recursive mode on")
+ else:
+ print("Recursive mode off")
+
+ if inpat and expat and inpat == expat:
+ raise CLIError("include and exclude pattern are equal! Nothing will be processed.")
+
+ for inpath in paths:
+ ### do something with inpath ###
+ print(inpath)
+ return 0
+ except KeyboardInterrupt:
+ ### handle keyboard interrupt ###
+ return 0
+ except Exception, e:
+ if DEBUG or TESTRUN:
+ raise(e)
+ indent = len(program_name) * " "
+ sys.stderr.write(program_name + ": " + repr(e) + "\n")
+ sys.stderr.write(indent + " for help use --help")
+ return 2
+
+if __name__ == "__main__":
+ if DEBUG:
+ sys.argv.append("-h")
+ sys.argv.append("-v")
+ sys.argv.append("-r")
+ if TESTRUN:
+ import doctest
+ doctest.testmod()
+ if PROFILE:
+ import cProfile
+ import pstats
+ profile_filename = 'MakeCloud_profile.txt'
+ cProfile.run('main()', profile_filename)
+ statsfile = open("profile_stats.txt", "wb")
+ p = pstats.Stats(profile_filename, stream=statsfile)
+ stats = p.strip_dirs().sort_stats('cumulative')
+ stats.print_stats()
+ statsfile.close()
+ sys.exit(0)
+ sys.exit(main())
\ No newline at end of file
--- /dev/null
+#! /bin/bash
+TAR=/tmp/rsh.tgz
+test -f $TAR && rm -f $TAR
+tar czf $TAR appl base net unittest doc examples InstallAll
+ls -ld $TAR
+
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import datetime
+import sys
+import os.path
+import stat
+import subprocess
+import tempfile
+import gzip
+import fnmatch
+import re
+import traceback
+
+from platform import node
+import shutil
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.Logger
+import base.MemoryLogger
+import base.StringUtils
+import base.JavaConfig
+import base.BaseTool
+import net.EMail
+
+class BackupBase (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'backup.conf')
+ self._lastTargetDir = None
+
+ def allDatabases(self, metadataToo=False):
+ '''Returns all mysql databases.
+ @param metadataToo: True: mysql is returned too
+ @param user: a user having the rights to read the database mysql
+ @param password: the user's password
+ @return: array with databases
+ '''
+ if '_mysql' in sys.modules:
+ mode = 'intern'
+ import _mysql
+ else:
+ mode = self._configuration.getString('mysql.mode', 'file')
+
+ rc = []
+ user = self._configuration.getString('mysql.user')
+ password = self._configuration.getString('mysql.code')
+ if user == None or password == None:
+ self._logger.error('missing mysql.user and/or mysql.code in configuration (backup.conf)')
+ elif mode == 'file':
+ path = '/var/lib/mysql'
+ nodes = os.listdir(path)
+ for node in nodes:
+ full = path + os.sep + node
+ if node != 'sys' and os.path.isdir(full):
+ if not metadataToo and (node == 'mysql' or node == 'information_schema' or node == 'performance_schema'):
+ continue
+ rc.append(node)
+ elif mode == 'extern':
+ argv = ['/usr/bin/mysql', '-u', user, '-p' + password, 'mysql']
+ errorFile = tempfile.gettempdir() + os.sep + 'backuptool.err.txt'
+ with open(errorFile, 'w') as fpError:
+ proc = subprocess.Popen(argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=fpError.fileno())
+ proc.communicate(b'show databases;\nquit;\n')
+ lineNo = 0
+ while True:
+ lineNo += 1
+ line = proc.stdout.readline().decode()
+ if line == '':
+ break
+ if lineNo < 4:
+ continue
+ if line.startswith('|'):
+ db = line.strip('| \n')
+ if not metadataToo and (db == 'mysql' or db == 'information_schema' or db == 'performance_schema'):
+ continue
+ rc.append(db)
+ self.errorFileToLog(errorFile, 'database mysql')
+ os.unlink(errorFile)
+ else:
+ db =_mysql.connect('localhost', user, password, 'mysql')
+ db.query('show databases;')
+ dbs = db.store_result()
+ results = dbs.fetch_row(0)
+ for item in results:
+ value = item[0]
+ if not metadataToo and (value == b'mysql' or value == b'information_schema' or value == b'performance_schema'):
+ continue
+ rc.append(value.decode())
+ return rc
+
+ def findConfig(self, variable, value, subdir):
+ '''Finds a configuration file given by a "variable" and a value.
+ @param variable: a configuration variable
+ @param value: the value to search
+ @param subdir: the subdirectory inside the backup configuration, e.g. 'webapps.d'
+ @return: None: not found otherwise: the JavaConfig instance of the found configuration
+ '''
+ rc = None
+ configDir = self._configDir + os.sep + subdir
+ if not os.path.isdir(configDir):
+ self._logger.error('missing {:s} in {:s}'.format(subdir, self._configDir))
+ else:
+ nodes = os.listdir(configDir)
+ for node in nodes:
+ if node.endswith('.conf'):
+ full = configDir + os.sep + node
+ config = base.JavaConfig.JavaConfig(full, self._logger)
+ if config.getString(variable) == value:
+ rc = config
+ break
+ return rc
+
+ def findDbInfo(self, path, defaultUser, defaultPassword):
+ '''Searches the info about the database in a given directory.
+ @param path: the directory to search
+ @return: a tuple (db, user, password)
+ '''
+ configWordPress = path + os.sep + 'wp-config.php'
+ configShop = path + os.sep + 'config.php'
+ configTimetracking = path + '/config/sys/main.php'
+ configNextCloud = path + '/config/config.php'
+ configMediaWiki = path + '/LocalSettings.php'
+ configWinfothek = path + '/resources/winfothek.conf'
+ if os.path.exists(configWinfothek):
+ # .dburl=mysql:host=localhost;dbname=winfothek
+ # .dbuser=winfothek
+ db = base.BaseTool.BasicStatics.grep('.dburl=mysql:', configWinfothek)[0].split('dbname=')[1].strip()
+ user = base.BaseTool.BasicStatics.grep('.dbuser=', configWinfothek)[0].split('.dbuser=')[1].strip()
+ password = base.BaseTool.BasicStatics.grep('.dbcode=', configWinfothek)[0].split('.dbcode=')[1].strip()
+ elif os.path.exists(configWordPress):
+ # define('DB_NAME', 'testdb'); // Der Name der Datenbank, die du benutzt.
+ db = base.BaseTool.BasicStatics.grep('DB_NAME', configWordPress)[0].split(')')[0].split(',')[1].strip().strip(",');").strip('"')
+ user = base.BaseTool.BasicStatics.grep('DB_USER', configWordPress)[0].split(')')[0].split(',')[1].strip().strip(",');").strip('"')
+ password = base.BaseTool.BasicStatics.grep('DB_PASSWORD', configWordPress)[0].split(');')[0].split("DB_PASSWORD',")[1].strip().strip(",');").strip('"')
+ elif os.path.exists(configNextCloud):
+ db = base.BaseTool.BasicStatics.grep('dbname', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"')
+ user = base.BaseTool.BasicStatics.grep('dbuser', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"')
+ password = base.BaseTool.BasicStatics.grep('dbpassword', configNextCloud)[0].split('=>')[1].strip().strip(",');").strip('"')
+ elif os.path.exists(configShop):
+ db = base.BaseTool.BasicStatics.grep('dbname', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"')
+ user = base.BaseTool.BasicStatics.grep('username', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"')
+ password = base.BaseTool.BasicStatics.grep('password', configShop)[0].split('=>')[1].strip().strip(',').strip("'").strip('"')
+ elif os.path.exists(configTimetracking):
+ db = base.BaseTool.BasicStatics.grep("'dsn'", configTimetracking)[0].split('dbname=')[1].strip().strip(',').strip("'").strip('"').strip(',')
+ user = base.BaseTool.BasicStatics.grep("'user'", configTimetracking)[0].split('=>')[1].strip().strip(',').strip("'").strip('"')
+ password = base.BaseTool.BasicStatics.grep("'pwd'", configTimetracking)[0].split('=>')[1].strip().strip(',').strip("'").strip('"')
+ elif os.path.exists(configMediaWiki):
+ # $wgDBuser = "forum";
+ db = base.BaseTool.BasicStatics.grep("wgDBname", configMediaWiki)[0].split('wgDBname =')[1].strip().strip(';').strip('"')
+ user = base.BaseTool.BasicStatics.grep("wgDBuser", configMediaWiki)[0].split('wgDBuser =')[1].strip().strip(';').strip('"')
+ password = base.BaseTool.BasicStatics.grep("wgDBpassword", configMediaWiki)[0].split('wgDBpassword =')[1].strip().strip(';').strip('"')
+ else:
+ db = 'unknowndb'
+ user = defaultUser
+ password = defaultPassword
+ return (db, user, password)
+
+ def relativeBackupPath(self, mode, timestamp=None):
+ '''Returns the (date based) relative backup path, e.g. '/dayly/Sun'
+ '''
+ relPath = os.sep + mode + os.sep
+ if mode == 'dayly':
+ relPath += ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][time.localtime(timestamp).tm_wday]
+ elif mode == 'weekly':
+ aTuple = time.localtime(timestamp)
+ # %W: week number with Mon as first day of the week
+ weekNo = int(time.strftime('%W', aTuple))
+ no = int(weekNo) % 4
+ relPath += str(no)
+ elif mode == 'monthly':
+ relPath += str(time.localtime(timestamp).tm_mon)
+ else:
+ relPath = relPath[0:-1]
+ return relPath
+
+ def replaceMacros(self, text):
+ '''Replaces macros (written as '%<name>%') with its values
+ @param text: a text with macros, e.g. '%etc%/dirs.d/*.conf'
+ @return: the text with replaced macros, e.g. '/etc/pyrshell/dirs.d/*.conf'
+ '''
+ rc = text.replace('%etc%', self._configDir)
+ if text.find('%backup') >= 0:
+ rc = rc.replace('%backup.base%', self._baseTargetDir)
+ rc = rc.replace('%backup.current%', self._targetDir)
+ if rc.find('%backup.last%') >= 0:
+ if self._lastTargetDir == None:
+ self._logger.error('%backup.last% is undefined. we replace it with %backup.current%: ' + self._targetDir)
+ rc = rc.replace('%backup.last%', self._targetDir)
+ else:
+ rc = rc.replace('%backup.last%', self._lastTargetDir)
+ node = os.path.basename(self._targetDir)
+ if node == 'often':
+ minipath = node
+ else:
+ minipath = os.path.basename(os.path.dirname(self._targetDir)) + os.sep + node
+ rc = rc.replace('%backup.current.minipath%', minipath)
+ rc = rc.replace('%backup.current.node%', node)
+ return rc
+
+ def synchronize(self, source, target, deleteToo):
+ '''Synchronizes a directory from another.
+ Copies the newer or not existing files from the source to the target, recursive
+ @param source: the source directory
+ @param target: the target directory
+ @param deleteToo: files in target but not existing in source will be deleted
+ '''
+ def logError(arg):
+ self._logger.error('error while removing directory {:s}: {:s}'.format(fullTarget, arg))
+ sourceNodes = os.listdir(source)
+ targetNodes = os.listdir(target)
+ dirs = []
+ for node in sourceNodes:
+ fullSource = source + os.sep + node
+ fullTarget = target + os.sep + node
+ doCopy = False
+ infoSource = os.lstat(fullSource)
+ if stat.S_ISDIR(infoSource.st_mode):
+ dirs.append(node)
+ else:
+ try:
+ infoTarget = os.lstat(fullTarget)
+ doCopy = infoTarget.st_mtime > infoSource.st_mtime
+ except OSError:
+ doCopy = True
+ if doCopy:
+ self._logger.log('copying {:s}'.format(fullSource), 3)
+ shutil.copy(fullSource, fullTarget)
+ if deleteToo:
+ if node in targetNodes:
+ targetNodes.remove(node)
+ if deleteToo:
+ for node in targetNodes:
+ fullTarget = target + os.sep + node
+ self._logger.log('deleting {:s}'.format(fullTarget), 3)
+ if os.path.isdir(fullTarget):
+ shutil.rmtree(fullTarget)
+ else:
+ os.unlink(fullTarget)
+ for node in dirs:
+ trgDir = target + os.sep + node
+ if os.path.exists(trgDir):
+ if not os.path.isdir(trgDir):
+ os.unlink(trgDir)
+ os.mkdir(trgDir)
+ else:
+ os.mkdir(trgDir)
+ self.synchronize(source + os.sep + node, trgDir, deleteToo)
+
+class ConfigurationSet():
+ '''Administrates a set of configuration files stored in a given directory.
+ '''
+ def __init__(self, pattern, logger):
+ '''Constructor.
+ @param pattern: a file pattern to identify the configuration files.
+ @param logger: the logger
+ '''
+ self._logger = logger
+ parts = pattern.split(':')
+ self._pattern = parts[0]
+ self._variables = parts[1:]
+ self._configurations = []
+ self._path = os.path.dirname(self._pattern)
+ self._namePattern = os.path.basename(self._pattern)
+ self._lastHandledFile = None
+ if not os.path.isdir(self._path):
+ self._logger.error('not a directory: ' + self._path)
+ else:
+ files = os.listdir(self._path)
+ for node in files:
+ if fnmatch.fnmatch(node, self._namePattern):
+ self._configurations.append(node)
+
+ def nextFile(self):
+ '''Generator method returning the next configuration file of the set.
+ @yields: the next file
+ '''
+ for node in self._configurations:
+ yield self._path + os.sep + node
+
+ def nextParameters(self):
+ '''Generator method returning the parameters stored in the next configuration file of the set.
+ @yields: the parameters of the next file
+ '''
+ for name in self.nextFile():
+ self._lastHandledFile = name
+ config = base.JavaConfig.JavaConfig(name, self._logger)
+ rc = []
+ if len(self._variables) == 0:
+ self._logger.error('no variables defined in ' + self._pattern)
+ for variable in self._variables:
+ value = config.getString(variable)
+ if value == None:
+ self._logger.error('variable {:s} not defined in {:s}'.format(variable, name))
+ rc.append(config.getString(variable))
+ yield rc
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import datetime
+import sys
+import os.path
+import gzip
+import re
+import traceback
+import subprocess
+import shutil
+from base.ProcessHelper import ProcessHelper
+from base.MemoryLogger import MemoryLogger
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.Logger
+import base.StringUtils
+import base.BaseTool
+import net.EMail
+import appl.BackupBase
+import appl.DbTool
+
+class BackupTool (appl.BackupBase.BackupBase):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ appl.BackupBase.BackupBase.__init__(self, globalOptions)
+ self._logger.setErrorFilter(re.compile(r'/bin/tar: .*(file changed as we read it|tar: Removing leading|mysqldump:.*Warning.*Using a password)'), True)
+
+ def btrfsInfo(self):
+ rc = ''
+ lines = self._processHelper.executeInputOutput(['/usr/sbin/blkid'])
+ devs = []
+ for line in lines:
+ if line.find('TYPE="btrfs"') >= 0:
+ # remove leading ':'
+ devs.append(line.split(' ')[0][0:-1])
+ mountDir = '/media/tmp'
+ self.ensureDirectory(mountDir)
+ files = os.listdir(mountDir);
+ if len(files) > 0:
+ self._processHelper.execute(['/bin/umount', mountDir], False)
+ for dev in devs:
+ self._processHelper.execute(['mount', '-o', 'ro', dev, mountDir], False)
+ lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'subvol', 'list', mountDir])
+ if rc == '':
+ rc = '\n=== btrfs-Info\n'
+ rc += '= btrfs: ' + dev + '\n' + '\n'.join(lines)
+ lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'device', 'usage', mountDir])
+ rc += '= device usage: ' + dev + '\n' + '\n'.join(lines)
+ lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'device', 'stats', mountDir])
+ rc += '= device stats: ' + dev + '\n' + '\n'.join(lines)
+ lines = self._processHelper.executeInputOutput(['/usr/bin/btrfs', 'filesystem', 'df', mountDir])
+ rc += '= filesytem df: ' + dev + '\n' + '\n'.join(lines)
+ self._processHelper.execute(['/bin/umount', mountDir], False)
+ self._logger.log('devs: {}'.format(len(devs)), 2)
+ return rc
+
+ def createConfigExample(self):
+ self.ensureDirectory(self._configDir)
+ dirWebApps = self._configDir + os.sep + 'webapps.d'
+ self.ensureDirectory(dirWebApps)
+ dirDirs = self._configDir + os.sep + 'dirs.d'
+ self.ensureDirectory(dirDirs)
+ filename = self._configDir + os.sep + 'backup.conf'
+ if not base.StringUtils.hasContent(filename, '#'):
+ filename = self._configDir + os.sep + 'backup.example'
+ base.StringUtils.toFile(filename, '''# backup example configuration
+target.path=/media/backup
+log.file=/var/log/local/backup.log
+log.mirror=/media/backup/log/backup.log
+zip.volume.size=10g
+# jobs to do: Fix is job.<mode> (job.dayly...)
+job.often=job.often.dirs job.chown job.rsync
+job.often.dirs=&saveDirByZipLatest 1 @%etc%/dirs.d/minimal.*.conf:directory
+job.dayly=job.sysinfo job.save.webapps job.sys job.minimal.1 job.chown job.once
+job.weekly=job.sysinfo job.save.webapps job.sys job.full job.db.all job.chown
+job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded
+job.full=&saveDirByTar @%etc%/dirs.d/std.*.conf:directory:excluded
+job.minimal.1=&saveDirByZipLatest 7 @%etc%/dirs.d/std.*.conf:directory:excluded
+job.minimal.2=&saveDirByZipLatest 7 @%etc%/webapps.d/*.conf:directory
+job.db=&saveMysql @%etc%/webapps.d/*.conf:db:user:password:sql.file
+job.db.all=&saveAllDatabases
+job.sysinfo=&systemInfo
+job.rsync=&rsync setDromedarDayly %backup.current%
+job.once=job.once.zip job.once.rsync job.chown
+job.once.rsync=&rsync setOnce %backup.last%/
+job.once.zip=&saveDirByZip @%etc%/webapps.d/*.conf:directory:excluded
+job.save.webapps=&saveWebapps @%etc%/webapps.d/*.conf:db:user:password:directory
+# job.gitlab.backup=&gitlabBackup
+job.chown=&chown bupsrv.bupsrv %backup.current%
+# Reading metadata from mysql:
+mysql.user=backup
+mysql.code=Secret.Phrase
+mysql.mode=file
+# rsync set for dayly synchronisation
+setDromedarDayly.user=extbup
+setDromedarDayly.target=bupcaribou@dromedar:/opt/extbup/caribou/often/%factor%
+setDromedarDayly.factor=4
+
+setOnce.user=root
+setOnce.target=%backup.base%/once
+setOnce.factor=1
+
+# Email data for error report:
+# Receiver of the error messages: may be a blank separated list of email addresses
+send.always=False
+location={:s}
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.neutral@gmx.de
+smtp.user=hm.neutral@gmx.de
+smtp.code=sEcReT
+smtp.tls=True
+'''.format(self.hostname(True)))
+ self._logger.log('created: ' + filename)
+ filenameWebApp = dirWebApps + os.sep + 'testdb.example'
+ base.StringUtils.toFile(filenameWebApp, '''# Example created by backuptool
+db=testdb
+user=testuser
+password=NotVerySecret
+sql.file=testdb
+directory=/opt/webapp/testapp
+''')
+ self._logger.log('created: ' + filenameWebApp)
+ BackupTool.createDirsD(dirDirs, self._logger, True)
+
+ @staticmethod
+ def createDirsD(dirDirs, logger, varLogToo):
+ '''Creates the standard files in dirs.d.
+ @param dirDirs: the full path of the directory dirs.d
+ @param logger: None or the logger
+ @param varLogToo: True: the configuration file to store /var/log will be created
+ Note: the files can be read only by root
+ '''
+ def createConfig(name, content):
+ if not os.path.exists(name):
+ base.StringUtils.toFile(name, content)
+ if logger != None:
+ logger.log('created: ' + name)
+ createConfig(dirDirs + os.sep + 'sys.etc.conf', '''# Example created by backuptool
+directory=/etc
+excluded=
+''')
+ createConfig(dirDirs + os.sep + 'sys.home.bin.conf', '''# Example created by backuptool
+directory=/home/bin
+excluded=
+''')
+ createConfig(dirDirs + os.sep + 'sys.usr.local.conf', '''# Example created by backuptool
+directory=/usr/local
+excluded=
+''')
+ if varLogToo:
+ createConfig(dirDirs + os.sep + 'std.var.log.conf', '''# Example created by backuptool
+directory=/var/log
+excluded=
+''')
+ createConfig(dirDirs + os.sep + 'std.srv.conf', '''# Example created by backuptool
+directory=/srv
+excluded=
+''')
+
+ def createLockAndReadyFile(self, mode):
+ '''Creates the names of the lock file and the ready file.
+ @param mode: 'dayly', 'weekly', 'monthly'
+ @return: an array [lockFile, readyFile]
+ '''
+ lockFile = "{}{}.{}.lck".format(self._baseTargetDir, os.sep, mode)
+ readyFile = "{}{}.{}.finished".format(self._baseTargetDir, os.sep, mode)
+ return [lockFile, readyFile]
+
+ def createWebapp(self, password, patterns):
+ '''Creates the webapps.d files given by the nginx configurations.
+ @param patterns: a list of patterns to select the nginx configurations, e.g. ["*.de", "*.com"]
+ '''
+ nodes = []
+ sourceDir = '/etc/nginx/sites-enabled'
+ trgDir = self._configDir + os.sep + 'webapps.d/'
+ for pattern in patterns:
+ nodes += base.BaseTool.BasicStatics.findFiles(sourceDir, pattern, False)
+ for node in nodes:
+ if self._verboseLevel >= 3:
+ self._logger.log('working on ' + node)
+ root = base.BaseTool.BasicStatics.grep('root', sourceDir + os.sep + node)[0].strip()
+ root = root.split()[1].replace('/;', '').replace(';', '')
+ self._logger.log('root: ' + root)
+ (db, user, password) = self.findDbInfo(root, 'backup', password)
+ if db == 'unknowndb':
+ dbFile = root + os.sep + '.dbname'
+ if os.path.exists(dbFile):
+ db = base.StringUtils.fromFile(dbFile).strip()
+ self._logger.log('db: {:s} user: {:s} pw: {:s}'.format(db, user, password))
+ base.StringUtils.toFile(trgDir + node + '.conf', '''db={:s}
+user={:s}
+password={:s}
+sql.file={:s}
+directory={:s}
+'''.format(db, user, password, node + '_' + db, root))
+
+ def doJob(self, config, statement):
+ '''Interpretes one statement.
+ @param config: the configuration file
+ @param statement: the statement to execute
+ '''
+ statement = statement.strip()
+ tokens = statement.split(' ')
+ if statement.startswith('&'):
+ if tokens[0] == '&saveDirByTar':
+ self.jobSaveDirByTar(tokens[1:])
+ elif tokens[0] == '&saveDirByZip':
+ self.jobSaveDirByZip(tokens[1:])
+ elif tokens[0] == '&saveDirByZipLatest':
+ self.jobSaveDirByZipLatest(tokens[1:])
+ elif tokens[0] == '&deleteFile':
+ self.jobDeleteFile(tokens[1:])
+ elif tokens[0] == '&saveMysql':
+ self.jobSaveMysql(tokens[1:])
+ elif tokens[0] == '&saveAllDatabases':
+ self.jobSaveAllDatabases()
+ elif tokens[0] == '&systemInfo':
+ self.systemInfo(self._targetDir)
+ elif tokens[0] == '&synchronize':
+ self.jobSynchronize(tokens[1:])
+ elif tokens[0] == '&gitlabBackup':
+ self.jobGitlabBackup()
+ elif tokens[0] == '&saveWebApps':
+ self.jobSaveWebApps(tokens[1:])
+ elif tokens[0] == '&chown':
+ self.jobChown(tokens[1:])
+ elif tokens[0] == '&rsync':
+ self.jobRSynchronize(tokens[1:])
+ else:
+ self._logger.error('unknown command: ' + tokens[0])
+ else:
+ for token in tokens:
+ savePath = None
+ if token == 'job.once':
+ savePath = self._lastTargetDir = self._targetDir
+ self._targetDir = self._baseTargetDir + os.sep + 'once'
+ self.ensureDirectory(self._targetDir)
+ statement2 = config.getString(token)
+ if statement2 == None:
+ self._logger.error('unknown variable {:s} in {:s} while working on "{:s}"'.format(
+ token, config._filename, statement))
+ else:
+ self.doJob(config, statement2)
+ if savePath != None:
+ self._targetDir = savePath
+
+ def doBackup(self, mode):
+ '''Does the backup process controlled by configuration files.
+ @param mode: 'dayly', 'weekly' ...
+ '''
+ logFile = self._configuration.getString('log.file')
+ if logFile != None and (not hasattr(self._logger, '_logfile') or os.path.abspath(logFile) != os.path.abspath(self._logger._logfile)):
+ logger2 = base.Logger.Logger(logFile, self._logger._verbose)
+ logger2.transferErrors(self._logger)
+ self._logger = logger2
+ logFile2 = self._configuration.getString('log.mirror')
+ if logFile2 != None:
+ logger3 = base.Logger.Logger(logFile, False)
+ self._logger.setMirror(logger3)
+ self._mode = mode
+ self._baseTargetDir = self._configuration.getString('target.path')
+ self._targetDir = self._baseTargetDir
+ try:
+ if self._configuration.getString('job.' + mode) == None:
+ self._logger.error('missing job.{:s} in {:s} Wrong mode?'.format(mode, self._configuration._filename))
+ elif self._targetDir == None:
+ self._logger.error('missing "{:s}" in {:s}'.format(self._targetDir, self._configuration._filename))
+ elif not os.path.isdir(self._targetDir):
+ self._logger.error('{:s} is not a directory. See "target.path" in '.format(self._targetDir, self._configuration._filename))
+ else:
+ relPath = self.relativeBackupPath(mode)
+ self._targetDir += relPath
+ if self.ensureDirectory(self._targetDir) != None:
+ if mode == 'dayly' or mode == 'weekly' or mode == 'monthly':
+ linkTarget = os.path.dirname(self._targetDir) + os.sep + 'current'
+ if os.path.exists(linkTarget):
+ os.unlink(linkTarget)
+ os.symlink(os.path.basename(self._targetDir), linkTarget)
+ statement = self._configuration.getString('job.' + mode );
+ if statement == None:
+ self._logger.error('missing job.' + mode + ' in ' + self._configuration._filename);
+ else:
+ [lockFile, readyFile] = self.createLockAndReadyFile(mode)
+ if os.path.exists(lockFile):
+ self._logger.error('backup [{}] is already running: {} exists'.format(mode, lockFile))
+ base.StringUtils.toFile(lockFile, '')
+ base.BaseTool.BasicStatics.ensureFileDoesNotExist(readyFile, None, self._logger)
+ self.doJob(self._configuration, statement)
+ base.StringUtils.toFile(readyFile, '')
+ base.BaseTool.BasicStatics.ensureFileDoesNotExist(lockFile, None, self._logger)
+ except Exception as exc:
+ self._logger.error('backup aborted with exception: ' + str(exc))
+ traceback.print_exc()
+ subject = None
+ if self._logger._errors > 0:
+ subject = 'Backup {:s} failed on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True)))
+ elif self._configuration.getString('send.always', 'False').startswith('T'):
+ subject = 'Backup {:s} finished on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True)))
+ if subject != None:
+ now = datetime.datetime.now()
+ body = now.strftime('%Y.%m.%d %H:%M:%S') + ' Backup {:s} finished with {:d} error(s)\n\n'.format(mode, self._logger._errors)
+ body += '\n'.join(self._logger._firstErrors)
+ self.sendStatusEmail(subject, body)
+
+ def example(self):
+ example = '''# backup example configuration
+target.path=/media/backup
+log.file=/var/log/local/backup.log
+log.mirror=/media/backup/log/backup.log
+zip.volume.size=10g
+# jobs to do: Fix is job.<mode> (job.dayly...)
+job.often=job.often.dirs job.chown job.rsync
+job.often.dirs=&saveDirByZipLatest 1 @%etc%/dirs.d/minimal.*.conf:directory
+job.dayly=job.sysinfo job.sys job.minimal.1 job.chown
+job.weekly=job.sysinfo job.sys job.full job.db.all job.chown
+job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded
+job.full=&saveDirByTar @%etc%/dirs.d/std.*.conf:directory:excluded
+job.minimal.1=&saveDirByZipLatest 7 @%etc%/dirs.d/std.*.conf:directory:excluded
+job.minimal.2=&saveDirByZipLatest 7 @%etc%/webapps.d/*.conf:directory
+job.db=&saveMysql @%etc%/webapps.d/*.conf:db:user:password:sql.file
+job.db.all=&saveAllDatabases
+job.sysinfo=&systemInfo
+job.rsync=&rsync setDromedarDayly %backup.current%
+
+# job.gitlab.backup=&gitlabBackup
+job.chown=&chown bupsrv.bupsrv %backup.current%
+# Reading metadata from mysql:
+mysql.user=backup
+mysql.code=Secret.Phrase
+mysql.mode=file
+# rsync set for dayly synchronisation
+setDromedarDayly.user=extbup
+setDromedarDayly.target=bupcaribou@dromedar:/opt/extbup/caribou/often/%factor%
+setDromedarDayly.factor=4
+# Email data for error report:
+# Receiver of the error messages: may be a blank separated list of email addresses
+send.always=False
+location={:s}
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.neutral@gmx.de
+smtp.user=hm.neutral@gmx.de
+smtp.code=sEcReT
+smtp.tls=True
+'''.format(base.BaseTool.BasicStatics.hostname(True))
+ self.storeExample(example)
+
+ def jobGitlabBackup(self):
+ # gitlab_rails['backup_path'] = "/var/opt/gitlab/backups"
+ start = time.time()
+ backupPath = '/var/opt/gitlab/backups'
+ regExpr = re.compile(r'gitlab_rails\[\'backup_path\'\]\s*=\s*"(.*?)"')
+ lines = base.StringUtils.grepInFile('/etc/gitlab/gitlab.rb', regExpr, 1)
+ if len(lines) != 1:
+ self._logger.error('cannot find backup path in /etc/gitlab/gitlab.rb')
+ else:
+ matcher = regExpr.search(lines[0])
+ if matcher:
+ backupPath = matcher.group(1)
+ self.clearDirectory(backupPath)
+ executable = '/usr/bin/gitlab-rake'
+ if not os.path.exists(executable):
+ self._logger.error('missing {}: could not save gitlab'.format(executable))
+ else:
+ argv = [executable, 'gitlab:backup:create']
+ self._processHelper.execute(argv, True)
+ backupFile = base.BaseTool.BasicStatics.firstFile(backupPath, '*.tar')
+ if backupFile == None:
+ self._logger.error('missing backup file in ' + backupPath)
+ else:
+ self.logFile(backupFile, 'gitlab backup file: %f %s %t (%r)', start)
+ target = self._targetDir + os.sep + 'gitlab_backup.tar'
+ nameFile = self._targetDir + os.sep + 'gitlab_name.txt'
+ base.StringUtils.toFile(nameFile + '\n#above: the original name of the backup file\n# current name: gitlab_backup.tar', backupFile)
+ self._logger.log('name saved into {}'.format(nameFile), 2)
+ try:
+ shutil.copy(backupFile, target)
+ os.unlink(backupFile)
+ self._logger.log('{} moved to {}'.format(backupFile, target), 3)
+ except OSError as exc:
+ self._logger.error('cannot move archive {:s} to {:s}: {:s}'.format(backupFile, target, str(exc)))
+
+ def jobChown(self, argv):
+ '''Changes the owner of some directories (recursive).
+ @param argv: contains the owner[.group] and the directories
+ '''
+ if len(argv) < 1:
+ self._logger.error('jobChown: missing owner')
+ elif re.match(r'\w+([.:]\w+)?$', argv[0]) == None:
+ self._logger.error('jobChown: wrong user[.group]: ' + argv[0])
+ elif len(argv) < 2:
+ self._logger.error('jobChown: missing directory')
+ elif self._beingRoot:
+ owner = argv[0]
+ self._logger.log('jobChown ' + ' '.join(argv), 2)
+ for name in argv[1:]:
+ path = self.replaceMacros(name)
+ if not os.path.isdir(path):
+ self._logger.error('jobChown: not a directory: ' + path)
+ else:
+ self._logger.log('chown {:s} {:s}'.format(owner, path), 3)
+ argv = ['/bin/chown', '-R', owner, path]
+ self._processHelper.execute(argv, True)
+ self._logger.log('chmod -R g+w {:s}'.format(path), 3)
+ argv = ['/bin/chmod', '-R', 'g+rw', path]
+ self._processHelper.execute(argv, True)
+
+ def jobDeleteFile(self, argv):
+ '''Saves some directories in some tar archivs.
+ @param argv: contains the source directories
+ '''
+ for name in argv:
+ if os.path.exists(name):
+ self._logger.log('deleting ' + name, 3)
+ os.unlink(name)
+ if os.path.exists(name):
+ self._logger.error('cannot delete ' + name)
+ else:
+ self._logger.log('deleted: ' + name)
+
+ def jobRSynchronize(self, argv):
+ '''Executes the program rsync to copy files to another host.
+ @param argv: contains the name of a parameter set and a list of source files/dirs
+ e.g.['setDromedarDayly', '%backup.current%']
+ '''
+ self._logger.log('jobRSynchronize ' + ' '.join(argv), 2)
+ ix = 0
+ setName = argv[0]
+ user = self._configuration.getString(setName + '.user', '')
+ target = self._configuration.getString(setName + '.target', '')
+ factor = self._configuration.getInt(setName + '.factor', 0)
+ if user == '' or target == '' or factor == '':
+ self._logger.error('rsync set {} incomplete: user: {} target: {} factor: {}'.format(setName, user, target, '' if factor == 0 else str(factor) ))
+ else:
+ time = datetime.datetime.now().time()
+ hour = int(round(time.hour*60+time.minute / 60.0)) % factor
+ target = self.replaceMacros(target.replace('%factor%', str(hour)))
+ cmd = ['/usr/bin/sudo', '-u']
+ cmd.append(user)
+ cmd.append('/usr/bin/rsync')
+ cmd.append('-a')
+ if self._verboseLevel >= 2:
+ cmd.append('-v')
+ cmd.append('--progress')
+ argv = argv[1:]
+ while ix < len(argv):
+ source = self.replaceMacros(argv[ix])
+ cmd.append(source)
+ ix += 1
+ cmd.append(target)
+ self._processHelper.execute(cmd, self._verboseLevel > 0)
+
+ def jobSaveAllDatabases(self):
+ '''Saves all databases of a mysql system.
+ '''
+ self._logger.log('saving all databases', 2)
+ dbTool = appl.DbTool.DbTool(self._globalOptions)
+ admin = dbTool._configuration.getString('admin.user')
+ code = dbTool._configuration.getString('admin.code')
+ dbTool.exportAllDatabases(admin, code, self._targetDir)
+
+ def jobSaveAllDatabases2(self):
+ '''Saves all databases of a mysql system.
+ '''
+ self._logger.log('saving all databases', 2)
+ dbs = self.allDatabases(True)
+ user = self._configuration.getString('mysql.user')
+ password = self._configuration.getString('mysql.code')
+ dbTool = appl.DbTool.DbTool(self._globalOptions)
+ if dbs != None:
+ for db in dbs:
+ name = db
+ currentUser = user
+ currentPassword = password
+ config = self.findConfig('db', db, 'webapps.d')
+ if config != None:
+ path = config.getString('directory')
+ if path != None:
+ name = os.path.basename(path) + '_' + db
+ currentUser = config.getString('user')
+ currentPassword = config.getString('password')
+ if currentPassword == None:
+ currentUser = user
+ currentPassword = password
+ if self._verboseLevel >= 3:
+ self._logger.log('saving db ' + db)
+ start = time.time()
+ outputFile = self._targetDir + os.sep + name + '.sql.gz'
+ dbTool.exportDatabase(db, currentUser, currentPassword, outputFile)
+ self.logFile(outputFile, '%f: %s %t created in %r', start)
+
+ def jobSaveDirByTar(self, argv):
+ '''Saves some directories in some tar archivs.
+ @param argv: contains the source directories
+ '''
+ self._logger.log('jobSaveDirByTar' + ' '.join(argv), 2)
+
+ for source in argv:
+ if not source.startswith('@'):
+ self._logger.log('saving directory {:s} into tar'.format(source), 3)
+ name = self.fullPathToName(source)
+ self.saveDirectoryByTar(name, source, self._targetDir, None)
+ else:
+ pattern = self.replaceMacros(source[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ opts = None
+ if len(arguments) < 1:
+ self._logger.error('jobSaveDirByTar: configuration pattern does not contain at least 1 variable (directory[, excluded]): {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ path = arguments[0]
+ if len(arguments) > 1:
+ excluded = arguments[1]
+ if excluded != None and len(excluded) > 2:
+ listExcluded = excluded[1:].split(excluded[0])
+ for entry in listExcluded:
+ if entry != '':
+ if opts == None:
+ opts = ['--exclude=' + entry]
+ else:
+ opts.append('--exclude=' + entry)
+ name = self.fullPathToName(path)
+ self._logger.log('saving directory {:s} into tar'.format(source), 3)
+ self.saveDirectoryByTar(name, path, self._targetDir, opts)
+
+ def jobSaveDirByZip(self, argv):
+ '''Saves some directories in some tar archives.
+ Note: target is taken from self._targetDir
+ @param argv: contains the source directories
+ '''
+ if len(argv) < 1:
+ self._logger.error('SaveDirByZip: missing source')
+ else:
+ self._logger.log('jobSaveDirByZip' + ' '.join(argv), 2)
+ for source in argv:
+ if not source.startswith('@'):
+ name = self.fullPathToName(source)
+ self._logger.log('saving {:s} into {:s}'.format(source, name), 3)
+ self.saveDirectoryByZip(name, source, self._targetDir, None)
+ else:
+ pattern = self.replaceMacros(source[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ opts = None
+ if len(arguments) < 1:
+ self._logger.error('jobSaveDirByZip: configuration pattern does not contain at least 1 variable (directory[, excluded]): : {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ path = arguments[0]
+ if len(arguments) > 1:
+ excluded = arguments[1]
+ if excluded != None and len(excluded) > 2:
+ listExcluded = excluded[:1].split(excluded[0])
+ for entry in listExcluded:
+ if entry == '':
+ continue
+ if opts == None:
+ opts = ['-x', entry]
+ else:
+ opts.append(entry)
+ name = self.fullPathToName(path)
+ self._logger.log('saving {:s} into {:s} {:s}'.format(path, name,
+ '' if opts == None else ' '.join(opts)), 3)
+ self.saveDirectoryByZip(name, path, self._targetDir, opts)
+
+ def jobSaveDirByZipLatest(self, argv):
+ '''Saves some directories in some tar archivs but only the "latest" files.
+ @param argv: contains the source directories
+ '''
+ doIt = False
+ if len(argv) < 2:
+ self._logger.error('missing source for SaveDirByZipLatest')
+ else:
+ try:
+ interval = int(argv[0])
+ doIt = True
+ except ValueError:
+ self._logger.error('SaveDirByZipLatest: argument 1 is not an integer: ' + argv[0])
+ if doIt:
+ self._logger.log('jobSaveDirByZipLatest' + ' '.join(argv), 2)
+ for source in argv[1:]:
+ if not source.startswith('@'):
+ name = self.fullPathToName(source)
+ self._logger.log('saving {:s} into {:s}'.format(source, name), 3)
+ self.saveDirectoryByZip(name, source, self._targetDir, None, interval)
+ else:
+ pattern = self.replaceMacros(source[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ if len(arguments) < 1:
+ self._logger.error('jobSaveDirByZipLatest: configuration pattern does not contain 1 variable (directory): : {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ if len(arguments) > 1:
+ self._logger.log("{} contains ignored parameter(s) below {}".format(configSet._lastHandledFile, arguments[0]))
+ path = arguments[0]
+ name = self.fullPathToName(path)
+ self._logger.log('saving {:s} into {:s}'.format(path, name), 3)
+ self.saveDirectoryByZip(name, path, self._targetDir, None, interval)
+
+ def jobSaveMysql(self, argv):
+ '''Dumps a mysql database into a gzipped sql file.
+ @param argv: contains a list of configuration patterns,
+ e.g.['@%etc%/webapps.d/*.conf:db:user:password:sql.file']
+ '''
+ self._logger.log('jobSaveMysql ' + ' '.join(argv), 2)
+ dbTool = appl.DbTool.DbTool(self._globalOptions)
+ for pattern in argv:
+ if pattern.startswith('@'):
+ pattern = self.replaceMacros(pattern[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ if len(arguments) != 4:
+ self._logger.error('jobSaveMysql: configuration pattern does not contain 4 variables (db,user,pw,sqlfile): : {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ domain = os.path.basename(configSet._lastHandledFile)[0:-5]
+ self._logger.log('saving db {:s}'.format(arguments[0]), 3)
+ # self.saveMysql(arguments[3], arguments[0], arguments[1], arguments[2], self._targetDir)
+ dbTool.exportDatabase(arguments[0], arguments[1], arguments[2], self._targetDir + os.sep + domain + '.sql.gz')
+
+ def jobSynchronize(self, argv):
+ '''Synchronizes a directory from another.
+ @param argv: contains a list of configuration patterns,
+ e.g.['@%etc%/dirs.d/rsync_*.conf:source:target:deletetoo']
+ '''
+ self._logger.log('jobSynchronize ' + ' '.join(argv), 2)
+ ix = 0
+ deleteToo = False
+ while ix < len(argv):
+ pattern = argv[ix]
+ ix += 1
+ if pattern.startswith('--delete'):
+ if ix >= len(argv):
+ break
+ deleteToo = True
+ pattern = pattern[8:].lower()
+ if pattern.startswith('=f'):
+ deleteToo = False
+ pattern = argv[ix]
+ ix += 1
+ if pattern.startswith('@'):
+ pattern = self.replaceMacros(pattern[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ if len(arguments) < 2:
+ self._logger.error('jobSynchronize: configuration pattern does not contain 2 variables (source,target[,deleteToo]): : {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ deleteToo2 = deleteToo
+ if (len(arguments) > 2):
+ deleteToo2 = arguments[2].startswith('T') or arguments[2].startswith('t')
+ source = self.replaceMacros(arguments[0])
+ target = self.replaceMacros(arguments[1])
+ self.ensureDirectory(target)
+ self.synchronize(source, target, deleteToo2)
+ else:
+ source = self.replaceMacros(pattern)
+ if ix >= len(argv):
+ self._logger.error('jobSynchronize: missing target to source ' + pattern)
+ else:
+ target = self.replaceMacros(argv[ix])
+ ix += 1
+ self.ensureDirectory(target)
+ self.synchronize(source, target, deleteToo)
+
+ def jobSaveWebApps(self, argv):
+ '''for all webapps: dump the database into the directory <base>/db
+ @param argv: contains a list of configuration patterns,
+ e.g.['@%etc%/webapps.d/*.conf:db:user:password:sql.file:directory']
+ '''
+ self._logger.log('jobSaveWebapps ' + ' '.join(argv), 2)
+ for pattern in argv:
+ if pattern.startswith('@'):
+ pattern = self.replaceMacros(pattern[1:])
+ configSet = appl.BackupBase.ConfigurationSet(pattern, self._logger)
+ for arguments in configSet.nextParameters():
+ if len(arguments) != 4:
+ self._logger.error('jobSaveMysql: configuration pattern does not contain 4 variables (db,user,pw,directory): : {} file: {}'
+ .format(pattern, configSet._lastHandledFile))
+ break
+ else:
+ # config file without '.conf':
+ domain = os.path.basename(configSet._lastHandledFile)[0:-5]
+ db = arguments[0]
+ user = arguments[1]
+ code = arguments[2]
+ target = arguments[3]
+ if not os.path.isdir(target):
+ self._logger.error('missing directory {} for {}'.format(target, domain))
+ else:
+ self._logger.log('saving db {:s}'.format(arguments[0]), 3)
+ tool = appl.DbTool.DbTool(self._globalOptions)
+ target += os.sep + 'db'
+ self.ensureDirectory(target)
+ tool.exportDatabase(db, user, code, target + os.sep + domain + '.sql.gz')
+
+ def sendStatusEmail(self, subject, body):
+ '''Sends an email.
+ @param subject the email's subject
+ @param body the emails's content
+ '''
+ recipient = self._configuration.getString('admin.email')
+ sender = self._configuration.getString('smtp.sender'),
+ host = self._configuration.getString('smtp.host')
+ port = self._configuration.getString('smtp.port')
+ user = self._configuration.getString('smtp.user')
+ code = self._configuration.getString('smtp.code')
+ value = self._configuration.getString('smtp.tls')
+ withTls = value != None and value.lower().startswith('t')
+ if not (recipient and sender and host and port and user and code):
+ self._logger.error('email configuration is wrong')
+ else:
+ net.EMail.sendStatusEmail(recipient, subject, body, sender, host, port, user, code, withTls, self._logger)
+
+ def snapshot(self, argv):
+ '''Makes a snapshot.
+ @param argv: program arguments, e.g. ['dayly', '/media/cloud/mycloud/data', '--create-if-missing']
+ '''
+ (argv, opts) = self.splitArgsAndOpts(argv)
+ if len(argv) < 2:
+ self.usage('missing arguments')
+ else:
+ mode = argv[0]
+ dataDir = argv[1]
+ baseDir = os.path.dirname(dataDir)
+ snapshotBase = baseDir + os.sep + mode
+ argv = argv[2:]
+ createIfMissing = False
+ if len(opts) > 0:
+ for opt in opts:
+ if opt == '--create-if-missing':
+ createIfMissing = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if mode not in ['dayly', 'weekly', 'monthly']:
+ self.usage('invalid mode {}: expected: dayly or weekly or monthly'.format(mode))
+ elif not os.path.isdir(dataDir):
+ self.usage('not a data directory: ' + dataDir)
+ elif baseDir == '':
+ self.usage('missing the parent directory: ' + dataDir)
+ elif not base.FileHelper.distinctPaths(dataDir, snapshotBase):
+ self.usage('nested directories: {} [] / {} [{}]'.format(dataDir, os.path.realpath(dataDir),
+ snapshotBase, os.path.realpath(snapshotBase)))
+ else:
+ if not os.path.isdir(snapshotBase):
+ if not createIfMissing:
+ self.usage('not a snapshot directory: ' + snapshotBase)
+ else:
+ self.ensureDirectory(snapshotBase)
+ tool = appl.DirTool.DirTool(self._globalOptions)
+ snapshotDir = baseDir + self.relativeBackupPath(mode)
+ if os.path.exists(snapshotDir):
+ shutil.rmtree(snapshotDir, False)
+ if os.path.exists(snapshotDir):
+ os.rename(snapshotDir, snapshotDir + '.{:.0}'.format(time.time()))
+ tool.snapshot([dataDir, snapshotDir])
+
+ def smartInfo(self):
+ '''Assembles the SMART infos about some disk devices.
+ @return a list of lines describing the SMART info
+ '''
+ devices = self._configuration.getString('smart.devices').split(',')
+ output = ['= SMART info']
+ for device in devices:
+ output.append('== ' + device)
+ lines = self._processHelper.executeInputOutput(['/usr/sbin/smartctl', '-a', device])
+ output += lines
+ return output
+
+ def systemInfo(self, target):
+ '''Assembles interesting informations about the current linux system.
+ Manually installed packets
+ disk devices
+ mounts
+ @param target: the target directory
+ '''
+ start = time.time()
+ # zcat /var/log/apt/history.log.*.gz | cat - /var/log/apt/history.log | grep -Po '^Commandline: apt-get install (?!.*--reinstall)\K.*' > $fnManuallyInstalled
+ path = '/var/log/apt'
+ nodes = os.listdir(path)
+ packets = []
+ for node in nodes:
+ if node.startswith('history.log') and node.endswith('.gz'):
+ full = path + os.sep + node
+ with gzip.open(full, 'r') as fp:
+ for line in fp:
+ if line.startswith(b'Commandline: apt-get install'):
+ packets += line[29:].decode().strip().replace(' ', ' ').replace(' ', ' ').split(' ')
+ with open('/var/log/apt/history.log', 'r') as fp:
+ for line in fp:
+ if line.startswith('Commandline: apt-get install'):
+ packets += line[29:].strip().replace(' ', ' ').replace(' ', ' ').split(' ')
+
+ mounts = []
+ with open('/proc/mounts', 'r') as fp:
+ for line in fp:
+ parts = line.split(' ')
+ if parts[0].find('/') >= 0:
+ mounts.append(line)
+ lsblkInfo = subprocess.check_output('/bin/lsblk')
+ fdiskInfo = subprocess.check_output(['/sbin/fdisk', '-l'])
+ networkInfo = subprocess.check_output(['/sbin/ip', 'addr'])
+ routeInfo = subprocess.check_output(['/sbin/ip', 'route', 'show'])
+ with open('/proc/diskstats', 'r') as fp:
+ diskStats = fp.read()
+ lvmInstalled = self._beingRoot and os.path.exists('/sbin/vgdisplay')
+ if lvmInstalled and self._beingRoot:
+ pvInfoShort = subprocess.check_output('/sbin/pvs')
+ pvInfo = subprocess.check_output('/sbin/pvdisplay')
+ vgInfoShort = subprocess.check_output('/sbin/vgs')
+ vgInfo = subprocess.check_output('/sbin/vgdisplay')
+ lvInfoShort = subprocess.check_output('/sbin/lvs')
+ lvInfo = subprocess.check_output('/sbin/lvdisplay')
+ fnOutput = target + os.sep + 'system.info.gz'
+ if os.geteuid() == 0:
+ btrfsInfo = self.btrfsInfo()
+ smartInfo = self.smartInfo()
+ with gzip.open(fnOutput, 'wb') as fp:
+ fp.write(b'=== manually installed packets:\n')
+ for packet in packets:
+ if packet != '':
+ fp.write('apt-get install -y {:s}\n'.format(packet).encode())
+ fp.write(b'\n=== lsblk:\n' + lsblkInfo)
+ fp.write(b'\n=== fdisk:\n' + fdiskInfo)
+ fp.write(b'\n=== mounts:\n' + ''.join(mounts).encode())
+ fp.write(b'\n=== diskstats:\n' + diskStats.encode())
+ if os.path.exists('/proc/mdstat'):
+ with open('/proc/mdstat', 'r') as fp2:
+ mdstat = fp2.read()
+ fp.write(b'\n=== mdstat:\n' + mdstat.encode())
+ if lvmInstalled and self._beingRoot:
+ fp.write(b'\n=== pvs:\n' + pvInfoShort)
+ fp.write(b'\n=== vgs:\n' + vgInfoShort)
+ fp.write(b'\n=== lvs:\n' + lvInfoShort)
+ fp.write(b'\n=== pvdisplay:\n' + pvInfo)
+ fp.write(b'\n=== vgdisplay:\n' + vgInfo)
+ fp.write(b'\n=== lvdisplay:\n' + lvInfo)
+ if btrfsInfo != '':
+ fp.write(btrfsInfo.encode('utf-8'))
+ fp.write(b'\n=== ip addr:\n' + networkInfo)
+ fp.write(b'\n=== ip route:\n' + routeInfo)
+ self.logFile(fnOutput, '%f: %s %t created in %r', start)
+
+ def testCompleteness(self, target):
+ '''Tests whether the backup is complete.
+ '''
+ target = self._targetDir
+ dbs = self.allDatabases()
+ for db in dbs:
+ name = db
+ config = self.findConfig('db', db, 'webapps.d')
+ if config != None:
+ path = config.getString('directory')
+ if path != None:
+ name = os.path.basename(path) + '_' + db
+ outputFile = target + os.sep + name + '.sql.gz'
+ if not os.path.exists(outputFile):
+ self._logger.error('missing SQL save: ' + outputFile)
+ else:
+ self._logger.log('SQL file found: ' + outputFile, 3)
+
+ def testReady(self, mode):
+ '''Tests whether a backup run has been finished normally.
+ @param mode: 'dayly', 'weekly', 'monthly'
+ '''
+ self._logger.log('Testing whether the last backup mode [{}] has been finished normally'.format(mode), 1)
+ self._baseTargetDir = self._configuration.getString('target.path')
+ if self.ensureDirectory(self._baseTargetDir) != None:
+ [lockFile, readyFile] = self.createLockAndReadyFile(mode)
+ if os.path.exists(readyFile):
+ self._logger.log('ready file found: {}'.format(readyFile), 1)
+ else:
+ if not os.path.exists(lockFile):
+ error = 'backup [{}] not ready but no lockfile exists'.format(mode)
+ else:
+ date = datetime.datetime.fromtimestamp(os.stat(lockFile).st_mtime)
+ error = 'backup [{}] not ready. Start time: {}'.format(mode, date.strftime('%Y.%m.%d %H:%M:%S'))
+ self._logger.error(error)
+ subject = 'Backup {:s} not ready on {:s}'.format(mode, self._configuration.getString('location', base.BaseTool.BasicStatics.hostname(True)))
+ self.sendStatusEmail(subject, error)
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """usage: backuptool [<global_opts>] <mode>
+ Dayly/weekly/monthly backup including system info and mysql backup
+GLOBAL_OPTS
+GLOBAL_MODES
+<mode>:
+ completeness
+ tests whether the last backup task was successfully completed
+ dayly
+ dayly actions
+ monthly
+ monthly actions
+ snapshot mode data-dir [--create-if-missing]
+ makes a snapshot for the given mode and directory
+ mode: dayly or weekly or monthly
+ data-dir: the directory laying in the base directory containing 'dayly' ...
+ create-if-missing:the mode specific base (data-dir../dayly ...) will be created if needed
+ test-email
+ Sends an email to test email configuration
+ test-ready <mode>
+ Tests whether the last backup with <mode> has been finished successfully
+ webapps <password-backup-user> <pattern-nginx-sites-enabled1> ...
+ Creates a configuration from nginx configuration
+ weekly
+ weekly actions
+Examples:
+backuptool -q backup dayly
+backuptool -r -v3 backup weekly
+ """
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/backuptool', '-v3', 'backup', 'dayly']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('backup', 'appl/BackupTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = BackupTool(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'backup':
+ if len(argv) == 0:
+ mode = 'dayly'
+ else:
+ mode = argv[0]
+ rc = tool.doBackup(mode)
+ elif cmd == 'webapps':
+ if len(argv) < 2:
+ tool.usage('missing password and/or pattern(s)')
+ else:
+ rc = tool.createWebapp(argv[0], argv[1:])
+ elif cmd == 'test-email':
+ tool.sendStatusEmail('Test email sending', 'Seems to work: email sending');
+ elif cmd == 'test-ready':
+ if len(argv) < 1:
+ tool.usage('missing <mode> for command test-ready')
+ else:
+ mode = argv[0]
+ if mode != 'dayly' and mode != 'weekly' and mode != 'monthly':
+ tool.usage('unknown mode for command test-ready: ' + mode)
+ else:
+ tool.testReady(mode)
+ elif cmd == 'completeness':
+ tool.testCompleteness()
+ elif cmd == 'snapshot':
+ tool.snapshot(argv)
+ else:
+ tool.usage('unknown command: ' + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+import time
+import sys
+import os.path
+import math
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.CryptoEngine
+
+class Statistics:
+ def __init__(self):
+ self._readOnly = False
+ self._readCount = 0
+ self._bytesRead = 0
+ self._writeCount = 0
+ self._bytesWritten = 0
+ self._timeRead = 0
+ self._timeWrite = 0
+
+class BenchTool (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ self._globalOptions = globalOptions
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'benchtool.conf')
+ self._dirs = []
+ self._files = []
+ self._written = []
+ self._factorPrint = 10000
+ self._random = base.CryptoEngine.CryptoEngine(self._logger)
+ self._random.setSeedRandomly()
+ self._ratioRead = 0.80
+ self._maxWritten = 1000
+ self._statistics = Statistics()
+ self._bufferLength = 8192
+ self._buffer = self._random.nextString(self._bufferLength, 'ascii95')
+ # in Blocks
+ self._maxFileLength = 4092
+ self._ratioSmallFiles = 0.8
+ self._fileNo = 0
+ self._maxLoops = 0x7fffffffffff
+ self._maxWritten = 1000
+ self._readOnly = False
+
+ def benchFiles(self):
+ loopNo = 0
+ while loopNo < self._maxLoops:
+ loopNo += 1
+ if self._readOnly:
+ self.benchRead()
+ else:
+ current = self._random.nextInt(100000)
+ if current < 100000*self._ratioRead:
+ self.benchRead()
+ else:
+ self.benchWrite()
+ if loopNo % self._factorPrint == 0:
+ self.fileStatistics()
+ self.fileStatistics()
+
+ def benchRead(self):
+ '''Reads a randomly choosen file from the tree.
+ '''
+ self._statistics._readCount += 1
+ ix = self._random.nextInt(len(self._files))
+ start = time.time()
+ with open(self._files[ix], 'rb') as fp:
+ again = True
+ while again:
+ buffer = fp.read(self._bufferLength)
+ if len(buffer) == 0:
+ again = False
+ else:
+ self._statistics._bytesRead += len(buffer)
+ self._statistics._timeRead += time.time() - start
+
+ def benchWrite(self):
+ '''Reads a randomly choosen file from the tree.
+ '''
+ self._statistics._writeCount += 1
+ if len(self._written) >= self._maxWritten:
+ ix = self._random.nextInt(len(self._written))
+ os.unlink(self._written[ix])
+ del self._written[ix]
+ ix = self._random.nextInt(len(self._files))
+ start = time.time()
+ with open(self._files[ix], 'w') as fp:
+ if self._random.nextInt(100000) < 100000 * self._ratioSmallFiles:
+ # small files
+ uBound = self._random.nextInt(self._bufferLength, 16)
+ fp.write(self._buffer[0:uBound])
+ self._statistics._bytesWritten += uBound
+ else:
+ for ix in range(self._random.nextInt(self._maxFileLength, 4)):
+ fp.write(self._buffer)
+ self._statistics._bytesWritten += self._bufferLength
+ self._statistics._timeWrite += time.time() - start
+
+ def buildName(self):
+ '''Builds a unique filename inside the given file tree.
+ '''
+ self._fileNo += 1
+ path = self._dirs[self._random.nextInt(len(self._dirs))]
+ name = '{}/x!%{}.bench'.format(path, self._fileNo)
+ return name
+
+ def calculate(self, argv):
+ '''Calulate benchmark: reads and write files.
+ @param argv: the program arguments, e.g. ['/home', '--read-only']
+ '''
+ def div(a,b):
+ try:
+ if abs(b) < 1E-20:
+ b = 1E-20
+ rc = a / b
+ except ZeroDivisionError as exc:
+ print(str(exc))
+ return rc
+
+ args, options = self.splitArgsAndOpts(argv)
+ for opt in options:
+ if opt.startswith('--max-loops='):
+ self._maxLoops = self.integerOption(opt)
+ elif opt.startswith('--max-written='):
+ self._maxWritten = self.integerOption(opt)
+ elif opt.startswith('--factor-print='):
+ self._factorPrint = self.integerOption(opt)
+ else:
+ self.usage('unknown option: ' + opt)
+ loops = 0
+ x = 1.03
+ while loops < self._maxLoops:
+ loops += 1
+ for ix in range(10000):
+ x = x * ix + math.sqrt(abs(x)*3)
+ x -= abs(math.pow(0.1 + abs(x), 0.33))
+ x *= math.sin(x+div(loops, div(loops, loops * 5.3333))) + math.log(1+abs(x*x))
+ a = 0.03 + abs(min(20 - div(x, (abs(x) + 10)), abs(div(x, (x + math.cos(div(x + 99, x + 3)))))))
+ x -= math.exp(a)
+ x = max(min(x, 1E10), -1E10) + div(ix + 77, -ix -37) + self._random.nextInt(100000)/100000
+ if loops % self._factorPrint == 0:
+ self._logger.log('Calculate: {} loops: {:.3f} k'.format(x, loops / 1000.0))
+ time.sleep(0.001)
+ self._result = x
+
+ def files(self, argv):
+ '''File benchmark: reads and write files.
+ @param argv: the program arguments, e.g. ['/home', '--read-only']
+ '''
+ args, options = self.splitArgsAndOpts(argv)
+ for opt in options:
+ if opt.startswith('--max-loops='):
+ self._maxLoops = self.integerOption(opt)
+ elif opt.startswith('--max-written='):
+ self._maxWritten = self.integerOption(opt)
+ elif opt.startswith('--factor-print='):
+ self._factorPrint = self.integerOption(opt)
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 1:
+ self.usage('missing directory')
+ else:
+ baseDir = argv[0]
+ if not os.path.isdir(baseDir):
+ self.usage('not a directory: ' + baseDir)
+ else:
+ self.findFiles(baseDir)
+ self.benchFiles()
+
+ def fileStatistics(self):
+ readMb = self._statistics._bytesRead / 1E6
+ writeMb = self._statistics._bytesWritten / 1E6
+ self._logger.log('read : files: {:4d} MBytes: {:10.3f} rate: {:10.3f} MByte/s'.format(
+ self._statistics._readCount, readMb, readMb / (self._statistics._timeRead + 0.000001)))
+ self._logger.log('write: files: {:4d} MBytes: {:10.3f} rate: {:10.3f} MByte/s'.format(
+ self._statistics._writeCount, writeMb, writeMb / (self._statistics._timeWrite + 0.000001)))
+
+ def findFiles(self, baseDir):
+ '''Builds the list of the files in the given directory tree
+ @param baseDir: the base directory of the file tree
+ '''
+ for node in os.listdir(baseDir):
+ full = baseDir + os.sep + node
+ if os.path.isdir(full):
+ self._dirs.append(full)
+ self.findFiles(full)
+ else:
+ self._files.append(full)
+ if len(self._files) % self._factorPrint == 0:
+ self._logger.log('files/dirs found: {}/{}'.format(len(self._files), len(self._dirs)), 2)
+
+ def example(self):
+ example = '''# benchtool example configuration
+'''
+ self.storeExample(example)
+
+def usage():
+ return """usage: benchtool [<global_opts>] <mode> <args>
+ Benchmarks and/or burn in.
+GLOBAL_OPTS
+GLOBAL_MODES
+<mode>:
+ files <baseDir> [--read-only] [--max-loops=<loops>] [--max-written=<written>] [--factor-print=<factor>]
+ Read and write randomly files.
+ calculate
+ Calculates some things
+
+Example:
+benchtool -v0 files /media/pool
+benchtool files /home --read-only
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/dirtool', 'run']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('benchtool', 'appl/BenchTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = BenchTool(options)
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'files':
+ tool.files(argv)
+ elif cmd == 'calculate':
+ tool.calculate(argv)
+ else:
+ tool.usage("unknown command: " + cmd)
+ base.BaseTool.setLatestTool(tool)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+DbTool: Scalable Vector Graphics tool
+
+@author: hm
+'''
+import os.path
+import sys
+import re
+import time
+import tempfile
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.JavaConfig
+import base.ProcessHelper
+
+DbToolResult = None
+
+def setResult(data):
+ global DbToolResult
+ DbToolResult = data
+
+def result():
+ global DbToolResult
+ return DbToolResult
+
+def removeFromArrayIfExists(anArray, item):
+ for ix in reversed(range(len(anArray) )):
+ if item == anArray[ix]:
+ del(anArray[ix])
+
+class DbTool (base.BaseTool.BaseTool):
+ '''Translates from one wiki syntax into another.
+ '''
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'dbtool.conf')
+ self._logger.setErrorFilter(re.compile(r'mysqldump:.*Warning.*Using a password'), True)
+ self._processTool = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger)
+ # True: delete user/db without confirmation: use only in unittests!
+ self._forceDeleting = False
+
+ def archiveWebapp(self, argv):
+ '''Deletes a web application:
+ Saving of the database, creation of a tar archive with directory and sql script.
+ Removing database and directory.
+ @param argv: program arguments, e.g. ['huber.de', '/media/backup/archive']
+ '''
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ else:
+ argv, options = self.splitArgsAndOpts(argv)
+ if len(options) != 0:
+ usage('unknown option(s): ' ' + '.join(options))
+ (admin, adminCode) = defaultAdmin(argv, 2, self)
+ domain = argv[0]
+ dirArchive = argv[1]
+ fnConfig = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf')
+ fnArchive = dirArchive + os.sep + domain + '.tgz'
+ if not os.path.isdir(dirArchive):
+ self.usage('missing archive dir: ' + dirArchive)
+ elif os.path.exists(fnArchive):
+ self.usage('archive already exists: ' + fnArchive)
+ elif not os.path.exists(fnConfig):
+ self.usage('missing ' + fnConfig)
+ else:
+ config = base.JavaConfig.JavaConfig(fnConfig, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ dirWebapp = config.getString('directory')
+ if not os.path.isdir(dirWebapp):
+ self.usage('missing directory of {}: {}'.format(domain, dirWebapp))
+ else:
+ dirDb = dirWebapp + os.sep + 'db'
+ self.ensureDirectory(dirDb)
+ fnSql = dirDb + os.sep + domain + '.sql.gz'
+ self.exportDatabase(db, admin, adminCode, fnSql)
+ size = os.path.getsize(fnSql)
+ fnNginx = self.getTarget('/etc/nginx/sites-available', domain)
+ base.FileHelper.copyIfExists(fnNginx, dirDb + os.sep + domain + '.nginx', None, self._logger)
+ base.FileHelper.copyIfExists(fnConfig, dirDb + os.sep + domain + '.conf', None, self._logger)
+ self._logger.log('{} exported DB {}'.format(base.StringUtils.formatSize(size), fnSql))
+ nodes = os.listdir(dirDb)
+ self._logger.log('content of {}:\n{}'.format(dirDb, ' '.join(nodes)))
+ self._processHelper.execute(['/usr/bin/du', '-sh', dirWebapp], True)
+ self.saveDirectoryByTar(domain, dirWebapp, dirArchive, None)
+ size = os.path.getsize(fnArchive)
+ self._logger.log('{}: archive {}'.format(base.StringUtils.formatSize(size), fnArchive))
+ fnScript = base.FileHelper.tempFile('Remove.' + domain)
+ base.StringUtils.toFile(fnScript, '''#! /bin/bash
+dbtool -v3 delete-db-and-user {} {} --no-saving $1
+rm -Rf {} {} {}
+rm -f /etc/ssl/certs/{}.pem /etc/ssl/private/{}.pem /etc/nginx/sites-enabled/{}
+'''.format(db, user, dirWebapp, fnNginx, fnConfig, domain, domain, domain))
+ self._logger.log('ToDo: bash {}'.format(fnScript))
+
+ def buildArgvMysql(self, db, user, passwd):
+ '''Creates the argument vector to start the command mysql
+ @param db: the db to handle
+ @param user: the user for login
+ @param passwd: the password of user: None or '-': no password given
+ @return: the program arguments
+ '''
+ argv = ['/usr/bin/mysql', '-u', user]
+ if passwd != None and len(passwd) > 0 and passwd != '-':
+ argv.append('-p' + passwd)
+ argv.append(db)
+ return argv;
+
+ def createAdmin(self, admin, adminPassword, readOnly, superuser='root', superuserPassword=None):
+ '''Creates an user able to process all databases
+ @param admin: new user
+ @param adminPassword: the password of user
+ @param readOnly: True: the new user may not change any database
+ @param superuser: a user which can write to mysql
+ @param superuserPassword: the password of user
+ '''
+ argv = self.buildArgvMysql('mysql', superuser, superuserPassword)
+ sql = '''GRANT {} ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}'{};
+flush privileges;'''.format('SELECT, SHOW VIEW' if readOnly else 'ALL', admin, adminPassword, '' if readOnly else ' WITH GRANT OPTION')
+ self._logger.log('creating admin {}...'.format(admin), 1)
+ self._logger.log(sql, 4)
+ self._processTool.executeInput(argv, self._verboseLevel >= 2, sql)
+
+ # create-and-import-all-webapps <dir-backup> <admin> <admin-passwd>
+ def createAndImportAllWebApps(self, dirBackup, adminUser, adminPasswd):
+ '''Creates for all webapps the DB and the user stored in the configuration and imports the SQL file.
+ @param backupDir: the directory containing the SQL files for import
+ @param adminUser: a user which can read mysql
+ @param adminPasswd: the password of adminUser
+ '''
+ path = os.path.dirname(self.getTarget(self._configDir + '/webapps.d', 'dummy'))
+ files = os.listdir(path)
+ count = 0
+ for filename in files:
+ if filename.endswith('.conf'):
+ count += 1
+ config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ passwd = config.getString('password')
+ domain = filename[0:-5]
+ sqlFileNode = config.getString('sql.file', 'sql.file_is_not_defined')
+ sqlFile = dirBackup + os.sep + sqlFileNode + '.sql.gz'
+ if db == None or user == None or passwd == None:
+ self._logger.error('missing auth data in ' + filename)
+ else:
+ self.createDbAndUser(db, user, passwd, adminUser, adminPasswd)
+ self.importWebApp(domain, sqlFile)
+ self._logger.log("= {} file(s) scanned".format(count), 2)
+
+ def createAndImportWebApp(self, domain, backupDir, adminUser, adminPasswd):
+ '''Creates for all webapps the DB and the user stored in the configuration.
+ @param adminUser: a user which can read mysql
+ @param adminPasswd: the password of adminUser
+ '''
+ path = os.path.dirname(self.getTarget(self._configDir + '/webapps.d', 'dummy'))
+ filename = path + os.sep + domain + '.conf'
+ config = base.JavaConfig.JavaConfig(filename, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ passwd = config.getString('password')
+ sqlFileNode = config.getString('sql.file', 'undefined_sql.file') + '.sql'
+ sqlFile = backupDir + os.sep + sqlFileNode + '.gz'
+ if not os.path.isdir(backupDir):
+ self.usage('not a directory: ' + backupDir)
+ elif not os.path.exists(sqlFile):
+ self.usage('missing SQL file: ' + sqlFile)
+ elif db == None or user == None or passwd == None:
+ self._logger.error('missing auth data in ' + filename)
+ else:
+ self.createDbAndUser(db, user, passwd, adminUser, adminPasswd)
+ self.importWebApp(domain, sqlFile)
+
+ def createBackupAdmin(self, admin, adminPasswd):
+ '''Creates the backup admin stored in backup.conf.
+ @param admin: a user which can write to mysql
+ @param adminPasswd: the password of user
+ '''
+ config = base.JavaConfig.JavaConfig('/etc/pyrshell/backup.conf', self._logger)
+ user = config.getString('mysql.user')
+ passw = config.getString('mysql.code')
+ if user != None and passw != None:
+ self.createBackupUser(user, passw, admin, adminPasswd)
+
+ # create-db-and-user <db> <user> <passwd> <admin> <admin-pw>
+ def createDbAndUser(self, db, user, passwd, adminUser, adminPasswd):
+ '''Creates an user able to read all databases (not write)
+ @param db: name of the new database
+ @param user: new user
+ @param passwd: the password of user
+ @param adminUser: a user which can write to mysql
+ @param adminPasswd: the password of adminUser
+ '''
+ argv = self.buildArgvMysql('mysql', adminUser, adminPasswd)
+ sql = '''GRANT ALL ON {}.* TO '{}'@'localhost' IDENTIFIED BY '{}' WITH GRANT OPTION;
+flush privileges;
+create database if not exists {};'''.format(db, user, passwd, db)
+ self._logger.log(' '.join(argv) + '\n' + sql)
+ self._logger.log('creating db {} and user {}...'.format(db, user), 1)
+ self._processTool .executeInput(argv, self._verboseLevel >= 2, sql)
+
+ def createBackupUser(self, backupUser, backupPassword, user='root', userPassword=None):
+ '''Creates an user able to read all databases (not write)
+ @param backupUser: new user
+ @param backupPassword: the password of user
+ @param user: a user which can write to mysql
+ @param userPassword: the password of user
+ '''
+ argv = self.buildArgvMysql('mysql', user, userPassword)
+ sql = '''GRANT USAGE ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}';
+GRANT SELECT, LOCK TABLES ON mysql.* TO '{}'@'localhost';
+GRANT SELECT, LOCK TABLES, SHOW VIEW, EVENT, EXECUTE, TRIGGER ON *.* TO '{}'@'localhost' IDENTIFIED BY '{}';
+flush privileges;'''.format(backupUser, backupPassword, backupUser, backupUser, backupPassword)
+ self._logger.log('creating readonly user {}...'.format(backupUser), 1)
+ self._processTool.executeInput(argv, self._verboseLevel >= 2, sql)
+
+ def createWebAppConfiguration(self, argv):
+ '''Creates a configuration file for a web application.
+ @param argv: the program arguments, e.g. ['example.com', '/home/example.com', 'dbexample', 'uexample', 'TopSecret']
+ '''
+ if len(argv) < 5:
+ self.usage('too few arguments')
+ else:
+ domain = argv[0]
+ directory = argv[1]
+ db = argv[2]
+ user = argv[3]
+ password = argv[4]
+ if re.match(r'^[\w.-]+$', domain) == None:
+ self.usage('illegal characters in domain: ' + domain)
+ if re.match(r'^[\w.-]+$', db) == None:
+ self.usage('illegal characters in db: ' + db)
+ if not os.path.isdir(directory):
+ self.usage('directory does not exists: ' + directory)
+ fn = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf')
+ base.StringUtils.toFile(fn, '''db={}
+user={}
+password={}
+sql.file={}
+directory={}
+excluded=
+'''.format(db, user, password, domain + '_' + db, directory))
+
+ def deleteDb(self, db, admin, passwd, doConfirm = True, doSave = True):
+ '''Deletes the database.
+ @param db: the db to delete
+ @param admin: a user with enough rights to delete the user
+ @param passwd: the password of the admin
+ @param doConfirm: True: the deletion must be confirmed (by typing the database name)
+ @param doSave: True: the database is saved into the tempororary directory
+ '''
+ argv = self.buildArgvMysql('mysql', admin, passwd)
+ answer = db
+ if doConfirm and not self._forceDeleting and not base.BaseTool.unitTestIsActive():
+ print('Do you want to delete db {}? Please enter the db name again: '.format(db))
+ answer = input()
+ if answer != db:
+ self._logger.error('confirmation failed')
+ else:
+ self._logger.log('deleting database {}...'.format(db), 1)
+ sql = 'drop database `{}`;'.format(db)
+ self._processTool .executeInput(argv, self._verboseLevel >= 2, sql)
+
+ def deleteDbAndUser(self, argv):
+ '''Deletes the database.
+ @param argv: program arguments, e.g. ['wordpress', 'jonny', '--no-save', '--no-confirmation']
+ '''
+ if len(argv) < 1:
+ self.usage('too few arguments')
+ else:
+ argv, options = self.splitArgsAndOpts(argv)
+ doConfirmation = True
+ doSaving = True
+ for opt in options:
+ if opt == '--no-confirmation':
+ doConfirmation = False
+ elif opt == '--no-saving':
+ doSaving = False
+ else:
+ self.usage('unknown option: ' + opt)
+ (admin, code) = defaultAdmin(argv, 2, self)
+ db = argv[0]
+ user = argv[1] if len(argv) > 1 else '-'
+ if doSaving:
+ sqlFile = base.FileHelper.tempFile(db + '.sql.' + str(int(time.time())) + '.gz')
+ self._logger.log('saving {} ...'.format(sqlFile), 1)
+ self.exportDatabase(db, admin, code, sqlFile)
+ self.deleteDb(db, admin, code, doConfirmation, doSaving)
+ if user != '-':
+ self.deleteUser(user, admin, code, doConfirmation)
+
+ def deleteUser(self, user, admin, passwd, doConfirm = True):
+ '''Deletes the database user.
+ @param user: the user to delete
+ @param admin: a user with enough rights to delete the user
+ @param passwd: the password of the admin
+ @param doConfirm: True: the deletion must be confirmed (by typing the user name)
+ '''
+ argv = self.buildArgvMysql('mysql', admin, passwd)
+ answer = user
+ if doConfirm and not self._forceDeleting and not (user == 'testadmin' and base.BaseTool.unitTestIsActive()):
+ print('Do you want to delete db user {}? Please enter the username again: '.format(user))
+ answer = input()
+ if answer != user:
+ self._logger.error('confirmation failed')
+ else:
+ sql = '''delete from user where user='{}';
+flush privileges;'''.format(user)
+ self._logger.log('deleting database user {}...'.format(user), 1)
+ self._processTool .executeInput(argv, self._verboseLevel >= 2, sql)
+
+ def example(self):
+ '''Creates a example configuration.
+ '''
+ example = '''# dbtool example configuration
+log.file=/var/log/local/dbtool.log
+admin.user=admin
+admin.code=TopSecret
+ '''
+ self.storeExample(example)
+
+ def exportAllDatabases(self, user, code, target):
+ '''Exports a database.
+ @param db: the DB's name
+ @param user: the DB user
+ @param code: '' or the password
+ @param target: the target file
+ '''
+ # def findAllDbs(self, adminUser, adminPasswd, internalTo = False):
+ if user == None:
+ user = self._configuration.getString('admin.user')
+ code = self._configuration.getString('admin.code')
+ if user == None:
+ # configuration of backuptool:
+ user = self._configuration.getString('mysql.user')
+ code = self._configuration.getString('mysql.code')
+ if user == None:
+ self._logger.error('exportAllDatabases(): missing admin user')
+ else:
+ dbs = self.findAllDbs(user, code)
+ for db in dbs:
+ trg = target + os.sep + db + '.sql.gz'
+ self.exportDatabase(db, user, code, trg)
+
+ def exportDatabase(self, db, user, code, target):
+ '''Exports a database.
+ @param db: the DB's name
+ @param user: the DB user
+ @param code: '' or the password
+ @param target: the target file
+ '''
+ argv = ['/usr/bin/mysqldump', '--default-character-set=utf8mb4', '--single-transaction', '-u', user]
+ if code != '':
+ argv.append('-p' + code)
+ argv.append(db)
+ if target.endswith('.gz'):
+ self._processHelper.executeScript('''#! /bin/bash
+/usr/bin/mysqldump --default-character-set=utf8mb4 --single-transaction -u{} '-p{}' '{}' | gzip -c > {}
+'''.format(user, code, db, target))
+ else:
+ self._processHelper.executeScript('''#! /bin/bash
+/usr/bin/mysqldump --default-character-set=utf8mb4 --single-transaction -u{} '-p{}' '{}' > {}
+'''.format(user, code, db, target))
+
+ def exportWebApp(self, argv):
+ '''Exports a web application's database.
+ @param argv: program arguments, e.g. ['x01.interfacemaker.com', '/tmp/x01.sql.gz']
+ '''
+ domain = argv[0]
+ argv = argv[1:]
+ if len(argv) == 0:
+ target = tempfile.gettempdir() + os.sep + domain + '.sql'
+ else:
+ target = argv[0]
+ argv = argv[1:]
+ fnConfig = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf')
+ if not os.path.exists(fnConfig):
+ self.usage('missing ' + fnConfig)
+ else:
+ config = base.JavaConfig.JavaConfig(fnConfig, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ code = config.getString('password')
+ if db == None or user == None or code == None:
+ self.usage('incomplete data in ' + fnConfig)
+ else:
+ self.exportDatabase(db, user, code, target)
+
+ def filterRecords(self, argv):
+ '''Filters some records from a given table in a given SQL file into a given output SQL file.
+ @param argv: the program arguments, e.g. ['mysql.sql', 'user', 'users.sql', '--exclude=,root,']
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ filterFunc = None
+ regExclude = None
+ for opt in options:
+ if opt.startswith('--exclude='):
+ regExclude = self.regExprCompile(opt[10:].encode('utf-8'), 'filterRecord', False)
+ else:
+ self.usage('unknown option: ' + opt)
+ if regExclude != None:
+ filterFunc = lambda record, excludeExpr: excludeExpr.search(record) == None
+ fnIn = argv[0]
+ table = argv[1]
+ fnOut = argv[2]
+ if not os.path.exists(fnIn):
+ self.usage('missing input file: ' +fnIn)
+ elif re.search(r'\W', table) != None:
+ self.usage('illegal char in table name: ' + table)
+ else:
+ self.filterRecordsInSqlFile(table, fnIn, fnOut, filterFunc, regExclude)
+
+ def filterRecordsInSqlFile(self, table, sqlInput, sqlOutput, filterFunc, paramFilter):
+ '''Parses a SQL file and searches for a given table. These tables will be written to another file:
+ @param table: name of the table to extract
+ @param sqlInput: name of the input file (created from mysqldump)
+ @param sqlOutput: name of the output file
+ @param filterFunc: none or a method to filter the record
+ if objectFilter != None filter() is a method of objectFilter
+ otherwise filter() is .filter is a function
+ signature: filter(record): bool. If it returns True the record is written
+ @param paramFilter: 2nd parameter of filterFunc()
+ '''
+ if not os.path.exists(sqlInput):
+ self.usage('missing input SQL file: ' + sqlInput)
+ else:
+ with open(sqlInput, 'rb') as fpInput, open(sqlOutput, 'wb') as fpOutput:
+ state = None
+ table1 = b' ' + table.encode('utf-8') + b' ';
+ table2 = b'`' + table.encode('utf-8') + b'`';
+ countRecords = 0
+ for line in fpInput:
+ ixStart = 0
+ if state == None:
+ if line.startswith(b'DROP TABLE IF EXISTS'):
+ state = 'searchTable'
+ else:
+ fpOutput.write(line)
+ elif state == 'searchTable' and (line.startswith(b'CREATE') or line.startswith(b'create')):
+ if line.find(table1) > 0 or line.find(table2) > 0:
+ state = 'foundTable'
+ firstInsert = True
+ else:
+ state = 'searchTable'
+ elif state == 'foundTable':
+ if line.startswith(b'INSERT') or line.startswith(b'insert'):
+ countValues = 0
+ if firstInsert:
+ fpOutput.write(b'LOCK TABLES `!` WRITE;\n'.replace(b'!', table.encode('utf-8')))
+ firstInsert = False
+ ixStart = line.find(b'VALUES')
+ if ixStart < 0:
+ ixStart = line.find(b'values')
+ if ixStart < 0:
+ self._logger.error('missing "VALUES":' + line.encode('utf-8')[0:80])
+ ixStart = 0
+ else:
+ ixStart += 6
+ if countValues == 0:
+ fpOutput.write(b'\n')
+ else:
+ fpOutput.write(b',\n')
+ fpOutput.write(line[0:ixStart])
+ state = 'inInserts'
+ elif line.startswith(b'UNLOCK') or line.startswith(b'unlock'):
+ state = 'end'
+ fpOutput.write(b'UNLOCK TABLES;\n')
+ if state == 'inInserts':
+ if line.startswith(b'UNLOCK') or line.startswith(b'unlock') or line.startswith(b'DROP') or line.startswith(b'drop') or line.startswith(b'CREATE') or line.startswith(b'create'):
+ state = 'end'
+ fpOutput.write(b';\nUNLOCK TABLES;\n')
+ else:
+ if ixStart == 0:
+ fpOutput.write(b',')
+ while True:
+ record = None
+ ix = line.find(b'),(', ixStart)
+ if ix >= 0:
+ record = line[ixStart:ix+1]
+ ixStart = ix + 2
+ else:
+ record = line[ixStart:].strip(b'\n\r;,')
+ ok = filterFunc(record, paramFilter)
+ if ok:
+ if countValues == 0:
+ fpOutput.write(b'\n')
+ else:
+ fpOutput.write(b',\n')
+ fpOutput.write(record)
+ countValues += 1
+ countRecords += 1
+ if ix < 0:
+ break
+ self._logger.log('found records: {}'.format(countRecords), 2)
+
+ # all-dbs <admin> <admin-passwd> [<internal-too>]
+ def findAllDbs(self, adminUser, adminPasswd, internalTo = False):
+ '''Logs the database names (without internal dbs like mysql)
+ @param adminUser: a user which can read mysql
+ @param adminPasswd: the password of adminUser
+ @param internalTo: False: internal databases (mysql, ...) are ignored
+ @return: a list of all database names
+ '''
+ argv = self.buildArgvMysql('mysql', adminUser, adminPasswd)
+ sql = '''show databases;'''
+ rc = self._processTool.executeInputOutput(argv, sql)
+ if len(rc) > 0 and rc[0] == 'Database':
+ rc = rc[1:]
+ removeFromArrayIfExists(rc, '')
+ if not internalTo:
+ removeFromArrayIfExists(rc, 'mysql')
+ removeFromArrayIfExists(rc, 'information_schema')
+ removeFromArrayIfExists(rc, 'performance_schema')
+ return rc
+
+ # all-dbs <admin> <admin-passwd> [<internal-too>]
+ def findAllUsers(self, adminUser, adminPasswd):
+ '''Logs the database names (without internal dbs like mysql)
+ @param adminUser: a user which can read mysql
+ @param adminPasswd: the password of adminUser
+ @param internalTo: False: internal databases (mysql, ...) are ignored
+ @return: a list of all database names
+ '''
+ argv = self.buildArgvMysql('mysql', adminUser, adminPasswd)
+ sql = '''select user from user;'''
+ self._logger.log(' '.join(argv) + '\n' + sql, 2)
+ rc = self._processTool.executeInputOutput(argv, sql)
+ setResult(rc)
+ return rc
+
+ # create-webapp-dbs <admin> <admin-passwd>
+ def createWebAppDbsAndUser(self, adminUser, adminPasswd):
+ '''Creates for all webapps the DB and the user stored in the configuration.
+ @param adminUser: a user which can read mysql
+ @param adminPasswd: the password of adminUser
+ '''
+ self.createBackupAdmin(adminUser, adminPasswd)
+ path = self._configDir + '/webapps.d'
+ files = os.listdir(path)
+ count = 0
+ for filename in files:
+ if filename.endswith('.conf'):
+ count += 1
+ config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ passwd = config.getString('password')
+ if db == None or user == None or passwd == None:
+ self._logger.error('missing auth data in ' + filename)
+ else:
+ self.createDbAndUser(db, user, passwd, adminUser, adminPasswd)
+ self._logger.log("= {} file(s) scanned".format(count), 2)
+
+ # import-all-webapps <sql-directory>
+ def importAllWebappDbs(self, sqlDirectory):
+ '''Imports all webapp dbs from files in a given directory.
+ Most of the parameters are read from the configuration files.
+ @param sqlDirectory: the directory containing the SQL files to import
+ '''
+ path = self._configDir + '/webapps.d'
+ files = os.listdir(path)
+ count = 0
+ for filename in files:
+ if filename.endswith('.conf'):
+ count += 1
+ config = base.JavaConfig.JavaConfig(path + os.sep + filename, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ passwd = config.getString('password')
+ sqlFilePrefix = config.getString('sql.file')
+ if db == None or user == None or passwd == None or sqlFilePrefix == None:
+ self._logger.error('missing needed data in ' + filename)
+ else:
+ fnSql = sqlDirectory + '/' + sqlFilePrefix + '.sql'
+ if not os.path.exists(fnSql):
+ fnSql2 = fnSql + '.gz'
+ if os.path.exists(fnSql2):
+ fnSql = fnSql2
+ else:
+ self._logger.error('sql not found: ' + fnSql)
+ fnSql = None
+ if fnSql != None:
+ cmd = '/bin/zcat' if fnSql.endswith('.gz') else '/bin/cat'
+ self._logger.log('importing {} into {}...'.format(fnSql, db), 1)
+ self._processTool.executeInChain([cmd, fnSql], None, ['/usr/bin/mysql', '-u', user, '-p' + passwd, db])
+
+ # import-webapp <domain> <sql-file>
+ def importWebApp(self, domain, sqlFile):
+ '''Imports a SQL file into the db of a webapp.
+ @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu'
+ @param sqlFile: the file to import
+ '''
+ filename = self.getTarget(self._configDir + '/webapps.d', domain + '.conf')
+ if not os.path.exists(sqlFile):
+ self._logger.error('sql file does not exist: ' + sqlFile)
+ elif not os.path.exists(filename):
+ self._logger.error('not found: ' + filename)
+ else:
+ configuration = base.JavaConfig.JavaConfig(filename, self._logger)
+ db = configuration.getString('db')
+ user = configuration.getString('user')
+ passwd = configuration.getString('password')
+ if db == None or user == None or passwd == None:
+ self._logger.error('missing needed data in ' + filename)
+ else:
+ cmd = '/bin/zcat' if sqlFile.endswith('.gz') else '/bin/cat'
+ argv1 = [cmd, sqlFile]
+ argv2 = ['/usr/bin/mysql', '-u', user, '-p' + passwd, db]
+ self._logger.log('importing {} into {}...'.format(sqlFile, db), 1)
+ self._processTool.executeInChain(argv1, None, argv2)
+
+ def saveWebApp(self, domain, archive):
+ '''Saves the db into a subdirectory db and stores the home of the webapp into a tar achive.
+ @param domain: the domain of the webapp, e.g. 'cloud.infeos.de'
+ @param archive: the name of the tar achive (will be created)
+ '''
+ fn = self.getTarget('/etc/pyrshell/webapps.d', domain + '.conf')
+ if not os.path.exists(fn):
+ self.usage('unknown web application ' + domain)
+ config = base.JavaConfig.JavaConfig(fn, self._logger)
+ homeWebApp = config.getString('directory')
+ opts = config.getString('excluded')
+ options = []
+ if opts != None and opts != '':
+ sep = opts[0]
+ for opt in sep.split(opts[1:]):
+ options.append('--exclude=' + opt)
+ if not os.path.isdir(homeWebApp):
+ self.usage('home of {} not found: {}'.format(domain, homeWebApp))
+ dbDir = self.ensureDirectory(homeWebApp + os.sep + 'db')
+ if dbDir == None:
+ self.error('save-webapp aborted: cannot create subdir db')
+ else:
+ fnDb = dbDir + os.sep + domain + '.sql.gz'
+ self.exportWebApp([domain, fnDb])
+ self._logger.log('db saved to {}: {}'.format(fnDb, base.StringUtils.formatSize(os.path.getsize(fnDb))), 2)
+ name = os.path.basename(archive)
+ if name.endswith('.tgz'):
+ name = name[0:-4]
+ self.saveDirectoryByTar(name, homeWebApp, os.path.dirname(archive), None if len(options) == 0 else options)
+
+ # show-tables <db> <user> <password> [<count-records>]
+ def showTables(self, db, user, passwd, countRecords):
+ '''Displays th tables of the given database.
+ @param db: the name of the db
+ @param user: a db user with access to db
+ @param passwd: the password of user
+ @param countRecords: True: the number of records is displayed too
+ '''
+ argv = self.buildArgvMysql(db, user, passwd)
+ sql = 'show tables;';
+ lines = self._processTool.executeInputOutput(argv, sql)
+ tables = lines[1:]
+ removeFromArrayIfExists(tables, '')
+ self._logger.log('= database {}:'.format(db))
+ if not countRecords:
+ self._logger.log('\n'.join(tables))
+ else:
+ DbToolResult = tables
+ for table in tables:
+ sql = 'select count(*) from {};'.format(table)
+ result = self._processTool.executeInputOutput(argv, sql)
+ if len(result) > 1:
+ count = result[1]
+ self._logger.log("{}: {}".format(table, count))
+
+ # show-webapps
+ def showWebApps(self):
+ '''Displays th tables of the given database.
+ @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu'
+ '''
+ path = self._configDir + '/webapps.d'
+ files = os.listdir(path)
+ DbToolResult = []
+ for item in files:
+ if item.endswith('.conf'):
+ DbToolResult.append(item[:-5])
+ self._logger.log(item[:-5])
+
+ # show-webapp-config <domain>
+ def showWebAppConfiguration(self, domain):
+ '''Displays th tables of the given database.
+ @param domain: the domain of the webapp, e.g. 'wolke.infeos.eu'
+ '''
+ filename = self._configDir + '/webapps.d/' + domain + '.conf'
+ if not os.path.exists(filename):
+ self._logger.error('webapp configuration not found: ' + filename)
+ else:
+ content = base.StringUtils.fromFile(filename)
+ DbToolResult = content.split('\n')
+ self._logger.log(filename + ':\n' + content)
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """dbtool [<global-opts>] <command>
+ Info and manipulation of (mysql) databases.
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ all-dbs [<admin> [<admin-passwd> [<internal-too>]]]
+ list all databases.
+ <internal-too>: 'true': internal databases like mysql are listed to
+ all-users [<admin> [<admin-passwd>]]
+ list all databases.
+ archive-webapp <domain> <dir-archive> [<admin> [<admin-passwd>]]
+ archives a webapp (directory, db, configuration) into a tar archive
+ create-admin [--read-only | -r] <admin> <admin-password> [<superuser> [<superuser-password>]]
+ creates an user which can access to all databases
+ create-and-import-webapp <domain> <dir-backup> [<superuser> [<superuser-password>]]
+ creates the DB and user, an imports the *.sql lying in the <dir-backup>
+ create-and-import-all-webapps <dir-backup>
+ creates the DBs and users for all web applications and imports the *.sql lying in the <dir-backup>
+ create-backup-user <user> <passwd> [<superuser> [<superuser-password>]]
+ creates an user which can access to all databases but readonly only
+ create-db-and-user <db> <user> <passwd> [<superuser> [<superuser-password>]]
+ creates a database (if not exists) and a user who can access to this db only
+ create-webapp-dbs [<superuser> [<superuser-password>]]
+ creates for all webapps db and a user (stored in the webapp configuration)
+ create-webapp-configuration <domain> <directory> <db> [ <user> [<password>]]
+ creates a configuration file in */webapps.d for a web application
+ delete-db-and-user <db> [<user> [<superuser> [<superuser-password>]]] [--no-saving] [--no-confirmation]
+ deletes the database <db> and (if given) a database user <user> connected as user <superuser>
+ --no-saving: the database is not saved before
+ --no-confirmation: no confirmation (retyping db/user name) is done
+ delete-user <user> [<superuser> [<superuser-password>]]
+ deletes the database user <user> connected as user <superuser>
+ export-all-dbs <directory> [<superuser> [<superuser-password>]]
+ exports all databases into a directory
+ export-db <db> <superuser> <superuser-password> <target>
+ exports a db into a sql file
+ export-webapp <domain> [<sqlfile>]
+ exports the database of the <domain>. user and password are taken from configuration
+ <sqlfile>: the output file. Default: /tmp/<domain>.sql
+ filter-records <source-sql> <table> <target-sql> [--exclude=<regexpr>]
+ reads an SQL file, searches for insert statements of the table <table> and writes a SQL file with this records
+ <source-sql>: name of the SQL file to read (formatted like from mysqldump)
+ <target-sql>: name of the result file
+ <regexpr>: a regular expression of the records which should not be copied
+ import-webapp <domain> <sql-file>
+ imports the sql-file (may be compressed) into the db of the webapp related to the domain
+ import-all-webapps <sql-directory>
+ import all webapps. <sql-directory> contains the sql files
+ save-webapp <domain> <tar-file>
+ store the database content in a subdirectory db and create a tar archive of the webapp directory
+ <tar-file>: a tar archive with the webapplication (files + db)
+ show-all-db-tables <user> <password> [<count-records>]
+ shows all databases with all tables. if <count-records> == true: the number of records is showed too
+ show-tables <db> [<superuser> [<superuser-password> [<count-records>]]]
+ shows the tables of the db. if <count-records> == true: the number of records is showed too
+ show-webapp-config <domain>
+ shows the configuration data of the web application for the <domain>
+ show-webapps
+ shows the domains of all webapps
+example:
+ dbtool -v3 archive-webapp huber.de /media/backup/archive
+ dbtool -v3 show-tables wordpress wp_user TopSecret [<count-records>]
+ dbtool -v3 create-backup-user wordpress wp_user TopSecret root NotKnown
+ dbtool -v3 create-webapp-configuration example.com /home/example.com dbexample usrexample NeverUsed
+ dbtool -v3 create-and-import-all-webapps /backup/dayly/Mon
+ dbtool -v3 create-and-import-webapp www.example.com /backup/dayly/Mon dbadmin ExtremeSecret
+ dbtool -v3 delete-db-and-user testdb testusr --no-saving --no-confirmation
+ dbtool -v3 filter-records mysql.sql user users.sql --exclude=,root,
+"""
+
+def defaultAdmin(argv, indexAdmin, tool):
+ '''Returns the default admin and its password.
+ If argv does not contains the data the configuration will be asked.
+ If nothing is found, ('root', '') is returned.
+ @param argv the argument vector
+ @param indexAdmin: the index of the admin in argv, the password has index+1
+ @param tool: the DirTool instance (for configuration data)
+ @returns: a tuple (admin, password)
+ '''
+ admin = tool._configuration.getString('admin.user') if len(argv) < indexAdmin + 1 or argv[indexAdmin] == '-' else argv[indexAdmin]
+ passwd = tool._configuration.getString('admin.code') if len(argv) < indexAdmin + 2 or argv[indexAdmin + 1] == '-' else argv[indexAdmin+1]
+ if admin == None:
+ admin = 'root'
+ if passwd == None:
+ passwd = ''
+ return (admin, passwd)
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/dbtool', 'run']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('dbtool', 'appl/DbTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = DbTool(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'archive-webapp':
+ tool.archiveWebapp(argv)
+ elif cmd == 'all-dbs':
+ # all-dbs <admin> <admin-passwd [<internal-too>]
+ (admin, code) = defaultAdmin(argv, 0, tool)
+ lines = tool.findAllDbs(admin, code, len(argv) > 4 and argv[4].upper().startswith('T'))
+ setResult(lines)
+ if tool._verboseLevel > 0:
+ print('\n'.join(lines))
+ elif cmd == 'all-users':
+ (admin, code) = defaultAdmin(argv, 3, tool)
+ lines = tool.findAllUsers(admin, code)
+ setResult(lines)
+ if tool._verboseLevel > 0:
+ print('\n'.join(lines))
+ elif cmd == 'create-db-and-user':
+ # create-db-and-user <db> <user> <passwd> <admin> <admin-pw>
+ if len(argv) < 3:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 3, tool)
+ tool.createDbAndUser(argv[0], argv[1], argv[2], admin, code)
+ elif cmd == 'create-admin':
+ # <user> <passwd> <user-adm> <passwd-adm>
+ argv, opts = tool.splitArgsAndOpts(argv)
+ readOnly = False
+ for opt in opts:
+ if opt == '-r' or opt == '--read-only':
+ readOnly = True
+ else:
+ usage('unknown option: ' + opt)
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 2, tool)
+ tool.createAdmin(argv[0], argv[1], readOnly, admin, code)
+ elif cmd == 'create-backup-user':
+ # <user> <passwd> <user-adm> <passwd-adm>
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 2, tool)
+ tool.createBackupUser(argv[0], argv[1], admin, code)
+ elif cmd == 'create-and-import-webapp':
+ # <domain>
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 2, tool)
+ tool.createAndImportWebApp(argv[0], argv[1], admin, code)
+ elif cmd == 'create-and-import-all-webapps':
+ # <domain>
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 1, tool)
+ tool.createAndImportAllWebApps(argv[0], admin, code)
+ elif cmd == 'create-webapp-dbs':
+ # create-webapp-dbs <admin> <admin-passwd>
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ tool.createWebAppDbsAndUser(argv[0], '' if len(argv) < 2 else argv[1])
+ elif cmd == 'create-webapp-configuration':
+ tool.createWebAppConfiguration(argv)
+ elif cmd == 'delete-db-and-user':
+ # <db> <user> <admin> <passwd-adm>
+ tool.deleteDbAndUser(argv)
+ elif cmd == 'delete-user':
+ # <user> <admin> <passwd-adm>
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 1, tool)
+ tool.deleteUser(argv[0], admin, code)
+ elif cmd == 'export-all-dbs':
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ (admin, code) = defaultAdmin(argv, 1, tool)
+ tool.exportAllDatabases(admin, code, argv[0])
+ elif cmd == 'export-db':
+ if len(argv) < 4:
+ tool.usage('too few arguments')
+ else:
+ tool.exportDatabase(argv[0], argv[1], '' if len(argv) <= 2 else argv[2], argv[3])
+ elif cmd == 'export-webapp':
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ tool.exportWebApp(argv)
+ elif cmd == 'filter-records':
+ if len(argv) < 3:
+ tool.usage('too few arguments')
+ else:
+ tool.filterRecords(argv)
+ elif cmd == 'import-webapp':
+ # import-webapp <domain> <sql-file>
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ tool.importWebApp(argv[0], argv[1])
+ elif cmd == 'import-all-webapps':
+ # import-all-webapps <sql-directory>
+ if len(argv) < 1:
+ tool.usage('too few arguments')
+ else:
+ tool.importAllWebappDbs(argv[0])
+ elif cmd == 'save-webapp':
+ # save-webapp <domain> <tar-file>
+ if len(argv) < 2:
+ tool.usage('missing arguments')
+ lines = tool.saveWebApp(argv[0], argv[1])
+ setResult(lines)
+ elif cmd == 'show-tables':
+ # show-tables <db> <user> <password> [<count-records>]
+ (user, code) = defaultAdmin(argv, 0, tool)
+ lines = tool.showTables(argv[0], user, code, len(argv) > 3 and argv[3].upper().startswith('T'))
+ setResult(lines)
+ elif cmd == 'show-all-db-tables':
+ # show-all-db-tables <user> <password> [<count-records>]
+ (user, code) = defaultAdmin(argv, 0, tool)
+ lines = tool.showAllDbsAndTables(argv[0], user, len(argv) > 2 and argv[3].upper().startswith('T'))
+ setResult(lines)
+ elif cmd == 'show-webapp-config':
+ # show-webapp-config <domain>
+ if len(argv) < 1:
+ usage('too few arguments')
+ else:
+ lines = tool.showWebAppConfiguration(argv[0])
+ setResult(lines)
+ elif cmd == 'show-webapps':
+ # show-webapps
+ tool.showWebApps()
+ else:
+ tool.usage("unknown command: " + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import sys
+import os.path
+import stat
+import shutil
+import datetime
+import tarfile
+import re
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.StringUtils
+import base.BaseTool
+import base.LinuxUtils
+import base.FileHelper
+import base.ProcessHelper
+
+class ProgramOptions:
+ '''Stores the common program options.
+ '''
+ def __init__(self, parent):
+ '''Constructor.
+ @param parent: the DirTool instance
+ '''
+ self._parent = parent
+ self._verboseLevel = parent._verboseLevel
+ self._logger = parent._logger
+ self._maxDepth = 999
+ self._fileOnly = False
+ self._dirOnly = False
+ self._excluded = None
+ self._included = None
+ self._ignoreCase = False
+ self._regExprCaseMode = 0
+ self._dirInfo = base.FileHelper.DirInfo()
+ self._blocksize = 1024*1024*16
+ self._testDate = True
+ self._maxDifferenceCount = None
+ self._silent = False
+ self._short = False
+ self._infoCount = 5
+ self._minSize = None
+ self._maxOldest = self._infoCount
+ self._maxYoungest = self._infoCount
+ self._maxSmallest = self._infoCount
+ self._maxLargest = self._infoCount
+ self._filesOnly = False
+ self._dirsOnly = False
+ self._humanReadable = True
+ self._orderDateSize = False
+
+ def parseOptions(self, options):
+ '''Stores the given options.
+ @param options: the list of program options, e.g. ['--max-depth=3']
+ '''
+ for opt in options:
+ options = options[1:]
+ if opt.startswith('--pattern='):
+ self._includes = self._parent.regExprCompile(opt[10:], self._ignoreCase)
+ elif opt.startswith('--excluded='):
+ self._includes = self._parent.regExprCompile(opt[11:], self._ignoreCase)
+ elif opt.startswith('--max-depth='):
+ self._maxDepth = self._parent.integerOption(opt)
+ elif opt == '--file-only' or opt == '-f':
+ self._filesOnly = True
+ elif opt == '--dir-only' or opt == '-d':
+ self._dirsOnly = True
+ elif opt == '--i' or opt == '-ignore-case':
+ self._ignoreCase = True
+ elif opt.startswith('--blocksize'):
+ self._blocksize = self.integerOption(opt, 1024*1024*16)
+ elif opt == '-i' or opt == '--ignore-case':
+ self._ignoreCase = True
+ elif opt == '--ignore-time':
+ self._testDate = False
+ elif opt.startswith('--max-differences'):
+ self._maxDifferenceCount = self.integerOption(opt, 20)
+ elif opt == '-q' or opt == '--quiet':
+ self._silent = True
+ elif opt == '-s' or opt == '--short':
+ self._short = True
+ elif opt == '-t' or opt == '--test-content':
+ self._testContent = True
+ elif opt.startswith('-n') or opt.startswith('--count'):
+ self._infoCount = self.integerOption(opt)
+ elif opt.startswith('--min-size='):
+ self._minSize = self.integerOption(opt)
+ elif opt.startswith('--max-oldest='):
+ self._maxOldest = self.integerOption(opt)
+ elif opt.startswith('--max-youngest='):
+ self._maxYoungest = self.integerOption(opt)
+ elif opt.startswith('--max-smallest='):
+ self._maxSmallest = self.integerOption(opt)
+ elif opt.startswith('--max-largest='):
+ self._maxLargest = self.integerOption(opt)
+ elif opt == '--file-only' or opt == '-f':
+ self._filesOnly = True
+ elif opt == '--dir-only' or opt == '-d':
+ self._dirsOnly = True
+ elif opt == '--byte-size' or opt == '--mbyte-size':
+ self._humanReadable = False
+ elif opt == '--order-date-size':
+ self._orderDateSize = True
+ else:
+ self._parent.usage('unknown option ' + opt)
+
+class FileInfo:
+ def init(self):
+ '''Constructor.
+ '''
+ self._name = None
+ self._size = None
+ self._modified = None
+
+ def set(self, name, size, modified):
+ '''Sets the attributes.
+ @param name: the name without path
+ @param size: the size in bytes, -1 for directories
+ @param modified: the modify datetime
+ '''
+ self._name = name
+ self._size = size
+ self._modified = modified
+
+class FileContainer:
+ '''Base class of file containers.
+ '''
+ def __init__(self, options):
+ '''Constructor.
+ @param options: the program options
+ '''
+ self._kind = None
+ self._options = options
+ self._currentFile = FileInfo()
+ self._action = 'list'
+ self._relPath = ''
+ self._base = '.'
+ self._dirInfo = base.FileHelper.DirInfo()
+
+ def actionList(self):
+ '''Lists the metadata of a file/directory.
+ '''
+
+ if self._currentFile._size < 0:
+ size = '<dir>'
+ elif self._options._humanReadable:
+ size = "{:>8s}".format(base.StringUtils.formatSize(self._currentFile._size))
+ else:
+ size = '{:13.6f} MB'.format(self._currentFile._size / 1000000)
+ fdate = datetime.datetime.fromtimestamp(self._currentFile._modified)
+ dateString = fdate.strftime("%Y.%m.%d %H:%M:%S")
+ if self._options._orderDateSize:
+ line = '{:s} {:>12s} {:s}{}{}{}'.format(dateString, size, self._base, os.sep, self._relPath, self._node)
+ else:
+ line = '{:>12s} {:s} {:s}{}{}{}'.format(size, dateString, self._base, os.sep, self._relPath, self._node)
+ if self._options._silent:
+ self._lines.append(line)
+ else:
+ print(line)
+
+ def onSelected(self):
+ if self._currentFile._size >= 0:
+ self._dirInfo._fileCount += 1
+ self._dirInfo._fileSizes += self._currentFile._size
+ if self._action == 'list':
+ self.actionList()
+
+ def traverse(self, relPath, depth):
+ '''Traverses the container tree and does the wanted job.
+ @param relPath: the relative path
+ @param depth: the nesting level: 0: the first level
+ '''
+ while self.nextNode():
+ if not self.matches():
+ if self._currentFile._size >= 0:
+ self._dirInfo._ignoredDirs += 1
+ else:
+ self._dirInfo._ignoredFiles += 1
+ else:
+ self.onSelected()
+ if self._isDir:
+ if depth >= self._options._maxDepth:
+ self._dirInfo._ignoredDirs += 1
+ else:
+ self._dirInfo._dirCount += 1
+ self._nextDir = relPath + os.sep + self._node
+ self.changeDir(self._nextDir)
+ self.traverse(self._nextDir, depth + 1)
+
+class TarContainer (FileContainer):
+ def __init__(self, archive, options):
+ FileContainer.__init__(self, options)
+ self._tar = tarfile.open(archive, 'r:*')
+ self._members = self._tar.getmembers()
+ self._currentMember = -1
+
+ def _finish(self):
+ '''Deconstructor.
+ '''
+ self._tar.close()
+
+ def changeDir(self, relPath):
+ '''Starts handling of the next directory.
+ @param relPath: the relative path from the base
+ '''
+ pass
+
+ def matches(self):
+ '''Tests whether the current file is not excluded by the option controlled filtering.
+ @return: True: the current file is not excluded
+ '''
+ self._node = self._members[self._currentMember].name
+ self._isDir = self._members[self._currentMember].isdir()
+ rc = self._options._fileOnly and not self._options._isDir
+ rc = rc or self._options._dirOnly and self._options._isDir
+ if not rc:
+ rc = self._options._included == None or self._options._included.match(self._node, self._regExprCaseMode)
+ rc = rc or self._options._excluded == None or not self._options._excluded.match(self._node, self._options._regExprCaseMode)
+ self._currentFile.set(self._node, -1 if self._isDir else self._members[self._currentMember].size, self._members[self._currentMember].mtime)
+ return rc
+
+ def nextNode(self):
+ '''Setups the next file in the container.
+ @return: False: no more file is available.
+ '''
+ self._currentMember += 1
+ return self._currentMember < len(self._members)
+
+
+class DirectoryContainer (FileContainer):
+ def __init__(self, path, options):
+ FileContainer.__init__(self, options)
+ self._base = path
+ self._nodes = os.listdir(path)
+ self._currentNode = -1
+
+ def changeDir(self, relPath):
+ '''Starts handling of the next directory.
+ @param relPath: the relative path from the base
+ '''
+ self._currentDir = self._base + relPath
+ self._nodes = os.listdir(self._currentDir)
+
+ def matches(self):
+ '''Tests whether the current file is not excluded by the option controlled filtering.
+ @return: True: the current file is not excluded
+ '''
+ self._node = self._nodes[self._currentNode]
+ self._full = self._base + os.sep + self._node
+ self._isDir = os.path.isdir(self._full)
+ rc = self._options._fileOnly and not self._isDir
+ rc = rc or self._options._dirOnly and self._isDir
+ if not rc:
+ rc = self._options._included == None or self._options._included.match(self._node, self._regExprCaseMode)
+ rc = rc or self._options._excluded == None or not self._options._excluded.match(self._node, self._regExprCaseMode)
+ if rc:
+ if not self._isDir:
+ self._dirInfo._fileCount += 1
+ self._dirInfo._sizesFiles += os.path.getsize(self._full)
+ else:
+ if self._isDir:
+ self._ignoredDirs += 1
+ else:
+ self._ignoredFiles += 1
+ self._currentFile.set(self._node, -1 if self._isDir else os.path.getsize(self._full), os.path.getmtime(self._full))
+ return rc
+
+ def nextNode(self):
+ '''Setups the next file in the container.
+ @return: False: no more file is available.
+ '''
+ self._currentNode += 1
+ return self._currentNode < len(self._nodes)
+
+class DirTool (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ self._globalOptions = globalOptions
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'dirtool.conf')
+ self._useRename = True
+ self._infos = []
+ self._filenames = []
+ self._maxDepth = 2048
+ self._withDirs = True
+ self._withFiles = True
+ self._count = int(self._configuration.getString('count.default', '20'))
+ self._dateSize = True
+ self._humanReadableSize = True
+ self._euid = os.geteuid()
+ self._egid = os.getegid()
+ self._testDate = True
+ self._timePrecision = 2.0
+ self._blocksize = 1024*1024*16
+ self._differenceCount = 0
+ self._maxDifferenceCount = 100
+ self._differences = None
+ self._short = False
+ self._silent = False
+ self._testContent = False
+ self._dirInfo = None
+
+ def buildExamples(self):
+ '''Prepares system to executes the examples of the usage message.
+ '''
+ def setTime(fn, modified):
+ modified2 = datetime.datetime.strptime(modified, '%Y-%m-%d %H:%M:%S')
+ base.FileHelper.setModified(fn, None, modified2)
+
+ def build(fn, content = 'line1', modified = '2018-01-03 07:03:53'):
+ self._logger.log('creating ' + fn, 1)
+ base.StringUtils.toFile(fn, content)
+ setTime(fn, modified)
+
+ baseDir = '/tmp/%examples'
+ # self.ensureDirectory(baseDir)
+ baseDir1 = baseDir + os.sep + 'dir1'
+ self.ensureDirectory(baseDir1)
+ for no in range(5):
+ build(baseDir1 + os.sep + 'file{}.txt'.format(no + 1),
+ 'content of the file file{}.txt'.format(no) + '\nbla bla' * (no + 1), '2018-{:02d}-03 07:03:5{}'.format(no+1, no))
+ no = 47
+ for dirNo in range(1, 4):
+ no += 3
+ subDir = baseDir1 + os.sep + 'subdir{}'.format(dirNo)
+ self.ensureDirectory(subDir)
+ for fileNo in range(4, 7):
+ no *= 7
+ build(subDir + os.sep + 'data_{}.txt'.format(fileNo + 1),
+ 'content of the file file{}.txt'.format(fileNo) + '\nbla bla' * (no % 5 + 1),
+ '2017-{:02d}-{} 07:03:5{}'.format(fileNo+1, dirNo, dirNo))
+ setTime(subDir, '2018-{:02d}-{:02d} 07:03:53'.format(no%12+1, dirNo + 7))
+
+ setTime(baseDir1, '2017-01-02 04:17:22')
+ baseDir2 = baseDir + os.sep + 'dir2'
+ shutil.rmtree(baseDir2, True)
+ shutil.copytree(baseDir1, baseDir2)
+
+ os.unlink(baseDir + '/dir2/file2.txt')
+ os.unlink(baseDir + '/dir2/subdir1/data_6.txt')
+
+ shutil.rmtree(baseDir + '/dir2/subdir2')
+ build(baseDir + '/dir2/subdir2', 'is a file instead of the name')
+
+ build(baseDir + '/dir2/new.txt', 'line1\nline2\nline3')
+ build(baseDir + '/dir2/subdir1/new2.txt', 'line1\nline2\nline3')
+
+ base.FileHelper.setModified(baseDir + '/dir2/file1.txt', None, datetime.datetime.strptime('2018-01-03 09:03:53', '%Y-%m-%d %H:%M:%S'))
+ base.FileHelper.setModified(baseDir + '/dir2/subdir1/data_5.txt', None, datetime.datetime.strptime('2018-01-03 09:03:53', '%Y-%m-%d %H:%M:%S'))
+
+ build(baseDir + '/dir2/file4.txt', 'newer content in file4.txt', '2018-10-03 09:03:53')
+ build(baseDir + '/dir2/subdir3/data5.txt', 'line1\nline2', '2018-10-03 09:07:53')
+
+ fn = baseDir + '/dir2/file5.txt'
+ content = base.StringUtils.fromFile(fn)
+ aTime = os.path.getmtime(fn)
+ build(fn, content.replace('e', 'E'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(aTime)))
+
+ fn = baseDir + '/dir2/subdir3/data_6.txt'
+ content = base.StringUtils.fromFile(fn)
+ aTime = os.path.getmtime(fn)
+ build(fn, content.replace('e', 'E'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(aTime)))
+
+ for dirNo in range(1, 4):
+ if dirNo != 2:
+ subDir = baseDir2 + os.sep + 'subdir{}'.format(dirNo)
+ setTime(subDir, '2018-{}-03 09:03:53'.format(1 + (dirNo + 7) % 12))
+ setTime(baseDir2, '2018-04-03 09:03:53')
+
+
+ def compare(self, argv):
+ '''Compares two directory trees.
+ @param dir1: the first directory to compare
+ @param dir2: the second directory to compare
+ '''
+ (argv, opts) = self.splitArgsAndOpts(argv)
+ if len(argv) < 2:
+ self.usage('missing arguments')
+ else:
+ dir1 = argv[0]
+ dir2 = argv[1]
+ argv = argv[2:]
+ if not os.path.isdir(dir1):
+ self.usage('dir1 is not a directory: ' + dir1)
+ elif not os.path.isdir(dir1):
+ self.usage('dir2 is not a directory: ' + dir2)
+ else:
+ self._blocksize = 1024*1024*16
+ self._differenceCount = 0
+ self._maxDifferenceCount = 100
+ self._differences = []
+ self._differenceCount = 0
+ self._silent = False
+ self._short = False
+ self._testContent = False
+ self._testDate = True
+ caseSensitive = True
+ exclude = None
+ pattern = None
+ for opt in opts:
+ if opt.startswith('--blocksize'):
+ self._blocksize = self.integerOption(opt, 1024*1024*16)
+ elif opt.startswith('--pattern='):
+ pattern = opt[10:]
+ elif opt.startswith('--exclude='):
+ exclude = opt[10:]
+ elif opt == '-i' or opt == '--ignore-case':
+ caseSensitive = False
+ elif opt == '--ignore-time':
+ self._testDate = False
+ elif opt.startswith('--max-differences'):
+ self._maxDifferenceCount = self.integerOption(opt, 20)
+ elif opt == '-q' or opt == '--quiet':
+ self._silent = True
+ elif opt == '-s' or opt == '--short':
+ self._short = True
+ elif opt == '-t' or opt == '--test-content':
+ self._testContent = True
+ self._exclude = None if exclude == None else self.regExprCompile(exclude, 'compare-exclude', caseSensitive)
+ self._pattern = None if pattern == None else self.regExprCompile(pattern, 'compare-pattern', caseSensitive)
+ self._dirInfo = base.FileHelper.DirInfo()
+ self.compareDir(dir1, dir2)
+ base.BaseTool.setResult(self._differences)
+ if self._verboseLevel > 0:
+ info = self._dirInfo
+ tail = '' if info._fileSizes == 0 else ' with {}'.format( base.StringUtils.formatSize(info._fileSizes))
+ self._logger.log('inspected {} dir(s) and {} file(s){}'.format(
+ info._dirCount, info._fileCount, tail))
+
+ def compareDir(self, dir1, dir2):
+ '''Compares all files two directories.
+ @param dir1: the first directory to compare
+ @param dir2: the 2nd directory to compare
+ @return: True: success False: stop processing
+ '''
+ rc = False
+ self._dirInfo._dirCount += 1
+ nodes1 = os.listdir(dir1)
+ nodes2 = os.listdir(dir2)
+ for node in nodes1:
+ full1 = dir1 + os.sep + node
+ isDir1 = os.path.isdir(full1)
+ if not isDir1 and self._pattern != None and self._pattern.match(node) == None:
+ self._logger.log('ignored (pattern): ' + node, 4)
+ self._dirInfo._ignoredFiles += 1
+ continue
+ if self._exclude != None and self._exclude.match(node):
+ if isDir1:
+ self._dirInfo._ignoredDirs += 1
+ else:
+ self._dirInfo._ignoredFiles += 1
+ self._logger.log('ignored (exclude): ' + node, 4)
+ continue
+ if node not in nodes2:
+ self.compareLog('{} {}'.format('+' if self._short else 'additional:' , dir1 + os.sep + node))
+ else:
+ full2 = dir2 + os.sep + node
+ type1 = base.FileHelper.fileType(full1)
+ type2 = base.FileHelper.fileType(full2)
+ if type1 != type2:
+ self.compareLog('{} {} / {} {}'.format('!' if self._short else 'different types:', type1, type2, full1))
+ elif type1 == 'dir':
+ self.compareDir(full1, full2)
+ else:
+ self.compareFile(full1, full2)
+ rc = self._differenceCount < self._maxDifferenceCount
+ if not rc:
+ break
+ if rc:
+ for node in nodes2:
+ if self._pattern != None and self._pattern.match(node):
+ self._logger.log('ignored (pattern): ' + node, 4)
+ continue
+ if self._exclude != None and self._exclude.match(node):
+ self._logger.log('ignored (exclude): ' + node, 4)
+ continue
+ if node not in nodes1:
+ self.compareLog('{} {}'.format('-' if self._short else 'missing counterpart:' , dir1 + os.sep + node))
+ rc = self._differenceCount < self._maxDifferenceCount
+ if not rc:
+ break
+ return rc
+
+ def compareFile(self, file1, file2):
+ '''Compares two files.
+ @param file1: the first file to compare
+ @param file2: the 2ndfile to compare
+ @return: True: both files are equal
+ '''
+ self._dirInfo._fileCount += 1
+ info1 = os.lstat(file1)
+ info2 = os.lstat(file2)
+ rc = info1.st_size == info2.st_size
+ if not rc:
+ self.compareLog('{} {} / {} {}'.format('S' if self._short else 'size:', info1.st_size, info2.st_size, file1))
+ elif self._testDate:
+ rc = abs(info1.st_mtime - info2.st_mtime) < self._timePrecision
+ if not rc:
+ self.compareLog('{} {} / {} {}'.format('T' if self._short else 'date:',
+ time.strftime('%Y.%m.%d %H:%M:%S', time.localtime(info1.st_mtime)),
+ time.strftime('%Y.%m.%d %H:%M:%S', time.localtime(info2.st_mtime)),
+ file1))
+ if rc and self._testContent:
+ with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
+ sumSize = 0
+ while rc:
+ block1 = fp1.read(self._blocksize)
+ block2 = fp2.read(self._blocksize)
+ rc = len(block1) == len(block2)
+ if not rc:
+ self.compareLog('{} {} / {} {}'.format('L' if self._short else 'length:', sumSize + len(block1), sumSize + len(block2), file1))
+ else:
+ rc = block1 == block2
+ if not rc:
+ for ix in range(len(block1)):
+ if block1[ix] != block2[ix]:
+ desc = 'C [{}]' if self._short else 'different at position [{}]: '
+ self.compareLog('{} {}'.format(desc.format(1 + sumSize + ix), file1))
+ break
+ sumSize += len(block1)
+ if len(block1) == 0:
+ break
+ self._dirInfo._fileSizes += sumSize
+ return rc
+
+ def compareLog(self, message):
+ '''Logs a difference of file tree comparism.
+ @param message: the difference as string
+ '''
+ self._differenceCount += 1
+ if self._differenceCount < self._maxDifferenceCount:
+ if self._differences != None:
+ self._differences.append(message)
+ if not self._silent:
+ self._logger.log(message)
+
+ def traverseContainer(self, relPath, depth):
+ '''
+ '''
+
+ def check(self, argv):
+ '''Searches for "wrong" files:
+ @param argv: command arguments, e.g. ['/home', '/etc' ]
+ '''
+ def checkFile(name):
+ try:
+ with open(name, 'rb') as fp:
+ while fp.read():
+ pass
+ except Exception as exc:
+ self._logger.error(name + ': ' + str(exc))
+ def checkDir(full):
+ try:
+ for node in os.listdir(full):
+ if not os.path.isdir(full):
+ checkFile(full + os.sep + node)
+ except Exception as exc:
+ self._logger.error(full + ': ' + str(exc))
+ # === def check
+ for arg in argv:
+ if not os.path.isdir(arg):
+ checkFile(arg)
+ else:
+ checkDir(arg)
+
+ def dirInfo(self, argv):
+ '''Lists summary info and "extreme" files (e.g. the youngest files) of a directory tree
+ @param argv: the program arguments, e.g. ['/home', '--max-depth=7']
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ filePattern = dirPattern = maxDepth = None
+ minSize = 1
+ maxYoungest = maxLargest = maxOldest = count = maxSmallest = None
+ dirsOnly = filesOnly = False
+ trace = 0
+ while len(options) > 0 and options[0].startswith('-'):
+ opt = options[0]
+ options = options[1:]
+ if opt.startswith('--pattern='):
+ dirPattern = opt[10:]
+ elif opt.startswith('--max-depth='):
+ maxDepth = self.integerOption(opt)
+ elif opt.startswith('-n') or opt.startswith('--count'):
+ count = self.integerOption(opt)
+ elif opt.startswith('--min-size='):
+ minSize = self.integerOption(opt)
+ elif opt.startswith('--max-oldest='):
+ maxOldest = self.integerOption(opt)
+ elif opt.startswith('--max-youngest='):
+ maxYoungest = self.integerOption(opt)
+ elif opt.startswith('--max-smallest='):
+ maxSmallest = self.integerOption(opt)
+ elif opt.startswith('--max-largest='):
+ maxLargest = self.integerOption(opt)
+ elif opt.startswith('--trace'):
+ trace = self.integerOption(opt)
+ elif opt == '--file-only' or opt == '-f':
+ filesOnly = True
+ elif opt == '--dir-only' or opt == '-d':
+ dirsOnly = True
+ if count != None:
+ if maxYoungest == None:
+ maxYoungest = count
+ if maxLargest == None:
+ maxLargest = count
+ if maxOldest == None:
+ maxOldest = count
+ if maxSmallest == None:
+ maxSmallest = count
+ if maxYoungest == None:
+ maxYoungest = 5
+ if maxOldest == None:
+ maxOldest = 5
+ if maxLargest == None:
+ maxLargest = 5
+ if maxSmallest == None:
+ maxSmallest = 5
+ info = None
+ if len(argv) == 0:
+ argv = ['.']
+ for source in argv:
+ info = base.FileHelper.directoryInfo(source, filePattern, dirPattern, maxDepth,
+ info, maxYoungest, maxLargest, maxOldest, maxSmallest, minSize, dirsOnly, filesOnly, trace)
+ result = ['Directories: {} Files: {} / {}'.format(info._dirCount, info._fileCount, base.StringUtils.formatSize(info._fileSizes))]
+ result.append('Ignored: {} file(s) / {} dir(s)'.format(info._ignoredFiles, info._ignoredDirs))
+ if info._maxSmallest != None and info._maxSmallest > 0:
+ result.append('The smallest files:')
+ for item in info._smallest:
+ full = item.split(':')[1]
+ statInfo = os.lstat(full)
+ name = full
+ if stat.S_ISLNK(statInfo.st_mode):
+ name += ' -> ' + os.readlink(full)
+ result.append(base.FileHelper.listFile(statInfo, name, True, True))
+ if info._maxOldest != None and info._maxOldest > 0:
+ result.append('The oldest files:')
+ for item in info._oldest:
+ full = item.split(':')[1]
+ statInfo = os.lstat(full)
+ name = full
+ if stat.S_ISLNK(statInfo.st_mode):
+ name += ' -> ' + os.readlink(full)
+ result.append(base.FileHelper.listFile(statInfo, name, True, True))
+ if info._maxLargest != None and info._maxLargest > 0:
+ result.append('The largest files:')
+ for item in info._largest:
+ full = item.split(':')[1]
+ statInfo = os.lstat(full)
+ if stat.S_ISLNK(statInfo.st_mode):
+ full = '-> ' + os.readlink(full)
+ result.append(base.FileHelper.listFile(statInfo, full, True, True))
+ if info._maxYoungest != None and info._maxYoungest > 0:
+ result.append('The youngest files:')
+ for item in info._youngest:
+ full = item.split(':')[1]
+ statInfo = os.lstat(full)
+ name = full
+ if stat.S_ISLNK(statInfo.st_mode):
+ name += ' -> ' + os.readlink(full)
+ result.append(base.FileHelper.listFile(statInfo, name, True, True))
+ base.BaseTool.setResult(result)
+ self._rawOutput = '\n'.join(result)
+ if self._verboseLevel > 0:
+ print(self._rawOutput)
+ return result
+
+ def example(self):
+ example = '''# dirtool example configuration
+ log.file=/var/log/local/dirtool.log
+ '''
+ self.storeExample(example)
+
+ def findOptions(self, argv, isDate):
+ '''Evaluates the options relevant for date/size searchings.
+ @param argv: the program arguments to inspect
+ @param isDate: True: mode is oldest or youngest
+ @return: string: error message
+ array: the argument vector without the options.
+ '''
+ rc = None
+ if len(argv) == 0:
+ return [os.sep]
+ else:
+ while len(argv) > 0:
+ arg = argv[0]
+ if not arg.startswith('-'):
+ break
+ if isDate and (arg == '-f' or arg == '--files-only'):
+ self._withDirs = False
+ self._withFiles = True
+ argv = argv[1:]
+ elif isDate and (arg == '-d' or arg == '--dirs-only'):
+ self._withDirs = True
+ self._withFiles = False
+ argv = argv[1:]
+ elif arg == '-b' or arg == '--byte-size':
+ self._humanReadableSize = False
+ argv = argv[1:]
+ else:
+ rc = 'unknown option: ' + arg
+ break
+ if rc == None:
+ rc = argv
+ return rc
+
+ def handleOneFile(self, filename, statInfo, moreInteresting):
+ '''Compares a given files with the previous found files (self._files) and inserts if meaningful.
+ @param statInfo: the info of the current file
+ @param moreInteresting: a lambda function f(statFile1, statFile2, mayBeEqual): bool returning whether file1 replaces file2
+ '''
+ if len(self._filenames) == 0:
+ self._filenames.append(filename)
+ self._infos.append(statInfo)
+ else:
+ if len(self._filenames) < self._count:
+ # insert always:
+ last = self._infos[-1]
+ if moreInteresting(last, statInfo, True):
+ self._filenames.append(filename)
+ self._infos.append(statInfo)
+ else:
+ for ix in range(len(self._infos)):
+ if moreInteresting(statInfo, self._infos[ix], True):
+ self._filenames.insert(ix, filename)
+ self._infos.insert(ix, statInfo)
+ break
+ else:
+ # replace an entry
+ # more interesting than the last?
+ if moreInteresting(statInfo, self._infos[-1], False):
+ # more interesting than the first?
+ if moreInteresting(statInfo, self._infos[0], True):
+ self._filenames.insert(0, filename)
+ self._infos.insert(0, statInfo)
+ else:
+ for ix in range(len(self._infos)):
+ if moreInteresting(statInfo, self._infos[ix], False):
+ self._filenames.insert(ix, filename)
+ self._infos.insert(ix, statInfo)
+ break
+ # Remove the last entry:
+ del self._filenames[self._count]
+ del self._infos[self._count]
+ self.testOrder(moreInteresting)
+
+ def handleTree(self, directory, depth, moreInteresting):
+ '''Finds recursively the n most interesting files in a directory tree.
+ @param directory: the directory to inspect
+ @param depth: the depth of the directory in the directory tree
+ @param moreInteresting: a lambda function f(statFile1, statFile2, mayBeEqual): bool returning whether file1 replaces file2
+ '''
+ self._logger.log(directory + os.sep + ' ...', 3)
+ if depth == 0:
+ try:
+ statInfo = os.lstat(directory)
+ accepted = (base.LinuxUtils.isExecutable(statInfo, self._euid, self._egid)
+ and base.LinuxUtils.isReadable(statInfo, self._euid, self._egid))
+ if accepted:
+ self.handleOneFile(directory, statInfo, moreInteresting)
+ else:
+ self._logger.log('no permission: ' + directory, 2)
+ except FileNotFoundError:
+ self._logger.log('no permission: ' + directory, 2)
+ directory = None
+ dirs = []
+ nodes = None
+ if directory != None:
+ try:
+ nodes = os.listdir(directory)
+ except PermissionError:
+ self._logger.log('no permission: ' + directory, 2)
+ directory = None
+ if directory != None:
+ for node in nodes:
+ if node != '.' and node != '..':
+ full = directory + os.sep + node
+ try:
+ statInfo = os.lstat(full)
+ except FileNotFoundError:
+ self._logger.log('no permission: ' + directory + os.sep + node, 2)
+ continue
+ isDir = stat.S_ISDIR(statInfo.st_mode)
+ if self._withDirs and isDir or self._withFiles and not isDir:
+ accepted = (base.LinuxUtils.isExecutable(statInfo, self._euid, self._egid)
+ and base.LinuxUtils.isReadable(statInfo, self._euid, self._egid))
+ if accepted:
+ self.handleOneFile(full, statInfo, moreInteresting)
+ self._logger.log('no permission: ' + directory + os.sep + node, 2)
+ if isDir:
+ dirs.append(node)
+ if depth <= self._maxDepth:
+ for node in dirs:
+ self.handleTree(directory + os.sep + node, depth + 1, moreInteresting)
+
+ def imageResize(self, argv):
+ '''resizes images.
+ @param argv: program arguments, e.g. ['src', 'trg', '--max-width=1024', '--max-height=768']
+ '''
+ argv, opts = self.splitArgsAndOpts(argv)
+ maxWidth = None
+ maxHeight = None
+ for opt in opts:
+ if opt.startswith('--max-width='):
+ maxWidth = self.integerOption(opt)
+ elif opt.startswith('--max-height='):
+ maxHeight = self.integerOption(opt)
+ else:
+ self.usage('unknown option: ' + opt)
+ if not os.path.exists('/usr/bin/identify') or not os.path.exists('/usr/bin/convert'):
+ self.usage('missing commands identify or convert: please install imagemagick')
+ elif len(argv) < 2:
+ self.usage('missing arguments')
+ else:
+ source = argv[0]
+ target = argv[1]
+ if not os.path.isdir(source):
+ self.usage('<source-dir> is not a directory: ' + source)
+ elif not os.path.isdir(target):
+ self.usage('<target-dir> is not a directory: ' + target)
+ else:
+ nodes = os.listdir(source)
+ regExtend = re.compile(r'\.(jpg|png|gif)$', re.I)
+ regDimension = re.compile(' (\d+)x(\d+) ')
+ for node in nodes:
+ src = source + os.sep + node
+ if regExtend.search(src) != None:
+ info = self._processHelper.executeInputOutput(['identify', src], None, False)
+ matcher = regDimension.search(info[0])
+ if matcher != None:
+ width = int(matcher.group(1))
+ height = int(matcher.group(2))
+ newWidth = None
+ newHeight = None
+ if width > height:
+ # landscape
+ if maxWidth != None and width > maxWidth:
+ newWidth = maxWidth
+ newHeight = int(newWidth * height / width)
+ else:
+ # portrait
+ if maxHeight != None and height > maxHeight:
+ newHeight = maxHeight
+ newWidth = int(newHeight * width / height)
+ if newWidth != None:
+ trg = target + os.sep + node
+ start = time.time()
+ oldSize = os.path.getsize(src)
+ self._processHelper.execute(['convert', src, '-resize', '{}x{}'.format(newWidth, newHeight), trg], None, False)
+ base.FileHelper.setModified(trg, os.path.getmtime(src))
+ newSize = os.path.getsize(trg)
+ duration = time.time() - start
+ self._logger.log('{} [{}x{}]: {:.1f}% ({:.3f} sec)'.format(node, width, height, newSize * 100 / oldSize, duration), 1)
+
+ def jobLargest(self, argv):
+ '''Find the n youngest files in one or more directories.
+ @param argv: options and directories
+ @return: None or an error message
+ '''
+ argv.append('--max-youngest=0')
+ argv.append('--max-smallest=0')
+ argv.append('--max-oldest=0')
+ rc = self.dirInfo(argv)
+ return rc
+
+ def jobOldest(self, argv):
+ '''Find the n oldest files in one or more directories.
+ @param argv: options and directories
+ @return: None or an error message
+ '''
+ argv.append('--max-largest=0')
+ argv.append('--max-smallest=0')
+ argv.append('--max-youngest=0')
+ rc = self.dirInfo(argv)
+ return rc
+
+ def jobSmallest(self, argv):
+ '''Find the n smallest files in one or more directories.
+ @param argv: options and directories
+ @return: None or an error message
+ '''
+ argv.append('--max-largest=0')
+ argv.append('--max-youngest=0')
+ argv.append('--max-oldest=0')
+ rc = self.dirInfo(argv)
+ return rc
+
+ def jobYoungest(self, argv):
+ '''Find the n youngest files in one or more directories.
+ @param argv: options and directories
+ @return: None or an error message
+ '''
+ argv.append('--max-largest=0')
+ argv.append('--max-smallest=0')
+ argv.append('--max-oldest=0')
+ rc = self.dirInfo(argv)
+ return rc
+
+ def list(self, argv):
+ '''Lists a directory or a container (tar archive...)
+ @param argv: the program arguments, e.g. ['/home', '--max-depth=7']
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ opts = ProgramOptions(self)
+ opts.parseOptions(options)
+ info = None
+ if len(argv) == 0:
+ argv = ['.']
+ dirInfo = base.FileHelper.DirInfo()
+ for source in argv:
+ container = None
+ aClass, subClass = base.FileHelper.fileClass(source)
+ if aClass == 'container':
+ if subClass == 'tar' or subClass == 'tgz' or subClass == 'tbz':
+ container = TarContainer(source, opts)
+ elif subClass == 'dir':
+ container = DirectoryContainer(source, opts)
+ else:
+ self._logger.error('unknown archive type: {} / {}', subClass, source)
+ else:
+ self._logger.error('not a directory or an archive: ' + source)
+ if container != None:
+ container._dirInfo = dirInfo
+ container.traverse('', 0)
+ result = ['Directories: {} Files: {} / {}'.format(dirInfo._dirCount, dirInfo._fileCount, base.StringUtils.formatSize(dirInfo._fileSizes))]
+ result.append('Ignored: {} file(s) / {} dir(s)'.format(dirInfo._ignoredFiles, dirInfo._ignoredDirs))
+ base.BaseTool.setResult(result)
+ self._rawOutput = '\n'.join(result)
+ if self._verboseLevel > 0:
+ print(self._rawOutput)
+ return result
+
+ def listFiles(self):
+ '''Print the file list of the found files.
+ '''
+ for ix in range(len(self._filenames)):
+ # statInfo, full, orderDateSize = True, humanReadable = True
+ base.FileHelper.listFile(self._infos[ix], self._filenames[ix], self._dateSize, self._humanReadableSize)
+
+ def snapshot(self, argv):
+ '''Duplicates a source directory tree into a target with hard links: needs space only for directory info (meta data).
+ @precondition: source and target must lay in a common filesystem to allow making hardlinks.
+ @param argv: the program arguments, e.g. ['/media/server/data', '/media/server/backup/Monday']
+ '''
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ else:
+ source = argv[0]
+ target = argv[1]
+ if not os.path.isdir(source):
+ self.usage('source is not a directory: ' + source)
+ elif not os.path.isdir(os.path.dirname(target)):
+ self.usage('parent of target is not a directory: ' + target)
+ elif os.path.isdir(target):
+ self.usage('target already exists: ' + target)
+ else:
+ process = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger)
+ process.execute(['/bin/cp', '-al', source, target], True, False)
+
+ def synchronize(self, argv):
+ '''Copies all changed/new files from a base directory to a "mirror".
+ But no file of the mirror will be deleted: they will be moved to a "safe".
+ @param argv: the program arguments, e.g. ['/media/data', '/media/backup', '/media/safe']
+ '''
+ if len(argv) < 3:
+ self.usage('too few arguments')
+ elif (not os.path.isdir(argv[0])):
+ self.usage('base is not a directory: ' + argv[0])
+ elif (not os.path.isdir(argv[1])):
+ self.usage('mirror is not a directory: ' + argv[1])
+ elif (not os.path.isdir(argv[2])):
+ self.usage('base is not a directory: ' + argv[2])
+ else:
+ try:
+ self.synchronizeOneDir(argv[0], argv[1], argv[2], '')
+ except Exception as exc:
+ self._logger.error('the mirroring has failed: ' + str(exc))
+ raise exc
+
+ def synchronizeOneDir(self, baseDir, mirrorDir, safeDir, relPath):
+ '''Copies all changed/new files from a baseDir directory to a "mirrorDir".
+ But no file of the mirrorDir will be deleted: they will be moved to a "safeDir".
+ @param baseDir: the source directory
+ @param mirrorDir: the target directory
+ @param safeDir: the directory for "deleted" files from the target
+ @param relPath: defines the effective paths to process: will be added to baseDir, mirrorDir and safeDir
+ example: source/py
+ '''
+ baseFull = baseDir
+ mirrorFull = mirrorDir
+ safeFull = safeDir
+ if relPath != '':
+ baseFull += os.sep + relPath
+ mirrorFull += os.sep + relPath
+ safeFull += os.sep + relPath
+ self._logger.log('processing ' + baseFull, 2)
+ baseNodes = os.listdir(baseFull)
+ mirrorNodes = os.listdir(mirrorFull)
+ # move deleted files from mirrorDir to safeDir:
+ for node in mirrorNodes:
+ if node not in baseNodes:
+ self.synchronizeMove(mirrorDir, safeDir, relPath, node)
+ # copy new/changed files from baseDir to mirrorDir
+ for node in baseNodes:
+ source = baseFull + os.sep + node
+ target = mirrorFull + os.sep + node
+ if not os.path.exists(target):
+ if os.path.isdir(source):
+ if not base.FileHelper.distinctPaths(source, target):
+ self._logger.error('nested directories: {} / {} [{} / {}]'.format(
+ source, target, os.path.realpath(source), os.path.realpath(target)))
+ else:
+ shutil.copytree(source, target)
+ else:
+ self._logger.log('{} -> {}'.format(source, target), 4)
+ shutil.copy2(source, target)
+ else:
+ if base.FileHelper.fileType(source) != base.FileHelper.fileType(target):
+ self.synchronizeMove(mirrorDir, safeDir, relPath, node)
+ elif not os.path.isdir(source):
+ infoSource = os.lstat(source)
+ infoTarget = os.lstat(target)
+ if abs(infoSource.st_mtime - infoTarget.st_mtime) > 2:
+ self.synchronizeMove(mirrorDir, safeDir, relPath, node)
+ self._logger.log('{} => {}'.format(source, target), 4)
+ shutil.copy2(source, target)
+ infoTarget = os.lstat(target)
+ if infoSource.st_mtime != infoTarget.st_mtime or infoSource.st_size != infoTarget.st_size:
+ self._logger.error('copy failed: {}/{}: {}/{} {}/{}'.format(
+ relPath, node, infoSource.st_mtime, infoTarget.st_mtime,
+ infoSource.st_size, infoTarget.st_size))
+ else:
+ prefix = relPath + os.sep if relPath != '' else ''
+ self.synchronizeOneDir(baseDir, mirrorDir, safeDir, prefix + node)
+
+ def synchronizeMove(self, mirror, safe, relPath, node):
+ '''Moves a file/directory from mirror to safe:
+ @param mirror: the source directory
+ @param safe: the target directory
+ @param relPath: defines the effective paths (in mirror and safe)
+ @param node: the node (filename without path) of the source and target file
+ '''
+ relPath2 = os.sep + relPath if relPath != '' else ''
+ source = mirror + relPath2 + os.sep + node
+ dirTarget = safe + relPath2
+ if not os.path.exists(dirTarget):
+ os.makedirs(dirTarget, 0o777)
+ if not os.path.exists(dirTarget):
+ self._logger.error('cannot create: ' + dirTarget)
+ else:
+ target = dirTarget + os.sep + node
+ self._logger.log('moving {} => {}'.format(source, target), 3)
+ if os.path.exists(target):
+ self._logger.error('target exists in safe: {} => {}'.format(source, target))
+ os.rename(target, target + '.' + str(time.time()))
+ if self._useRename:
+ os.rename(source, target)
+ elif os.path.isdir(source):
+ shutil.copytree(source, target)
+ else:
+ shutil.copy2(source, target)
+
+ def testOrder(self, moreInteresting):
+ for ix in range(len(self._infos) - 1):
+ if not moreInteresting(self._infos[ix], self._infos[ix+1], True):
+ print(self._filenames[ix] + '\n' + self._filenames[ix + 1] + '\n')
+
+def usage():
+ return """usage: dirtool [<global_opts>] <mode> <args>
+ Offers some services in directory trees.
+ Note: a container is a directory or an archive (tar, zip).
+GLOBAL_OPTS
+GLOBAL_MODES
+<mode>:
+ build-examples
+ prepares the system for executing the examples below
+ check <dir1> [<dir2...]
+ reads all files of the source directories (and its subdirectories) to find access problems
+ compare <dir1> <dir2> <opts>
+ compares two directory trees
+ <opt>:
+ --max-differences=<count>
+ the search is stopped if <count> differences has been found
+ --blocksize=<size>
+ files are read in this chunk size. Default: 16 MiByte
+ --exclude=<regular-expr>
+ files and dirs matching this pattern are ignored (not compared)
+ -i or --ignore-case
+ used for --pattern and/or --exclude
+ --ignore-time
+ only different size or content will be counted as difference
+ --pattern=<regular-expr>
+ only files and dirs matching this pattern are compared
+ -q or --quiet
+ only the summary is displayed
+ -s or opt == '--short'
+ the prefix of a difference notice is reduced to one char
+ -t or --test-content
+ the file content is inspected for differences
+ image-resize <source-dir> <target-dir> [--max-width=<width>] [--max-height=<height>]
+ resizes *.jpg or *.png images
+ info <path1> [ <path2> ... ] <opts>
+ displays a summary info about the given directories
+ largest <path1> [ <path2> ... ]
+ list the n youngest files in the directory tree
+ list <container> [<container2> ...] <opt>
+ show the metadata of the files of the container
+ <opt>:
+ --order-date-size
+ the displayed data: <date> <size> <name> instead of <size> <date> <name>
+ --byte-size or --mbyte-sizes
+ the size is displayed in MBytes (instead of "human readable" with different units
+ oldest <path1> [ <path2> ... ]
+ list the n youngest files in the directory tree(s)
+ smallest <path1> [ <path2> ... ] [--min-size=<min-size>]
+ list the n smallest files (but >= min-size) in the directory tree
+ sync <base> <mirror> <safe>
+ copy modified/new files from <base> to mirror.
+ no file from mirror will be deleted/replaced: instead it will be moved to safe
+ youngest [<opt_date>] <path1> [ <path2> ... ]
+ list the n youngest files in the directory tree(s)
+ <opts>:
+ -n=<count> or --count=<count>
+ number of displayed entries (replaces max-largest or max-youngest or max-oldest or max-smallest)
+ -d or --dir-only
+ the counters respects directory only
+ -f or --file-only
+ only files land in the youngest array
+ --max-depth=<depth>
+ the maximal nesting depth of directories. 0: only the called directory is inspected
+ --max-largest=<count>
+ the maximal length of the array for the largest files
+ --max-oldest=<count>
+ the maximal length of the array for the oldest files
+ --max-smallest=<count>
+ the maximal length of the array for the smallest files
+ --max-youngest=<count>
+ the maximal length of the array for the youngest files
+ --pattern=<wildcard-expr>
+ filename pattern, e.g. "*.png"
+Example:
+dirtool check /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2
+dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2
+dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 --ignore-time --short --test-content
+dirtool compare /usr/share/pyrshell/examples/dir1 /usr/share/pyrshell/examples/dir2 -s --exlude=sub.*2 "--pattern=[^u]*[1-3]" --ignore-case
+dirtool info /usr/share/pyrshell/examples/dir1 --max-largest=2 --max-youngest=3 --file-only
+dirtool info self._dir1 --count=0 --max-youngest=3 --max-oldest=4 --dir-only
+dirtool -v4 largest /usr/share/pyrshell/examples/dir1 '--pattern=.*1.*'
+dirtool -v4 youngest /usr/share/pyrshell/examples/dir1 --max-depth=0
+dirtool -v4 oldest /usr/share/pyrshell/examples/dir1 --file-only
+dirtool latest /home/jonny -n5
+dirtool -v3 list /usr/share/pyrshell/unittest/data/example.tgz --dir-only
+dirtool -v3 list /usr/share/pyrshell/unittest/data --order-date-size --file-only -mbytes-size
+dirtool -v3 image-convert /pic /out --max-width=1920 --max-height=1080
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/dirtool', 'run']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('dirtool', 'appl/DirTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = DirTool(options)
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'build-examples':
+ tool.buildExamples()
+ elif cmd == 'check':
+ tool.check(argv)
+ elif cmd == 'compare':
+ tool.compare(argv)
+ elif cmd == 'info':
+ tool.dirInfo(argv)
+ elif cmd == 'largest':
+ tool.jobLargest(argv)
+ elif cmd == 'list':
+ tool.list(argv)
+ elif cmd == 'oldest':
+ tool.jobOldest(argv)
+ elif cmd == 'smallest':
+ tool.jobSmallest(argv)
+ elif cmd == 'snapshot':
+ tool.snapshot(argv)
+ elif cmd == 'sync':
+ tool.synchronize(argv)
+ elif cmd == 'youngest':
+ tool.jobYoungest(argv)
+ elif cmd == 'image-resize':
+ tool.imageResize(argv)
+ else:
+ tool.usage("unknown command: " + cmd)
+ base.BaseTool.setLatestTool(tool)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import sys
+import os.path
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.JavaConfig
+import net.FtpEngine
+
+class FtpTool (base.BaseTool.BaseTool):
+ '''Implements a command interpreter for FTP.
+ '''
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param configurationBase: directory containing the configuration file ftptool.conf
+ @param logger: None: use the logger given by the configuration file
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'ftptool.conf')
+ self._client = None
+
+ def example(self):
+ '''Builds an example configuration.
+ @param options: an instance of GlobalOptions
+ '''
+ exampleConfig = self._configDir + os.sep + 'ftptool.conf.example'
+ base.StringUtils.toFile(exampleConfig, 'log.file=/tmp/ftptool.log')
+ print('created: ' + exampleConfig)
+ subdir = self._configDir + os.sep + 'ftp.d'
+ base.BaseTool.BasicStatics.ensureDirectory(subdir)
+ exampleConfig = subdir + os.sep + 'example.conf'
+ base.StringUtils.toFile(exampleConfig, 'host=hamatoma.de\nport=21\nuser=jonny\ncode=Secret\n')
+ print('created: ' + exampleConfig)
+
+ def setServer(self, name):
+ full = self._configDir + os.sep + 'ftp.d' + os.sep + name + '.conf'
+ self._client = None
+ if not os.path.exists(full):
+ self._logger.error('server {:s} is not configured: Please create {}'.format(name, full))
+ else:
+ self._server = base.JavaConfig.JavaConfig(full, self._logger)
+ host = self._server.getString('host')
+ port = self._server.getString('port')
+ user = self._server.getString('user')
+ pw = self._server.getString('code')
+ if host == None or port == None or user == None or pw == None:
+ self._logger.error('missing FTP authority data for server {:s}. See {:s}'.format(name, full))
+ else:
+ try:
+ port2 = int(port)
+ self._client = net.FtpEngine.FtpEngine(host, port2, user, pw, self._logger, self._verboseLevel)
+ except ValueError:
+ self._logger.error('port must be an integer: ' + port)
+ except Exception as exc:
+ self._logger.error('connection failed: {:s}:{:d} [{:s}]'.format(host, port2, str(exc)))
+
+ def close(self):
+ '''Frees the resources.
+ '''
+ if self._client != None:
+ self._client.close()
+
+def usage():
+ '''Returns an info about usage
+ '''
+ return """usage: ftptool <globalOpts> <task> [<server> [<arg1>...]]"
+GLOBAL_OPTS
+GLOBAL_MODES
+<server>:
+ the name of a server which is defined in %etc%/ftp.d/<server>.conf
+<task>:
+ du (or diskusage) <server> [<startDir> [<depth>]]
+ calculate file size (over all files and directories)
+ <startDir>: start directory, default: '/'
+ <depth>: only directories lower this depth will be displayed. default: 0
+ info
+ print welcome message and features
+ compare <server> <ftpStart> <localStart>
+ compare a local file tree with an ftp file tree
+ <ftpStart>: ftp start directory
+ <localStart>: local start directory
+ lstree <server> <startDir1> [<startDir2>...]
+ lists the directory infos of <startDirX>
+ rmtree <server> [<startDir1> [<startDir2>...]]
+ : deletes one or more directories recursive
+ <startDirN>: directories to remove
+example:
+ftptool du jonny / 3
+ftptool info contabo
+ftptool rmtree contabo /storage /jonny/trash
+"""
+
+def buildClient(configurationBase):
+ config = configurationBase + os.sep + 'ftptool.conf'
+
+def addServer(options, argv):
+ '''Adds a server configuration to the configuration directory.
+ @param options: an instance of GlobalOptions
+ @param argv: the arguments, [<servername>, <host>, <user>, <password>[, <port>]]
+ '''
+ if len(argv) < 4:
+ usage('missing argument(s): expected: <servername> <host> <user> <password> [<port>]')
+ else:
+ configFile = options._configDir + os.sep + 'ftp.d' + os.sep + argv[0]
+ try:
+ port = 21 if len(argv) < 4 else argv[4]
+ base.StringUtils.toFile(configFile, 'host={:s}\nport={}\nuser={}\ncode={}\n'.format(argv[1], port, argv[2], argv[3]))
+ print('created: ' + configFile)
+ except ValueError:
+ print('+++ port must be an integer, e.g. 21, not ' + argv[4])
+
+def main(argv):
+ appInfo = base.BaseTool.ApplicationInfo('ftptool', 'appl/FtpTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = FtpTool(options)
+ if len(argv) > 1:
+ tool.setServer(argv[1])
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if tool._client == None:
+ pass
+ elif cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd in ['du', 'diskusage']:
+ startDir = '/' if len(argv) <= 0 else argv[0]
+ tool._client._printDepth = -1 if len(argv) <= 1 else int(argv[1])
+ (size, files, dirs) = tool._client.diskUsage(startDir, 0)
+ unit = 'm' if size < 1000000000 else 'g'
+ size2 = size / 1000000000.0 if unit == 'g' else size / 1000000.0
+ digits = '9' if unit == 'g' else '6'
+ tool._logger.log(startDir + (": {:d} bytes [{:." + digits + "f} {:s}b] files: {:d} dirs: {:d}").format(size, size2, unit, files, dirs))
+ elif cmd in ['info']:
+ tool._client.serverInfo()
+ elif cmd in ['compare']:
+ if len(argv) < 2:
+ tool.usage('to few arguments')
+ elif not os.path.isdir(argv[1]):
+ tool.usage('not a directory: ' + argv[1])
+ else:
+ (size, files, dirs) = tool._client._client.compare(argv[0], argv[1], 0)
+ unit = 'm' if size < 1000000000 else 'g'
+ size2 = size / 1000000000.0 if unit == 'g' else size / 1000000.0
+ digits = '9' if unit == 'g' else '6'
+ print(server + (": {:d} bytes [{:." + digits + "f} {:s}b] files: {:d} dirs: {:d}").format(size, size2, unit, files, dirs))
+ elif cmd in ['rmtree']:
+ if len(argv) < 1:
+ tool.usage('too few arguments: missing directory')
+ else:
+ for directory in argv:
+ tool.tool._client.removeTree(directory)
+ elif cmd in ['lstree']:
+ if len(argv) < 1:
+ tool.usage('too few arguments: missing directory')
+ else:
+ if len(argv) < 2:
+ depth = 9999
+ else:
+ try:
+ depth = int(argv[1])
+ except ValueError:
+ tool._logger.error('depth must be an integer: ' + argv[1])
+ depth = -1
+ if depth >= 0:
+ tool._client._maxDepth = depth
+ tool._client.listTree(argv[0], 0)
+ tool.close()
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 27.04.2018
+
+@author: hm
+'''
+import sys
+import os.path
+import re
+import time
+import datetime
+import traceback
+import pwd
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.LinuxUtils
+import net.EMail
+import net.HttpClient
+import urllib
+
+IGNORE_CASE = re.I
+class Property:
+
+ def __init__(self, name, value=None):
+ '''Constructor.
+ @param name: the property name
+ @param value: the value
+ '''
+ self._id = name
+ self._value = value
+
+ def copy(self, source):
+ '''Copies the properties from a template
+ @param source: the source, type: Property
+ '''
+ if source._value != None:
+ self._value = source._value
+
+ def dump(self, indent):
+ '''Dumps a property.
+ @param indent: indent level
+ @return: a string describing the property
+ '''
+ if self._id == None:
+ self._id = None
+ if self._value == None:
+ value = '<None>'
+ elif type(self._value) == float:
+ value = '{:.3f'.format(self._value)
+ elif type(self._value) == int:
+ value = str(self._value)
+ else:
+ value = self._value
+ return ' ' * indent + self._id + ': ' + value + "\n"
+
+class ObservedProperty:
+
+ def __init__(self, name, warnLimit, errorLimit, receivers):
+ '''Constructor.
+ @param name: the name
+ @param warnLimit: a higher value raises a warning
+ @param errorLimit: a higher value raises an error
+ '''
+ self._id = name
+ self._warnLimit = warnLimit
+ self._errorLimit = errorLimit
+ self._receivers = receivers
+
+ def copy(self, source):
+ '''Copies the properties from a template
+ @param source: the source, type: Property
+ '''
+ if source._warnLimit != None:
+ self._warnLimit = source._warnLimit
+ if source._errorLimit != None:
+ self._errorLimit = source._errorLimit
+ if source._receivers != None:
+ self._receivers = source._receivers
+
+ def dump(self, indent):
+ '''Dumps a observed property.
+ @param indent: indent level
+ @return: a string describing the property
+ '''
+ rc = ' ' * indent + self._id + ': ' + str(self._warnLimit) + ' ' + str(self._errorLimit) + ' "' + self._receivers._name + '"\n'
+ return rc
+
+class Container:
+
+ def __init__(self, aType, name, parent, keyword = None):
+ '''Constructor.
+ @param aType: the container type: 'host', 'disk'
+ @param name: the name of the container. '': the default container
+ @param parent: type Monitor or another container
+ '''
+ self._type = aType
+ self._name = name
+ self._properties = dict()
+ self._observed = dict()
+ self._parent = parent
+ self._keyword = keyword if keyword != None else aType.lower()
+
+ def copy(self, source):
+ '''Copies the properties from a source
+ @param source: the source, type: Property
+ '''
+ for prop in self._observed:
+ if prop in source._observed:
+ self._observed[prop].copy(source._observed[prop])
+
+ def dump(self, indent,):
+ '''Dumps an container.
+ @param indent: indent level
+ @return: a string describing the container
+ '''
+ rc = ' ' * indent + self._type + ' "' + self._name + '":\n'
+ for key in sorted(self._properties):
+ rc += self._properties[key].dump(indent + 3)
+ for key in sorted(self._observed):
+ rc += self._observed[key].dump(indent + 3)
+ return rc
+
+class WebSite(Container):
+ def __init__(self, name, monitor):
+ '''Constructor.
+ @param name: name of the group
+ @param monitor: the parent with type Monitor
+ '''
+ Container.__init__(self, 'WebSite', name, monitor, 'site')
+ self._properties = {
+ 'url': Property('url', ''),
+ }
+
+class ReceiverGroup(Container):
+
+ def __init__(self, name, monitor):
+ '''Constructor.
+ @param name: name of the group
+ @param monitor: the parent with type Monitor
+ '''
+ Container.__init__(self, 'ReceiverGroup', name, monitor)
+ self._properties = {
+ 'warning': Property('warning', ''),
+ 'error': Property('error', '')
+ }
+
+class Disk(Container):
+
+ def __init__(self, name, host):
+ '''Constructor.
+ @param name: name of the host, if empty default host is assumed
+ @param host: the parent with type Host
+ '''
+ Container.__init__(self, 'Disk', name, host)
+ if '' not in host._parent._hosts:
+ defaultDisk = self
+ receiverGroup = ReceiverGroup('', host._parent)
+ else:
+ if name in host._parent._hosts['']._disks:
+ defaultDisk = host._parent._hosts['']._disks[name]
+ else:
+ defaultDisk = host._parent._hosts['']._disks['']
+ receiverGroup = defaultDisk._properties['receivers']
+ self._observed = {
+ 'used': ObservedProperty('used', '80%', '90%', receiverGroup),
+ }
+ self._properties = {
+ 'receivers': receiverGroup
+ }
+ self.copy(defaultDisk)
+
+class RaidDevice(Container):
+ def __init__(self, name, host):
+ '''Constructor.
+ @param name: the device name, e.g. 'md0'
+ @param host: the parent, type: Host
+ '''
+ Container.__init__(self, 'RaidDevice', name, host, 'raid')
+ receiverGroup = host._properties['receivers']
+ self._properties = {
+ 'receivers': receiverGroup,
+ 'raidtype': Property('raidtype'),
+ 'members': Property('members'),
+ 'blocks': Property('blocks')
+ }
+class Host(Container):
+
+ def __init__(self, name, monitor):
+ '''Constructor.
+ @param name: name of the host, if empty default host is assumed
+ @param monitor: the parent with type Monitor
+ '''
+ Container.__init__(self, 'Host', name, monitor)
+ receiverGroup = monitor._receiverGroups['']
+ if name == '':
+ disk = Disk('', self)
+ else:
+ disk = monitor._hosts['']._disks['']
+ self._disks = {
+ '': disk
+ }
+ self._raids = dict()
+ self._properties = {
+ 'address': Property('address'),
+ 'receivers' : receiverGroup,
+ 'interval' : Property('interval', 60)
+ }
+ self._observed = {
+ 'load1': ObservedProperty('load1', 10, 20, receiverGroup),
+ 'load5': ObservedProperty('load5', 10, 20, receiverGroup),
+ 'load10': ObservedProperty('load10', 10, 20, receiverGroup),
+ 'processes': ObservedProperty('processes', 500, 1000, receiverGroup),
+ 'swap': ObservedProperty('swap', 100.01, 100.01, receiverGroup),
+ }
+ if name != '' and '' in monitor._hosts:
+ self.copy(monitor._hosts[''])
+
+ def copy(self, source):
+ '''Copies the properties from a source
+ @param source: the source, type: Disk
+ '''
+ Container.copy(self, source)
+ for name in self._disks:
+ if name in source._disks:
+ self._disks[name].copy(source._disks[name])
+
+ def dump(self, indent):
+ '''Dumps a host.
+ @param indent: indent level
+ @return: a string describing the receiver group
+ '''
+ rc = "===\n" + Container.dump(self, 0)
+ for disk in sorted(self._disks):
+ rc += self._disks[disk].dump(indent + 3)
+ return rc
+
+class SmtpHost (Container):
+
+ def __init__(self, name, monitor):
+ '''Constructor.
+ @param name: the name of the smtp host
+ @param monitor: the parent, type: Monitor
+ '''
+ Container.__init__(self, 'SmtpHost', name, monitor)
+ self._properties = {
+ 'host': Property('host'),
+ 'port' : Property('port', '587'),
+ 'sender' : Property('sender'),
+ 'user' : Property('user'),
+ 'code' : Property('code'),
+ 'tls' : Property('tls', 'True')
+ }
+
+class Monitor (base.BaseTool.BaseTool):
+ '''Implements a monitor for hosts to detect service faults.
+ '''
+
+ def __init__(self, globalOptions, additionalConfigDir = None):
+ '''Constructor.
+ @param globalOptions: a instance of GlobalOptions
+ '''
+ config = '/etc/pyrshell/monitor.d' if additionalConfigDir == None else additionalConfigDir
+ base.BaseTool.BasicStatics.ensureDirectory(config)
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'pymonitor.conf', False, config)
+ self._start = time.time()
+ self._loops = 0
+ self._openProblems = dict()
+ self._openProblems = []
+ # self._configDir = dirConfiguration
+ self._reloadRequestFile = '/tmp/{}.reload.request'.format(globalOptions._appInfo._applicationName)
+ self._readAllConfigs()
+ self._lastStatus = dict()
+
+ def _readAllConfigs(self):
+ self._sites = dict()
+ self._hosts = dict()
+ self._smtpHosts = dict()
+ self._receiverGroups = dict()
+ self._receiverGroups[''] = ReceiverGroup('', self)
+ defaultConf = self._additionalConfigDir + os.sep + 'default.conf'
+ if os.path.exists(defaultConf):
+ self.readConfigurationFile(defaultConf)
+ else:
+ self._hosts[''] = Host('', self)
+ nodes = os.listdir(self._additionalConfigDir)
+ for node in nodes:
+ if node.endswith('.conf') and node != 'default.conf' and node != 'pymonitor.conf':
+ self.readConfigurationFile(self._additionalConfigDir + os.sep + node)
+
+ def checkReload(self):
+ '''Tests whether a reload request exists.
+ If yes the configuration will be read again.
+ '''
+ fn = self._configDir + '/reload.request'
+ if os.path.exists(self._reloadRequestFile):
+ self.reload()
+ os.unlink (self._reloadRequestFile)
+ if os.path.exists(self._reloadRequestFile):
+ self._logger.error('cannot delete ' + fn)
+
+ def createSiteServer(self, argv):
+ '''Creates the configuration for an observed site on the server side.
+ @param argv: the program arguments, e.g. ['--nginx']
+ '''
+ webserver = 'nginx'
+ argv, options = self.splitArgsAndOpts(argv)
+ ip = None
+ domain = None
+ overwrite = False
+ for opt in options:
+ if opt == '--nginx':
+ webserver = 'nginx'
+ elif opt == '--apache':
+ webserver = 'apache'
+ elif opt == '--overwrite':
+ overwrite = True
+ elif opt.startswith('--ip='):
+ ip = opt[5:]
+ elif opt.startswith('--domain='):
+ domain = opt[9:]
+ else:
+ self.usage('unknown option: ' + opt)
+ if domain == None:
+ domain = base.BaseTool.BasicStatics.hostname(True)
+ if ip == None:
+ ip = self.publicIp()
+ if ip == None:
+ self.usage('cannot detect public ip. Please use the --ip=<ip> option')
+ baseDir = self.getTarget('/var', 'www' + os.sep + domain)
+ self.ensureDirectory(baseDir)
+ fn = baseDir + os.sep + 'index.html'
+ base.StringUtils.toFile(fn, '<html><body><p>Ups. Verirrt?</p></body></html>\n')
+ fn = baseDir + os.sep + 'index.php'
+ base.StringUtils.toFile(fn, '<?php\necho "<html><body><p>Ups. Verirrt?</p></body></html>";\n')
+ fn = baseDir + os.sep + 'domain.txt'
+ base.StringUtils.toFile(fn, domain + '\n')
+ if webserver == None:
+ if os.path.isdir('/etc/nginx'):
+ webserver = 'nginx'
+ elif os.path.isdir('/etc/apache2'):
+ webserver = 'apache'
+ if webserver == 'nginx':
+ available = self.getTarget('/etc/nginx', 'sites-available')
+ enabled = os.path.dirname(available) + os.sep + 'sites-enabled'
+ fn = available + os.sep + domain
+ if os.path.exists(fn) and not overwrite:
+ self.usage('{} exists. Use --overwrite to overwrite'.format(fn))
+ base.StringUtils.toFile(fn, '''server {}
+ listen 80;
+ server_name {} {};
+ root {};
+ location / {}
+ allow all;
+ {}
+{}
+'''.format('{', domain, ip, baseDir, '{', '}', '}'))
+ linkTarget = enabled + os.sep + domain
+ if os.path.islink(linkTarget):
+ self._logger.log('deleting ' + linkTarget, 2)
+ os.unlink(linkTarget)
+ self._logger.log('creating symlink ' + linkTarget, 2)
+ os.symlink('../sites-available/' + domain, linkTarget)
+ if self._isRoot:
+ self._processHelper.execute(['/bin/systemctl', 'reload', 'nginx'], True)
+ elif webserver == 'apache':
+ available = self.getTarget('/etc/apache2', 'sites-available')
+ enabled = os.path.dirname(available) + os.sep + 'sites-enabled'
+ fn = available + os.sep + domain + '.conf'
+ if os.path.exists(fn) and not overwrite:
+ self.usage('{} exists. Use --overwrite to overwrite'.format(fn))
+ base.StringUtils.toFile(fn, '''<VirtualHost *:80>
+ ServerName {}
+ ServerAlias {}
+ ServerAdmin webmaster@localhost
+ DocumentRoot {}
+ <Directory {}>
+ AllowOverride all
+ Require all granted
+ Order allow,deny
+ allow from all
+ </Directory>
+</VirtualHost>
+'''.format(domain, ip, baseDir, baseDir))
+ linkTarget = enabled + os.sep + domain + '.conf'
+ if os.path.islink(linkTarget):
+ self._logger.log('deleting ' + linkTarget, 2)
+ os.unlink(linkTarget)
+ self._logger.log('creating symlink ' + linkTarget, 2)
+ os.symlink('../sites-available/' + domain + '.conf', linkTarget)
+ fn = baseDir + os.sep + 'index.html'
+ if self._isRoot:
+ self._processHelper.execute(['/bin/systemctl', 'reload', 'apache2'], True)
+ else:
+ self.usage('unknown webserver: ' + webserver)
+
+ def dump(self):
+ '''Dumps a monitor.
+ @return: a string describing the monitor
+ '''
+ rc = ''
+ for group in sorted(self._receiverGroups):
+ rc += self._receiverGroups[group].dump(0)
+ for host in sorted(self._smtpHosts):
+ rc += self._smtpHosts[host].dump(0)
+ for host in sorted(self._hosts):
+ rc += self._hosts[host].dump(0)
+ for site in sorted(self._sites):
+ rc += self._sites[site].dump(0)
+ return rc
+
+ def diskLimitReached(self, limit, total, free):
+ '''Tests whether a limit is reached.
+ @param limit: in bytes or as percent
+ @param total: total amount of disk bytes
+ @param free: free disk bytes
+ @return: True: limit is reached
+ '''
+ limitBytes = int (total * int(limit[0:-1]) / 100) if limit.endswith('%') else int(limit)
+ rc = limitBytes < int(total - free)
+ return rc
+
+ def example(self):
+ '''Prints a configuration example to files and prints / returns a reference notice.
+ @param doPrint: True: the reference notice will be displayed.
+ @return: the reference notice, e.g. 'created: /etc/pyrshell/monitor.d/default.example'
+ '''
+ content = '''# Example config for pymonitor:
+log=/var/log/local/pymonitor.log
+'''
+ self.storeExample(content)
+ contentDefault = '''
+receivergroup "" {
+ warning: hm.neutral@gmx.de
+ error: hm.neutral@gmx.de
+}
+receivergroup "std" {
+ warning: hm.neutral@gmx.de
+ error: hm.neutral@gmx.de
+}
+smtphost "" {
+ host: smtp.gmx.de
+ port: 587
+ sender: hm.neutral@gmx.de
+ user: hm.neutral@gmx.de
+ code: sEcReT
+ tls: True
+}
+host "" {
+ receivers: std
+ disk "" {
+ receivers: std
+ used: 85% 90%
+ }
+}
+'''
+ self.storeExample(contentDefault, 'default.conf', self._additionalConfigDir)
+ content = '''host "{}" {
+address: localhost
+interval: 60
+'''
+ host = base.BaseTool.BasicStatics.hostname(False)
+ content = content.replace('{}', host)
+ diskInfos = base.LinuxUtils.diskFree()
+ diskDescription = ''
+ for info in diskInfos:
+ # info: [ name, total, free ]
+ total = 1 if info[1] == 0 else info[1]
+ warnLimit = (100 * (total - info[2]) / total + 100) / 2
+ errorLimit = (warnLimit + 100) / 2
+ diskDescription += '\tdisk "' + info[0] + '" {\n'
+ diskDescription += '\t\tused: {:.0f}% {:.0f}%\n'.format(warnLimit, errorLimit)
+ diskDescription += '\t}\n'
+ content += diskDescription
+ infos = base.LinuxUtils.mdadmInfo()
+ for info in infos:
+ # [name, raidType, members, blocks, status
+ content += '\traid "' + info[0] + '" {\n'
+ content += '\t\traidtype: ' + info[1] + '\n'
+ content += '\t\tmembers: ' + info[2] + '\n'
+ content += '\t\tblocks: {:d}\n'.format(info[3])
+ content += '\t}\n'
+
+ infos = base.LinuxUtils.load()
+ content += '\tload1: {:.1f} {:.1f}\n'.format(infos[0] * 10, infos[0] * 20)
+ content += '\tload5: {:.1f} {:.1f}\n'.format(infos[1] * 10, infos[1] * 20)
+ content += '\tload10: {:.1f} {:.1f}\n'.format(infos[2] * 10, infos[2] * 20)
+ content += '\tprocesses: {:.0f} {:.0f}\n'.format(int(infos[4] * 1.1), int(infos[4] * 2))
+ infos = base.LinuxUtils.memoryInfo()
+ total = infos[2] if infos[2] != 0 else 1
+ swapUsage = (100 + infos[3] * 100 / total) / 2
+ content += '\tswap: {}% {}%\n'.format(swapUsage, (swapUsage+100)/2)
+ content += '}\n'
+ self.storeExample(content, host + '.conf', self._additionalConfigDir)
+
+ def getLocalHost(self):
+ '''Returns the name of the host describing the localhost.
+ @return: the name of the local host
+ '''
+ rc = None
+ for host in self._hosts:
+ if host != '':
+ if host == 'localhost':
+ rc = host
+ else:
+ rc = host
+ break
+ return rc
+
+ def observe(self, hostname='localhost'):
+ '''Observes a host and return the new errors/warnings.
+ @param hostname: name of the host to observer: used for the configuration
+ @return: a tuple (newErrors, closedErrors)
+ '''
+ self._logger.log('observe...', 3)
+ self._currentHostname = hostname
+ rc = self.observeCore(hostname) + self.observeDisks(hostname) + self.observeRaid(hostname)
+ rc += self.observeSites(hostname)
+ currentProblems = dict()
+ newProblems = dict()
+ closedProblems = []
+ for message in rc:
+ parts = message.split('|')
+ key = parts[0] + ':' + parts[1]
+ if key in self._openProblems:
+ currentProblems[key] = self._openProblems[key]
+ else:
+ newProblems[key] = message
+ for key in self._openProblems:
+ if key not in newProblems and key not in currentProblems:
+ closedProblems.append(self._openProblems[key])
+ self._openProblems = currentProblems
+ self._openProblems.update(newProblems)
+ return (newProblems.values(), closedProblems)
+
+ def observerBuildMessage(self, section, location, isWarning, limitType, current, observed):
+ '''Builds the message used for the transporting all infos about an error/warning.
+ @param section: identifies the caller, e.g. 'core'
+ @param location: the part of the message specific for the caller, e.g. 'core detection'
+ @param isWarning: False: describes an error
+ @param limitType: load1,load5,load10 or swap
+ @param current: the current value
+ @param observed: the property describing the raised limit, type: ObservedProperty
+ @return: list of notice info: notice_type|property_key|message|receivers|time, 'E|localhost:disk:/home|disk usage: free: 0 of 512.000 GiByte
+ '''
+ receivers = ''
+ if observed == None:
+ limit = ''
+ else:
+ limit = observed._warnLimit if isWarning else observed._errorLimit
+ if observed._receivers != None:
+ receivers = observed._receivers._properties['warning' if isWarning else 'error']._value
+ msg = 'S' if isWarning == None else ('W' if isWarning else 'E')
+ msg += '|{}:{}:{}|{} '.format(self._currentHostname, section, limitType, location)
+ msg += 'warning' if isWarning else 'error'
+ msg += ' ' + str(current)
+ if limit != None and limit != '':
+ msg += ' Limit: ' + str(limit)
+ msg += '|' + receivers + '|' + "{:.0f}".format(time.time())
+ return msg
+
+ def observeCore(self, hostname='localhost'):
+ '''Checks whether a load state and swap usage should be noticed (warning or error).
+ @return: list of notice info: notice_type|property_key|message|receivers|time
+ e.g. 'E|localhost:load5|load is to heavy: 5.10|a@bc.de|147382902.3928302
+ '''
+ def buildMessage(isWarning, limitType, current):
+ return self.observerBuildMessage('core', 'core detection', isWarning, limitType, current, host._observed[limitType])
+
+ def checkOne(rc, current, limitType):
+ if float(current) >= float(host._observed[limitType]._errorLimit):
+ rc.append(buildMessage(False, limitType, current))
+ elif float(current) >= float(host._observed[limitType]._warnLimit):
+ rc.append(buildMessage(True, limitType, current))
+ else:
+ self._lastStatus[host._name + ':core:' + limitType] = buildMessage(None, limitType, current)
+
+ self._logger.log('observeCore...', 3)
+ rc = []
+ if hostname in self._hosts:
+ infos = base.LinuxUtils.load()
+ host = self._hosts[hostname]
+ checkOne(rc, infos[0], 'load1')
+ checkOne(rc, infos[1], 'load5')
+ checkOne(rc, infos[2], 'load10')
+ checkOne(rc, infos[4], 'processes')
+ infos = base.LinuxUtils.memoryInfo()
+ total = infos[2] if infos[2] != 0 else 1
+ usagePercent = 0 if total <= 1 else (total - infos[3]) * 100 / total
+ checkOne(rc, usagePercent, 'swap')
+
+ return rc
+
+ def observeDisks(self, hostname='localhost'):
+ '''Checks whether a disk usage should be noticed (warning or error).
+ @return: list of notice info: notice_type|property_key|message|receivers|time, 'E|localhost:disk:/home|disk usage: free: 0 of 512.000 GiByte
+ '''
+
+ def buildMessage(isWarning, diskName, total, free, limitType, observed):
+ total = 1 if total < 1E-6 else total
+ current = 'free: {:.3f} GiByte ({:.1f}%) of {:.3f} GiByte'.format(free / 1024.0 / 1024 / 1024, free * 100.0 / total, total / 1024.0 / 1024 / 1024)
+ return self.observerBuildMessage('disk', 'disk usage', isWarning, limitType, current, observed)
+
+ self._logger.log('observeDisks...', 3)
+ rc = []
+ diskInfos = base.LinuxUtils.diskFree()
+ if hostname in self._hosts:
+ host = self._hosts[hostname]
+ for info in diskInfos:
+ # info: [ name, total, free ]
+ if info[0] not in host._disks:
+ # rc.append('E|'+ hostname + ':disk:' + info[0] + '|disk not found|' + host._properties['receivers']._properties['error']._value)
+ pass
+ else:
+ disk = host._disks[info[0]]
+ if self.diskLimitReached (disk._observed['used']._errorLimit, info[1], info[2]):
+ msg = buildMessage(False, info[0], info[1], info[2], disk._name, disk._observed['used'])
+ self._logger.log(msg, 2)
+ rc.append(msg)
+ elif self.diskLimitReached (disk._observed['used']._warnLimit, info[1], info[2]):
+ msg = buildMessage(True, info[0], info[1], info[2], disk._name, disk._observed['used'])
+ self._logger.log(msg, 2)
+ rc.append(msg)
+ else:
+ self._lastStatus[host._name + ':disk:' + info[0]] = buildMessage(None, info[0], info[1], info[2], disk._name, disk._observed['used'])
+ return rc
+
+ def observeRaid(self, hostname='localhost'):
+ self._logger.log('observeDisks...', 3)
+ '''Tests whether a raid is broken.
+ @return: list of notice info: notice_type|property_key|message|receivers|time,
+ e.g. 'E|localhost:raid|raid is broken|a@bc.de|147382902.3928302'
+ '''
+
+ def buildMessage(isWarning, limitType, raid, message):
+ if raid._properties['members']._value != None:
+ message += ' members: ' + raid._properties['members']._value
+ if raid._properties['blocks']._value != None:
+ message += ' blocks: ' + raid._properties['blocks']._value
+ return self.observerBuildMessage('raid', 'type: {} {}'.format(raid._properties['raidtype']._value, message), isWarning, limitType, '', None)
+
+ self._logger.log('observeRaid...', 3)
+ infos = base.LinuxUtils.mdadmInfo()
+ host = self._hosts[hostname]
+ raidMap = dict()
+ rc = []
+ for info in infos:
+ raidMap[info[0]] = info
+ for name in host._raids:
+ raid = host._raids[name]
+ if raid._name not in raidMap:
+ rc.append(buildMessage(False, raid._name, raid, 'missing raid device'))
+ else:
+ [name, raidType, members, blocks, status] = raidMap[raid._name]
+ if status == 'recover':
+ rc.append(buildMessage(True, raid._name, raid, 'rebuilding the raid'))
+ elif status == 'broken':
+ rc.append(buildMessage(False, raid._name, raid, 'raid is broken'))
+ else:
+ self._lastStatus[host._name + ':raid:' + raid._name] = buildMessage(None, raid._name, raid, 'OK')
+ return rc
+
+ def observeSites(self, hostname):
+ '''Tests whether a site is reachable.
+ @param hostname: the hostname of 'localhost'
+ @return: list of notice info: notice_type|property_key|message|receivers|time,
+ e.g. 'E|localhost:site:https://wiki.example.com|site is not reachable|a@bc.de|147382902.3928302'
+ '''
+ if self._verboseLevel >= 3:
+ self._logger.log('observeSites...')
+ rc = []
+ for site in self._sites:
+ url = self._sites[site]._properties['url']._value
+ client = net.HttpClient.HttpClient(self._verboseLevel, self._logger)
+ content = client.getContent(url, 1)
+ # 404 returns an empty bytes object
+ if content == None or content == '' or content == b'' or content.find(b'404') >= 0:
+ receivers = self._hosts[hostname]._properties['receivers']._properties['error']._value
+ msg = 'E|{}:sites:{}|site is not reachable|{}|{:.0f}'.format(hostname, site, receivers, time.time())
+ rc.append(msg)
+ else:
+ self._lastStatus['{}:sites:{}'.format(hostname, site)] = 'S|{}:sites:{}|site is running||{}'.format(hostname, site, time.time())
+ client.close()
+ return rc
+
+ def readConfigurationFile(self, filename):
+ '''Reads a configuration file.
+ @param filename: file to read
+ Example of a configuration file:
+ host "localhost" {
+ address: localhost;
+ disk "/" {
+ used: 80% 90%
+ }
+ load1: 3.0 5.0
+ }
+ '''
+
+ def _error(msg):
+ '''Puts an error with context information.
+ @param msg: the error message
+ '''
+ self._logger.error('{:s}-{:d}: {:s}\n{:s}'.format(filename, lineNo, msg, line))
+
+ def _checkEmails(emails):
+ '''Tests a list of emails.
+ @param emails: a string with email addresses separated by blanks
+ @return: True: emails correct
+ '''
+ rc = True
+ for item in emails.split():
+ if not reEmailAddress.match(item):
+ _error('invalid email address: ' + item)
+ rc = False
+ return rc
+
+ def _observedValue(name, value):
+ rc = value[0:-1] if name == 'swap' and value[-1] == '%' else value
+ return rc
+
+ with open(filename, 'r') as fp:
+ lineNo = 0
+ containerStack = []
+ currentContainer = None
+ reReceiverGroup = re.compile(r'receivergroup\s+"([^"]*)"\s*\{$')
+ reContainer = re.compile(r'(site|host|disk|raid|smtphost)\s+"([^"]*)"\s*\{$')
+ reProperty = re.compile(r'(\w+):\s*(.+)$')
+ reEmailAddress = re.compile(r'[-+\w.=!]+@[\w.]+[.][a-zA-z]+$')
+ for line in fp:
+ lineNo += 1
+ line = line.strip()
+ if line == '' or line.startswith('#'):
+ continue
+ if line == '}':
+ if currentContainer == None:
+ _error('unexpected "}"')
+ else:
+ containerStack.pop()
+ currentContainer = None if len(containerStack) == 0 else containerStack[-1]
+ continue
+ matcher = reContainer.match(line)
+ if matcher:
+ aType, name = matcher.group(1), matcher.group(2)
+ if aType == 'host' or aType == 'smtphost' or aType == 'site':
+ if currentContainer != None:
+ _error('nested definition of ' + aType)
+ else:
+ if aType == 'host':
+ currentContainer = Host(name, self)
+ self._hosts[name] = currentContainer
+ elif aType == 'site':
+ currentContainer = WebSite(name, self)
+ self._sites[name] = currentContainer
+ else:
+ currentContainer = SmtpHost(name, self)
+ self._smtpHosts[name] = currentContainer
+ containerStack.append(currentContainer)
+ elif aType == 'disk':
+ if currentContainer == None:
+ _error('disk not inside a host definition')
+ elif currentContainer._keyword != 'host':
+ _error('disk not inside a host definition. Found: ' + currentContainer._keyword)
+ else:
+ disk = Disk(name, currentContainer)
+ currentContainer._disks[name] = disk
+ currentContainer = disk
+ containerStack.append(disk)
+ elif aType == 'raid':
+ if currentContainer == None:
+ _error('raid not inside a host definition')
+ elif currentContainer._keyword != 'host':
+ _error('raid not inside a host definition. Found: ' + currentContainer._keyword)
+ else:
+ raid = RaidDevice(name, currentContainer)
+ currentContainer._raids[name] = raid
+ currentContainer = raid
+ containerStack.append(raid)
+ else:
+ _error('unknown container type: ' + aType)
+ continue
+ matcher = reProperty.match(line)
+ if matcher:
+ name, propValue = matcher.group(1), matcher.group(2)
+ if currentContainer == None:
+ _error('property outside a container')
+ elif name not in currentContainer._properties and name not in currentContainer._observed:
+ _error('unknown property ' + name)
+ else:
+ if name in currentContainer._properties:
+ if name == 'receivers':
+ if propValue in self._receiverGroups:
+ currentContainer._properties[name] = self._receiverGroups[propValue]
+ else:
+ _error('unknown receivergroup: ' + propValue)
+ elif (name != 'error' and name != 'warning') or _checkEmails(propValue):
+ currentContainer._properties[name]._value = propValue
+ if name == 'address' and propValue != 'localhost' and not propValue.startswith('127.'):
+ currentContainer._name = propValue
+ else:
+ values = propValue.split()
+ if len(values) == 1:
+ currentContainer._observed[name]._errorLimit = _observedValue(name, values[0])
+ currentContainer._observed[name]._receivers = currentContainer._properties['receivers']
+ elif len(values) == 2:
+ currentContainer._observed[name]._warnLimit = _observedValue(name, values[0])
+ currentContainer._observed[name]._errorLimit = _observedValue(name, values[1])
+ currentContainer._observed[name]._receivers = currentContainer._properties['receivers']
+ else:
+ if values[2] not in self._receiverGroups:
+ _error('unknown receivergroup: ' + values[2])
+ else:
+ currentContainer._observed[name]._warnLimit = _observedValue(name, values[0])
+ currentContainer._observed[name]._errorLimit = _observedValue(name, values[1])
+ currentContainer._observed[name]._receivers = self._receiverGroups[values[2]]
+
+ continue
+ matcher = reReceiverGroup.match(line)
+ if matcher:
+ if currentContainer != None:
+ _error('receiver group definition inside a container')
+ else:
+ group = ReceiverGroup(matcher.group(1), self)
+ self._receiverGroups[group._name] = group
+ currentContainer = group
+ containerStack.append(group)
+ continue
+ _error('unexpected input')
+ if len(containerStack) > 0:
+ _error('missing "}"')
+
+ def reload(self):
+ '''Reads the configuration again.
+ '''
+ self._logger.log('reloading configuration...')
+ self._readAllConfigs()
+ hostname = base.BaseTool.BasicStatics.hostname(False)
+ subject = hostname + ': reloaded'
+ duration = time.time() - self._start
+ start = datetime.datetime.fromtimestamp(self._start)
+ currentState = ''
+ states = list(self._lastStatus.values())
+ states.sort()
+ for state in states:
+ info = state.split('|')
+ currentState += info[1] + ': ' + info[2].replace(' error', '') + '\n'
+ body = '{}\n\nRunning since: {} / {:.0f}d{:.0f}h{:.0f}m loops: {}\n\nOpen problems: {}\n\n{}\n\nCurrent state:\n{}\n'.format(subject,
+ start.strftime('%Y.%m.%d-%H:%M:%S'),
+ duration // 86400, duration % 86400 / 3600, duration % 3600 / 60,
+ self._loops,
+ len(self._openProblems), '\n'.join(self._openProblems),
+ currentState)
+ self.sendEmail(subject, body)
+
+ def reloadRequest(self):
+ '''Requests a reload of the configuration of the running service.
+ '''
+ base.StringUtils.toFile(self._reloadRequestFile, '')
+ os.chmod(self._reloadRequestFile, 0o666)
+ entry = pwd.getpwnam('pymonitor')
+ uid = self._configuration.getInt('uid', None if entry == None else entry.pw_uid)
+ if self._isRoot and uid != None:
+ os.chown(self._reloadRequestFile, uid, uid)
+
+ def run(self, argv):
+ '''A never ending loop with regulary started observervations.
+ @param hostname: the hostname to find the matching configuration
+ '''
+ hostname = base.BaseTool.BasicStatics.hostname(False)
+ interval=60
+ if len(argv) > 0:
+ hostname = argv[0]
+ if len(argv) > 1:
+ interval = self.integerArgument(argv[1], 60)
+ if hostname not in self._hosts:
+ self._logger.error('unknown host: ' + hostname)
+ else:
+ try:
+ time.sleep(10)
+ host = self._hosts[hostname]
+ interval = int(host._properties['interval']._value)
+ if interval < 1:
+ interval = 1
+ self._logger.log('starting loop (interval: {})...'.format(interval))
+ # seconds since midnight
+ lastClear = time.time() % 86400
+ self._loops = 0
+ while True:
+ self._loops += 1
+ current = time.time() % 86400
+ if current < lastClear:
+ # day has changed
+ # enforce new error messages on long term errors
+ self._openProblems.clear()
+ lastClear = current
+ self.checkReload()
+ [newProblems, closedProblems] = self.observe(hostname)
+ for key in newProblems:
+ self.sendStatusEmail(key, True)
+ for key in closedProblems:
+ self.sendStatusEmail(key, False)
+ time.sleep(interval)
+ except Exception:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ self._logger.error(''.join(traceback.format_exception(exc_type, exc_value, exc_traceback, 8)))
+ self._logger.log('daemon finished')
+
+ def sendStatusEmail(self, error, isProblem):
+ '''Sends one or many emails.
+ @param errors: a list of error information
+ @param isProblem: True: the errors have been started False: the errors have been finished
+ '''
+ [aType, key, message, recipients, theTime] = error.split('|')
+ if len(recipients) > 0:
+ aType2 = 'error' if aType == 'E' else 'warning'
+ aDate = datetime.datetime.fromtimestamp(int(float(theTime)))
+ dateString = aDate.strftime('%d.%m.%Y %H:%M')
+ [host, scope, dummy] = key.split(':')
+ if isProblem:
+ subject = '[PROBLEM] ' + key + ' is on ' + aType2
+ text = '''***** Service Monitoring on {:s} *****
+Scope: {:s}
+Info: {:s}
+Service: {:s}
+When: {:s}
+'''.format(key, host, message, scope, dateString)
+ else:
+ subject = '[RECOVERY] ' + key + ' is ok'
+ text = '''***** Service Monitoring on {:s} *****
+Scope: {:s}
+Recovered from: {:s}
+Service: {:s}
+When: {:s} - {:s}
+'''.format(key, host, message, scope, dateString, datetime.datetime.now().strftime('%d.%m.%Y %H:%M'))
+ if key in self._lastStatus:
+ info = self._lastStatus[key].split('|')
+ text += '''
+current status: {}
+from {}
+'''.format(info[2], time.strftime('%Y.%m.%d-%H:%M:%S', time.localtime(float(info[4]))))
+ self.sendEmail(subject, text, recipients)
+
+ def sendEmail(self, subject, text, recipients = None):
+ '''Sends an email.
+ @param subject: the subject of the email
+ @param isProblem: True: the errors have been started False: the errors have been finished
+ '''
+ email = net.EMail.EMail(subject, text)
+ smtp = self._smtpHosts['']
+ if recipients == None:
+ recipients = self._receiverGroups['']._properties['warning']._value
+ if recipients == None or recipients == '':
+ self._logger.error('missing recipients in sendEmail(): subject: {} text:\n{}'.format(subject, text))
+ else:
+ parts = recipients.split(' ')
+ email.setSmtpLogin(smtp._properties['host']._value, smtp._properties['port']._value,
+ smtp._properties['user']._value, smtp._properties['code']._value, smtp._properties['tls']._value == 'True')
+ sender = smtp._properties['sender']._value
+ cc = None if len(parts) < 2 else parts[1:]
+ #self._logger.debug('Email: To: {} CC: {} host: {} port: {} user: {} TLS: {}'.format(
+ # parts[0], '' if cc == None else ' '.join(cc),
+ # smtp._properties['host']._value, smtp._properties['port']._value, smtp._properties['user']._value,
+ # smtp._properties['tls']._value))
+ try:
+ email.sendTo(sender, parts[0], cc)
+ except Exception as exc:
+ smtpArgs = smtp._properties['host']._value + smtp._properties['port']._value, smtp._properties['user']._value + smtp._properties['code']._value + smtp._properties['tls']._value
+ self._logger.error('sendmail [{}] to {} failed: {}\n{}\n{}'.format(smtpArgs, recipients, str(exc), subject, text))
+ self._logger.debug('email sent to ' + recipients)
+
+ def site(self, argv):
+ '''Displays the configuration of an observed website.
+ @param argv: program arguments, e.g. ['--scan']
+ '''
+ def _handleUrl(url, lines, logger):
+ if not url.startswith('http'):
+ url = 'http://' + url
+ client = net.HttpClient.HttpClient(self._verboseLevel, logger)
+ url2 = client.handleRequest(url, 'HEAD', 10)
+ if client._response != None and client._response.status >= 400 and client._response.status < 500:
+ url += '/works'
+ url2 = client.handleRequest(url, 'HEAD', 10)
+ if client._response == None or client._response.status != 200:
+ logger.error('site not available: ' + url)
+ else:
+ parts = urllib.parse.urlparse(url2)
+ domain = parts.netloc
+ ix = domain.find(':')
+ if ix > 0:
+ domain = domain[0:ix]
+ output = 'site "' + domain + '" {\n\turl: ' + url2 + '\n}\n'
+ logger.log(output)
+ lines.append(output)
+
+ def _scanNginx(filename, lines, logger):
+ rexprPort = re.compile(r'^\s*listen\s+[sl\s]*(\d+)', IGNORE_CASE)
+ rexprServer = re.compile(r'^\s*server_name\s+(.*);', IGNORE_CASE)
+ with open(filename, "r") as fp:
+ lastPort = 80
+ lastIsSsl = False
+ bestUrl = None
+ url = None
+ for line in fp:
+ matcher = rexprPort.match(line)
+ if matcher:
+ lastPort = int(matcher.group(1))
+ lastIsSsl = line.lower().find('ssl') > 0
+ continue
+ matcher = rexprServer.match(line)
+ if matcher:
+ names = matcher.group(1).split()
+ url = 'http'
+ isHttps = lastIsSsl or (lastPort >= 400 and lastPort < 500)
+ if isHttps:
+ url += 's'
+ port = '' if lastPort == 80 or lastPort == 443 else ':' + str(lastPort)
+ url += '://' + names[0] + port
+ if bestUrl == None:
+ bestUrl = url
+ elif isHttps:
+ bestUrl = url
+ break
+ if bestUrl:
+ _handleUrl(bestUrl, lines, logger)
+
+ argv, options = self.splitArgsAndOpts(argv)
+ done = False
+ lines = []
+ for opt in options:
+ if opt == '--scan':
+ aDir = self.getSource('/etc/nginx/sites-enabled')
+ if not os.path.isdir(aDir):
+ self.usage('--scan: not a directory: ' + aDir)
+ files = os.listdir(aDir)
+ for node in files:
+ _scanNginx(aDir + '/' + node, lines, self._logger)
+ done = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if not done:
+ if len(argv) < 1:
+ self.usage('site: missing <url>')
+ else:
+ for url in argv:
+ _handleUrl(url, lines, self._logger)
+ base.BaseTool.setResult(lines)
+
+ def test(self, argv):
+ '''Tests the configuration.
+ @param argv: program arguments
+ '''
+ dump = self.dump()
+ fn = '/tmp/{}.test.dump.txt'.format(self._globalOptions._appInfo._applicationName)
+ with open(fn, "w") as fp:
+ fp.write(dump)
+ print('=== dump stored in ' + fn)
+ # notice_type|property_key|message|receivers|time
+ receivers = self._receiverGroups['']._properties['warning']._value
+ now = time.time()
+ host = self.getLocalHost()
+ msg = 'W|{}:testmessage:emailtest|This is only a test to check email sending capability|{}|{}'.format(host, receivers, now)
+ self.sendStatusEmail(msg, False)
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return '''Usage: monitor <mode> [<opts>]
+GLOBAL_OPTS
+GLOBAL_MODES
+<mode>:
+ create-site-server [<opts>]
+ creates the configuration for sites to observe
+ <opt>:
+ --domain=<domain>
+ used for the virtual server, e.g. hawk.hamatoma.de
+ --ip=<ip>
+ the public ip, e.g. 217.0.3.99
+ --nginx
+ the configuration is created for the webserver NGINX. This is the default behaviour
+ --overwrite
+ the existing configuration will be overwritten
+ daemon <opts>
+ starts a never ending loop for monitoring
+ <opts>:
+ -v<verbose-level>
+ -v<level> or --verbose-level=<level>
+ Verbose level: 0: no tracing 1: summary 2: standard 3: fine Default: 0
+ -c<dir> or --configuration-directory=<dir>
+ configuraton directory with *.conf files.
+ Default: os.environ['MONTITOR_CONFIG'] or '/etc/pyrshell/monitor.d'
+ --host=<host>
+ host to observe. Default: os.environ['MONTITOR_HOST'] or <hostname>
+ -l<file> or --log=<file>
+ file for logging output.
+ Default: os.environ['MONTITOR_LOG'] or /var/log/local/<application>.log
+ --application=<application>
+ the name of the application. Default: pymonitor
+ example [<file>]
+ prints an example configuration file to a file or stdout
+ install <opts>
+ installs the daemon as a systemd service
+ <opts>:
+ --application=<application>
+ the name of the application. Default: pymonitor
+ --user=<user>
+ the daemon runs as this user. Default: <application> or 'pymonitor'
+ --group=<group>
+ the daemon runs under this group. Default: <application> or 'pymonitor'
+ --host=<host>
+ the name of the host used for the daemon. Must be defined in configuration
+ Default: localhost
+ --no-auto-start
+ the service does not start at boot time (systemctl disable <application>)
+ -l<file> or --log=<log>
+ file for logging output (of the daemon).
+ Default: os.environ['MONTITOR_LOG'] or /var/log/local/<application>.log
+ uninstall <opts>
+ --purge
+ remove configuration files too
+ --application=<application>
+ the name of the application. Default: 'pymonitor'
+ --hold-user
+ the user will not be deleted.
+ Note: the user will be deleted only if its name is equal to the application
+ --hold-group
+ the user will not be deleted
+ Note: the group will be deleted only if its name is equal to the application
+ site <url1> [<url2> ...]
+ prints the configuration for <urlN>
+ <urlN>:
+ the universal resource locator, e.g. https://wiki.hamatoma.de
+ site --scan
+ prints configuration filtered from NgInx configuration files in <directory>
+ Default: '/etc/nginx/sites-enabled'
+ test <opts>
+ tests the configuration files.
+ <opts>:
+ -c<dir> or --configuration-directory=<dir>
+ configuraton directory with *.conf files.
+ Default: os.environ['MONTITOR_CONFIG'] or '/etc/pyrshell/monitor.d'
+ -l<file> or --log=<file>
+ file for logging output. Default: /var/log/local/<application>.log
+ --email=<email>
+ email address for email sending test.
+ Default: warning part of the default ReceiverGroup
+Examples:
+pymonitor -v3 daemon
+pymonitor reload
+pymonitor -v3 sites --scan
+pymonitor -v3 create-site-server --ip=208.33.99.5 --domain=gustl.example.com --overwrite
+pymonitor -v3 create-site-server --apache --overwrite
+pymonitor -v3 site http://life.sky.infeos.de/domain.txt
+'''
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/pymonitor', 'run']
+ '''
+ serviceInfo = base.BaseTool.ServiceInfo('pymonitor', 'pymonitor', 'pymonitor',
+ 'A monitor for system resources, e.g. disk usage.', 'MONITOR', None)
+ appInfo = base.BaseTool.ApplicationInfo('pymonitor', 'appl/Monitor.py', usage, serviceInfo)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = Monitor(options)
+ if len(argv) == 0 and 'MONITOR_APPL' in os.environ:
+ argv = ['daemon']
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'create-site-server':
+ tool.createSiteServer(argv)
+ elif cmd == 'daemon':
+ tool.run(argv)
+ elif cmd == 'site':
+ tool.site(argv)
+ elif cmd == 'test':
+ tool.test(argv)
+ elif cmd == 'reload':
+ tool.reloadRequest()
+ else:
+ tool.usage("unknown subcommand: " + cmd)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import datetime
+import sys
+import os.path
+import stat
+import subprocess
+import tempfile
+import gzip
+import traceback
+import re
+import shutil
+import fnmatch
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.JavaConfig
+import base.Logger
+import base.BaseTool
+import net.EMail
+import appl.BackupBase
+import base.ProcessHelper
+import appl.TextTool
+
+class GroupInfo:
+
+ def __init__(self, name, gid, groupLine):
+ self._name = name
+ self._groupLine = groupLine
+ self._gid = gid
+ self._comments = None
+
+ def asText(self):
+ rc = ''
+ if self._comments != None:
+ rc += '\n'.join(self._comments) + '\n'
+ rc += self._groupLine + '\n'
+ return rc
+
+class UserInfo:
+ def __init__(self, name, uid, passwdLine):
+ self._name = name
+ self._passwdLine = passwdLine
+ self._uid = uid
+ self._comments = None
+
+ def asText(self):
+ rc = ''
+ if self._comments != None:
+ rc += '\n'.join(self._comments) + '\n'
+ rc += self._passwdLine + '\n'
+ return rc
+
+class PartitionInfo:
+ def __init__(self, device, uuid, fsType, label):
+ '''Constructor.
+ @param device: the device, e.g. '/dev/sda1'
+ @param uuid: the UUID of the partition
+ @param fsType: the filesystem type, e.g. 'ext4
+ @param label: None or the label of the filesystem
+ '''
+ self._device = device
+ self._uuid = uuid
+ self._fsType = fsType
+ self._label = label
+ self._osName = 'linux'
+
+
+class RestoreTool (appl.BackupBase.BackupBase):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'restoretool.conf')
+ self._backupConfiguration = base.JavaConfig.JavaConfig(self._configDir + '/backup.conf', self._logger)
+ self._processTool = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger)
+ self._textTool = appl.TextTool.TextTool(self._globalOptions)
+
+ def basics(self, dirBackup, week=None, day=None):
+ '''Prepares the following restauration.
+ @param dirBackup: a directory con
+ @param week: a weekno: 0..3 (weekno mod 4): locates the weekly backup
+ @param day: 'Mon' ... 'Sun': locates the dayly backup
+ '''
+ base.BaseTool.BasicStatics.ensureDirectory('/opt/restore/etc_origin', self.logger)
+ base.BaseTool.BasicStatics.ensureDirectory('/opt/restore/etc', self.logger)
+ if not os.path.exists('/opt/restore/etc_origin/passwd'):
+ self.log('saving /etc to /opt/restore/etc_origin')
+ shutil.copy('/etc', '/opt/restore/etc_origin')
+
+ def btrFs(self, argv):
+ '''Executes the btrfs command.
+ @param argv: the arguments, e.g. ['create-subvol', 'cave', 'home']
+ '''
+ if len(argv) < 1:
+ self.usage('missing <what>')
+ else:
+ what = argv[0]
+ argv = argv[1:]
+ if what == 'create-fs':
+ self.btrFsCreateFs(argv)
+ elif what == 'create-subvol':
+ self.btrFsCreateSubvolume(argv)
+ elif what == 'create-snapshot':
+ self.btrFsCreateSnapshot(argv)
+ else:
+ self.usage('unknown <what>: ' + what)
+
+ def btrFsCreateFs(self, argv):
+ '''Creates a subvolume of a btrfs filesystem.
+ Steps:
+ if loop device: create image and loop device
+ create the rootMountpoint
+ create the automount interface for mounting
+ create-btrfs <device> <label> [<mount-path>] [--image=<path>:<size> ] [--force]
+ @param argv: the program arguments, e.g. ['/dev/loop33', 'fs.unittest', '/tmp/fs.unittest', '--size=150M']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ force = False
+ size = None
+ image = None
+ mediaDir = self.getTarget('/media')
+ for opt in options:
+ if opt.startswith('--image='):
+ arg = opt[8:]
+ expr = self.regExprCompile(r'^(.*):(\d+[gtm])$', 'btrFsCreateSubvolume()', False)
+ if expr != None:
+ matcher = expr.match(arg)
+ if matcher == None:
+ self.usage('wrong format {}. example: --image=/space/fs.img:100G'.format(arg))
+ else:
+ image, size = (matcher.group(1), matcher.group(2))
+ if size[-1].upper() == 'M' and int(size[0:-1]) < 115:
+ self.usage('invalid size: {} minimum is 115M'.format(size))
+ elif opt == '--force':
+ force = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ elif image != None and not argv[0].startswith('/dev/loop'):
+ self.usage('--image found, but device does not start with "/dev/loop": ' + argv[0])
+ else:
+ device = argv[0]
+ labelRoot = argv[1]
+ mountPoint = argv[2] if len(argv) > 2 else mediaDir + os.sep + labelRoot
+ if os.path.exists(mountPoint) and not force:
+ self.usage('{} already exists. Use --force if needed. '.format(mountPoint))
+ elif re.match('^[-\w._]+$', labelRoot) == None:
+ self.usage('illegal characters in subvolume: ' + labelRoot)
+ else:
+ if force and os.path.exists(mountPoint):
+ self._logger.log('try to create the btrfs volume {} which already exists (--force found)'.format(labelRoot))
+ else:
+ self.ensureDirectory(mountPoint)
+ if image != None and size != None:
+ self._processHelper.execute(['truncate', '--size=' + size, image], True)
+ script = self.getTarget('/etc/pyrshell/boot.d', 'btrfs.{}.sh'.format(labelRoot))
+ self._logger.log('creating {} ...'.format(script), 1)
+ base.StringUtils.toFile(script , '''#! /bin/bash
+losetup {} {}
+'''.format(device, image))
+ os.chmod(script, 0o755)
+ self._processHelper.execute(['losetup', device, image], True)
+ self._processHelper.execute(['/bin/bash', script], True)
+ etcDir = self.getTarget('/etc')
+ fn = etcDir + os.sep + 'auto.btrfs.' + labelRoot
+ base.FileHelper.ensureFileExists(fn, '', self._logger)
+ self._textTool.replaceOrInsert(['^' + mountPoint, '{}\t-fstype=btrfs\t:{}'.format(mountPoint, device), fn, '--create-if-missing', '--max-depth=0'])
+ master = etcDir + os.sep + 'auto.master'
+ writeToMaster = True
+ if not os.path.exists(master):
+ self._logger.error('missing ' + master)
+ if force:
+ base.FileHelper.ensureFileExists(master)
+ else:
+ writeToMaster = False
+ if writeToMaster:
+ self._textTool.replaceOrInsert(['.*' + fn, '/-\t' + fn, master, '--create-if-missing', '--max-depth=0'])
+ if self._isRoot:
+ self._processHelper.execute(['btrfs', 'quota', 'enable', mountPoint], True)
+ cmd = 'btrfs subvolume list MOUNT | cut -d" " -f2 | xargs "-I{}" -n1 btrfs qgroup "create 0/{}" ";"'.replace('MOUNT', mountPoint)
+ self._processHelper.execute([cmd], True, 'shell')
+ self._processHelper.execute(['systemctl', 'reload', 'autofs'], True)
+
+ def btrFsCreateSnapshot(self, argv):
+ '''Duplicates a subvolume as snapshot.
+ Steps:
+ @param argv: the program arguments, e.g. ['fs.system', 'home', '--mode=dayly', '-r']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ readonly = False
+ mode = None
+ for opt in options:
+ if opt.startswith('--mode='):
+ mode = opt[7:]
+ if mode not in ['dayly', 'weekly', 'monthly', 'date', 'now']:
+ self.usage('unknown mode: ' + mode + ' use dayly|weekly|monthly|date|now')
+ elif opt == '--read-only' or opt == '-r':
+ readonly = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ else:
+ labelRoot = argv[0]
+ labelSource = argv[1]
+ if len(argv) < 3:
+ if mode == None:
+ self.usage('missing <label-snapshot> or --mode')
+ labelSnapshot = None
+ else:
+ if mode != None:
+ self.usage('do not use <label-snapshot> and --mode at the same time')
+ else:
+ mode = 'special'
+ labelSnapshot = argv[2]
+ if mode != None:
+ now = datetime.datetime.now()
+ labelSnapshot = labelSource + '.'
+ if mode == 'dayly':
+ labelSnapshot += now.strftime('%a').lower()
+ elif mode == 'weekly':
+ labelSnapshot += str(int(now.strftime('%W')) % 4)
+ elif mode == 'monthly':
+ labelSnapshot += now.strftime('%m')
+ elif mode == 'date':
+ labelSnapshot += now.strftime('%Y.%m.%d')
+ elif mode == 'now':
+ labelSnapshot += now.strftime('%Y.%m.%d-%H-%M-%S')
+ elif mode == 'special':
+ labelSnapshot = argv[2]
+ else:
+ usage('unknown mode: ' + mode)
+ mountPath = self.getTarget('/media', 'snapshots/{}/{}/{}'.format(labelSource, mode, labelSnapshot))
+ self.ensureDirectory(os.path.dirname(mountPath))
+ mountSource = self.getTarget('/media/{}/{}'.format(labelRoot, labelSource))
+ if os.path.isdir(mountPath):
+ self._processHelper.execute(['btrfs', 'subvol', 'delete', mountPath], True)
+ args = ['btrfs', 'subvol', 'snapshot', mountSource, mountPath]
+ if readonly:
+ args.insert(3, '-r')
+ self._processHelper.execute(args, True)
+
+
+ def btrFsCreateSubvolume(self, argv):
+ '''Installs a btrfs filesystem.
+ @param argv: the program arguments, e.g. ['wk.fs', 'backup', '--size=100G']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ force = False
+ size = None
+ mountOptions = None
+ transfer = False
+ for opt in options:
+ if opt.startswith('--size='):
+ size = opt[7:]
+ if re.match(r'^\d+[gGtTmM$', size) == None:
+ self.usage('illegal size: {} example: 100G'.format(size))
+ elif opt.startswith('--options='):
+ mountOptions = opt[10:]
+ for item in mountOptions.split(','):
+ if item == 'compress=zlib' or item == 'compress=lzo':
+ pass
+ elif item == 'nodatacow':
+ pass
+ else:
+ self.usage('unknown mount option: ' + item)
+ elif opt == '--move-files':
+ transfer = True
+ elif opt == '--force':
+ force = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ else:
+ labelRoot = argv[0]
+ mountRoot = self.getTarget('/media', labelRoot)
+ labelSubVol = argv[1]
+ mountPointCreation = mountRoot + os.sep + labelSubVol
+ mountPoint2 = None if len(argv) < 3 else argv[2]
+ files = 0
+ if not transfer and mountPoint2 != None and os.path.exists(mountPoint2):
+ files = len(os.listdir(mountPoint2))
+ if not os.path.exists(mountRoot):
+ self.usage('root of btrfs not found: ' + labelRoot)
+ elif re.match('^[-\w._]+$', labelSubVol) == None:
+ self.usage('illegal characters in subvolume: ' + labelSubVol)
+ elif os.path.exists(mountPointCreation) and not force:
+ self.usage('subvolume already exists: ' + labelSubVol)
+ elif files > 0:
+ self.usage('{} files/dirs will be hidden (use --move-files)'.format(files))
+ else:
+ if force and os.path.exists(mountPointCreation):
+ self._logger.log('try to create a subvolume which already exists (--force found)')
+ self._processHelper.execute(['btrfs', 'subvolume', 'create', mountPointCreation], True)
+ if mountPoint2 != None:
+ dirOld = None
+ if transfer and os.path.isdir(mountPoint2):
+ dirOld = '{}.{}'.format(mountPoint2, int(time.time()))
+ os.rename(mountPoint2, dirOld)
+ self.ensureDirectory(mountPoint2)
+ fn = self.getTarget('/etc', 'fstab')
+ self._textTool.readCurrent(fn, True)
+ ix, matcher = self._textTool.currentFind2(re.compile(r'^([^# ]\S+)\s+{}\s'.format(mountRoot)))
+ if matcher == None:
+ self._logger.error('not found in /etc/fstab: ' + mountRoot)
+ else:
+ dev = matcher.group(1)
+ key = '{} {}'.format(dev, mountPoint2)
+ options = 'subvol={},defaults,noatime,space_cache,autodefrag'.format(labelSubVol)
+ if mountOptions != None:
+ options += ',' + mountOptions
+ self._logger.log('mount options: ' + options, 1)
+ line = '{} btrfs {} 0 0'.format(key, options)
+ self._textTool.currentInsertAnchored(line, key)
+ self._textTool.writeCurrent()
+ self._processHelper.execute(['mount', '-v', mountPoint2], True)
+ if dirOld != None:
+ for item in os.listdir(dirOld):
+ self._logger.log('moving {} to {}'.format(item, mountPoint2), 2)
+ shutil.move(dirOld + os.sep + item, mountPoint2 + os.sep + item)
+ def example(self):
+ example = '''# texttool example configuration
+log.file=/var/log/local/restoretool.log
+php.upload_max_filesize=624M
+php.max_file_uploads=102
+php.post_max_size=448M
+php.max_execution_time=900
+php.max_input_time=630
+'''
+ self.storeExample(example)
+
+ def expandIp(self, ip, template = None):
+ '''Expands an shortened ip to a valid IP address.
+ @param ip: the (shortened) ip
+ @param template: None or a template to build the IP address
+ @return: the valid ip
+ '''
+ if ip == None and template != None:
+ ip = '.'.join(template.split('.')[0:-1]) + '.1'
+ else:
+ parts = ip.split('.')
+ templ = None if template == None else template.split('.')
+ if len(parts) == 1:
+ if template == None:
+ ip = '10.10.10.' + ip
+ else:
+ ip = '.'.join(templ[0:-1]) + '.' + ip
+ elif len(parts) == 2:
+ if template == None:
+ ip = parts[0] + '.10.10.' + parts[1]
+ else:
+ ip = parts[0] + '.' + '.'.join(templ[1:-1]) + '.' + parts[1]
+ if re.match(r'^(\d{1,3}\.){3}\d{1,3}$', ip) == None:
+ self.usage('not a valid IP address: ' + ip)
+ return ip
+
+ def init(self, argv):
+ '''Executes the init command.
+ @param argv: the arguments, e.g. ['php']
+ '''
+ if len(argv) < 1:
+ self.usage('missing <what>')
+ else:
+ what = argv[0]
+ argv = argv[1:]
+ if what == 'etc':
+ self._logger.log('setting vm.swappiness...')
+ fn = self.createPath('/etc', 'sysctl.conf')
+ self._textTool.currentSimpleInsert(fn, '^vm.swappiness', 'vm.swappiness = 20', None)
+ elif what == 'dirs':
+ base.BaseTool.BasicStatics.ensureDirectory('/media/tmp')
+ base.BaseTool.BasicStatics.ensureDirectory('/media/trg')
+ base.BaseTool.BasicStatics.ensureDirectory('/media/src')
+ base.BaseTool.BasicStatics.ensureDirectory('/var/log/local')
+ base.BaseTool.BasicStatics.ensureDirectory('/var/cache/local')
+ elif what == 'linuxserver':
+ self.linuxServer()
+ elif what == 'local-bin':
+ tarNode = 'local_bin.tgz'
+ tar = '/tmp/' + tarNode
+ url = self._configuration.getString('url.download', 'https://public.hamatoma.de')
+ self._processTool.execute(['/usr/bin/wget', '-O', tar, url + '/' + tarNode], True)
+ self.restoreDirectoryByTar(tar, '/usr/local/bin', None, True, None, False)
+ elif what == 'apache':
+ self.installApache(argv)
+ elif what == 'nginx':
+ self.installNginx(argv)
+ elif what == 'php':
+ self.installPhp(argv)
+ elif what == 'mariadb':
+ packets = 'mariadb-common mariadb-server mariadb-client'.split(' ')
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install' ] + packets, True)
+ elif what == 'cms':
+ packets = 'imagemagick php-redis redis-tools redis-server'.split(' ')
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install'] + packets, True)
+ elif what == 'letsencrypt':
+ self.installLetsencrypt()
+ elif what == 'grub':
+ self.initGrub(argv)
+ else:
+ self.usage('unknown <what>: ' + what)
+
+ def installApache(self, argv):
+ '''Installs the Apache Webserver.
+ @param argv: program arguments, e.g. ['--ports=81,444']
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ ports = [80, 443]
+ for opt in options:
+ if opt.startswith('--ports='):
+ if re.match(r'^\d+,\d+$', opt[8:]) == None:
+ self.usage('not 2 comma separated ports in: ' + opt)
+ else:
+ parts = opt[8:].split(',')
+ ports = [int(parts[0]), int(parts[1])]
+ if self._isRoot:
+ packets = 'apache2 libapache2-mod-php libapache2-mod-php5.6 libapache2-mod-php7.0 libapache2-mod-php7.1 libapache2-mod-php7.2 libapache2-mod-php7.3'
+ args = ['/usr/bin/apt-get', '-y', 'install' ] + packets.split(' ')
+ self._processTool.execute(args, True)
+ service = 'apache2'
+ fn = self.getFilenameOrCopy('/etc/apache2', 'ports.conf')
+ self._textTool.readCurrent(fn, True)
+ (start, end) = self._textTool.findRegion('^', True, '^<', False)
+ if start >= 0:
+ self._textTool.currentReplace(r'Listen\s', 'Listen {}'.format(ports[0]), None, False, start, end)
+ (start, end) = self._textTool.findRegion('<IfModule ssl_module>', True, '</IfModule', True)
+ if start >= 0:
+ self._textTool.currentReplace(r'\s*Listen\s', '\tListen {}'.format(ports[1]), None, False, start, end)
+ (start, end) = self._textTool.findRegion('<IfModule mod_gnutls.c>', True, '</IfModule', True)
+ if start >= 0:
+ self._textTool.currentReplace(r'\s*Listen\s', '\tListen {}'.format(ports[1]), None, False, start, end)
+ self._textTool.writeCurrent(fn)
+ if self._isRoot:
+ self._processTool.execute(['a2enmod', 'rewrite', service], True)
+ self._processTool.execute(['systemctl', 'enable', service], True)
+ self._processTool.execute(['systemctl', 'start', service], True)
+ self._processTool.execute(['systemctl', 'status', service], True)
+
+ def initGrub(self, argv):
+ '''Extends the grub/grub.cfg with switches to all other found grub.cfg files on other disks.
+ @param argv: arguments
+ '''
+ argv, opts = self.splitArgsAndOpts(argv)
+ dry = False
+ for opt in opts:
+ if opt == '--dry':
+ dry = True
+ else:
+ self.usage('unknown option: ' + opt)
+ fnGrubConfig = self.getTarget('/boot/grub', 'grub.cfg')
+ if not os.path.exists(fnGrubConfig):
+ self._logger.error('missing ' + fnGrubConfig)
+ else:
+ # look for block devices with mountable filesystems containing /boot/grub or /grub
+ underTest = not fnGrubConfig.startswith('/boot')
+ if underTest:
+ fnDevices = self.getTarget('/boot/grub', 'blkid.out')
+ lines = base.StringUtils.fromFile(fnDevices, '\n')
+ else:
+ self._processTool.executeInputOutput(['blkid'], None)
+ lines = self._processTool._rawOutput.decode().split('\n')
+ regLine = re.compile(r'(/dev/\S+): (.*)')
+ regLabel = re.compile(r'LABEL="([^"]+)"')
+ regUUID = re.compile(r'\bUUID="([^"]+)"')
+ regType = re.compile(r'\bTYPE="([^"]+)"')
+ self._gptDisks = []
+ sections = []
+ for line in lines:
+ matcher = regLine.match(line)
+ if matcher != None:
+ dev = matcher.group(1)
+ if line.find('PTTYPE="gpt"') >= 0:
+ self._gptDisks.append(dev)
+ continue
+ attr = matcher.group(2)
+ matcher = regLabel.search(attr)
+ label = None if matcher == None else matcher.group(1)
+ matcher = regUUID.search(attr)
+ uuid = None if matcher == None else matcher.group(1)
+ matcher = regType.search(attr)
+ fsType = None if matcher == None else matcher.group(1)
+ partInfo = PartitionInfo(dev, uuid, fsType, label)
+ if fsType == 'btrfs':
+ self.initGrubSearchBtrFs(sections, partInfo)
+ elif fsType != None and fsType != 'vfat' and fsType != 'ntfs':
+ found = True
+ if not underTest:
+ self.initGrubMount(partInfo, None)
+ found = self.initGrubIsBootDir(self._mountPoint)
+ # found may be None!
+ if found == True:
+ partInfo._osName = self.initGrubFindOsInfo(self._mountPoint)
+ self.initGrubUnmount()
+ if found:
+ self.initGrubAddMenuEntry(sections, partInfo, '')
+ if len(sections) > 0:
+ self._textTool.readCurrent(fnGrubConfig)
+ again = True
+ while again:
+ start, end = self._textTool.findRegion(r'^menuentry "=>', True, r'menuentry|submenu', False)
+ again = start > 0
+ if again:
+ self._logger.log('removing ' + self._textTool._lines[start], 2)
+ self._textTool.removeRegion(start, end)
+ start, end = self._textTool.findRegion(r'^menuentry', False, r'menuentry|submenu', False)
+ self._textTool._lines.insert(end, '\n'.join(sections))
+ if dry:
+ self._logger.log('no changes if --dry found')
+ else:
+ self._textTool.writeCurrent(None, True)
+
+ def initGrubFindOsInfo(self, rootDir):
+ '''Tries to find out the OS installed on the given partition
+ @param rootDir: the name of the mount point of the partition
+ @return: 'linux' or the name of the installed operating system
+ '''
+ rc = 'linux'
+ etc = rootDir + os.sep + 'etc'
+ if os.path.isdir(etc):
+ nodes = os.listdir(etc)
+ for node in nodes:
+ if node.endswith('release'):
+ fn = etc + os.sep + node
+ if not os.path.isdir(fn):
+ osInfo = base.StringUtils.fromFile(fn, '\n')[0]
+ ix = osInfo.find('=')
+ if ix > 0:
+ osInfo = osInfo[ix+1:]
+ rc = osInfo.strip('"')
+ break
+ return rc
+
+ def initGrubAddMenuEntry(self, sections, partInfo, prefix):
+ '''Adds one menu entry to the array.
+ @param sections: IN/OUT: the list of menu entries
+ @param partInfo: the data of the partition
+ @param prefix: prefix of the path containing the grub directory, e.g. '/fs.system/boot'
+ '''
+ partName = 'part_msdos'
+ if len(self._gptDisks) > 0:
+ for disk in self._gptDisks:
+ if partInfo._device.startswith(disk):
+ partName = 'part_gpt'
+ break
+ fsName = 'btrfs' if partInfo._fsType == 'btrfs' else 'ext2'
+ section = '''menuentry "=> {} on {}" {}
+ insmod {}
+ insmod {}
+ search --no-floppy --fs-uuid --set=root {}
+ configfile {}/grub/grub.cfg
+{}
+'''.format(partInfo._osName, partInfo._device, '{', partName, fsName, partInfo._uuid, prefix, '}')
+ self._logger.log('{}: OS: {} label: {} type: {} location: {}/grub'.format(partInfo._device, partInfo._osName, partInfo._label, partInfo._fsType, prefix), 1)
+ sections.append(section)
+
+ def initGrubIsBootDir(self, path):
+ '''Tests whether a path has a grub directory or not.
+ @param path: path to inspect
+ @return: None: not a grub directory
+ True: the path contains a grub directory with prefix '/boot'
+ False: the path contains a grub directory without prefix '/boot'
+ '''
+ rc = os.path.exists(path + '/boot/grub/grub.cfg')
+ if not rc:
+ rc = os.path.exists(path + '/grub/grub.cfg')
+ if not rc:
+ rc = None
+ return rc
+
+ def initGrubMount(self, partInfo, options):
+ '''Mounts the device in the partInfo.
+ @param partInfo: the partition info
+ @param options: the mount options
+ '''
+ self._mountPoint = '/media/tmp.grub'
+ self.ensureDirectory(self._mountPoint)
+ opts = 'ro' if options == None else 'ro,' + options
+ device = None if partInfo._uuid == None else 'UUID=' + partInfo._uuid
+ device = partInfo._device if device == None else device
+ self._processTool.execute(['mount', '-o', opts, device, self._mountPoint], True)
+ if len(self._processTool._error) > 0:
+ matcher = re.search(r'already mounted on (.*).$', self._processTool._error[0])
+ if matcher != None:
+ self._mountPoint = matcher.group(1)
+ self._logger.log('using other mountpoint: ' + self._mountPoint)
+
+ def initGrubSearchBtrFs(self, sections, partInfo):
+ '''Adds one menu entry to the array.
+ @param sections: IN/OUT: the list of menu entries
+ @param partInfo: the data of the partition
+ '''
+ self.initGrubMount(partInfo, 'subvolid=5')
+ path = self._mountPoint
+ nodes = os.listdir(path)
+ for node in nodes:
+ full = path + os.sep + node
+ isBootDir = self.initGrubIsBootDir(full)
+ if isBootDir != None:
+ partInfo._osName = self.initGrubFindOsInfo(full)
+ self.initGrubAddMenuEntry(sections, partInfo, '/' + (node if not isBootDir else node + '/boot'))
+ self.initGrubUnmount()
+
+
+ def initGrubUnmount(self, ):
+ '''Unmounts the device in the partInfo.
+ '''
+ if self._mountPoint == '/media/tmp.group':
+ self._processTool.execute(['umount', self._mountPoint], True)
+ self._mountPoint = None
+
+ def installNetworkFindInterface(self, pattern):
+ '''Finds the interface from the existing one.
+ Finds the network devices and return the first matching the pattern
+ @param pattern: None or a pattern e.g. 'eth*'
+ @return: None: no interface available otherwise: the first interface matching the pattern
+ '''
+ rc = None
+ self._processTool.executeInputOutput(['ip', 'addr', 'show'], None)
+ lines = self._processTool._rawOutput.decode().split('\n')
+ if pattern == None:
+ pattern = '*'
+ for line in lines:
+ if len(line) == 0 or line[0].isspace():
+ continue
+ # 3: wlp4s0: <BROADCAST
+ dev = line.split(' ')[1][0:-1]
+ if dev == 'lo':
+ continue
+ if fnmatch.fnmatch(dev, pattern):
+ if rc == None:
+ rc = dev
+ elif not rc.startswith('e') and dev.startswith('e'):
+ rc = dev
+ elif not rc.startswith('w') and not rc.startswith('e') and dev.startswith('w'):
+ rc = dev
+ if rc == None:
+ self.usage('unknown network interface: ' + pattern)
+ return rc
+
+ def installNginx(self, argv):
+ '''Initializes the webserver NGINX.
+ @param argv: program arguments, e.g. '--well-known=/var/www/letsencrypt'
+ '''
+ root = '/var/www/letsencrypt'
+ argv, options = self.splitArgsAndOpts(argv)
+ for opt in options:
+ if opt.startswith('--well-known='):
+ root = opt[13:]
+ else:
+ self.usage('unknown option: ' + opt)
+ if self._isRoot:
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install', 'nginx-full', 'ssl-cert', 'ca-certificates'], True)
+ self._textTool.currentSimpleInsert('/etc/nginx/nginx.conf', 'client_max_body_size', '''
+ client_max_body_size 512M;
+ ## Detect when HTTPS is used
+ map $scheme $fastcgi_https {
+ default off;
+ https on;
+ }
+ #client_body_temp_path /space/php_temp;
+ fastcgi_read_timeout 3600s;
+ fastcgi_request_buffering off;
+''', 'include /etc/nginx/conf.d', False, True)
+ self.ensureDirectory(root)
+ self.ensureDirectory(root + os.sep + '.well-known')
+ fn = self.getTarget('/etc/nginx/snippets', 'letsencrypt.conf')
+ base.FileHelper.ensureFileExists(fn, createLetsEncryptConf(root))
+ service = 'nginx'
+ if self._isRoot:
+ self._processTool.execute(['systemctl', 'enable', service], True)
+ self._processTool.execute(['systemctl', 'start', service], True)
+ self._processTool.execute(['systemctl', 'status', service], True)
+
+ def installPhp(self, argv):
+ phpVersion = None if len(argv) <= 0 else argv[0]
+ packets = 'php-fpm php-cli php-json php-curl php-imap php-gd php-mysql php-xml php-zip php-intl php-imagick '
+ packets += 'php-mbstring php-memcached memcached php-xdebug php-igbinary php-msgpack'
+ if phpVersion != None:
+ if self._ubuntuVersion != None:
+ self._processTool.execute(['add-apt-repository', '-y', 'ppa:ondrej/php'], True)
+ elif self._debianVersion != None:
+ if not os.path.exists('/etc/apt/sources.list.d/php.list'):
+ self._processTool.executeScript('''#! /bin/bash
+wget -q https://packages.sury.org/php/apt.gpg -O- | sudo apt-key add -
+echo "deb https://packages.sury.org/php/ {} main" | sudo tee /etc/apt/sources.list.d/php.list
+'''.format(self.debianName()))
+ self.updateApt(True)
+ else:
+ self._logger.error('not DEBIAN, not Ubuntu. I am confused')
+ return
+ if phpVersion in ['7.2', '7.3', '7.0', '7.1', '5.6']:
+ packets = packets.replace('-', str(phpVersion) + '-')
+ if phpVersion == '5.6':
+ packets += ' php5.6-mcrypt php5.6-opcache php5.6-readline php5.6-sqlite3'
+ args = ['/usr/bin/apt-get', '-y', 'install' ] + packets.split(' ')
+ self._processTool.execute(args, True)
+ service = 'php{}-fpm'.format('' if phpVersion == None else phpVersion)
+ self._processTool.execute(['systemctl', 'enable', service], True)
+ self._processTool.execute(['systemctl', 'start', service], True)
+ self._processTool.execute(['systemctl', 'status', service], True)
+
+ def jobGitlabRestore(self):
+ argv = ['/usr/bin/gitlab-rake', 'gitlab:backup:create']
+ proc = subprocess.Popen(argv, stderr=subprocess.PIPE)
+ while True:
+ line = proc.stderr.readline()
+ if line == b'':
+ break
+ self._logger.error(line.decode())
+
+ def jobSaveAllDatabases(self):
+ '''Saves all databases of a mysql system.
+ '''
+ self._logger.log('saving all databases', 2)
+ dbs = self.allDatabases(True)
+ user = self._configuration.getString('mysql.user')
+ password = self._configuration.getString('mysql.code')
+ if dbs != None:
+ for db in dbs:
+ name = db
+ currentUser = user
+ currentPassword = password
+ config = self.findConfig('db', db, 'webapps.d')
+ if config != None:
+ path = config.getString('directory')
+ if path != None:
+ name = os.path.basename(path) + '_' + db
+ currentUser = config.getString('user')
+ currentPassword = config.getString('password')
+ if currentPassword == None:
+ currentUser = user
+ currentPassword = password
+ if self._verboseLevel >= 3:
+ self._logger.log('saving db ' + db)
+ self.saveMysql(name, db, currentUser, currentPassword, self._targetDir)
+
+ def linuxServer(self):
+ '''Makes the basic action on a linux server.
+ '''
+ self.init(['etc'])
+ self.init(['dirs'])
+ packets = 'htop iotop curl tmux git etckeeper bzip2 zip unzip nfs-common nfs-kernel-server nmap rsync sudo apt-transport-https net-tools ntp btrfs-compsize'.split(' ')
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install' ] + packets, True)
+ appInfo = base.BaseTool.ApplicationInfo('pyboot', None, None, None)
+ tool = base.BaseTool.InstallTool(appInfo, self._globalOptions)
+ tool.createSystemDScript('pyboot', 'pyboot', 'root', 'root', 'Starts shell scripts from /etc/pyrshell/boot.d at boot time.')
+ bootDir = '/etc/pyrshell/boot.d'
+ self.ensureDirectory(bootDir)
+ script = self.getTarget('/usr/local/bin', 'pyboot')
+ base.StringUtils.toFile(script, '''#! /bin/bash
+DIR={}
+LOG=/var/log/local/boot.log
+export PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
+date "+%Y.%m.%d-%H:%M:%S start" >>$LOG
+cd $DIR
+for script in *.sh; do
+ if [ "$script" != "*.sh" ]; then
+ date "+%Y.%m.%d-%H:%M:%S $script" >>$LOG
+ ./$script $*
+ fi
+done
+date "+%Y.%m.%d-%H:%M:%S end" >>$LOG
+systemctl stop pyboot
+exit 0
+'''.format(bootDir), self._logger)
+ os.chmod(script, 0o755)
+ self._processTool.execute(['/bin/systemctl', 'enable', 'pyboot'], True)
+ self._processTool.execute(['/bin/systemctl', 'status', 'pyboot'], True)
+ currentZone = base.StringUtils.fromFile('/etc/timezone').strip()
+ if currentZone != 'Europe/Berlin':
+ self._logger.log('=== Curious timezone: {} Please execute: dpkg-reconfigure tzdata'.format(currentZone), 1)
+
+ def network(self, argv):
+ '''Installs network entities.
+ net { info | static [ip [interface [gateway]]] | virt-manager netname | name-server [ip] }
+ info: displays relevant infos
+ ip: examples: 10.10.10.100 or 100
+ interface: a pattern for the interface, e.g. eth0 or ens* Default: the first device given by the system except 'lo'
+ gateway: Default: the above ip with end number 1
+ @param argv: the program arguments, e.g. ['100', 'eth1' ]
+ '''
+ cmd = "info"
+ if len(argv) > 0:
+ cmd = argv[0]
+ argv = argv[1:]
+ if cmd == 'info':
+ self._processTool.executeInputOutput(['ip', 'addr', 'show'], None)
+ lines = self._processTool._rawOutput.decode().split('\n')
+ device = None
+ rc = []
+ for line in lines:
+ matcher = re.match(r'\d: (\w+):', line)
+ if matcher != None:
+ device = matcher.group(1)
+ continue
+ matcher = re.match(r'\s+inet (\S+)/', line)
+ if matcher != None:
+ ip = matcher.group(1)
+ info = '{} {}'.format(device, ip)
+ rc.append(info)
+ print(info)
+ continue
+ base.BaseTool.setResult(rc)
+ elif cmd == 'static':
+ ip = None
+ iface = None
+ gateway = None
+ if len(argv) == 1:
+ ip = argv[0]
+ elif len(argv) == 2:
+ ip = argv[0]
+ iface = argv[1]
+ elif len(argv) == 3:
+ ip = argv[0]
+ iface = argv[1]
+ gateway = argv[2]
+ if ip == None:
+ ip = '10.10.10.100'
+ else:
+ ip = self.expandIp(ip)
+ iface = self.installNetworkFindInterface(iface)
+ gateway = self.expandIp(gateway, ip)
+ config = self.getTarget('/etc/network') + os.sep + 'interfaces'
+ self._textTool.readCurrent(config, True)
+ self._textTool.findRegion(r'^(auto|iface) ' + iface, True, r'^(auto|iface)', False, None, True)
+ count = 3 if ip.startswith('192') else (2 if ip.startswith('172') else 1)
+ netmask = ('255.' * count + '0.' * (4 - count))[0:-1]
+ text = '''auto {}
+iface {} inet static
+ address {}
+ netmask {}
+ gateway {}
+ dns-nameservers {}
+ '''.format(iface, iface, ip, netmask, gateway, gateway)
+ self._textTool.replaceRegion(text)
+ self._textTool.writeCurrent()
+ elif cmd == 'virt-manager':
+ if len(argv) == 0:
+ self.usage('missing network name')
+ else:
+ netName = argv[0]
+ argv = argv[1:]
+ fn = tempfile.gettempdir() + os.sep + netName + '.xml'
+ base.StringUtils.toFile(fn, '''<network>
+ <name>{}</name>
+ <ip address='10.10.10.1' netmask='255.0.0.0'>
+ <dhcp>
+ <range start='10.10.10.20' end='10.10.10.99' />
+ </dhcp>
+ </ip>
+</network>
+'''.format(netName))
+ self._processTool.executeInput(['virsh', 'net-define', fn], False, '')
+ self._processTool.executeInput(['virsh', 'net-autostart', netName], False, '')
+ self._processTool.executeInput(['virsh', 'net-start', netName], False, '')
+ self._processTool.executeInput(['virsh', 'list'], True, '')
+ self._logger.log('''=== for all guests:
+virsh GUEST_NAME
+
+<interface type='network'>
+ <source network='{}'/>
+ <model type='virtio'/> <-- This line is optional.
+</interface>'''.format(netName))
+ elif cmd == 'nameserver':
+ if len(argv) > 0:
+ ip = argv[0]
+ else:
+ ip = '9.9.9.9'
+ fn = self.getSource('/etc/network') + os.sep + 'interfaces'
+ self._textTool.readCurrent(fn, True)
+ (ix, matcher) = self._textTool.currentFind2(r'\s+gateway\s+(\S+)')
+ if ix != None:
+ ip = matcher.group(1)
+ (ix, matcher) = self._textTool.currentFind2(r'\s+dns-nameservers\s+(\S+)')
+ if ix != None:
+ ip = matcher.group(1)
+ fn = self.getTarget('/etc/systemd') + os.sep + 'resolved.conf'
+ if self._textTool.readCurrent(fn, True):
+ if not self._textTool.currentReplace(r'^\s*DNS=\s*', 'DNS=' + ip,
+ r'^\s*#\s*DNS\s*='):
+ self.usage('no DNS entry (comment too) found in ' + fn)
+ else:
+ self._textTool.writeCurrent()
+ else:
+ self.usage('unknown sub command: ' + cmd)
+
+ def readGroups(self, full):
+ '''Reads the passwd file.
+ @param full: the filename to read
+ @result: a dictionary with (name : GroupInfo) pairs
+ '''
+ self._textTool.readCurrent(full, True)
+ groups = dict()
+ lastComments = []
+ for line in self._textTool._lines:
+ parts = line.split(':')
+ if len(parts) < 2:
+ continue
+ groups[parts[0]] = GroupInfo(parts[0], parts[2], line)
+
+ for line in self._textTool._lines:
+ parts = line.split(':')
+ if not (len(line) > 0 and line[0] == '#'):
+ if len(lastComments) > 0:
+ groups[parts[0]]._comments = lastComments
+ lastComments = []
+ else:
+ parts2 = line[1:].split(':')
+ if parts2[0] in groups:
+ groups[parts[0]]._comments = [line]
+ return groups
+
+ def readPasswd(self, full):
+ '''Reads the passwd file.
+ @param full: the filename to read
+ @result: a dictionary with (name : UserInfo) pairs
+ '''
+ self._textTool.readCurrent(full, True)
+ users = dict()
+ lastComments = []
+ for line in self._textTool._lines:
+ parts = line.split(':')
+ if len(parts) < 2:
+ continue
+ users[parts[0]] = UserInfo(parts[0], parts[2], line)
+
+ for line in self._textTool._lines:
+ parts = line.split(':')
+ if not (len(line) > 0 and line[0] == '#'):
+ if len(lastComments) > 0:
+ users[parts[0]]._comments = lastComments
+ lastComments = []
+ else:
+ parts2 = line[1:].split(':')
+ if parts2[0] in users:
+ users[parts[0]]._comments = [line]
+ return users
+
+ def readShadow(self, full):
+ '''Reads the shadow file.
+ @param full: the filename to read
+ @result: a pair of dictionaries, first is (name : line), the second (name: comments)
+ '''
+ self._textTool.readCurrent(full, True)
+ linesByName = dict()
+ commentsByName = dict()
+ lastComments = []
+ for line in self._textTool._lines:
+ parts = line.split(':')
+ if len(parts) < 2:
+ lastComments.append(line)
+ else:
+ if len(lastComments) > 0:
+ commentsByName[parts[0]] = '\n'.join(lastComments) + '\n'
+ lastComments = []
+ else:
+ commentsByName[parts[0]] = ''
+ linesByName[parts[0]] = line
+ return (linesByName, commentsByName)
+
+ def reconfigure(self, argv):
+ '''Change configuration files given from the internal configuration.
+ @param argv: arguments, e.g. ['php', '7.1']
+ '''
+ if len(argv) < 1:
+ self.usage('missing <what>')
+ else:
+ what = argv[0]
+ argv = argv[1:]
+ if what == 'local-bin':
+ self._textTool.currentSimpleInsert('/etc/sysctl.conf', '^vm.swappiness', 'vm.swappiness = 20')
+ elif what == 'letsencrypt':
+ self.reconfigureLetsencrypt(argv)
+ elif what == 'php':
+ self.reconfigurePhp(argv)
+ else:
+ self.usage('unknown <what>: ' + what)
+
+ def reconfigureLetsencrypt(self, argv):
+ '''Switches letsencrypt certificate and private certificate.
+ @param argv: program arguments, e.g. ['on', 'abc.infeos.de']
+ '''
+ argv, opts = self.splitArgsAndOpts(argv)
+ all = False
+ for opt in opts:
+ if opt == '--all':
+ all = True
+ else:
+ self.usage('unknown option ' + opt)
+ if len(argv) < 1:
+ self.usage('missing arguments')
+ elif argv[0] != 'on' and argv[0] != 'off':
+ self.usage('wrong mode: use "on" or "off" not ' + argv[0])
+ elif len(argv) == 1 and all:
+ mode = argv[0]
+ subdir = self.getTarget('/etc/nginx/sites-enabled', '')
+ nodes = os.listdir(subdir)
+ for node in nodes:
+ if node != 'default':
+ self.reconfigureLetsencrypt([mode, node])
+ else:
+ mode = argv[0]
+ domain = argv[1]
+ domainRaw = domain
+ fn = self.getTarget('/etc/nginx/sites-enabled', domain)
+ if not os.path.exists(fn):
+ if domain.startswith('www.'):
+ domainRaw = domain[4:]
+ fn = self.getTarget('/etc/nginx/sites-enabled', domainRaw)
+ else:
+ fn = self.getTarget('/etc/nginx/sites-enabled', 'www.' + domain)
+ if not os.path.exists(fn):
+ self.usage('unknown domain: ' + domain)
+ fn = None
+ if fn != None:
+ self._textTool.readCurrent(fn, True)
+ regComment = re.compile(r'^\s*#')
+ changedLets = 0
+ changedPrivate = 0
+ for ix in range(len(self._textTool._lines)):
+ line = self._textTool._lines[ix]
+ if line.find('ssl_certificate') >= 0:
+ hasComment = regComment.match(line) != None
+ isLetsencrypt = self._textTool._lines[ix].find('live/latest') >= 0
+ if mode == 'on':
+ if hasComment and isLetsencrypt:
+ self._textTool._lines[ix] = line.replace('#', '')
+ changedLets += 1
+ elif not hasComment and not isLetsencrypt:
+ changedPrivate += 1
+ self._textTool._lines[ix] = '#' + line
+ elif hasComment and not isLetsencrypt:
+ changedPrivate += 1
+ self._textTool._lines[ix] = line.replace('#', '')
+ elif not hasComment and isLetsencrypt:
+ changedLets += 1
+ self._textTool._lines[ix] = '#' + line
+ if changedLets + changedPrivate > 0:
+ self._logger.log('changed lines: letsencrypt: {} private: {}'.format(changedLets, changedPrivate), 2)
+ line = self._textTool.writeCurrent(fn, True)
+ else:
+ self._logger.log('nothing to change', 2)
+
+ def reconfigureLetsencryptAdapt(self):
+ '''Adapts the link to the latest certificate.
+ '''
+ baseDir = self.getTarget('/etc/letsencrypt', 'live')
+ nodes = os.listdir(baseDir)
+ # search the youngest directory
+ youngest = 0
+ name = None
+ for node in nodes:
+ full = baseDir + os.sep + node
+ if os.path.isdir(full):
+ current = os.path.getmtime(full)
+ if current > youngest:
+ youngest = current
+ name = node
+ if name == None:
+ self._logger.error('no subdir in ' + baseDir)
+ else:
+ latest = baseDir + os.sep + 'latest'
+ if not os.path.islink(latest):
+ self._logger.error('not a link: ' + latest)
+ else:
+ link = os.readlink(latest)
+ if link == name:
+ self._logger.log('link is correct: ' + link)
+ else:
+ self._logger.log('exchanging link from {} to {}'.format(link, name))
+ os.unlink(latest)
+ os.symlink(name, latest)
+
+ def reconfigurePhp(self, argv):
+ '''Replace configuration data given by the internal configuration.
+ @param argv: the argument vector, e.g. ['7.1']
+ '''
+ argv = argv[1:]
+ variables = self._textTool.findVariables('php.', self._configuration)
+ if len(argv) == 0:
+ versions = os.listdir('/etc/php')
+ versions.sort()
+ version = versions[-1]
+ else:
+ version = argv[0]
+ argv = argv[1:]
+ if not re.match(r'\d+\.\d+', version):
+ self.usage('invalid version: ' + version)
+ fnConfig = '/etc/php/{}/fpm/php.ini'.format(version)
+ if not os.path.exists(fnConfig):
+ self.usage('missing {}: is version {} installed?'.format(fnConfig, version))
+ else:
+ nodes = os.listdir('/etc/php/{}'.format(version))
+ for node in nodes:
+ fnConfig = '/etc/php/{}/{}/php.ini'.format(version, node)
+ if os.path.exists(fnConfig):
+ missingDebug = False
+ configuration = base.StringUtils.fromFile(fnConfig).split('\n')
+ if not base.StringUtils.arrayContains(configuration, 'xdebug.remote_enabled'):
+ missingDebug = True
+ configuration += '''
+; Ist normalerweise in xdebug-spezifischer Konfiguration, z.B. mods.d/20-xdebug
+;zend_extension="/usr/lib/php/20160303/xdebug.so"
+xdebug.remote_port=9000
+xdebug.remote_enable=Off
+xdebug.remote_handler=dbgp
+xdebug.remote_host=127.0.0.1
+;xdebug.remote_connect_back=On
+;xdebug.remote_log=/var/log/xdebug.log
+xdebug.remote_autostart=1
+'''.split('\n')
+ content = '\n'.join(self._textTool.adaptVariables(variables, configuration))
+ if missingDebug or self._textTool._hits > 0:
+ self.createBackup(fnConfig)
+ base.StringUtils.toFile(fnConfig, content)
+ if self._verboseLevel >= 2:
+ self._logger.log('{}: {} variable(s) changed{}'.format(
+ fnConfig, self._textTool._hits,
+ '' if not missingDebug else '\nxdebug setup added'))
+
+ def remove(self, argv):
+ '''Removes some entities.
+ @param argv: program arguments, e.g. ['web-app', 'www.huber.de']
+ '''
+ if len(argv) < 1:
+ self.usage('missing argument(s)')
+ else:
+ mode = argv[0]
+ argv = argv[1:]
+ if mode == 'webapp':
+ if len(argv) < 1:
+ self.usage('missing argument(s)')
+ else:
+ domain = argv[0]
+ argv = argv[1:]
+ self.removeWebApp(domain, argv[1] if len(argv) > 0 else None)
+ else:
+ self.usage('unknown entity: ' + mode)
+
+ def removeWebApp(self, domain, rootPasswd):
+ '''Removes a web application (described in /etc/pyrshell/webapps.d/<domain>.conf
+ @param domain: the id of the application
+ '''
+ fnConfiguration = '/etc/pyrshell/webapps.d/{}.conf'.format(domain)
+ if not os.path.exists(fnConfiguration):
+ self.usage('missing ' + fnConfiguration)
+ else:
+ config = base.JavaConfig.JavaConfig(fnConfiguration, self._logger)
+ db = config.getString('db')
+ user = config.getString('user')
+ code = config.getString('password')
+ directory = config.getString('directory')
+ if db == None or user == None or code == None or directory == None:
+ self.usage('missing needed entries in ' + fnConfiguration)
+ elif not os.path.isdir(directory):
+ self.usage('not a directory: ' + directory)
+ else:
+ self.createBackup(directory)
+ self._logger.log('removing directory tree {}...'.format(directory), 1)
+ shutil.rmtree(directory, True)
+ fn = '{}.{}.sql'.format(domain, time.strftime('%Y.%m.%d.%H_%M_%S'))
+ self._processTool.execute(['mysqldump', '--default-character-set=utf8mb4', '--single-transaction', '-u', user, '-p' + code, db, '>' + fn], True, 'shell')
+ self._processTool.execute(['gzip', fn], True)
+ if rootPasswd != None:
+ user = 'root'
+ code = rootPasswd
+ self._processTool.execute(['mysqladmin', '-u', user, '-p' + code, 'drop', db], True)
+ self.createBackup(fnConfiguration)
+ self.ensureFileDoesNotExist(fnConfiguration)
+
+ def restore(self, argv):
+ '''Fetch data from backup and installs them.
+ @param argv: the program arguments, e.g. ['clone']
+ '''
+ if len(argv) < 1:
+ self.usage('missing <what>')
+ else:
+ what = argv[0]
+ argv = argv[1:]
+ if what == 'clone':
+ self.restoreClone(argv)
+ elif what == 'etc':
+ dirToMerge = self.getTarget('/', 'etc2')
+ self.restoreMergeUsersAndGroups(dirToMerge)
+ self.restorePyRShell()
+ if os.path.isdir('/etc/nginx'):
+ self.restoreNginx()
+ elif what == 'pyrshell':
+ self.restorePyRShell()
+ elif what == 'nginx':
+ self.restoreNginx()
+ else:
+ self.usage('unknown <what>: ' + what)
+
+ def restoreClone(self, argv):
+ '''Clones an archive into a "sibling directory" of a target.
+ Note: a sibling directory of a target has the same parent as the target itself.
+ @param argv: the program arguments, e.g. ['usr=local=bin.tgz', '/usr/local/bin'
+ '''
+ if len(argv) < 2:
+ self.usage('missing arguments')
+ else:
+ archive = argv[0]
+ target = argv[1]
+ target2 = target + '2'
+ if not os.path.isdir(target):
+ self.usage('<target> is not a directory: ' + target)
+ elif os.path.exists(target2):
+ self.usage('clone already exists: ' + target2)
+ else:
+ parent = os.path.dirname(target)
+ tempDir = parent + os.sep + 'tmp.{}'.format(int(time.time()))
+ self.ensureDirectory(tempDir)
+ if archive.endswith('.zip'):
+ argv = ['/usr/bin/unzip', '-q', archive, '-d', tempDir]
+ self._processHelper.execute(argv, True)
+ elif archive.endswith('.tgz'):
+ argv = ['/bin/tar', 'xzf', archive, '--directory=' + tempDir]
+ self._processHelper.execute(argv, True)
+ nodes = os.listdir(tempDir)
+ if len(nodes) == 1 and os.path.isdir(tempDir + os.sep + nodes[0]):
+ os.rename(tempDir + os.sep + nodes[0], target2)
+ self.ensureFileDoesNotExist(tempDir)
+ else:
+ os.rename(tempDir, target2)
+ self._logger.log('created: {} (from {}'.format(target2, archive))
+
+ def restoreCopyFile(self, source, target):
+ '''Copies one file from a source to a target.
+ @param source: the source filename
+ @param target: the target filename
+ '''
+ self.nearBackup(target, True)
+ if not os.path.exists(source):
+ self._logger.error('missing source: ' + source)
+ else:
+ self._logger.log('copying {} -> {}'.format(source, target))
+ self.ensureDirectory(os.path.dirname(target))
+ shutil.copy2(source, target)
+
+ def restoreDirectoryByTar(self, archive, target, opts=None, tempDir=False, subdir=None, clearTarget=True):
+ '''Restores a directory from a tar archive.
+ @param archive: archive name
+ @param target: target directory
+ @param opt: None or an array of options like '-exclude=<pattern>'
+ @param tempDir: True: usage of a temporary directory
+ @param subdir: None or a subdirectory in the TAR archive
+ @param clearTarget: True: the target directory will be cleared before extracting
+ '''
+ if not os.path.exists(archive):
+ self._logger.error('missing tar archive {}'.format(archive))
+ elif not os.path.exists(target):
+ self._logger.error('missing target dir: ' + target)
+ elif not os.path.isdir(target):
+ self._logger.error('not a directory: ' + target)
+ else:
+ start = time.time()
+ tempBase = self.ensureDirectory('/tmp/restoretool')
+ if clearTarget:
+ self.clearDirectory(target)
+ trg = target if not tempBase else '/tmp/restoretool' + os.sep + 'trg'
+ if tempDir:
+ base.BaseTool.BasicStatics.ensureDirectory(trg, self._logger)
+ argv = ['/bin/tar', 'xzf', archive, '--directory=' + trg]
+ if subdir != None:
+ argv.append('./' + subdir)
+ if opts != None:
+ argv += opts
+ self._processTool.execute(argv, self._verboseLevel >= 3)
+ # os.chdir(oldDir)
+ if tempDir:
+ if subdir != None:
+ trg += os.sep + subdir
+ self._processTool.execute(['/usr/bin/rsync', '-a', trg + '/', target], self._verboseLevel >= 3)
+ self.logFile(archive, '%f: %s %t restored in %r', start)
+
+ def restoreDirectoryByZip(self, archive, target, opts):
+ '''Restores a directory from a zip archive.
+ @param archive: archive name
+ @param target: target directory
+ @param opt: None or an array of options like '-exclude=<pattern>'
+ '''
+ if not os.path.exists(archive):
+ self._logger.error('missing zip archive {}'.format(archive))
+ elif not os.path.exists(target):
+ self._logger.error('missing target dir: ' + target)
+ elif not os.path.isdir(target):
+ self._logger.error('not a directory: ' + target)
+ else:
+ start = time.time()
+ self.clearDirectory(target)
+ argv = ['/usr/bin/unzip', '-q', archive, '-d', target]
+ self._processHelper.execute(argv, True)
+ # os.chdir(oldDir)
+ self.logFile(archive, '%f: %s %t restored in %r', start)
+
+ def restoreFromBackup(self, mode):
+ '''Does the backup process controlled by configuration files.
+ @param mode: 'dayly', 'weekly' ...
+ '''
+ logFile = self._configuration.getString('log.file')
+ if logFile != None and (not hasattr(self._logger, '_logfile') or os.path.abspath(logFile) != os.path.abspath(self._logger._logfile)):
+ logger2 = base.Logger.Logger(logFile, self._logger._verbose >= 0)
+ logger2.transferErrors(self._logger)
+ self._logger = logger2
+ logFile2 = self._configuration.getString('log.mirror')
+ if logFile2 != None:
+ logger3 = base.Logger.Logger(logFile, False)
+ self._logger.setMirror(logger3)
+ self._mode = mode
+ configuration = self._backupConfiguration
+ self._baseTargetDir = configuration.getString('target.path')
+ self._targetDir = self._baseTargetDir
+ try:
+ if configuration.getString('job.' + mode) == None:
+ self._logger.error('missing job.{:s} in {:s} Wrong mode?'.format(mode, configuration._filename))
+ elif self._targetDir == None:
+ self._logger.error('missing "{:s}" in {:s}'.format(self._targetDir, configuration._filename))
+ elif not os.path.isdir(self._targetDir):
+ self._logger.error('{:s} is not a directory. See "target.path" in '.format(self._targetDir, configuration._filename))
+ else:
+ relPath = self.relativeRestorePath(mode)
+ self._targetDir += relPath
+ if self.ensureDirectory(self._targetDir) != None:
+ if mode == 'dayly' or mode == 'weekly' or mode == 'monthly':
+ linkTarget = os.path.dirname(self._targetDir) + os.sep + 'current'
+ if os.path.exists(linkTarget):
+ os.unlink(linkTarget)
+ os.symlink(os.path.basename(self._targetDir), linkTarget)
+ statement = configuration.getString('job.' + mode);
+ if statement == None:
+ self._logger.error('missing job.' + mode + ' in ' + configuration._filename);
+ else:
+ self.doJob(configuration, statement)
+ except Exception as exc:
+ self._logger.error('backup aborted with exception: ' + str(exc))
+ traceback.print_exc()
+ subject = None
+ if self._logger._errors > 0:
+ subject = 'Restore {:s} failed on {:s}'.format(mode, configuration.getString('location', base.BaseTool.BasicStatics.hostname(True)))
+ elif configuration.getString('send.always', 'False').startswith('T'):
+ subject = 'Restore {:s} finished on {:s}'.format(mode, configuration.getString('location', base.BaseTool.BasicStatics.hostname(True)))
+ if subject != None:
+ recipient = configuration.getString('admin.email')
+ now = datetime.datetime.now()
+ body = now.strftime('%Y.%m.%d %H:%M:%S') + ' Restore {:s} finished with {:d} error(s)\n\n'.format(mode, self._logger._errors)
+ body += '\n'.join(self._logger._firstErrors)
+ sender = configuration.getString('smtp.sender'),
+ host = configuration.getString('smtp.host')
+ port = configuration.getString('smtp.port')
+ user = configuration.getString('smtp.user')
+ code = configuration.getString('smtp.code')
+ value = configuration.getString('smtp.tls')
+ withTls = value != None and value.lower().startswith('t')
+ if recipient and sender and host and port and user and code:
+ net.EMail.sendStatusEmail(recipient, subject, body, sender, host, port, user, code, withTls, self._logger)
+
+ def restoreMergeUsersAndGroups(self, dirMerge):
+ '''Merges two passwd (and shadow) files into one.
+ Priority has the file /etc/passwd (/etc/shadow, /etc/groups).
+ That means that a user existing in both files will taken from the first (/etc/passwd, /etc/shadow /etc/groups)
+ @param dirMerge: the directory containing 'etc2' which contains the 2end files to merge
+ '''
+ dirSource = self.getSource('/', 'etc')
+ users = self.readPasswd(dirSource + os.sep + 'passwd')
+ contentPasswd = '\n'.join(self._textTool._lines) + '\n'
+ contentShadow = base.StringUtils.fromFile(dirSource + os.sep + 'shadow') + '\n'
+ groups = self.readGroups(dirSource + os.sep + 'group')
+ contentGroups = '\n'.join(self._textTool._lines) + '\n'
+ users2 = self.readPasswd(dirMerge + os.sep + 'passwd')
+ (linesByName, commentsByName) = self.readShadow(dirMerge + os.sep + 'shadow')
+ groups2 = self.readGroups(dirMerge + os.sep + 'group')
+ keys2 = users2.keys()
+ oldKeys = users.keys()
+ addendumPasswd = ''
+ addendumShadow = ''
+ addendumGroups = ''
+ keys2 = sorted(keys2)
+ for user in keys2:
+ if user not in oldKeys:
+ addendumPasswd += users2[user].asText().strip() + '\n'
+ if user in commentsByName.keys():
+ addendumShadow += commentsByName[user].strip() + '\n'
+ if user in linesByName.keys():
+ addendumShadow += linesByName[user].strip() + '\n'
+ keys2 = groups2.keys()
+ oldKeys = groups.keys()
+ for group in keys2:
+ if group not in oldKeys:
+ addendumGroups += groups2[group].asText()
+ dirTarget = self.getTarget('/', 'etc')
+ if dirTarget == '/etc':
+ self.createBackup(dirTarget + os.sep + 'passwd', 'etc')
+ self.createBackup(dirTarget + os.sep + 'shadow', 'etc')
+ self.createBackup(dirTarget + os.sep + 'group', 'etc')
+ base.StringUtils.toFile(dirTarget + os.sep + 'passwd', (contentPasswd + addendumPasswd).replace('\n\n', '\n'))
+ base.StringUtils.toFile(dirTarget + os.sep + 'shadow', (contentShadow + addendumShadow).replace('\n\n', '\n'))
+ base.StringUtils.toFile(dirTarget + os.sep + 'group', (contentGroups + addendumGroups).replace('\n\n', '\n'))
+
+ def restoreMysql(self, name, db, user, password, target):
+ '''Restores a database previously stored by mysqldump.
+ @param dumpFile: containing the SQL statements to restore the db
+ @param db: database name
+ @param user: database use
+ @param password: database password
+ '''
+ start = time.time()
+ oldFilter = self._logger._errorFilter
+ self._logger.setErrorFilter('mysqldump: [Warning] Using a password', True)
+ argv = ['/usr/bin/mysql', '-u', user, '-p' + password, '--single-transaction', db]
+ outputFile = target + os.sep + name + '.sql.gz'
+ errorFile = tempfile.gettempdir() + os.sep + 'backuptool.err.txt'
+ with gzip.open(outputFile, 'wb') as fpGZip, open(errorFile, 'w') as fpError:
+ proc = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=fpError.fileno())
+ while True:
+ line = proc.stdout.readline()
+ if line == b'':
+ break
+ fpGZip.write(line)
+ self.errorFileToLog(errorFile)
+ self._logger.setErrorFilter(oldFilter)
+ self.logFile(outputFile, '%f: %s %t created in %r', start)
+
+ def restoreNginx(self):
+ '''Restores the /etc/nginx-directory
+ '''
+ source = self.getSource('/', 'etc2/nginx')
+ target = self.getTarget('/', 'etc/nginx')
+ if not os.path.isdir(source):
+ self.usage('missing shadow directory {}'.format(source))
+ else:
+ self.ensureDirectory(target)
+ base.FileHelper.copyDirectory(source + os.sep + 'sites-available', target + os.sep + 'sites-available', 'update', self._logger, self._verboseLevel)
+ base.FileHelper.copyDirectory(source + os.sep + 'sites-enabled', target + os.sep + 'sites-enabled', 'update', self._logger, self._verboseLevel)
+ fn = source + os.sep + 'snippets/letsencrypt.conf'
+ if os.path.exists(fn):
+ self.restoreCopyFile(fn, target + os.sep + 'snippets/letsencrypt.conf')
+ self.restoreCopyFile(source + os.sep + 'nginx.conf', target + os.sep + 'nginx.conf')
+ source = self.getSource('/', 'etc2/ssl')
+ target = self.getTarget('/', 'etc/ssl')
+ if not os.path.isdir(source):
+ self.usage('missing shadow directory {}'.format(source))
+ else:
+ self.ensureDirectory(target)
+ base.FileHelper.copyDirectory(source + os.sep + 'certs', target + os.sep + 'certs', 'update', self._logger, self._verboseLevel)
+ base.FileHelper.copyDirectory(source + os.sep + 'private', target + os.sep + 'private', 'update', self._logger, self._verboseLevel)
+ if os.path.isdir('/etc/letsencrypt/live'):
+ if not os.path.exists('/etc/letsencrypt/live/latest'):
+ self._logger('=== missing /etc/letsencrypt/live/latest')
+
+ def restorePyRShell(self):
+ '''Restores the /etc/pyrshell-directory
+ '''
+ source = self.getSource('/', 'etc2')
+ target = self.getTarget('/etc', 'pyrshell')
+ if not os.path.isdir(source):
+ self.usage('missing shadow directory {}'.format(source))
+ else:
+ self.ensureDirectory(target)
+ base.FileHelper.copyDirectory(source + os.sep + 'pyrshell', target, 'clear', self._logger, self._verboseLevel)
+
+ def storage(self, argv):
+ '''Executes the storage command.
+ @param argv: the arguments, e.g. ['ftp-service', '10225']
+ '''
+ if len(argv) < 1:
+ self.usage('missing <what>')
+ else:
+ what = argv[0]
+ argv = argv[1:]
+ if what == 'autofs':
+ self.storageAutoFs(argv)
+ elif what == 'disks':
+ self.storageDisks(argv)
+ elif what == 'ftp-service':
+ self.storageFtpService(argv)
+ elif what == 'nfs-share':
+ self.storageNFSShare(argv)
+ elif what == 'sftp-service':
+ self.storageSFtpService(argv)
+ elif what == 'lvm':
+ self.storageLvm(argv)
+ else:
+ self.usage('unknown <what>: ' + what)
+
+ def storageDisks(self, argv):
+ '''Informs about disks.
+ @param argv: the program arguments, e.g. ['sd[bc]']
+ '''
+ pattern = None if len(argv) == 0 else argv[0]
+ lines = self.storageDiskList(pattern)
+ self._output = lines
+ base.BaseTool.setResult(lines)
+ base.BaseTool.setLatestTool(self)
+ print('\n'.join(lines))
+
+ def storageDiskList(self, pattern):
+ '''Finds disk info matching a pattern.
+ @param pattern: None or a regular expression. Only matching lines will be displayed
+ @return: an array of info lines, e.g. ['caribou-home', 'UUID="950924c0-a0cd-4bae-8b80-13ba5d3e7cbd"', 'TYPE="ext4"']
+ '''
+ def extract(prefix, parts):
+ rc = ''
+ for part in parts:
+ if part.startswith(prefix):
+ rc = ' ' + part
+ parts.remove(part)
+ break
+ return rc
+
+ regExpr = None if pattern == None else re.compile(pattern)
+ lines = self._processHelper.executeInputOutput(['/sbin/blkid'], None, False)
+ rc = []
+ for line in lines:
+ if regExpr == None or regExpr.search(line):
+ parts = line.split(' ')
+ info = parts[0].replace('/dev/', '').replace('mapper/', '')
+ parts = parts[1:]
+ info += extract('UUID=', parts)
+ info += extract('TYPE=', parts)
+ info += ' ' + ' '.join(parts)
+ rc.append(info)
+ return rc
+
+ def storageAutoFs(self, argv):
+ '''Installs and configures automatic mounting.
+ @param argv: program arguments, e.g. ['bigtoy', '/', 'nfs:10101:/media/data/www']
+ <server> <mount-base> <source1> [<source2 ... ]
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ autoSelect = None
+ for opt in options:
+ if opt.startswith('--auto-select='):
+ autoSelect = opt[14:]
+ else:
+ self.usage('unknown option: ' + opt)
+ if self._isRoot:
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install', 'autofs'], True)
+ self._textTool.readCurrent(self.getTarget('/etc', 'auto.master'), False)
+ if len(argv) < 3:
+ self.usage('too few arguments')
+ else:
+ name = argv[0]
+ if re.match(r'^[-.\w]+$', name) == None:
+ self.usage('wrong characters in <server>: only letters+digits+underline+dot allowed')
+ else:
+ fnConfig = self.getTarget('/etc', 'auto.') + name
+ mountBase = argv[1]
+ if mountBase == '/':
+ mountBase = '/-'
+ argv = argv[2:]
+ no = 0
+ for source in argv:
+ no += 1
+ parts = source.split('|')
+ if len(parts) != 4:
+ self.usage('missing "|" (3 times) in:' + source)
+ else:
+ (srcType, addr, mountPoint, option) = parts
+ if no == 1:
+ self._textTool.currentReplace(r'{}\s+{}'.format(mountBase, fnConfig), mountBase + '\t' + fnConfig)
+ self._textTool.writeCurrent()
+ self._textTool.readCurrent(fnConfig, False)
+ if srcType == 'nfs':
+ # /home/wworker/jail/www -fstype=nfs4,rw,retry=0 localhost:/var/www
+ if option == '':
+ option = 'rw'
+ if addr.find(':/') < 0:
+ self.usage('wrong syntax of addr (ip:directory expected): ' + addr)
+ self._textTool.currentReplace(r'{}\s+\S+\s+{}'.format(mountPoint, addr),
+ '{}\t-fstype=nfs4,{},retry=0\t{}'.format(mountPoint, option, addr))
+ elif srcType == 'disk':
+ if option == '':
+ option = 'rw'
+ fsType = 'ext4'
+ uuid = addr
+ if autoSelect != None:
+ list = self.storageDiskList(autoSelect)
+ if len(list) == 0:
+ self.usage('autoselection: nothing matches ' + autoSelect)
+ elif len(list) > 1:
+ self.usage('autoselection: more than one match:\n' + '\n'.join(list))
+ else:
+ infos = list[0].split(' ')
+ if len(infos) > 1 and infos[1].startswith('UUID="'):
+ uuid = infos[1][6:-1]
+ else:
+ self.usage('autoselection: missing UUID: ' + list[0])
+ if len(infos) > 2 and infos[2].startswith('TYPE="'):
+ fsType = infos[2][6:-1]
+ # UUID -fstype=ext4,rw,gid=1000,umask=002 :/dev/disk
+ self._textTool.currentReplace(r'{}\s+\S+'.format(uuid),
+ '{}\t-fstype={},{},umask=002\t:{}'.format(uuid, fsType, option, mountPoint))
+ else:
+ self.usage('unknown source type: {} in {}'.format(srcType, source))
+ self._textTool.writeCurrent()
+ if self._isRoot:
+ self._processHelper.execute(['/bin/systemctl', 'reload', 'autofs'], True)
+
+ def storageFtpService(self, argv):
+ '''Installs the ftp server proftp.
+ Arguments: [servername [port [hostname]]
+ @param argv: the program arguments, e.g. ['Backup for Wordpress', '58773', 'sky.infeos.de']
+ '''
+ hostname = servername = None
+ port = 22
+ if len(argv) > 0:
+ servername = argv[0]
+ elif len(argv) > 1:
+ port = self.integerArgument(argv[1], 22)
+ else:
+ hostname = argv[2]
+ if hostname == None:
+ hostname = base.BaseTool.BasicStatics.hostname(True)
+ if servername == None:
+ servername = hostname
+ packets = 'proftpd-basic'.split(' ')
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install'] + packets, True)
+ text = '''<VirtualHost {}>
+ ServerName "{}"
+ MaxClients 20
+ MaxLoginAttempts 1
+ Port {}
+ # DeferWelcome prevents proftpd from displaying the servername
+ # until a client has authenticated.
+ DeferWelcome on
+
+ # Limit normal user logins, because we only want to allow
+ # guest logins.
+ <Limit LOGIN>
+ DenyGroup !www-data
+ </Limit>
+</VirtualHost>
+'''.format(hostname, servername, port)
+ fn = self.getTarget('/etc/proftpd/proftpd.conf')
+ self._textTool.readCurrent(fn, True)
+ firstLine = text.split('\n')[0]
+ self._textTool.findRegion(firstLine, True, '<Virtual', True)
+ self._textTool.replaceRegion(text, r'</Limit>')
+
+ def storageLvm(self, argv):
+ '''Installs an Locical Volume.
+ Steps:
+ create the LVM
+ create the mountpoint
+ create the automount interface for mounting
+ @param argv: the program arguments, e.g. ['VGSafe', 'LVBackup' '32G', '/media/safe/backup']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ for opt in options:
+ if True:
+ pass
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 4:
+ self.usage('too few arguments')
+ else:
+ volGroup = argv[0]
+ logVolume = argv[1]
+ size = argv[2]
+ mountPoint = argv[3]
+ device = '/dev/{}/{}'.format(volGroup, logVolume)
+ if not os.path.exists('/dev/' + volGroup):
+ self.usage('volume group not found: ' + volGroup)
+ elif re.match('^[-\w._+]+$', logVolume):
+ self.usage('illegal characters in logical volume: ' + logVolume)
+ elif re.match(r'^\d+[gGtTmM$', size) == None:
+ self.usage('illegal size: {} example: 100G'.format(size))
+ elif os.path.exists(device):
+ self.usage('logical volume already exists: ' + logVolume)
+ else:
+ self.ensureDirectory(mountPoint)
+ self._processHelper.execute(['lvcreate', '--name=' + logVolume, '--size=' + size, volGroup], True)
+ key = 'vg.' + volGroup
+ fn = '/etc/auto.' + key
+ base.FileHelper.ensureFileExists(fn, '', self._logger)
+ appl.TextTool.TextTool.replaceOrInsert(['^' + device, '{}\t-fstype=ext2\t:{}'.format(device, mountPoint), '--create-if-missing', '--max-depth=0'])
+ master = '/etc/auto.master'
+ if os.path.exists(master):
+ self._logger.error('missing ' + master)
+ else:
+ appl.TextTool.TextTool.replaceOrInsert(['.*' + fn, '/-\t' + fn, '--create-if-missing', '--max-depth=0'])
+ self._processHelper.execute(['systemctl', 'reload', 'autofs'], True)
+
+ def storageSFtpService(self, argv):
+ '''Installs an SecureFileTransferProtocol service.
+ Steps:
+ create the user if it does not exist
+ add user to group www-data and user www-data to group <user>
+ create the jail for chroot
+ create the mount points inside the jail remember the nodes are unique
+ configure and inform the ssh daemon
+ define the mount provider (must be chroot proof we use nfs)
+ define the autofs mounts
+ @param argv: the program arguments, e.g. ['jonny', '/var/www', '/srv/www' '--force']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ force = False
+ for opt in options:
+ if opt == '-f' or opt == '--force':
+ force = True
+ else:
+ self.usage('unknown option: ' + opt)
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ else:
+ user = argv[0]
+ if re.match(r'^[\w]+$', user) == None:
+ self.usage('invalid characters in username: ' + user)
+ name = 'sftp_' + user
+ if not force and ('auto.' + name) in base.BaseTool.BasicStatics.findFiles('/etc', 'auto.*', False):
+ self.usage('the autofs name {} already exists. Use --force to allow that'.format(name))
+ directories = argv[1:]
+ # we need unique nodes (for mount directories inside jail):
+ nodes = dict()
+ for item in directories:
+ node = os.path.basename(item)
+ if not os.path.isdir(item):
+ self.usage('not a directory: ' + item)
+ node2 = node
+ no = 0
+ while node2 in nodes.values():
+ no += 1
+ node2 = node + str(no)
+ nodes[item] = node2
+ # create the user and group if it does not exist:
+ base.BaseTool.BasicStatics.ensureUserAndGroup(user, user, self._logger)
+ # add user to group www-data and user www-data to group <user>:
+ self._processHelper.execute(['usermod', '-a', '-G', 'www-data', user], True)
+ self._processHelper.execute(['usermod', '-a', '-G', user, 'www-data'], True)
+ # create the jail for chroot:
+ jailDir = '/home/jails/' + user
+ self.ensureDirectory(jailDir, 0o755)
+ self.ensureSymbolicLink(jailDir, '/home/{}/jail'.format(user), True)
+ # create the mount points inside the jail: remember: the nodes are unique
+ for item in directories:
+ self.ensureDirectory(jailDir + os.sep + nodes[item], 0o775)
+ # configure and inform the ssh daemon:
+ config = self.getTarget('/etc/ssh', 'sshd_config')
+ self._textTool.readCurrent(config, True)
+ self._textTool.findRegion(r'^match User ' + user, True, r'^match', False)
+ self._textTool.replaceRegion('''match User {}
+ ChrootDirectory {}
+ ForceCommand internal-sftp
+ AllowTcpForwarding no
+'''.format(user, jailDir))
+ self._textTool.writeCurrent()
+ self._processHelper.execute(['/bin/systemctl', 'reload', 'sshd'], True)
+ # define the mount provider (must be chroot proof: we use nfs)
+ argv2 = []
+ argv3 = [name, '/']
+ for item in directories:
+ argv2.append('{}:localhost:rw'.format(item))
+ argv3.append('nfs|localhost:{}|{}/{}|rw'.format(item, jailDir, nodes[item]))
+ self.storageNFSShare(argv2)
+ # define the autofs mounts
+ self.storageAutoFs(argv3)
+ self._logger.log('= do not forget to set password for ' + user, 1)
+
+ def installLetsencrypt(self):
+ '''Prepare the system to make letsencrypt certificates.
+ '''
+ packets = 'python3-acme python3-certbot python3-mock python3-openssl python3-pkg-resources python3-pyparsing python3-zope.interface'.split(' ')
+ self._processTool.execute(['/usr/bin/apt-get', '-y', 'install'] + packets, True)
+ base.BaseTool.BasicStatics.ensureDirectory('/var/www/letsencrypt/.well-known/acme-challenge', self._logger)
+ base.StringUtils.toFile('/var/www/letsencrypt/.well-known//acme-challenge/hi.txt', 'Hi')
+ if os.path.isdir('/usr/share/nginx/html') and not os.path.islink('/usr/share/nginx/html/.well-known'):
+ self._logger.log('creating link /usr/share/nginx/html/.well-known => /var/www/letsencrypt/.well-known', 1)
+ os.symlink('/var/www/letsencrypt/.well-known', '/usr/share/nginx/html/.well-known')
+
+ def storageNFSShare(self, argv):
+ '''Initializes a NFS share on the server side.
+ @param argv: the arguments, e.g. ['/media/tmp:10.10.10.104']
+ '''
+ if len(argv) < 1:
+ self.usage('missing arguments')
+ else:
+ if not base.BaseTool.unitTestIsActive():
+ self._processTool.execute(['apt-get', '-y', 'install', 'nfs-kernel-server'], True)
+ self._textTool.readCurrent('/etc/exports', False)
+ for arg in argv:
+ parts = arg.split(':')
+ if len(parts) != 3:
+ self._logger.error('wrong syntax of addr: use <path>:<ip>:<option> ' + arg)
+ else:
+ (path, ip, option) = parts
+ if not os.path.isdir(path):
+ self._logger.error('not a direcory: ' + path)
+ if re.match(r'^ro|rw$', option) == None:
+ self._logger.error('unknown option: ' + option)
+ option = 'rw'
+ self._textTool.currentReplace(r'^{}\s+{}'.format(path, ip),
+ '{}\t{}({},async,no_root_squash,subtree_check)'.format(path, ip, option))
+ self._textTool.writeCurrent(None)
+ self._processTool.execute(['/usr/sbin/exportfs', '-r'], True)
+ self._processTool.execute(['/usr/sbin/exportfs', '-a'], True)
+ self._processTool.execute(['/usr/sbin/exportfs', '-v'], True)
+
+ def synchronize(self, source, target, deleteToo):
+ '''Synchronizes a directory from another.
+ Copies the newer or not existing files from the source to the target, recursive
+ @param source: the source directory
+ @param target: the target directory
+ @param deleteToo: files in target but not existing in source will be deleted
+ '''
+
+ def logError(arg):
+ self._logger.error('error while removing directory {:s}: {:s}'.format(fullTarget, arg))
+
+ sourceNodes = os.listdir(source)
+ targetNodes = os.listdir(target)
+ dirs = []
+ for node in sourceNodes:
+ fullSource = source + os.sep + node
+ fullTarget = target + os.sep + node
+ if not base.FileHelper.distinctPaths(fullSource, fullTarget):
+ self._logger('nested directories: {} / {} [{} / {}]'.format(
+ fullSource, fullTarget, os.path.realpath(fullSource), os.path.realpath(fullTarget)))
+ continue
+ doCopy = False
+ infoSource = os.lstat(fullSource)
+ if stat.S_ISDIR(infoSource.st_mode):
+ dirs.append(node)
+ else:
+ try:
+ infoTarget = os.lstat(fullTarget)
+ doCopy = infoTarget.st_mtime > infoSource.st_mtime
+ except OSError:
+ doCopy = True
+ if doCopy:
+ self._logger.log('copying {:s}'.format(fullSource), 3)
+ shutil.copy(fullSource, fullTarget)
+ if deleteToo:
+ if node in targetNodes:
+ targetNodes.remove(node)
+ if deleteToo:
+ for node in targetNodes:
+ fullTarget = target + os.sep + node
+ self._logger.log('deleting {:s}'.format(fullTarget), 3)
+ if os.path.isdir(fullTarget):
+ shutil.rmtree(fullTarget)
+ else:
+ os.unlink(fullTarget)
+ for node in dirs:
+ trgDir = target + os.sep + node
+ if os.path.exists(trgDir):
+ if not os.path.isdir(trgDir):
+ os.unlink(trgDir)
+ os.mkdir(trgDir)
+ else:
+ os.mkdir(trgDir)
+ self.synchronize(source + os.sep + node, trgDir, deleteToo)
+
+ def systemInfo(self, target):
+ '''Assembles interesting informations about the current linux system.
+ Manually installed packets
+ disk devices
+ mounts
+ @param target: the target directory
+ '''
+ start = time.time()
+ # zcat /var/log/apt/history.log.*.gz | cat - /var/log/apt/history.log | grep -Po '^Commandline: apt-get install (?!.*--reinstall)\K.*' > $fnManuallyInstalled
+ path = '/var/log/apt'
+ nodes = os.listdir(path)
+ packets = []
+ for node in nodes:
+ if node.startswith('history.log') and node.endswith('.gz'):
+ full = path + os.sep + node
+ with gzip.open(full, 'r') as fp:
+ for line in fp:
+ if line.startswith(b'Commandline: apt-get install'):
+ packets += line[29:].decode().strip().replace(' ', ' ').replace(' ', ' ').split(' ')
+ with open('/var/log/apt/history.log', 'r') as fp:
+ for line in fp:
+ if line.startswith('Commandline: apt-get install'):
+ packets += line[29:].strip().replace(' ', ' ').replace(' ', ' ').split(' ')
+
+ mounts = []
+ with open('/proc/mounts', 'r') as fp:
+ for line in fp:
+ if line.split(' ')[0].find('/') >= 0:
+ mounts.append(line)
+
+ lsblkInfo = subprocess.check_output('/bin/lsblk')
+ with open('/proc/diskstats', 'r') as fp:
+ diskStats = fp.read()
+ with open('/proc/mdstat', 'r') as fp:
+ mdstat = fp.read()
+ if self._beingRoot:
+ pvInfoShort = subprocess.check_output('/sbin/pvs')
+ pvInfo = subprocess.check_output('/sbin/pvdisplay')
+ vgInfoShort = subprocess.check_output('/sbin/vgs')
+ vgInfo = subprocess.check_output('/sbin/vgdisplay')
+ lvInfoShort = subprocess.check_output('/sbin/lvs')
+ lvInfo = subprocess.check_output('/sbin/lvdisplay')
+ fnOutput = target + os.sep + 'system.info.gz'
+ with gzip.open(fnOutput, 'wb') as fp:
+ fp.write(b'=== manually installed packets:\n')
+ for packet in packets:
+ if packet != '':
+ fp.write('apt-get install -y {:s}\n'.format(packet).encode())
+ fp.write(b'\n=== lsblk:\n' + lsblkInfo)
+ fp.write(b'\n=== mounts:\n' + ''.join(mounts).encode())
+ fp.write(b'\n=== diskstats:\n' + diskStats.encode())
+ fp.write(b'\n=== mdstat:\n' + mdstat.encode())
+ if self._beingRoot:
+ fp.write(b'\n=== pvs:\n' + pvInfoShort)
+ fp.write(b'\n=== vgs:\n' + vgInfoShort)
+ fp.write(b'\n=== lvs:\n' + lvInfoShort)
+ fp.write(b'\n=== pvdisplay:\n' + pvInfo)
+ fp.write(b'\n=== vgdisplay:\n' + vgInfo)
+ fp.write(b'\n=== lvdisplay:\n' + lvInfo)
+ self.logFile(fnOutput, '%f: %s %t created in %r', start)
+
+ def updateApt(self, force=False):
+ '''Tests whether the last "apt update" command is younger than one day.
+ If not the command is executed.
+ @param force: True: the marker file will be removed: apt-get update is executed always
+ '''
+ doUpdate = False
+ fileMarker = self._configDir + '/last.apt.update.mrk'
+ if force:
+ doUpdate = True
+ elif not os.path.exists(fileMarker):
+ doUpdate = True
+ else:
+ status = os.lstat(fileMarker)
+ doUpdate = status.st_mtime + 86400 < time.time()
+ if doUpdate:
+ if not base.BaseTool.unitTestIsActive():
+ self._processTool.execute(['apt-get', 'update'], True)
+ # change file date:
+ base.StringUtils.toFile(fileMarker, '')
+ os.chmod(fileMarker, 0o666)
+
+def usage():
+ '''Returns an info about usage
+ '''
+ return """usage: [<global_opts>] <mode>
+ some actions for restauration of servers
+GLOBAL_OPTS
+GLOBAL_MODES
+<mode>:
+ btrfs <what>
+ <what>:
+ create-fs <device> <label> [<mount-path>] [--image=<image>:<size>] [--force]
+ creates a btrfs filesystem on <device> and autofs mounting
+ <device>: the device on which the btrfs filesystem is created
+ <label>: the label (name) of the btrfs filesystem
+ <mount-path>: the mountpoint Default: /media/<label>
+ <path>: the full filename of the image
+ <size>: the size of the loop device image: <count><unit>, e.g. 32G for 32 GBYte or 2T vor 2 TByte
+ --force: works too if the filesystem seems already to exist
+ create-snapshot <label-root> <label-subvol> [<label-snapshot>] [--mode=<mode>] [--read-only | -r]
+ <label-root>: the mountpoint of the while btrfs device (must be mounted)
+ <label-subvol>: the label of the subvolume which is to copy
+ <label-snapshot>: the label of the new snapshot: needed if missing --mode
+ <mode>: dayly | weekly | monthly | now: the label of the snapshot depends of the given mode
+ e.g. 'dayly' appends '.sun' to the subvol label if the current day is Sunday
+ --read-only or -r: the snapshot will be not be writable
+ create-subvol <label-root> <label-subvol> [<mount-path>] [--size=<size>] [--force] [--options=<mount-opts>] [--move-files]
+ <label-root>: the mountpoint of the while btrfs device (must be mounted)
+ <label-subvol>: the label of the new subvolume, e.g. "backup" (should not exist)
+ <mount-path>: the relative path of the subvolume. Default: <label-subvol> under /media/<label-root>
+ <size>: the size of the logical volume: <count><unit>, e.g. 32G for 32 GBYte or 2T vor 2 TByte
+ --force: works if seems the subvolume already exists
+ --move-files: if the mount directory contains dirs/files they will be moved to the new subvolume
+ <mount-opts>: delimited with ',': compress=zlib compress=lzo nodatacow
+ init <what>
+ <what>:
+ apache <opts>
+ installs the Apache webserver
+ <opt>:
+ --ports=<port-http>,<port-https>
+ port for HTTP and HTTPS, e.g. --ports=81,444
+ cms
+ installs packages relevant for content management systems: imagemagick redis-server...
+ dirs
+ creates the standard directories (if needed)
+ etc
+ initializes /etc
+ grub [--dry]
+ extends /boot/grub/grub.cfg
+ --dry: show infos, do not change grub.cfg
+ letsencrypt
+ prepares the system for creation of letsencrypt certificates
+ linuxserver
+ installs boot daemon, swapiness setting, useful packages
+ local-bin
+ initializes /usr/local/bin
+ mariadb
+ installs the database management system mariadb
+ nginx <opts>
+ installs the webserver NGINX
+ <opt>:
+ --well-known=<dir>
+ <dir> is the directory containing a .well-known directory (used for letsencrypt)
+ php <version>
+ installs packages relevant for PHP
+ net <what>
+ <what>:
+ info
+ displays relevant infos
+ static [ip [interface [gateway]]]
+ defines an interface with a static ip
+ ip: examples: 10.10.10.100 or 100
+ interface: eth0 or ens18 Default: the first device given by the system
+ gateway: Default: the above ip with end number 1
+ virt-manager netname
+ defines a "host to guest" network in a virtmanager environment
+ netname: identifier of the internal network
+ nameserver [ip]
+ defines the DNS nameserver
+ reconfigure <what>
+ <what>:
+ letsencrypt <mode> { <domain> | --all }
+ switches the letsencrypt certificate and private certificate
+ <mode>: 'on': letsencrypt certificate will be switched on. 'off': the private certificate will be switched on
+ <domain>: specifies the filename in /etc/nginx/sites-enabled. With or without 'www.'
+ --all: switch all domains in /etc/nginx/sites-enabled
+ php [<version>]
+ adapts the php ini files of the given version
+ if no version is given the highest installed version will be taken
+ remove <what>
+ <what>:
+ restore <what>
+ <what>:
+ clone <archive-prefix> <target>
+ search archive (zip/tar), extracts it as sibling of the target
+ e.g. "local=bin.tgz" becomes /usr/local/bin2
+ etc
+ merge passwd, shadow and group, includes restore of 'pyrshell', 'nginx'
+ pyrshell
+ recovers the /etc/pyrshell directory
+ nginx
+ recovers the /etc/nginx
+ storage <what>
+ <what>:
+ autofs server mount-base source1 [source2 ... ] <opts>
+ installs and configures automatic mounting
+ server: defines the name of the configuration, e.g. the NFS server
+ mount-base: '/' or a common mount point
+ source: type|extern-address|mount-point|option
+ option: rw: read write ro: read only
+ examples:
+ nfs|10.10.10.1:/media/data/www|/media/www|ro
+ disk|2e8b6f9b-6fc2-4463-8c58-d812104baef9|/media/usb|rw
+ <opt>:
+ --auto-select=<pattern>
+ for disks: a regular expression. Must be unique. See <what>="disk" below
+ disks [<pattern>]
+ informs about disks, e.g. UUID
+ <pattern>: a regular expression for any info, e.g. disk type, disk name...
+ ftp-service [port]
+ installs proftp
+ port: the serviced port
+ nfs-share <addr1> [<addr2>...]
+ Creates /etc/exports. Import: see storage autofs
+ <addrX>: <directory>:<client-ip>:<option>
+ <option>: rw: read write ro: readonly
+ example: /media/www:10.10.10.104:rw
+ sftp-service <user> <directory1> [<directory2> ...]
+ installs an SFTP service with exclusive access to some directories (chroot)
+ <user>: this user can login
+ <directoryX>: theese directories are available via SFTP
+ lvm <vol-group> <log-volume> <size> <mountpoint>
+ <vol-group>: volume group (must exist)
+ <log-volume>: logical volume (will be created)
+ <size>: the size of the logical volume: <count><unit>, e.g. 32G for 32 GBYte or 2T vor 2 TByte
+Examples:
+restoretool -q restore etc
+restoretool -v3 init php 7.3
+restoretool -v3 init nginx --well-known=/var/www/letsencrypt
+restoretool -v3 init grub --dry
+restoretool -v3 init linuxserver
+restoretool -v3 net info"
+restoretool -v3 net static 100 ens18"
+restoretool -r -v3 reconfigure php 7.1
+restoretool -v3 storage autofs bigtoy / "nfs|10.10.10.1:/media/data/www|/media/www|rw"
+restoretool -v3 storage autofs disk --auto-select=sda1
+restoretool -v3 storage ftp-service 58725"
+restoretool -v3 storage nfs-share "/media/www:10.10.104:rw" "/media/www/cloud:10.10.103:ro"
+restoretool -v3 storage sftp-service coder /var/www /home/ws/php --force"
+restoretool -v3 storage disks "sd[bc]"
+restoretool -v3 storage lvm sixpack 100G /media/sixpack/cl/cloud.infeos.de
+restoretool -v3 storage btrfs sixpack cloud cl/cloud --size=32G
+restoretool -v3 btrfs create-fs /dev/sda4 data
+restoretool -v3 btrfs create-fs /dev/loop22 fs.unittest --image=/tmp/unittest.img:150M --force
+restoretool -v3 btrfs create-subvol raid databases /var/lib/mysql --options=nodatacow --move-files
+restoretool -v3 btrfs create-snapshot fs.system @home --mode=dayly --read-only
+restoretool -v3 restore clone usr=local=bin.tgz /usr/local/bin
+restoretool -v3 restore etc
+restoretool -v3 reconfigure letsencrypt on www.example.de
+restoretool -v3 reconfigure letsencrypt off www.example.de
+restoretool -v3 reconfigure letsencrypt off --all
+"""
+
+def createLetsEncryptConf(root):
+ return '''
+location ^~ /.well-known/acme-challenge/ {
+ default_type "text/plain";
+ root $ROOT;
+}
+# Hide /acme-challenge subdirectory and return 404 on all requests.
+# It is somewhat more secure than letting Nginx return 403.
+# Ending slash is important!
+location = /.well-known/acme-challenge/ {
+ return 404;
+}
+'''.replace('$ROOT', root)
+
+ def example(self):
+ '''Creates a example configuration file.
+ '''
+ example = '''# restoretool example configuration
+log.file=/var/log/local/.log
+backup.directory=/media/backup
+url.download=https://public.hamatoma.de
+
+php.memory_limit = 2048M
+php.upload_max_filesize = 512M
+php.max_file_uploads = 100
+php.post_max_size = 512M
+php.max_execution_time = 600
+php.max_input_time = 600
+'''
+ self.storeExample(example)
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/restoretool', 'script', '-', 'data.csv']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('restoretool', 'appl/RestoreTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = RestoreTool(options)
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ rc = None
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'init':
+ tool.updateApt()
+ rc = tool.init(argv)
+ elif cmd == 'net':
+ tool.network(argv)
+ elif cmd == 'reconfigure':
+ tool.updateApt()
+ rc = tool.reconfigure(argv)
+ elif cmd == 'remove':
+ rc = tool.remove(argv)
+ elif cmd == 'restore':
+ rc = tool.restore(argv)
+ elif cmd == 'btrfs':
+ rc = tool.btrFs(argv)
+ elif cmd == 'storage':
+ rc = tool.storage(argv)
+ else:
+ tool.usage("unknown command: " + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+svgtool: Scalable Vector Graphics tool
+
+@author: hm
+'''
+import os.path
+import sys
+import re
+import time
+import math
+import functools
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.StringUtils
+
+gSvgToolPeriod = 4
+
+class Column:
+ '''Implements a series of data, e.g. one dimension of a data collection.
+ '''
+ def __init__(self, title, parent):
+ '''Constructor.
+ @param title: the column's title
+ @param parent: the array containing the column
+ '''
+ self._parent = parent
+ self._title = title
+ self._factor = 1
+ if title.find('GByte') >= 0:
+ self._factor = 1024*1024*1024
+ elif title.find('MByte') >= 0:
+ self._factor = 1024*1024
+ elif title.find('KByte') >= 0:
+ self._factor = 1024
+ self._min = 1E100
+ self._max = -1E100
+ self._average = 0.0
+ self._reducedRange = None
+ self._values = []
+ self._desc = False
+ self._asc = False
+ self._dataType = None
+ self._offset = 0
+
+ def add(self, value):
+ [value, dataType] = base.StringUtils.toFloatAndType(value.strip())
+ if dataType == 'int':
+ dataType = 'float'
+ if dataType == 'undef':
+ raise ValueError(value)
+ if self._dataType == None:
+ self._dataType = dataType
+ elif dataType != self._dataType:
+ raise ValueError('mixed data types: {} / {}'.format(dataType, self._dataType) )
+ self._values.append(value)
+
+ def average(self):
+ '''Returns the average of the values.
+ @return: the average
+ '''
+ return self._average / self._factor
+
+ def findMinMax(self, spreadRange, spreadFactor, maxAverageQuotient = 40):
+ '''Finds the minumum and the maximum of the data:
+ spreadRange is given as % value. The extrema will be set in this way
+ that only points inside this range are displayed.
+ Example: data = [-5, 1, 2, 7, 99], spreadRange is 60%.
+ the data inside the range are [1, 2, 7]. _max = 7, _min=1
+ @param spreadRange: a % value: only data in this range will be displayed
+ @param spreadFactor: @pre: greater or equal 1
+ if abs(extremum-endOfRange) / range <= spreadFactor: the range is expanded to the extremum
+ Example: data [0.5, 1, 2, 7, 99] max=7 min=1 range=7-1=6
+ abs(0.5-7)/6=1.099 1.099<1.1 => _min=0.5
+ abs(99-1)/6=16 16>1.1 => _max=99
+ @param maxAverageQuotient: if max*min > 0 and (max / min < maxAverageQuotient: no clipping is done
+ '''
+ if spreadRange < 100 and len(self._values) > 0:
+ # if 100 _min and _max are already set
+ minValues = []
+ maxItems = len(self._values) * (100.0 - spreadRange) / 100
+ # round up. +1: we want the extremum outside of the excluded range: plus one item
+ countMax = int(maxItems + 0.5) + 1
+ countMin = int(maxItems) + 1
+ maxValues = []
+ sum = 0.0
+ for val in self._values:
+ if type(val) == str:
+ val = float(val)
+ sum += val
+ if len(minValues) < countMin:
+ minValues.append(val)
+ minValues.sort()
+ elif val < minValues[-1]:
+ minValues[-1] = val
+ minValues.sort()
+ if len(maxValues) < countMax:
+ maxValues.append(val)
+ maxValues.sort()
+ elif val > maxValues[0]:
+ maxValues[0] = val
+ maxValues.sort()
+ # get the maximum of the found values:
+ self._min = minValues[-1]
+ # get the minimum of the found values:
+ self._max = maxValues[0]
+ distance = self._max - self._min
+ # we use the full range if the difference of the full range and the calculated range is less than 10%:
+ self._average = sum / len(self._values)
+ if maxValues[-1] - self._min <= distance * spreadFactor:
+ self._max = maxValues[-1]
+ if self._max - minValues[0] <= distance * spreadFactor:
+ self._min = minValues[0]
+ if sum > 0 and self._max / self._average > maxAverageQuotient:
+ self._min = minValues[0]
+ self._max = maxValues[-1]
+
+ def extremum(self, minimumNotMaximum):
+ '''Returns the minimum or the maximum of the column.
+ @param minimumNotMaximum: true: returns the minumum otherwise: the maximum
+ @return the minimum or the maximum of the column divided by _factor
+ '''
+ if minimumNotMaximum:
+ return self._min / self._factor
+ else:
+ return self._max / self._factor
+
+ def getRange(self):
+ '''Returns the difference between maximum and minimum of the column.
+ @return the difference between maximum and minimum of the column divided by _factor
+ '''
+ return (self._max - self._min) / self._factor
+
+ def normalize(self, offset):
+ '''Scales the values to the avarage + varianz
+ '''
+ # column._max = functools.reduce(lambda rc, item: item if item > rc else rc, column._values, -1E+100)
+ sumValues = functools.reduce(lambda rc, item: rc + item, self._values)
+ standardDeviation = math.sqrt(functools.reduce(lambda rc, item: rc + item*item, self._values)) / len(self._values)
+ average = sumValues / len(self._values)
+ self._reducedRange = average + max(standardDeviation, average)
+ self._offset = offset
+
+ def getValue(self, index):
+ '''Gets the index-th value of the column.
+ @param index: index of _values[]
+ @return the index-th value, diviced by _factor
+ '''
+ rc = self._values[index]
+ if type(rc) != float:
+ rc = base.StringUtils.toFloat(rc)
+ return rc / self._factor
+
+ def toString(self, index):
+ value = self.getValue(index)
+ rc = base.StringUtils.toString(value, self._dataType)
+ return rc
+
+class AxisScale:
+ '''Implements the x or y axis of a graph.
+ '''
+
+ def __init__(self, column, maxScales):
+ '''Constructor.
+ @param column: the column info related to the scale
+ @param maxScales: maxScales / 2 < scale-count <= maxScales. scale-count is the number of markers on the scale
+ '''
+ self._column = column
+ if column._max == -1E+100:
+ column._max = functools.reduce(lambda rc, item: item if item > rc else rc, column._values, -1E+100)
+ column._min = functools.reduce(lambda rc, item: item if item < rc else rc, column._values, +1E+100)
+ self._scaleSize = column._reducedRange if column._reducedRange != None else column.getRange()
+ rangeScale = "{:e}".format(self._scaleSize)
+ if self._scaleSize == 0:
+ self._countScales = 1
+ self._lastScale = 0
+ self._scaleStep = 1
+ elif column._dataType == 'date':
+ self._countScales = self._scaleSize
+ self._lastScale = 0
+ else:
+ digit = rangeScale[0]
+ if digit == '1' or digit == '2' or digit == '3':
+ lastScale = "{:.1e}".format(column.getRange())
+ self._countScales = int(lastScale[0] + lastScale[2])
+ self._lastScale = float(lastScale)
+ else:
+ self._lastScale = float("{:.0e}".format(column.getRange()))
+ self._countScales = int(digit)
+ if self._countScales == 0:
+ self._countScales = maxScales
+ elif self._countScales < 0:
+ self._countScales = - self._countScales
+ while self._countScales * 2 <= maxScales:
+ self._countScales *= 2
+ while self._countScales > maxScales:
+ self._countScales //= 2
+ self._scaleStep = self._lastScale / self._countScales
+
+ def indexData(self, index, length):
+ '''Returns the data of a marker with a given index.
+ @param index: the index of the marker (< _countScales)
+ @param length: the length of the axis (width for x and height for y)
+ @return: [posMarker, label]
+ '''
+ if self._countScales == 0 or self._lastScale == 0 or self._scaleSize == 0:
+ posMarker = 0
+ label = ''
+ else:
+ posMarker = int(index * length * self._lastScale / self._scaleSize / self._countScales)
+ value = self._column.extremum(True) + index * self._scaleStep
+ dataType = self._column._dataType
+ label = "{}".format(base.StringUtils.toString(value, dataType))
+ if dataType == 'datetime':
+ if index == 0:
+ self._firstDate = label.split(' ')[0]
+ else:
+ current = label.split(' ')
+ if current[0] == self._firstDate:
+ label = current[1]
+ else:
+ self._firstDate = current[0]
+ elif dataType == 'float' or dataType == 'int':
+ if label == '0.000':
+ label = "{:.2g}".format(value)
+ else:
+ while label.endswith('0'):
+ label = label[:-1]
+ if label.endswith('.'):
+ label = label[:-1]
+ if len(label) > 6:
+ label = "{:.2g}".format(value)
+ return [posMarker, label]
+
+class SvgTool (base.BaseTool.BaseTool):
+ '''Creates SVG graphics.
+ '''
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'svgtool.conf')
+ self._columns = []
+ self._rexprNo = re.compile(r'^\s*[+-]?\d+([.,]\d+([eE][+-]?\d+)?)?\s*$')
+ self._color = 'black'
+ self._strokeWidth = 1
+ self._output = []
+ self._fontSize = 7
+ self._colors = ['black', 'red', 'green', 'magenta', 'orange', 'brown']
+
+ def convertToMovingAverage(self, data, span = 5):
+ '''Converts an array of values inplace into an array of values with moving average.
+ @param data: IN/OUT: the array of values
+ @param span: the number of values which is used to calculate the average
+ '''
+ window = []
+ sum = 0
+ spanHalf = int(span / 2)
+ spanHalf2 = span - spanHalf
+ for ix in range(span):
+ window.append(data[ix])
+ sum += data[ix]
+ if ix >= spanHalf:
+ data[ix-spanHalf] = sum / len(window)
+ for ix in range(spanHalf2, len(data) - spanHalf):
+ sum -= window[0]
+ window = window[1:]
+ window.append(data[ix + spanHalf])
+ sum += window[-1]
+ data[ix] = sum / span
+ for ix in range(len(data) - spanHalf, len(data)):
+ sum -= window[0]
+ window = window[1:]
+ data[ix] = sum / len(window)
+
+ def diagram(self, argv, usage):
+ '''Creates a SVG diagram.
+ @param argv: arguments
+ @return: None: OK otherwise: error message
+ '''
+ rc = None
+ source = argv[0]
+ target = argv[1]
+ argv = argv[2:]
+ if not os.path.exists(source):
+ rc = "input file {} does not exist".format(source)
+ else:
+ fp = None
+ if target != '-':
+ fp = open(target, "w")
+ self.readCsv(source)
+ width = self._configuration.getInt('width', 1000)
+ height = self._configuration.getInt('height', 500)
+ if width < len(self._columns[0]._values):
+ self.shrinkData(width)
+ axisAreaWidth = self._configuration.getInt('axis.area.width', 15)
+ spreadRange = 90
+ spreadFactor = 1.1
+ maxAverageQuotient = 1.0
+ title = 'Diagram'
+ movingAverage = None
+ for arg in argv:
+ if arg.startswith('--width'):
+ width = self.integerOption(arg)
+ elif arg.startswith('--height'):
+ height = self.integerOption(arg)
+ elif arg.startswith('--axis-area-width'):
+ axisAreaWidth = self.integerOption(arg)
+ elif arg.startswith('--spread-range'):
+ spreadRange = self.integerOption(arg)
+ if spreadRange < 50 or spreadRange > 100:
+ self.usage('invalid value (allowed: 50..100): ' + arg)
+ elif arg.startswith('--moving-average'):
+ movingAverage = self.integerOption(arg, 5)
+ elif arg.startswith('--spread-factor'):
+ spreadFactor = self.floatArgument(arg)
+ elif arg.startswith('--max-average-quotient'):
+ maxAverageQuotient = self.integerOption(arg)
+ if maxAverageQuotient < 1:
+ self.usage('invalid value (allowed: >= 1): ' + arg)
+ elif arg.startswith('--title='):
+ title = arg[8:]
+ else:
+ self.usage('unknown options: ' + arg)
+ self._logger.log('start ' + title)
+ self.htmlStart(title)
+ self.svgStart(width, height)
+ self.xAxis(width, height, axisAreaWidth, 0)
+ for ix in range(len(self._columns) - 1):
+ self._color = self._colors[ix % len(self._colors)]
+ aProperty = 'stroke-dasharray="{},{}'.format(5*(ix + 1), 3)
+ for ix2 in range(ix + 1):
+ aProperty += ',1,1'
+ aProperty += '"'
+ if movingAverage != None:
+ self.convertToMovingAverage(self._columns[ix + 1]._values, movingAverage)
+ self._columns[ix + 1].findMinMax(spreadRange, spreadFactor, maxAverageQuotient)
+ self.polyline(width, height, axisAreaWidth, 0, ix+1, aProperty)
+ self.yAxis(width, height, axisAreaWidth, ix + 1, self._color)
+ self.svgEnd()
+ self.htmlLegend();
+ self.htmlEnd()
+ if fp == None:
+ for line in self._output:
+ print(line)
+ else:
+ for line in self._output:
+ fp.write(line + '\n')
+ fp.close()
+ self._logger.log('end ' + title)
+ return rc
+
+ def example(self):
+ '''Creates an example configuration file and example data files (sinus.csv and sinus.html).
+ '''
+ example = '''# svgtool example configuration
+log.file=/var/log/local/svgtool.log
+width=1000
+height=500
+axis.area.width=15
+'''
+ self.storeExample(example)
+ global gSvgToolPeriod
+ name = '/tmp/sinus.csv'
+ content = 'x;sin(x);cos(x);tan(x)\n'
+ maxX = 500-15
+ for ix in range(maxX):
+ x = ix * gSvgToolPeriod * 3.141592 / maxX
+ content += '{};{};{};{}\n'.format(x, math.sin(x), math.cos(x), min(1, max(-1, math.tan(x))))
+ base.StringUtils.toFile(name, content)
+ self._logger.log('created: ' + name)
+
+ def firstLine(self, line):
+ '''Evaluates the first line.
+ Searches the separator and the titles (if they exists)
+ @param line: the first line to inspect
+ '''
+ cTab = line.count('\t')
+ cComma = line.count(',')
+ self._columns = []
+ cSemicolon = line.count(';')
+ if cTab >= cComma and cTab >= cSemicolon:
+ self._separator = '\t'
+ elif cSemicolon >= cComma and cSemicolon >= cTab or cSemicolon > 0 and cSemicolon == cComma - 1:
+ self._separator = ';'
+ else:
+ self._separator = ','
+ titles = line.split(self._separator)
+ isNumeric = True
+ for title in titles:
+ self._columns.append(Column(title, self))
+ if self._rexprNo.match(title) == None:
+ isNumeric = False
+ if isNumeric:
+ self.numericLine(line, 1)
+ for ix in range(len(titles)):
+ self._columns[ix]._title = "col{:d}".format(ix + 1)
+
+ def htmlEnd(self):
+ self._output.append('</body>\n</html>')
+
+ def htmlLegend(self):
+ '''Writes the legend of the dialog as HTML table.
+ '''
+ xCol = self._columns[0]
+ self._output.append('<table border="1"><thead><tr><td>Titel</td><td>Schnitt</td><td>Minimum</td><td>Maximum</td><td></td></tr></thead>\n');
+ self._output.append('<tbody>\n<tr style="color: blue"><td>{}:</td><td>{}</td><td>{}</td><td>{}</td><td>{} Werte</td></tr>\n'
+ .format(xCol._title, '', base.StringUtils.toString(xCol.extremum(True), xCol._dataType, 2),
+ base.StringUtils.toString(xCol.extremum(False), xCol._dataType, 2), len(xCol._values)))
+
+ for ix in range(len(self._columns) - 1):
+ yCol = self._columns[ix + 1]
+ self._output.append('<tr style="color: {}"><td>{}:</td><td>{:.6f}</td><td>{:.6f}</td><td>{:.6f}</td><td></td></tr>\n'
+ .format(self._colors[ix % len(self._colors)], yCol._title, yCol.average(), yCol.extremum(True),
+ yCol.extremum(False)))
+ self._output.append('</tbody>\n</table>\n')
+
+ def htmlStart(self, title):
+ '''Starts a HTML script.
+ '''
+ self._output.append('<html>\n<body>\n<h1>{}</h1>\n'.format(title))
+
+ def numericLine(self, line, lineNo):
+ '''Evaluates a "numeric" line (a list of values)
+ Searches the separator and the titles (if they exists)
+ @param line: the line to inspect
+ @param lineNo: the line number
+ '''
+ values = line.split(self._separator)
+ if len(values) != len(self._columns):
+ self._logger.error('wrong column number in line {}: {} instead of {}'.format(lineNo, len(values), len(self._columns)))
+ for ix in range(len(values)):
+ if ix < len(self._columns):
+ self._columns[ix].add(base.StringUtils.toString(values[ix], self._columns[ix]._dataType))
+
+ def polyline(self, width, height, axisAreaWidth, indexX, indexY, properties = None):
+ '''Converts the CSV data into a polyline.
+ @param width: the length of the x dimension
+ @param height: the length of the y dimension
+ @param axisAreaWidth: the width of the axis area (x and y)
+ @param indexX: the column index of the x data
+ @param indexy: the column index of the Y data
+ @param properties: None or additional SVG properties for polyline, e.g. 'stroke-dasharray="5,5"
+ '''
+ self._output.append('<polyline style="fill:none;stroke:{};stroke-width:{}"{}'.format(self._color, self._strokeWidth, ' ' + properties if properties != None else ''))
+ line = 'points="'
+ xCol = self._columns[indexX]
+ yCol = self._columns[indexY]
+ vWidth = max(1E-10, xCol.getRange())
+ vHeight = max(1E-10, yCol.getRange())
+ vUsable = (height - axisAreaWidth)
+ for ix in range(len(xCol._values)):
+ x = axisAreaWidth + int((xCol.getValue(ix) - xCol.extremum(True)) * (width - axisAreaWidth) / vWidth)
+ yRange = yCol.extremum(False) - yCol.extremum(True)
+ if yCol.getValue(ix) != None:
+ # a1 = yCol.getValue(ix)
+ # aE = yCol.extremum(True)
+ # aR = yCol._reducedRange
+ # bring y into 0..max
+ y = (yCol.getValue(ix) - yCol.extremum(True))
+ aY0 = y
+ # normalize into 0..1:
+ if yRange != 0.0:
+ y = y / yRange
+ # aYnorm = y
+ if yCol._reducedRange != None and yCol._reducedRange != 0:
+ y /= yCol._reducedRange
+ yPixel = int(vUsable - y * vUsable)
+ line += "{:g},{:g} ".format(x, yPixel)
+ self._output.append(line + '" />')
+
+ def putCsv(self, target):
+ '''Puts the internal columns into a CSV file
+ @param target: the full name of the result file
+ '''
+ with open(target, "w") as fp:
+ line = ''
+ for col in self._columns:
+ line += col._title + ';'
+ fp.write(line[0:-1] + "\n")
+ for ix in range(len(self._columns[0]._values)):
+ line = ''
+ for col in self._columns:
+ line += col.toString(ix) + ';'
+ fp.write(line[0:-1] + "\n")
+
+ def readCsv(self, source):
+ '''Reads a CSV file with the diagram data.
+ @param source: the filename, e.g. 'diagram/data1.csv'
+ '''
+ with open(source, "r") as fp:
+ lineNo = 0
+ for line in fp:
+ line = line.strip()
+ lineNo += 1
+ if lineNo == 1:
+ self.firstLine(line)
+ else:
+ self.numericLine(line, lineNo)
+ # Remove empty columns:
+ count = len(self._columns) - 1
+ for ix in range(count, -1, -1):
+ column = self._columns[ix]
+ if column._max == -1E+100:
+ column._max = functools.reduce(lambda rc, item: base.StringUtils.toFloat(item) if base.StringUtils.toFloat(item) > rc else rc, column._values, -1E+100)
+ column._min = functools.reduce(lambda rc, item: base.StringUtils.toFloat(item) if base.StringUtils.toFloat(item) < rc else rc, column._values, +1E+100)
+ # column.normalize((1 + ix % 5) / count * 0.8)
+ self.returnToZero()
+
+ def returnToZero(self):
+ '''Find gaps in x values and set behind every gap a "return to zero" line
+ example:
+ x;y;z
+ 1;99;77
+ 2;100;70
+ 20;90;60
+ There is a gap between line 2 and 3. Minimum gap length is 1 (between line 1 and 2)
+ We insert "3;0;0" and "19;0;0"
+ Result:
+ x;y;z
+ 1;99;77
+ 2;100;70
+ 3;0;0
+ 19;0;0
+ 20;90;60
+ '''
+ columnX = self._columns[0]
+ self._minGap = +1E+100
+ [last, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(0))
+ for ix in range(len(columnX._values) - 1):
+ [current, dummy] = base.StringUtils.toFloatAndType(columnX._values[1+ix])
+ if current - last < self._minGap:
+ self._minGap = current - last
+ if self._minGap < 5*60:
+ self._minGap = 5*60
+ [last, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(-1))
+ for ix in range(len(columnX._values) - 1, 1, -1):
+ [current, dummy] = base.StringUtils.toFloatAndType(columnX.getValue(ix-1))
+ if last - current > self._minGap:
+ columnX._values.insert(ix, last - self._minGap)
+ columnX._values.insert(ix, current + self._minGap)
+ for col in range(len(self._columns)):
+ if col > 0:
+ self._columns[col]._values.insert(ix, 0)
+ self._columns[col]._values.insert(ix, 0)
+ last = current
+ self.putCsv('/tmp/corrected.csv')
+
+ def simpleLine(self, x1, y1, x2, y2, properties = None, color = None):
+ line = '<line x1="{}" y1="{}" x2="{}" y2="{}" stroke="{}" stroke-width="{}" {}/>'.format(
+ x1, y1, x2, y2, color if color != None else self._color, self._strokeWidth, properties if properties != None else '')
+ self._output.append(line)
+
+ def simpleText(self, x, y, text):
+ self._output.append('<text x="{}" y="{}" fill="{}" font-size="{}">{}</text>'.format(x, y, self._color, self._fontSize, text))
+
+ def shrinkData(self, count):
+ '''Returns an array of columns with count elements per column.
+ Input is self._columns.
+ @pre: the first column contains the x data.
+ @post: the x values (first column) of the result are equidistant.
+ @post: the local extrema (minimum and maximum) will be saved
+ @param count: the number of items of each column of the result
+ @return: the array of the converted columns
+ '''
+ xValues = self._columns[0]._values
+ rc = []
+ if count <= 0 or len(xValues) <= count:
+ rc = self._columns[:]
+ else:
+ xOut = Column(self._columns[0]._title, rc)
+ rc.append(xOut)
+ step = (xValues[-1] - xValues[0]) / (count - 1)
+ x = xValues[0]
+ for ix in range(count):
+ xOut._values.append(x)
+ x += step
+
+ for ixCol in range(len(self._columns) - 1):
+ yCol = self._columns[1+ixCol]
+ yValues = yCol._values
+ yOut = Column(yCol._title, rc)
+ rc.append(yOut)
+ ixLastSrc = -1
+ yMiddle = 0
+ for ixTrg in range(count):
+ xTrg = xOut._values[ixTrg]
+ ixLastSrc += 1
+ yMin = yValues[ixLastSrc]
+ if ixTrg == 0:
+ yOut._values.append(yMin)
+ elif ixTrg == count - 1:
+ yOut._values.append(yValues[-1])
+ else:
+ yMax = yMin
+ while xValues[ixLastSrc] <= xTrg:
+ if yValues[ixLastSrc] < yMin:
+ yMin = yValues[ixLastSrc]
+ elif yValues[ixLastSrc] > yMax:
+ yMax = yValues[ixLastSrc]
+ ixLastSrc += 1
+ # 4 cases: max: min: line up: line down:
+ # yHigh: a i i u d
+ # a a i u d
+ # yLow: u d
+ # xLow xHigh
+ if yOut._values[ixTrg - 1] > yMax:
+ # y[ixTrg-1] is line down or max:
+ yOut._values.append(yMin if ixTrg <= 1 or yValues[ixTrg - 2] > yValues[ixTrg - 1] else yMiddle)
+ else:
+ # y[ixTrg-1] is line up or min
+ yOut._values.append(yMax if ixTrg <= 1 or yValues[ixTrg - 2] < yValues[ixTrg - 1] else yMiddle)
+ yMiddle = (yMax - yMin) / 2
+ return rc
+
+ def svgEnd(self):
+ self._output.append('</svg>\n');
+
+ def svgStart(self, width, height):
+ '''Starts the SVG block.
+ @param width: the width of the SVG area
+ @param height: the height of the SVG area
+ '''
+ self._output.append('<svg height="{}" width="{}">\n'.format(height, width))
+
+ def xAxis(self, width, height, axisAreaWidth, indexX):
+ '''Creates the x axis.
+ @param width: the length of the x dimension
+ @param height: the length of the y dimension
+ @param axisAreaWidth: the width of the axis area (x and y)
+ @param indexX: the column index of the x values
+ '''
+ color = self._color
+ self._color = 'blue'
+ self.simpleLine(axisAreaWidth, height - axisAreaWidth, width, height - axisAreaWidth)
+ xCol = self._columns[indexX]
+ axis = AxisScale(xCol, min((width - axisAreaWidth) / 50, 20))
+ y1 = height - axisAreaWidth - self._strokeWidth * 3
+ y2 = height - axisAreaWidth + self._strokeWidth * 3
+ for ix in range(int(axis._countScales)):
+ [pos, label] = axis.indexData(ix, width - axisAreaWidth)
+ x = axisAreaWidth + pos
+ self.simpleLine(x, y1, x, y2)
+ self.simpleText(x - 10, y2 + axisAreaWidth / 2, label)
+ if ix > 0:
+ self.simpleLine(x, y1 - 5, x, 0, 'stroke-opacity="0.1" stroke-dasharray="5,5"', 'rgb(3,3,3)')
+ self._color = color
+
+ def yAxis(self, width, height, axisAreaWidth, indexY, color):
+ '''Creates the x axis.
+ @param width: the length of the x dimension
+ @param height: the length of the y dimension
+ @param axisAreaWidth: the width of the axis area (x and y)
+ @param indexY: the column index of the y values
+ '''
+ color2 = self._color
+ self._color = color
+ self.simpleLine(axisAreaWidth, 0, axisAreaWidth, height - axisAreaWidth)
+ yCol = self._columns[indexY]
+ axis = AxisScale(yCol, min((height - axisAreaWidth) / 50, 20))
+ x1 = axisAreaWidth - self._strokeWidth * 3
+ x2 = axisAreaWidth + self._strokeWidth * 3
+ for ix in range(int(axis._countScales)):
+ [pos, label] = axis.indexData(ix, height - axisAreaWidth)
+ y = height - axisAreaWidth - pos
+ self.simpleLine(x1, y, x2, y)
+ self.simpleText(1+(indexY-1)*30, y, label)
+ if indexY == 1 and ix > 0:
+ self.simpleLine(x2 + 5, y, width, y, 'stroke-opacity="0.1" stroke-dasharray="5,5"', 'rgb(3,3,3)')
+ self._color = color2
+
+def usage():
+ '''Returns an info about usage
+ '''
+ return """svgtool [<opts>] <command>
+ Builds Scalable Vector Graphics embedded in HTML.
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ x-y-diagram <input-file> <output-file> <opts>
+ <output-file>
+ '-': output will be put to the stdout otherwise: the HTML will be put to this file
+ <opt>:
+ --width=<width>
+ the width of the drawing area in pixel. Default: 1000
+ --height=<height>
+ the height of the drawing area in pixel. Default: 500
+ --axis-area-width=<width>
+ the width of the area containing the axis and the related labels (for x and y axis). Default: 15
+ --max-average-quotient=<value>
+ if max/avg(values) < maxAvgQuotient: no clipping is done. Default: 5
+ --moving-average=<window-length>
+ prepare data with "moving average": for each value a "window" (values and neigbours, symetic left
+ and right) is used to build the average: this average is used instead of the value
+ default windows width: 5
+ --spread-range=<value>
+ a % value: only data in this range will be displayed. Default: 90
+ --spread-factor
+ if abs(extremum-endOfRange) / range <= spreadFactor: the range is expanded to the extremum
+ Example: data [0.5, 1, 2, 7, 99] max=7 min=1 range=7-1=6
+ abs(0.5-7)/6=1.099 1.099<1.1 => _min=0.5
+ abs(99-1)/6=16 16>1.1 => _max=99
+ --title=<title>
+ Default: Diagram
+example:
+ svgtool -v2 x-y-diagram /tmp/sinus.csv /tmp/sinus.html --width=1920 --height=1024 "--title=Trigonometric functions from [0, 4*pi]"
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/svgtool', 'run']
+ '''
+ if len(argv) > 2 and argv[1] == 'example':
+ global gSvgToolPeriod
+ try:
+ gSvgToolPeriod = int(argv[2])
+ except ValueError:
+ pass
+ appInfo = base.BaseTool.ApplicationInfo('svgtool', 'appl/SvgTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = SvgTool(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'x-y-diagram':
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ rc = tool.diagram(argv, usage)
+ else:
+ tool.usage('unknown command: ' + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import sys
+import os.path
+import fnmatch
+import re
+import time
+import tempfile
+import shutil
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.LinuxUtils
+import base.FileHelper
+import base.PythonToTypeScript
+import base.CryptoEngine
+import base.StringUtils
+
+textToolResult = None
+textToolTool = None
+
+class ReplaceRegionInfo:
+ def __init__(self):
+ self._regionStart = None
+ self._regionEnd = None
+ self._replacement = None
+ self._startIncluded = True
+ self._endExcluded = False
+
+class ReplaceInfo:
+ def __init__(self, pattern, line, anchor, above, maxCount):
+ self._pattern = pattern
+ self._line = line
+ self._anchor = anchor
+ self._above = above
+ self._maxCountPerFile = maxCount
+ self._countPerFile = 0
+ self._regionStart = -1
+ self._regionEnd = 0
+
+class TextTool (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'texttool.conf')
+ self._out = []
+ self._euid = os.geteuid()
+ self._egid = os.getegid()
+ self._list = False
+ self._onlyMatch = False
+ self._format = None
+ self._reverse = False
+ self._excluded = None
+ self._missing = False
+ self._dirMustBeWritable = False
+ self._fileMustBeWritable = False
+ self._target = None
+ self._hits = 0
+ self._countPerLine = None
+ self._table = None
+ self._patternReference = r'[$]\d+'
+ self._references = []
+ self._lines = []
+ self._currentChanged = False
+ self._replaceInfo = None
+ self._fileCount = 0
+ self._maxFileCount = 0x7ffffff
+ self._noName = False
+ self._replaceRegionInfo = None
+
+
+ def adaptConfiguration(self, argv):
+ '''Replaces values of given variables in a configuration.
+ @param variables: an array of strings with variable definitions, e.g. ['abc=123', 'command.log = "/var/log/command.log"']
+ @param configuration: the configuration content (as array of text) with comments, e.g. ["#configuration:", "abc=4", "command.log='/var/log/dummy.log'"]
+ @return: the changed text, e.g. ["#configuration:", "abc=123", 'command.log = "/var/log/command.log"']
+ '''
+ if len(argv) == 0:
+ self.usage('missing <configuration-file>')
+ else:
+ configFile = argv[0]
+ argv = argv[1:]
+ if not os.path.exists(configFile):
+ self.usage('missing ' + configFile)
+ elif len(argv) == 0:
+ self.usage('missing <variables-files> or prefix=<prefix>')
+ else:
+ configuration = base.StringUtils.fromFile(configFile)
+ arg1 = argv[0]
+ argv = argv[1:]
+ if arg1.startswith('prefix='):
+ prefix = arg1[7:]
+ variables = self.findVariables(prefix)
+ else:
+ variablesFile = arg1
+ if not os.path.exists(variablesFile):
+ self.usage('missing ' + variablesFile)
+ else:
+ variables = base.StringUtils.fromFile(variablesFile).split('\n')
+ self._out = self.adaptVariables(variables, configuration.split('\n'))
+ if self._hits > 0:
+ self._logger.log('{} variable(s) changed in {}'.format(self._hits, configFile), 1)
+ self.createBackup(configFile, base.FileHelper.pathToNode(os.path.dirname(configFile)))
+ base.StringUtils.toFile(configFile, '\n'.join(self._out))
+
+ def adaptVariables(self, variables, configuration, logSummary = False):
+ '''Replaces values of given variables in a configuration.
+ @post: self._hits: the number of changed variables
+
+ @param variables: an array of strings with variable definitions
+ e.g. ['abc=123', 'command.log = "/var/log/command.log"']
+ @param configuration: the configuration content (as array of text) with comments
+ e.g. ["#configuration:", "abc=4", "command.log='/var/log/dummy.log'"]
+ @param logSummary: True: the number of hits will be logged
+ @return: the changed text, e.g. ["#configuration:", "abc=123", 'command.log = "/var/log/command.log"']
+ '''
+ rc = []
+ variables2 = dict()
+ self._hits = 0
+ for line in variables:
+ parts = line.split('=', 2)
+ if len(parts) <= 1:
+ continue
+ key = parts[0].strip()
+ variables2[key] = line
+ keys = variables2.keys()
+ self._hasChanged = False
+ for line in configuration:
+ parts = line.split('=', 1)
+ if len(parts) <= 1:
+ rc.append(line)
+ continue
+ key = parts[0].strip()
+ if not key in keys:
+ rc.append(line)
+ else:
+ value = parts[1].strip()
+ value2 = variables2[key].split('=', 1)[1].strip()
+ if value == value2:
+ self._logger.log('already defined: {}: {}'.format(key, value), 3)
+ rc.append(line)
+ else:
+ self._hits += 1
+ self._logger.log('{}: {} => {}'.format(key, value, value2), 2)
+ rc.append(variables2[key])
+ self._hasChanged = True
+ if self._hits > 0 and self._verboseLevel > 1 and logSummary:
+ self._logger.log('{} variables changed'.format(self._hits))
+ return rc
+
+ def buildExamples(self):
+ '''Builds the filed used for the examples in the usage message.
+ '''
+ def build(fn, content):
+ self._logger.log('creating ' + fn, 1)
+ base.StringUtils.toFile(fn, content)
+
+ baseDir= '/usr/share/pyrshell/examples/'
+ configDir = self.ensureDirectory(baseDir + 'config')
+ dataDir = self.ensureDirectory(baseDir + 'data')
+ dataDir2 = self.ensureDirectory(baseDir + 'data/subdir')
+ safeDir = self.ensureDirectory(baseDir + 'safe')
+ build(configDir + os.sep + 'php_minimal.conf', '''memory_limit = 2048M
+upload_max_filesize = 512M
+max_file_uploads = 100
+post_max_size = 512M
+max_execution_time = 600
+max_input_time = 600
+''')
+ fn = safeDir + os.sep + 'php.ini'
+ self._logger.log('copying ' + dataDir + os.sep + 'php.ini', 1)
+ shutil.copy2(fn, dataDir)
+ build(dataDir + os.sep + 'sum.txt', '''water: 7.32 EUR
+cheese: 13.08 EUR
+total: 20.40 EUR
+''')
+ build(dataDir + os.sep + 'ip.out.txt', '''1: lo: <LOOPBACK,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN group default qlen 1000
+ link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
+ inet 127.0.0.1/8 scope host lo
+ valid_lft forever preferred_lft forever
+ inet6 ::1/128 scope host
+ valid_lft forever preferred_lft forever
+2: enp2s0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc fq_codel state UP group default qlen 1000
+ link/ether 8c:16:45:92:9a:c6 brd ff:ff:ff:ff:ff:ff
+ inet 192.168.2.50/24 brd 192.168.2.255 scope global enp2s0
+ valid_lft forever preferred_lft forever
+ inet6 fe80::8e16:45ff:fe92:9ac6/64 scope link
+ valid_lft forever preferred_lft forever
+3: wlp4s0: <BROADCAST,MULTICAST> mtu 1500 qdisc noop state DOWN group default qlen 1000
+ link/ether 50:5b:c2:e9:38:01 brd ff:ff:ff:ff:ff:ff
+''')
+ build(dataDir + os.sep + 'today.sh', '''#! /bin/bash
+DATE=31.2.2100
+echo $DATE
+''')
+ build(dataDir + os.sep + 'first.addr', '''jonny@gmx.de
+Jonny.Hamburger@web.de
+info@whow.com
+''')
+ build(dataDir + os.sep + 'second.addr', '''info@whow.com
+jonny@gmx.de
+info@bla.com
+''')
+ build(dataDir + os.sep + 'test.addr', '''info@whow.com
+jonny@gmx.de
+info@bla.com
+''')
+ build(dataDir + os.sep + 'shop.addr', '''jonny@gmx.de
+info@bla.com
+''')
+ build(dataDir2 + os.sep + 'third.addr', '''jonny@gmx.de
+info@bla.com
+''')
+ build(configDir + os.sep + 'german.txt', '''No\tNr
+First Name\tVorname
+Last Name\tNachname
+''')
+ build(dataDir + os.sep + 'addr.csv', '''No;First Name;Last Name
+1;Henry;Miller
+2;Berta;Eastern
+''')
+
+ def buildTargetname(self, nameInput):
+ '''Builds the targetname from a pattern and the input filename.
+ @param nameInput: the input filename
+ @return: the target filename
+ '''
+ if self._target == '!full!':
+ rc = nameInput + '~'
+ else:
+ parts = base.FileHelper.splitFilename(nameInput)
+ rc = self._target
+ rc = rc.replace('!full!', nameInput)
+ rc = rc.replace('!path!', parts['path'])
+ rc = rc.replace('!node!', parts['node'])
+ rc = rc.replace('!fn!', parts['fn'])
+ rc = rc.replace('!ext!', parts['ext'])
+ return rc
+
+ def currentFind(self, regExpr, ixStart=0, ixEnd=None):
+ '''Searches a regular expression in the current file.
+ @param regExpr: a regular expression (re.RegExpr)
+ @param ixStart: the index of the first line to inspect
+ @param ixEnd: None or the successor index of the last line to inspect (_lines[ixEnd] will not be inspected)
+ @return: None: not found otherwise: the index of the first hit
+ '''
+ rc = None
+ if ixStart == None:
+ ixStart = 0
+ if ixEnd == None:
+ ixEnd = len(self._lines)
+ for ix in range(ixStart, ixEnd):
+ line = self._lines[ix]
+ if regExpr.search(line):
+ rc = ix
+ break
+ return rc
+
+ def currentFind2(self, regExpr, ixStart=0, ixEnd=None):
+ '''Searches a regular expression in the current file and return the line index and the matcher.
+ @param regExpr: a string or a regular expression (re.RegExpr)
+ @param ixStart: the index of the first line to inspect
+ @param ixEnd: None or the successor index of the last line to inspect (_lines[ixEnd] will not be inspected)
+ @return: tuple (index, matcher) the index line and the matcher. (None, None): not found
+ '''
+ if type(regExpr) == str:
+ regExpr = self.regExprCompile(regExpr, 'currentFind2')
+ rc = (None, None)
+ if ixEnd == None:
+ ixEnd = len(self._lines)
+ for ix in range(ixStart, ixEnd):
+ line = self._lines[ix]
+ matcher = regExpr.search(line)
+ if regExpr.search(line):
+ rc = (ix, matcher)
+ break
+ return rc
+
+ def currentInsertAnchored(self, line, anchor, below=True, start=None, end=None):
+ '''Inserts a line in front of or below a given anchor.
+ If the anchor is not found the line is added at the end.
+
+ @param line: the line to insert
+ @param anchor: a regular expression to search: text or regexpr object
+ @param below: True the insert position is below the anchor line
+ @param start: None or the first index to inspect
+ @param end: None or the successor index to inspect (_lines[end] is not inspected)
+ @return: True: success (key found)
+ '''
+ found = False
+ if start == None:
+ start = 0
+ if end == None:
+ end = len(self._lines)
+ if anchor != None:
+ if type(anchor) == str:
+ anchor = self.regExprCompile(anchor, 'currentInsertAnchored')
+ ixFound = None
+ for ix in range(start, end):
+ current = self._lines[ix]
+ if anchor.search(current):
+ ixFound = ix + 1 if below else ix
+ break
+ if ixFound != None:
+ self._lines.insert(ixFound, line)
+ found = True
+ if not found:
+ self._lines.insert(end, line)
+ self._currentChanged = True
+ return found
+
+ def currentReplace(self, regExpr, line, anchor=None, below=False, start=None, end=None):
+ '''Replaces a line containing given regular expression or inserts it.
+ If not found:
+ if an anchor is given: the anchor is searched.
+ If found, insertion is done.
+ If not found: nothing is done
+ if no anchor is given:
+ line is inserted at the end
+ @param regExpr: a pattern string or an re.regexpr object describing the key content of the line to replace
+ @param line: the line to replace (or insert)
+ @param anchor: None or a regular expression (string or re.RegExpr) describing an insert point (if not found)
+ @param below: True: the insert point is below the anchor
+ @param start: None or the first index to inspect
+ @param end: None or the successor index to inspect (_lines[end] is not inspected)
+ @return: True: success
+ False: anchor != None: anchor not found, nothing replaced
+ '''
+ if type(regExpr) == str:
+ regExpr = re.compile(regExpr)
+ found = False
+ if start == None:
+ start = 0
+ if end == None:
+ end = len(self._lines)
+ for ix in range(start, end):
+ current = self._lines[ix]
+ if regExpr.search(current) != None:
+ found = True
+ if self._lines[ix] != line:
+ self._currentChanged = True
+ self._logger.log('replaced: {} -> {}'.format(self._lines[ix], line), 3)
+ else:
+ self._logger.log('unchanged: {}'.format(self._lines[ix], line), 4)
+ self._lines[ix] = line
+ break
+ if not found and anchor != None:
+ if type(anchor) == str:
+ anchor = re.compile(anchor)
+ for ix in range(len(self._lines)):
+ current = self._lines[ix]
+ if anchor.search(current) != None:
+ found = True
+ ixNew = ix if below else ix + 1
+ self._logger.log('insert into [{}: {}'.format(ixNew, line), 3)
+ self._lines.insert(ixNew, line)
+ self._currentChanged = True
+ break
+ if anchor == None and not found:
+ self._lines.insert(end, line)
+ self._currentChanged = True
+ found = True
+ return found
+
+ def currentSimpleInsert(self, full, key, line, anchor, below=True, mustExist=False):
+ '''Inserts a line in front of or below a given anchor.
+ If the anchor is not found the line is added at the end.
+
+ @param full: the filename
+ @param key: the key for the line to replace
+ @param line: the line to replace/insert
+ @param anchor: a regular expression to mark the insert position
+ @param below: True the insert position is below the anchor line
+ @param mustExist: True: the routine will be aborted if the file does not exist
+ @return: True: success (key found)
+ '''
+ rc = False
+ if self.readCurrent(full, mustExist):
+ if not self.currentSetLine(key, line):
+ self.currentInsertAnchored(line, anchor, below)
+ self.writeCurrent(full)
+ rc = True
+ return rc
+
+ def currentSetLine(self, key, value, count=1, start=None, end=None):
+ '''Sets a line marked by key.
+ @param key: a regular expression to search
+ @param line: line to replace
+ @param count: the key is search count times in the given range [start, end[
+ @param start: None or the first index to inspect
+ @param end: None or the successor index to inspect (_lines[end] is not inspected)
+ @return: True: success (key found)
+ '''
+ found = False
+ regKey = self.regExprCompile(key, 'currentSetLine') if type(key) == str else key
+ firstIx = start
+ if count == None:
+ count = 0x7fffffff
+ for current in range(count):
+ ix = self.currentFind(regKey, firstIx, end)
+ if ix == None:
+ break
+ if self._lines[ix] != value:
+ self._currentChanged = True
+ self._lines[ix] = value
+ firstIx = ix + 1
+ found = True
+ return found
+
+ def example(self):
+ example = '''# texttool example configuration
+log.file=/var/log/local/texttool.log
+php.upload_max_filesize=624M
+php.max_file_uploads=102
+php.post_max_size=448M
+php.max_execution_time=900
+php.max_input_time=630
+'''
+ self.storeExample(example)
+
+ def extractExcluded(self, opt):
+ '''Sets the excluded option.
+ @param opt: the option value, e.g. ';*.txt;text*'
+ '''
+ excluded = []
+ if re.match(r'[*?\[\]]', opt[0]):
+ self.usage('wrong first char (meta character: ?*[]/) in: ' + opt)
+ patterns = opt[1:].split(opt[0])
+ for pattern in patterns:
+ excluded.append((os.path.dirname(pattern), os.path.basename(pattern)))
+ return excluded
+
+ def findRegion(self, regExprFirst, firstIncluding, regExprLast = None, lastIncluding = True, regExprBlock = None, onlyOneFirst = True):
+ '''Finds a sequence of lines with given first and last line.
+ @param regExprFirst: a regular expression (as string or re.RegExpr) to define the first line
+ @param firstIncluding: True: the first line is part of the region False: the line below the first line starts the region
+ @param regExprLast: a regular expression (as string or re.RegExpr) to define the last line
+ @param lastIncluding: True: the last line is part of the region False: the line above the last line starts the region
+ @param regExprBlock: None or a regular expression (as string or re.RegExpr) to define the block between start and last
+ regExprLast and regExprBlock are exclusive: only one should be not None
+ @param onlyOneFirst: False: if the regExprFirst is found it will tested not anymore
+ True: after finding the regExprFirst the line is tested again: if found it belongs to the block
+ @return: a tuple (ixStart, ixEnd) or (-1, x) if not found
+ '''
+ if type(regExprFirst) == str:
+ regExprFirst = re.compile(regExprFirst)
+ if type(regExprLast) == str:
+ regExprLast = re.compile(regExprLast)
+ if type(regExprBlock) == str:
+ regExprBlock = re.compile(regExprBlock)
+ self._regionStart = -1
+ self._regionEnd = 0
+ for ix in range(len(self._lines)):
+ line = self._lines[ix]
+ if self._regionStart < 0 and regExprFirst.search(line):
+ self._regionStart = ix if firstIncluding else ix + 1
+ elif self._regionStart >= 0:
+ if not onlyOneFirst and regExprFirst.search(line):
+ continue
+ if regExprLast != None and regExprLast.search(line):
+ self._regionEnd = ix + 1 if lastIncluding else ix
+ break
+ elif regExprBlock != None and regExprBlock.search(line) == None:
+ self._regionEnd = ix + 1
+ break
+ if self._regionEnd <= 0:
+ self._regionEnd = len(self._lines)
+ self._logger.log('region {}..{}: [{},{}['.format(regExprFirst.pattern, regExprLast.pattern, self._regionStart, self._regionEnd), 3)
+ return self._regionStart, self._regionEnd
+
+ def findVariables(self, prefix, configuration = None):
+ '''Return an array of variables defined in the internal configuration.
+ Example: the configuration contains 'php.max_memory=2M'
+ The prefix is 'php.', the result is [... 'max_memory=2M' ...]
+ @param prefix: the common prefix to filter the definitions in the configuration
+ @param configuration: None: the internal configuration is taken otherwise: the configuration to inspect
+ @return: an array (may be empty) with the specified variables
+ '''
+ variables = []
+ if configuration == None:
+ configuration = self._configuration
+ # mask meta characters:
+ # @doc re.sub(r'([\[\]{}().+*])', r'\\1', 'a.b) returns r'a\\1b' instead of r'a\.b'
+ pattern = re.sub(r'([\[\]{}().+*])', r'~!slash!~\1', prefix)
+ pattern = pattern.replace('~!slash!~', '\\')
+ keys = configuration.getKeys('^' + pattern)
+ for key in keys:
+ if key.startswith(prefix):
+ line = '{}={}'.format(key[len(prefix):], self._configuration.getString(key))
+ variables.append(line)
+ return variables
+
+ def grep(self, argv):
+ '''Search a regular expression in files matching a wildcard expression.
+ @param argv: the arguments, e.g. ['grep', 'jonny', *.txt'
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ pattern = argv[0]
+ filePattern = argv[1]
+ argv = argv[2:]
+ excluded = None
+ self._list = False
+ self._onlyMatching = False
+ self._format = None
+ self._out = []
+ self._withLineNo = False
+ maxDepth = 999
+ ignoreCase = False
+ '''
+ -i or --ignore-case
+ '''
+ for opt in options:
+ if opt.startswith('--excluded='):
+ excluded = self.extractExcluded(opt[11:])
+ elif opt.startswith('--format='):
+ self._format = opt[9:]
+ if self._format == '':
+ self.usage('format may not be empty')
+ elif opt == '-i' or opt == '--ignore-case':
+ ignoreCase = True
+ elif opt == '--line-no':
+ self._withLineNo = True
+ elif opt == '--list' or opt == '-l':
+ self._list = True
+ elif opt.startswith('--max-depth='):
+ maxDepth = self.integerOption(opt)
+ elif opt == '--missing':
+ self._missing = True
+ elif opt == '--no-name':
+ self._noName = True
+ elif opt == '-o' or opt == '--only-matching':
+ self._onlyMatching = True
+ elif opt == '-v' or opt == '--reverse':
+ self._reverse = True
+ else:
+ self.usage('unknown option: ' + opt)
+ self._regSearch = self.regExprCompile(pattern, 'pattern', not ignoreCase)
+ if self._format != None:
+ self._formatGroups = re.findall(r'\$([0-9]+)', self._format)
+ self._formatGroups = list(map(lambda x: int(x), self._formatGroups))
+ self._formatGroups.sort(reverse=True)
+ self.traverse(filePattern, 'grep', maxDepth, excluded)
+ msg = '\n'.join(self._out)
+ base.BaseTool.setResult(msg)
+ if msg != '':
+ print(msg)
+ if base.BaseTool.result() == '' and not self._exitWith0:
+ sys.exit(10)
+
+ def grepOneFile(self, full):
+ '''Search in a given file.
+ @param full: the path and node
+ '''
+ self._logger.log('inspecting {} ...'.format(full), 2)
+ with open(full, "rb") as fp:
+ lineNo = 0
+ for rawline in fp:
+ lineNo += 1
+ line = base.FileHelper.fromBytes(rawline.rstrip())
+ matcher = self._regSearch.search(line)
+ if matcher == None:
+ if self._reverse:
+ if self._noName:
+ self._out.append(line)
+ elif self._withLineNo:
+ self._out.append('{}-{}: {}'.format(full, lineNo, line))
+ else:
+ self._out.append('{}: {}'.format(full, line))
+ elif self._missing:
+ self._out.append(full)
+ break
+ elif self._list:
+ self._out.append(full)
+ break
+ elif self._onlyMatching:
+ if self._noName:
+ self._out.append(line)
+ elif self._withLineNo:
+ self._out.append('{}-{}: {}'.format(full, lineNo, matcher.group(0)))
+ else:
+ self._out.append('{}: {}'.format(full, matcher.group(0)))
+ elif self._format != None:
+ theFormat = self._format.replace('!full!', full).replace('!node!', os.path.basename(full)).replace('!no!', str(lineNo))
+ for groupNo in self._formatGroups:
+ if groupNo <= matcher.lastindex:
+ theFormat = theFormat.replace('$' + str(groupNo), matcher.group(groupNo))
+ self._out.append(theFormat)
+ elif not self._missing and not self._reverse:
+ if self._noName:
+ self._out.append(line)
+ elif self._withLineNo:
+ self._out.append('{}-{}: {}'.format(full, lineNo, line))
+ else:
+ self._out.append('{}: {}'.format(full, line))
+
+ def handleOneDir(self, directory, pattern, action, depth, maxDepth=999):
+ '''Search for files matching <pattern> and processes them depending on <action>.
+ @param directory: directory to process
+ @param pattern: only files matching this will be processed
+ @param action: the action, e.g. 'replace-or-insert'
+ @param depth: the current subdirectory nesting level
+ @param maxDepth: maximum subdirectory nesting level. < 0: not limited
+ @return: True: success False: stop the processing
+ '''
+ rc = True
+ if depth == 0:
+ self._fileCount = 0
+ self._logger.log('processing ' + directory + '/ ...', 2)
+ if not os.path.isdir(directory):
+ self.usage('not a directory: ' + directory)
+ elif maxDepth < 0 or depth <= maxDepth:
+ for node in os.listdir(directory):
+ full = directory + os.sep + node
+ try:
+ info = os.stat(full)
+ except FileNotFoundError:
+ self._logger.log('no permission: ' + directory + os.sep + node, 2)
+ continue
+ except OSError as exc2:
+ self._logger.log('os error on {}: {}'.format(directory + os.sep + node, str(exc2)), 2)
+ continue
+ if os.path.isdir(full):
+ if maxDepth < 0 or depth < maxDepth:
+ if self._dirMustBeWritable and base.LinuxUtils.isReadable(info, self._euid, self._egid):
+ self._logger.log('no dir write permission: ' + directory + os.sep + node, 2)
+ elif self.notExcluded(directory, node):
+ accepted = (base.LinuxUtils.isExecutable(info, self._euid, self._egid)
+ and base.LinuxUtils.isReadable(info, self._euid, self._egid))
+ if accepted:
+ rc = self.handleOneDir(full, pattern, action, depth + 1, maxDepth)
+ self._logger.log('no permission: ' + directory + os.sep + node, 2)
+ elif fnmatch.fnmatch(node, pattern) and self.notExcluded(directory, node):
+ if not self._isRoot and self._fileMustBeWritable and not base.LinuxUtils.isWritable(info, self._euid, self._egid):
+ self._logger.log('no write permission: ' + directory + os.sep + node, 2)
+ continue
+ accepted = base.LinuxUtils.isReadable(info, self._euid, self._egid)
+ if not accepted:
+ self._logger.log('no permission: ' + directory + os.sep + node, 2)
+ else:
+ rc = self.handleOneFile(full, action)
+ if self._fileCount >= self._maxFileCount:
+ self._logger.log('maximal file count reached: ' + str(self._fileCount), 3)
+ rc = False
+ break
+ return rc
+
+ def handleOneFile(self, filename, action):
+ '''Search for files matching <pattern> and processes them depending on <action>.
+ @param filename: the full name of the file to process
+ @param action: the action, e.g. 'replace-or-insert'
+ @return: True: success False: stop the processing
+ '''
+ if action == 'replace-or-insert':
+ rc = self.replaceOrInsertOneFile(filename)
+ elif action == 'replace':
+ rc = self.replaceOneFile(filename)
+ elif action == 'replace-region':
+ rc = self.replaceRegionOneFile(filename)
+ elif action == 'grep':
+ rc = self.grepOneFile(filename)
+ else:
+ rc = False
+ raise Exception('handleOneFile(): unknown action ' + action)
+ return rc
+
+ def notExcluded(self, path, node):
+ '''Tests whether a file or a directory is marked as "excluded".
+ @param node: the file's node
+ @param path: the path without node
+ @return True: the file is not excluded
+ '''
+ rc = True
+ if self._excluded != None:
+ for pair in self._excluded:
+ rc = not (pair[0] != '' and fnmatch.fnmatch(path, pair[0]) or fnmatch.fnmatch(node, pair[1]))
+ if not rc:
+ break
+ if not rc:
+ self._logger.log('excluded: ' + path + os.sep + node, 2)
+ return rc
+
+ def random(self, argv):
+ '''Prints a random number.
+ syntax: random { [ <maxExcluded> ] | <min> <maxExcluded> } <opts>
+ @param argv: the program arguments, e.g. ['100', '1000', '--seed=ThisIsMyPassword
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ if len(argv) == 0:
+ minValue, maxValue = '0', '2147483648'
+ elif len(argv) == 1:
+ minValue, maxValue = '0', argv[0]
+ elif len(argv) == 2:
+ minValue, maxValue = argv[0], argv[1]
+ if base.StringUtils.asInt(minValue) == None:
+ self._usage('minimum is not an integer: ' + argv[1])
+ elif base.StringUtils.asInt(maxValue) == None:
+ self._usage('maximum is not an integer: ' + (argv[1] if len(argv) >= 2 else argv[0]))
+ random = base.CryptoEngine.CryptoEngine(self._logger)
+ statusFile = None
+ seedString = None
+ clearSeconds = 60
+ for opt in options:
+ if opt.startswith('--seed='):
+ seedString = opt[7:]
+ elif opt.startswith('--status-file='):
+ statusFile = opt[14:]
+ elif opt.startswith('--clear-status-after='):
+ clearSeconds = self.integerOption(opt, 60)
+ else:
+ self.usage('unknown option: ' + opt)
+ if seedString == None:
+ seedString = str(time.time())
+ if statusFile == None:
+ seedName = hex(random.hash('Hi' + random.saveSeed()))[2:]
+ statusFile = '{}{}texttool.{}.seed'.format(tempfile.gettempdir(), os.sep, seedName)
+ content = ''
+ if clearSeconds != 0 and os.path.exists(statusFile):
+ statInfo = os.stat(statusFile)
+ if time.time() - statInfo.st_mtime <= clearSeconds:
+ content = base.StringUtils.fromFile(statusFile)
+ if content != '':
+ random.restoreSeed(content)
+ random.setSeedFromString(seedString)
+ base.BaseTool.setResult(str(random.nextInt(int(maxValue), int(minValue))))
+
+ def readCurrent(self, full, mustExist=True):
+ '''Reads a given file into _lines[].
+ @param full: the filename
+ @return: True: success
+ '''
+ self._currentChanged = False
+ rc = True
+ self._lines = []
+ self._currentFile = full
+ if not os.path.exists(full):
+ if mustExist:
+ self._logger.error('missing ' + full)
+ rc = False
+ else:
+ self._logger.log('reading {}...'.format(full), 3)
+ with open(full, 'rb') as fp:
+ self._table = []
+ self._lines = []
+ for rawLine in fp:
+ line = base.FileHelper.fromBytes(rawLine.rstrip(b'\n'))
+ self._lines.append(line)
+ return rc
+
+ def removeRegion(self, first = None, last = None):
+ '''Removes the region.
+ @param first: None or the index of the first line None: self._regionFirst is taken
+ @param last: None or the index of the last line None: self._regionEnd is taken
+ '''
+ if first == None:
+ first = self._regionStart
+ if last == None:
+ last = self._regionEnd
+ if first < 0:
+ first = 0
+ if last >= len(self._lines):
+ last = len(self._lines) - 1
+ if first <= last:
+ self._logger.log('removing lines [{}..{}[...'.format(first, last))
+ self._lines = self._lines[0:first] + self._lines[last:]
+
+ def replace(self, argv):
+ '''Search a regular expression in files matching a wildcard expression.
+ @param argv: the arguments, e.g. ['grep', 'jonny', *.txt']
+ '''
+ (argv, options) = self.splitArgsAndOpts(argv)
+ self._table = None
+ self._regSearch = None
+ self._replacement = None
+ self._patternSearch = None
+ excluded = None
+ maxDepth = 1024
+ if len(argv) < 2:
+ self.usage('too few arguments')
+ elif argv[0].startswith('@'):
+ table = argv[0][1:]
+ self.readTable(table)
+ argv = argv[1:]
+ else:
+ if len(argv) < 3:
+ self.usage('too few arguments')
+ else:
+ self._patternSearch = argv[0]
+ self._regSearch = self.regExprCompile(self._patternSearch, 'pattern')
+ self._replacement = argv[1]
+ argv = argv[2:]
+ filePattern = argv[0]
+ argv = argv[1:]
+ if len(argv) == 0:
+ self._target = '!full!'
+ else:
+ self._target = argv[0]
+ argv = argv[1:]
+ for opt in options:
+ if opt.startswith('--excluded='):
+ excluded = self.extractExcluded(opt[11:])
+ elif opt.startswith('--prefix-back-reference=') or opt.startswith('-p'):
+ cc = opt[24:] if opt[1] == '=' else opt[1:]
+ if cc == '':
+ self.usage('empty prefix is forbidden: ' + opt)
+ else:
+ self._patternReference = r'[{}]\d+'.format(cc)
+ elif opt.startswith('-p'):
+ self._regReference = re.compile(r'[{}]\d+'.format(opt[24:]))
+ elif opt.startswith('--count='):
+ self._countPerLine = self.integerOption(opt)
+ elif opt.startswith('--max-depth'):
+ maxDepth = self.integerOption(opt, 1024)
+ else:
+ self.usage('unknown option: ' + opt)
+ dirMustBeWritable = self._target.find('!full!') >= 0 or self._target.find('!path!') >= 0
+ fileMustBeWritable = self._target == '!full!'
+ if self._replacement != None:
+ references = re.findall(self._patternReference, self._replacement)
+ if len(references) > 0:
+ self._references = list(set(map(lambda x: int(x[1:]), references)))
+ self._references.sort(reverse=True)
+ if len(self._references) == 0 or self._patternSearch.count('(') < self._references[0]: # )
+ self.usage('missing "(" for groups in "{}". Highest back reference {} in "{}" has no group'.format(
+ self._patternSearch, 0 if len(self._references) == 0 else self._references[0], self._replacement))
+ if filePattern.startswith('='):
+ for line in filePattern[1:].split('\n'):
+ self._out.append(self.replaceLine(line))
+ else:
+ self.traverse(filePattern, 'replace', maxDepth, excluded, dirMustBeWritable, fileMustBeWritable)
+ msg = '\n'.join(self._out)
+ base.BaseTool.setResult(msg)
+ self._logger.log('hits: {}'.format(self._hits), 1)
+ if msg != '' and self._verboseLevel > 0:
+ print(base.BaseTool.result())
+ if msg == '' and not self._exitWith0:
+ sys.exit(10)
+
+ def replaceLine(self, line):
+ '''Handle replacement for one line.
+ @param line: the input line to inspect
+ @param fpOut: the filehandle of the target file
+ @return: the (possible changed) line
+ '''
+ outline = line
+ count = 0
+ pos = 0
+ if self._regSearch != None:
+ again = True
+ while again:
+ again = False
+ matcher = self._regSearch.search(outline, pos)
+ if matcher != None:
+ self._hits += 1
+ replacement = self._replacement
+ for group in self._references:
+ try:
+ pattern = '${}'.format(group)
+ except IndexError:
+ self.usage('too few "(" for groups in search pattern.')
+ repl = matcher.group(group)
+ replacement = replacement.replace(pattern, repl)
+ outline = outline[0:matcher.start()] + replacement + outline[matcher.end():]
+ pos = matcher.end()
+ count += 1
+ again = self._countPerLine == None or count < self._countPerLine
+ else:
+ for (key, replacement) in self._table:
+ pos = 0
+ count = 0
+ again = True
+ while again:
+ again = False
+ ix = outline.find(key, pos)
+ if ix < 0:
+ again = False
+ else:
+ self._hits += 1
+ outline = outline[0:ix] + replacement + outline[ix + len(key):]
+ pos = ix + len(replacement)
+ count += 1
+ again = self._countPerLine == None or count < self._countPerLine
+ return outline
+
+ def replaceOrInsert(self, argv):
+ '''Replaces or inserts a given line into files.
+ syntax: replace-or-insert <line-pattern> <line> <file-pattern> [<opts>]
+ @param argv: the command arguments, e.g. [r'count\s*=', 'count=1', '/etc/pyrshell/dirs.d/*.conf', r'--below-anchor=\[special\]']
+ --count-files=<count>
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ if len(argv) < 3:
+ self.usage('missing arguments')
+ else:
+ pattern = self.regExprCompile(argv[0], '<line-pattern>')
+ line = argv[1]
+ filePattern = argv[2]
+ anchor = None
+ above = False
+ isOK = True
+ maxDepth = 999
+ excluded = None
+ argv = argv[2:]
+ self._maxFileCount = 0x7ffffff
+ createIfMissing = False
+ for opt in options:
+ if opt.startswith('--above-anchor='):
+ anchor = self.regExprCompile(opt[15:], 'above anchor')
+ above = True
+ elif opt.startswith('--excluded='):
+ excluded = self.extractExcluded(opt[11:])
+ elif opt.startswith('--below-anchor='):
+ anchor = self.regExprCompile(opt[15:], 'below anchor')
+ elif opt.startswith('--file-count'):
+ self._maxFileCount = self.integerOption(opt)
+ if self._maxFileCount < 0:
+ isOK = False
+ break
+ elif opt.startswith('--max-depth'):
+ maxDepth = self.integerOption(opt)
+ if maxDepth < 0:
+ isOK = False
+ break
+ elif opt == '--create-if-missing':
+ createIfMissing = True
+ else:
+ self.usage('unknown option: ' + opt)
+ self._replaceInfo = ReplaceInfo(pattern, line, anchor, above, None)
+ if isOK:
+ if not base.FileHelper.hasWildcards(filePattern):
+ self._maxDepth = 0
+ if createIfMissing:
+ base.FileHelper.ensureFileExists(filePattern, '', self._logger)
+ self.traverse(filePattern, 'replace-or-insert', maxDepth, excluded, False, True)
+
+ def replaceOrInsertOneFile(self, filename):
+ '''Handles one file for the command 'replace-or-insert'.
+ @param filename: the file to process
+ @return: True: success False: stop processing
+ '''
+ rc = True
+ if self.currentSimpleInsert(filename, self._replaceInfo._pattern, self._replaceInfo._line,
+ self._replaceInfo._anchor, not self._replaceInfo._above):
+ rc = True
+ self._fileCount += 1
+ return rc
+
+ def replaceOneFile(self, full):
+ '''Handles one file for the command 'replace'.
+ @param full: the filename to process
+ @return True: success False: stop processing
+ '''
+ rc = True
+ target = self.buildTargetname(full)
+ changed = False
+ asBinary = self._table != None
+ with open(full, 'rb') as fp, open(target, 'wb'if asBinary else 'w') as fpOut:
+ self._logger.log('inspecting {} ...'.format(full), 3)
+ lineNo = 0
+ for rawLine in fp:
+ if asBinary:
+ line = rawLine
+ else:
+ line = base.FileHelper.fromBytes(rawLine)
+ line = line.rstrip(b'\n' if asBinary else '\n')
+ lineNo += 1
+ out = self.replaceLine(line)
+ if line != out:
+ changed = True
+ fpOut.write(out + (b'\n' if asBinary else '\n'))
+ if not changed:
+ self._logger.log('leaving unchanged: ' + full, 4)
+ else:
+ self._logger.log('replaced {} hit(s) in {}'.format(self._hits, full), 4)
+ if self._target == '!full!':
+ os.unlink(full)
+ os.rename(target, full)
+ self._fileCount += 1
+ return rc
+
+ def replaceRegion(self, replacement, regExprAnchor = None):
+ '''Replaces the last found region with the given text.
+ @param replacement: the region to replace
+ @param regExprAnchor: None or a regular expression (text or re.RegExpr) for the line to insert if no region has been found
+ '''
+ lines = replacement.split('\n')
+ if self._regionStart < 0:
+ ix = None
+ if type(regExprAnchor) == str:
+ regExprAnchor = re.compile(regExprAnchor)
+ ix = self.currentFind(regExprAnchor)
+ if ix == None:
+ self._lines += lines
+ else:
+ self._lines = self._lines[0:ix] + lines + self._lines[ix+1:]
+ self._currentChanged = True
+ elif replacement != '\n'.join(self._lines[self._regionStart:self._regionEnd]):
+ self._currentChanged = True
+ self._lines = self._lines[0:self._regionStart] + lines + self._lines[self._regionEnd:]
+ self._logger.log('replacing with {} line(s)'.format(replacement.count('\n') + 1), 3)
+ else:
+ self._logger.log('replacement is equal to region', 3)
+
+ def replaceRegionOneFile(self, full):
+ '''Handles one file for the command 'replace'.
+ @param full: the filename to process
+ @return True: success False: stop processing
+ '''
+ rc = True
+ info = self._regionReplaceInfo
+ self.readCurrent(full, True)
+ self.findRegion(info._regionStart, info._startIncluded, info._regionEnd, info._endExcluded, None, True)
+ self.replaceRegion(info._replacement)
+ self.writeCurrent(full)
+ self._fileCount += 1
+ return rc
+
+ def replaceRegionCmd(self, argv):
+ '''Replaces a region with a replacement.
+ syntax: replace-region <start> <end> <replacement> <file-pattern> [<opts>]
+ @param argv: the command arguments, e.g. ['<body>', '</body>', '<p>site locked</p>', 'index.php', '--start-excluded', ]
+ '''
+ argv, options = self.splitArgsAndOpts(argv)
+ if len(argv) < 4:
+ self.usage('missing arguments')
+ else:
+ self._regionReplaceInfo = info = ReplaceRegionInfo()
+ info._regionStart = self.regExprCompile(argv[0], '<pattern-start>')
+ info._regionEnd = self.regExprCompile(argv[1], '<pattern-start>')
+ info._replacement = argv[2]
+ filePattern = argv[3]
+ excluded = None
+ maxDepth = None
+ for opt in options:
+ if opt == '--start-excluded':
+ info._startIncluded = False
+ elif opt == '--end-included':
+ info._endExcluded = True
+ elif opt.startswith('--max-depth='):
+ maxDepth = self.integerOption(opt)
+ elif opt.startswith('--excluded='):
+ excluded = self.extractExcluded(opt[11:])
+ else:
+ self.usage('unknown option: ' + opt)
+ self.traverse(filePattern, 'replace-region', maxDepth, excluded, True, True)
+
+ def readTable(self, full):
+ '''Reads the file with the key/replacement strings.
+ @param full: the filename
+ '''
+ with open(full, 'rb') as fp:
+ self._table = []
+ for line in fp:
+ line = line.rstrip(b'\n')
+ pair = line.split(b'\t', 1)
+ self._table.append(pair)
+
+ def writeCurrent(self, full=None, forceWrite=False):
+ '''Reads a given file into _lines[].
+ @param full: the filename
+ '''
+ if forceWrite or self._currentChanged:
+ if full == None:
+ full = self._currentFile
+ with open(full, 'w') as fp:
+ self._table = []
+ for line in self._lines:
+ fp.write(line + '\n')
+ self._logger.log('writing {}...'.format(full), 3)
+
+ def traverse(self, fullPattern, action, maxDepth, excluded=None, dirMustBeWritable=False, fileMustBeWritable=False):
+ '''Traverses the directory tree and do the given action.
+ @param fullPattern: only files matching this will be processed.
+ If a path is given that is the start directory. Otherwise the current directory is used
+ @param action: the action, e.g. 'replace-or-insert'
+ @param depth: the current subdirectory nesting level
+ @param maxDepth: maximum subdirectory nesting level
+ @param excluded: None or an array of patterns for file exclusion
+ @param dirMustBeWritable: True the directory may be changed
+ @param fileMustBeWritable: True the file may be changed
+ '''
+ self._excluded = excluded
+ self._dirMustBeWritable = dirMustBeWritable
+ self._fileMustBeWritable = fileMustBeWritable
+ startDir = os.path.dirname(fullPattern)
+ if maxDepth == None:
+ maxDepth = -1
+ self.handleOneDir(startDir if startDir != '' else '.', os.path.basename(fullPattern), action, 0, maxDepth)
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return r"""usage: texttool [<global_opts>] <command> [<opts>] [<params>]
+ Text manipulation tool
+<input>: file to manipulate
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ adapt[-configuration] <configuration-file> <variable-file>
+ Sets specified variables in a configuration file to given values.
+ The variables are defined in a separate file.
+ <configuration-file>
+ the file to change
+ <variable-file>
+ the file containing the values to change. One variable per line, e.g. 'verbose=True'
+ adapt[-configuration] <configuration-file> prefix=<prefix>
+ Sets specified variables in a configuration file to given values.
+ The variables are defined in the texttool configuration file.
+ <configuration-file>
+ the file to change
+ <prefix>
+ the prefix to filter the keys in the internal configuration file, e.g. 'php.'
+ build-examples
+ Populates the example directory: Than the below examples can be used unmodified.
+ e(xecute) <statements> [<input>]
+ Executes the statements on the file <input>.
+ If <input> is missed the statements must contain a "read <file>" statement.
+ If <input> is '-' the input is read from stdin.
+ grep <pattern> <file-pattern> [<opts>]
+ Search the <pattern> (a regular expression) in all files matching <file-pattern>
+ <file-pattern>
+ file name pattern, with wilcards *, ? [chars] and [!not chars]
+ <opts>:
+ --excluded=<pattern>
+ shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak"
+ --format=<pattern>
+ prints the format: Wildcards: $1 first group (delimited by a parentesis pair). First from left
+ placeholders: !full! (path and node), !node! (node only) '!no! (line number), e.g. "!full!-!no!: $1 $3"
+ -i or --ignore-case
+ the search is case insensitive
+ --line-no
+ the line number is displayed (behind the filename)
+ -l --list
+ prints the filename only
+ --max-depth=<depth>
+ the maximum nesting level of the directory searched in
+ --missing
+ lists the full filenames not containing <pattern>
+ --no-name
+ the filename (and linenumber) is not displayed on top of the line
+ -o or --only-matching
+ prints the matching string only
+ -v or --reverse
+ prints lines not matching the given regular expression
+ exit code is 10 if no hit is found
+ python-to-c <python-source> <c-target>
+ Converts python source code into C like syntax (C++, C, TypeStript. This is a 80% tool: many corrections must be done manually.
+ <python-source>: the file with the python source code
+ <c-target>: the file with the C like syntax
+ replace {} <pattern> <replacement> | @<table> {} <input> [<target-pattern>] [<opts>]
+ Search the <pattern> (a regular expression) in all files matching <file-pattern> and replace it by a given string
+ <pattern>
+ a regular expression
+ <replacement>
+ may contain $<n> for back references ($1 replaces group 1...)
+ @<table>
+ a text file containing the strings and replacements separated by a TAB, one pair per line, e.g. "@table.txt"
+ <input>
+ '='<string>: the input is given as string, e.g. "=Needle"
+ file name pattern, with wilcards *, ? [chars] and [!not chars].
+ If the file name pattern starts with '=', take '[=]' for distinction
+ <target-pattern>
+ if given the name of the target file, can contain macros !full!, !path!, !node!, !fn!, !ext!
+ e.g. "!path!/!fn!.replaced!ext!"
+ example: !full!: /tmp/abc.def !path!: /tmp/ !node!: abc.def !fn!: abc !ext!: .def
+ default: !full! (replaces the source file)
+ <opts>:
+ --excluded=<pattern>
+ shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak"
+ --max-depth
+ 0: only the directory given by <file-pattern> will be processed.
+ otherwise: the maximal nesting level of subdirectories to be processed
+ exit code is 10 if no hit is found
+ replace-or-insert <line-pattern> <line> <file-pattern> [<opts>]
+ Search for the regular expression <line-pattern> in files.
+ If found the place is replaced by <line>.
+ Else the <anchor> is searched. If found the line is insert
+ <line-pattern>:
+ a regular expression defining the line. Should contain content from <line>
+ <line>:
+ precondition: this line is in the content of the files
+ <file-pattern>:
+ a file pattern with wildcards '*' and '?'. All files matching this pattern will be processed. In all subdirectories too
+ <opts>:
+ --above-anchor=<anchor>
+ if <line-pattern> is not found the line is inserted above the first line containing this regular expression
+ --below-anchor=<anchor>
+ if <line-pattern> is not found the line is inserted below the first line containing this regular expression
+ --create-if-missing
+ if <file-pattern> does not exists and the name does not contain wildcards it will be created
+ --excluded=<pattern>
+ shell wildcard pattern for excluded files and directories, e.g. ":.git:./tmp/*.bak"
+ --file-count=<count>
+ only <count> files will be processed
+ --max-depth
+ 0: only the directory given by <file-pattern> will be processed.
+ otherwise: the maximal nesting level of subdirectories to be processed
+ replace-region <pattern-start> <pattern-end> <replacement> <file-pattern> [<opts>]
+ Replace a region (some sequential lines) in a file
+ <pattern-start>
+ a regular expression describing the start of the region (this line is part of the region)
+ if not found nothing is replaced
+ <pattern-end>
+ a regular expression describing the end of the region (this line is not part of the region)
+ if this pattern is not found the file end is the region end
+ <replacement>
+ the replacement text: will be inserted instead of the old region
+ <file-pattern>
+ this files will be inspected
+ <opts>:
+ --start-excluded
+ <pattern-start> is not part of the region
+ --end-included
+ <pattern-end> is part of the region
+ --max-depth=<max-level>
+ maximum nesting level
+ --excluded=<pattern>
+ file name pattern to exclude, e.g.':*.txt:*.bak'
+ random { [ <maxExcluded> ] | <min> <maxExcluded> } <opts>
+ Prints a pseudo random number between 0 or <min> (inclusive) and <maxExcluded> or 2**31 (excluded).
+ If <seed> is not given a time related seed is taken. Otherwise the rand generator starts with the given seed.
+ <opts>:
+ --seed=<seed>
+ the rand generator starts with the given seed.
+ --status-file=<path>
+ the status of the random generator is stored in this file. Default: /tmp/texttool.<seed>.seed or /tmp/texttool.default.seed
+ script <script> [<input>]
+ Executes the statements in the file <script> on the file <input>
+ If <input> is missed the statements must contain a "read <file>" statement.
+ If <input> is '-' the input is read from stdin.
+ if <script> is '-' the script is read from stdin.
+Example:
+texttool build-examples
+texttool -v3 adapt /usr/share/pyrshell/examples/data/php.ini /usr/share/pyrshell/examples/config/php_minimal.conf
+texttool adapt-configuration /usr/share/pyrshell/examples/data/php.ini prefix=php.
+texttool execute 'fb "[.]";p "host: ";p20' /etc/hostfile
+texttool --log=/var/log/general.log script build_summary.txt calculation.csv
+texttool grep '\bopen\b.*rb' '/usr/share/pyrshell/base/*.php' '--excluded=:*file*:*text*'
+texttool grep 'total:\s+([0-9.]+)' /usr/share/pyrshell/examples/data/sum.txt --line-no '--format=Sum: $1'
+texttool grep StringUtils '/usr/share/pyrshell/*.py' --missing --list
+ip | texttool grep "^(\d: \w+)|(\s*inet [0-9.]+" --only-matching
+texttool grep 'total:' /usr/share/pyrshell/data/sum.txt --reverse
+texttool grep -i --no-name -v jonny /usr/share/pyrshell/examples/data/first.addr
+texttool python-to-c /usr/share/pyrshell/base/ThreadLogger.py /tmp/stringutils.ts
+texttool replace 'DATE=\S+' 'DATE=1.7.2019' /usr/share/pyrshell/examples/today.sh
+texttool replace jonny@gmx.de mr.universe@gmx.de '/usr/share/pyrshell/examples/data/*.sql' '!path!universe_!node!.out' '--excluded=:shop.sql:test*.sql --max-depth=0'
+texttool replace @table.changes /db/old.sql /db/new.sql
+texttool replace hot cool '=should be hot'
+texttool replace @table.txt "*.txt" "!full!.new" --excluded=.git;*draft*
+texttool replace-or-insert '^directory\s*=' "directory=/etc" /usr/share/pyrshell/examples/inf.de.conf '--below-anchor=^\[files\]$' --create-if missing
+texttool replace-region '<body>' '</body>' '<p>no access!</p>' index.html --start-excluded --max-depth=0
+texttool replace-region '<h[1-9]>' '</p>' '<p>no access!</p>' index.html --end-included
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/texttool', 'run']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('texttool', 'appl/TextTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = TextTool(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'adapt-configuration' or cmd == 'adapt':
+ rc = tool.adaptConfiguration(argv)
+ elif cmd == 'build-examples' or cmd == 'adapt':
+ rc = tool.buildExamples()
+ elif cmd == 'execute' or cmd == 'e':
+ # rc = tool.execute(argv)
+ tool._logger.log('not implemented: ' + cmd)
+ elif cmd == 'grep':
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ rc = tool.grep(argv)
+ elif cmd == 'python-to-c':
+ if len(argv) < 2:
+ tool.usage('missing arguments')
+ else:
+ converter = base.PythonToTypeScript.PythonToTypeScript(tool, TextTool(options))
+ tool.readCurrent(argv[0], True)
+ converter.convert()
+ converter._typeScript.writeCurrent(argv[1], True)
+ elif cmd == 'random':
+ tool.random(argv)
+ elif cmd == 'replace':
+ rc = tool.replace(argv)
+ elif cmd == 'replace-or-insert':
+ rc = tool.replaceOrInsert(argv)
+ elif cmd == 'replace-region':
+ rc = tool.replaceRegionCmd(argv)
+ elif cmd == 'script':
+ rc = tool.script(argv)
+ elif cmd == 'manual':
+ rc = tool.manual(argv)
+ else:
+ tool.usage("unknown command: " + cmd)
+ base.BaseTool.setLatestTool(tool)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 07.05.2019
+
+@author: hm
+'''
+import sys
+import os.path
+import time
+import re
+import datetime
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.JavaConfig
+import net.HttpClient
+import base.BaseTool
+import net.EMail
+
+class UrlChecker (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'urlchecker.conf', False, 'urls.d')
+ self._client = net.HttpClient.HttpClient(self._verboseLevel, self._logger)
+ self._urlInfo = []
+ self._indexUrlInfo = 0
+ self._data = None
+ self._urlError = []
+ self._rexprEmail = re.compile(r'^[-.\w]+@[-.\w]+?\.[a-zA-Z]+$')
+
+ def check(self):
+ '''Reads one URL configuration file.
+ @param filename: the name of the file, e.g. '/etc/pyrshell/urls.d/wiki.url.conf'
+ @return: [url, indication] url: the URL which has been checked
+ indication: None: site online otherwise: the offline indication message
+ '''
+ currentIndex = self._indexUrlInfo
+ current = self._urlInfo[currentIndex]
+ self._indexUrlInfo = (currentIndex + 1) % len(self._urlInfo)
+ url = current.getString('url')
+ self._data = None
+ indication = None
+ data = self._client.getContent(url)
+ if data == None:
+ indication = 'not reachable'
+ no = 1
+ while indication == None and no > 0:
+ needle = current.getString('needle.{}'.format(no))
+ if needle == None:
+ no = 0
+ else:
+ if data != None and not data.decode('utf-8').find(needle) >= 0:
+ indication = 'missing marker in content: ' + needle
+ else:
+ no += 1
+
+ no = 1
+ while indication == None and no > 0:
+ needle = current.getString('regexpr.{}'.format(no))
+ if needle == None:
+ no = 0
+ else:
+ if data != None and re.match(needle, data.decode('utf-8')) == None:
+ indication = 'missing regular expression in content: ' + needle
+ else:
+ no += 1
+ return [url, indication]
+
+ def close(self):
+ '''Frees the resources.
+ '''
+ if self._client != None:
+ self._client.close(self)
+ self._client = None
+
+ def configFromNginx(self, argv):
+ '''Builds configuration files from NGINX configuration.
+ @param argv: arguments, e.g. '[ '/etc/nginx/sites-enabled', 'bigtoy', /opt/new-config']
+ '''
+ if len(argv) < 2:
+ self.usage('missing arguments')
+ else:
+ srcDir = argv[0]
+ trgDir = argv[2] if len(argv) > 2 else self._additionalConfigDir
+ if not os.path.exists(srcDir) or not os.path.isdir(srcDir):
+ self.usage('not a directory: ' + srcDir)
+ elif not os.path.exists(trgDir) or not os.path.isdir(trgDir):
+ usage('not a directory: ' + trgDir)
+ else:
+ remoteHost = argv[1]
+ rexprHost = re.compile(r'^\s*server_name\s+([-.\w]+)')
+ for node in os.listdir(srcDir):
+ full = srcDir + os.sep + node
+ if os.path.isdir(full):
+ continue
+ lines = base.BaseTool.BasicStatics.grep('server_name', full)
+ matcher = rexprHost.match(lines[0])
+ if matcher != None:
+ domain = matcher.group(1)
+ self.createConfig(domain, remoteHost, trgDir)
+
+ def createConfig(self, domain, remoteHost, trgDir):
+ '''Creates a configuration file.
+ @param domain: the domain of the website
+ @param remoteHost: this server hosts the website
+ @param trgDir: target directory
+ '''
+ full = trgDir + os.sep + domain + '-' + remoteHost + '.conf'
+ url = self._client.getRealUrl('http://{}'.format(domain))
+ content = self._client.getContent(url, 1)
+ with open(full, "w") as fp:
+ fp.write('# created by urlchecker\n')
+ fp.write('server={}\n'.format(remoteHost))
+ fp.write('domain={}\n'.format(domain))
+ fp.write('url={}\n'.format(url))
+ fp.write('#needle.1=<body>\n')
+ fp.write('#rexpr.1=[a-z]+\n')
+ if content != None:
+ fp.write('content={}\n'.format(content.decode('utf-8').replace('\\n', '\n')))
+
+ def example(self):
+ text = '''# webdashserver example configuration
+log.file=/var/log/local/urlchecker.log
+# used as sender info in the email
+service.host={}
+# all URL checks will be done in interval (in seconds)
+checker.interval=3600
+# == Email data for error report: ==
+send.always=False
+# Receiver of the error messages: may be a blank separated list of email addresses
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.neutral@gmx.de
+smtp.user=hm.neutral@gmx.de
+smtp.code=sEcReT
+smtp.tls=True
+'''.format(base.BaseTool.BasicStatics.hostname(True))
+ self.storeExample(text)
+
+ def isEmail(self, email):
+ '''Tests whether a string is an email address.
+ @param email: string to test
+ @return True: email is correct
+ '''
+ rc = self._rexprEmail.match(email) != None
+ return rc
+
+ def readConfigs(self):
+ '''Reads all configuration files.
+ '''
+ def sumLengths(array):
+ rc = 0
+ for ix in range(len(array)):
+ rc += len(array[ix])
+ return rc
+ path = self._configuration.getString('configuration.path', '/etc/pyrshell/urls.d')
+ if not os.path.exists(path):
+ self._usage('configuration path not found: ' + path)
+ else:
+ files = os.listdir(path)
+ hosts = []
+ arrays = []
+ rexprFile = re.compile(r'.*-(\w+)\.conf$')
+ for aFile in files:
+ if aFile.endswith('.conf'):
+ matcher = rexprFile.match(aFile)
+ host = 'default' if matcher == None else matcher.group(1)
+ try:
+ ix = hosts.index(host)
+ except ValueError as exc:
+ ix = len(hosts)
+ hosts.append(host)
+ arrays.append([])
+
+ full = path + os.sep + aFile
+ config = base.JavaConfig.JavaConfig(full, self._logger)
+ if config.getString('url') == None:
+ self._logger.error('{}: missing entry url=<url>. File ignored'.format())
+ else:
+ arrays[ix].append(config)
+ # self._urlError.append(None)
+ counts = []
+ for ix in range(len(arrays)):
+ counts.append(len(arrays[ix]))
+ totalCount = sumLengths(arrays)
+ if totalCount == 0:
+ self.usage('no valid configuration found: no URL specified')
+ else:
+ countHosts = len(hosts)
+ while sumLengths(arrays) > 0:
+ # from all hosts the first entry
+ for ix in range(countHosts):
+ if len(arrays[ix]) > 0:
+ self._urlInfo.append(arrays[ix][0])
+ del arrays[ix][0]
+ self._urlError.append(None)
+ # from all hosts the rest of the entries
+ for ix in range(countHosts):
+ for ix2 in range(totalCount // countHosts - 1):
+ if len(arrays[ix]) > 0:
+ self._urlInfo.append(arrays[ix][0])
+ del arrays[ix][0]
+ self._urlError.append(None)
+ for info in self._urlInfo:
+ self._logger.debug('{}: {} {}'.format(ix, info.getString('server', '?'), info.getString('url')))
+
+ def sendStatusEmail(self, subject, message):
+ '''Sends one or many emails.
+ @param message: the message to send
+ @param subjectSuffix: None or the end of the email subject
+ @param isProblem: True: the errors have been started False: the errors have been finished
+ '''
+ text = message
+ myHost = self._configuration.getString('service.host')
+ email = net.EMail.EMail(myHost + ': ' + subject, text)
+ receipients = self._configuration.getString('admin.email').split(' ')
+ host = self._configuration.getString('smtp.host')
+ port = self._configuration.getInt('smtp.port', 0)
+ sender = self._configuration.getString('smtp.sender')
+ user = self._configuration.getString('smtp.user')
+ code = self._configuration.getString('smtp.code')
+ withTls = self._configuration.getString('smtp.tls') == 'True'
+ if not (self.isEmail(receipients[0]) and host and port > 0 and port < 0xffff and self.isEmail(sender) and code):
+ self._logger.error('wrong smtp data: receiver: {} host: {} port: {} user: {} sender: {}'.format(receipients[0], host, port, user, sender))
+ else:
+ email.setSmtpLogin(host, port, user, code, withTls)
+ cc = None if len(receipients) < 2 else receipients[1:]
+ email.sendTo(sender, receipients[0], cc)
+ self._logger.debug('email sent to ' + receipients[0])
+
+ def sendEmailStatusChange(self, url, indication, timeOffline = None):
+ '''Sends one or many emails.
+ @param url: the error status of this url has been changed
+ @param indication: None: the status has changed to online
+ Otherwise: the indication of the offline state
+ @param timeOffline: None: the status has changed to offline
+ Otherwise: the status has changed to online. The offline status has started at this time
+ '''
+ if timeOffline == None:
+ subject = 'website offline ' + url
+ message = 'Indication: ' + indication
+ else:
+ subject = 'website again online ' + url
+ message = 'The website {} was offline since {}'.format(url, timeOffline.format('%Y.%m.%d %H:%M'))
+ self.sendStatusEmail(subject, message)
+
+ def sendEmailOnline(self, url, timeOffline):
+ '''Sends an email because the website is online again.
+ @param url: the error status of this url has been changed
+ @param timeOffline: the offline status has started at this time
+ '''
+
+ subject = 'website again online ' + url
+ message = 'The website {} is online again.\nOffline: {} - {}'.format(
+ url, timeOffline.strftime('%Y.%m.%d %H:%M'), datetime.datetime.now().strftime('%Y.%m.%d %H:%M'))
+ self.sendStatusEmail(subject, message)
+
+ def sendEmailOffline(self, url, indication):
+ '''Sends one or many emails.
+ @param url: the error status of this url has been changed
+ @param indication: None: the status has changed to online
+ Otherwise: the indication of the offline state
+ @param timeOffline: None: the status has changed to offline
+ Otherwise: the status has changed to online. The offline status has started at this time
+ '''
+ subject = 'website offline ' + url
+ message = 'Indication: ' + indication
+ self.sendStatusEmail(subject, message)
+
+ def service(self):
+ """Starts the TCP server and wait for tasks.
+ """
+ interval = self._configuration.getInt('checker.interval', 3600)
+ slice = interval // len(self._urlInfo)
+ self._logger.debug("service: domains: {} interval: {} sec slice: {} sec".format(len(self._urlInfo), interval, slice))
+ while True:
+ currentIndex = self._indexUrlInfo
+ offlineStart = self._urlError[currentIndex]
+ [url, indication] = self.check()
+ if indication == None:
+ if offlineStart != None:
+ self.sendEmailOnline(url, offlineStart)
+ elif offlineStart == None:
+ if indication == 'not reachable':
+ self.sendEmailOffline(url, indication)
+ else:
+ self.sendStatusEmail('website content changed: ' + url, indication)
+ self._urlError[currentIndex] = datetime.datetime.now()
+ time.sleep(interval)
+
+ self.close()
+ def testEmail(self):
+ '''Sends a test email.
+ '''
+ self.sendStatusEmail("test email", "Success")
+
+def usage():
+ '''Returns an info about usage
+ '''
+ return """usage: urlchecker [<global_opts>] <command>
+ Checks whether a amount of URLs are reachable by http(s).
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ test
+ send a test email
+ service or daemon
+ starts the monitoring daemon
+ config-from-nginx <nginx-dir> [<target-dir>]
+ create configuration files from all sites defined in a nginx configuration, e.g. /etc/nginx/sites-enabled
+ <target-dir>the created files will be stored there. Default: /etc/pyrshell/urls.d
+ Note: <nginx-dir> should be imported from the observed website
+ example
+ Creates an example configuration
+ service
+ Starts the TCP server
+Examples:
+urlchecker -q install
+urlchecker -v3 service
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/urlchecker', 'service']
+ '''
+ serviceInfo = base.BaseTool.ServiceInfo('urlchecker', 'www-data', 'www-data',
+ 'A monitor observing websites', 'CHECKER', None)
+ appInfo = base.BaseTool.ApplicationInfo('urlchecker', 'appl/UrlChecker.py', usage, serviceInfo)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = UrlChecker(options)
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ rc = None
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ if cmd == 'service' or cmd == 'daemon':
+ tool.readConfigs()
+ rc = tool.service()
+ elif cmd == 'test':
+ rc = tool.testEmail()
+ elif cmd == 'config-from-nginx':
+ rc = tool.configFromNginx(argv)
+ elif cmd == 'reload':
+ # rc = tool.service()
+ pass
+ else:
+ tool.usage('unknown command: ' + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(['dummy', 'daemon'])
+ #main(['dummy', 'config-from-nginx', '/opt/nginx', 'bear'])
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import sys
+import os
+import re
+import datetime
+import random
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.StringUtils
+import base.FileHelper
+import base.BaseTool
+import net.TcpClient
+import base.LinuxUtils
+
+class TaskInfo:
+
+ def __init__(self, name, interval, parent):
+ '''Constructor.
+ @param name: the task's name
+ @param interval: the repeating interval, will be multiplied by the time slice
+ '''
+ self._name = name
+ self._parent = parent
+ if interval <= 0 or interval > 3600:
+ self._parent._logger.error('invalid interval in {}: {}'.format(name, interval))
+ self._interval = interval
+ self._current = random.randint(0, interval)
+
+ def next(self):
+ '''Tests whether the task should be triggered.
+ @return True: the task should be triggered
+ '''
+ rc = False
+ self._current += 1
+ if self._current >= self._interval:
+ self._current = 0
+ rc = True
+ self._parent._logger.log('{}: current/interval: {}/{} rc: {}'.format(self._name, self._current, self._interval, 'T' if rc else 'F'), 4)
+ return rc
+
+class WebDashClient (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'webdashclient.conf')
+ self._connection = None
+ self._lastStress = None
+ self._loopNo = 0
+ # msec since epoch
+ self._timeLastStress = None
+ self._hostName = self._configuration.getString('host.name', '?')
+ if self._hostName == '?':
+ self._logger.error('missing host.name in configuration: ' + self._configuration._filename)
+ tasks = self._configuration.getString('tasks').strip().split(':')
+ rexprTasks = re.compile(r'^(filesystems?|stress(?:es)?|users?|clouds?)-(\d+)$')
+ self._tasks = []
+ for task in tasks:
+ matcher = rexprTasks.match(task)
+ if matcher == None:
+ self._logger.error('unknown task or syntax error (<name>-<interval>): ' + task)
+ else:
+ info = TaskInfo(matcher.group(1), int(matcher.group(2)), self)
+ self._logger.log('adding task {} ({} sec)...'.format(info._name, info._interval), 2)
+ self._tasks.append(info)
+
+ def close(self):
+ '''Frees the resources.
+ '''
+ self.disconnect()
+
+ def connect(self):
+ '''Connect to the server.
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ host = self._configuration.getString('server.host', '127.0.0.2')
+ port = self._configuration.getInt('server.port', 58200)
+ self._logger.log('connecting to {}:{}'.format(host, port), 2)
+ again = True
+ start = time.time()
+ while again:
+ again = False
+ try:
+ self._connection = net.TcpClient.TcpClient(host, port, self._logger)
+ except:
+ again = True
+ time.sleep(5)
+ self._logger.log('connection found after {:.1f} second(s)'.format(time.time() - start), 2)
+ return rc
+
+ def createConfigExample(self):
+ '''Creates a example configuration file
+ '''
+ self.ensureDirectory(self._configDir)
+ filename = self._configDir + os.sep + 'webdashclient.conf'
+ # to avoid errors a dummy could be created earlier:
+ if base.StringUtils.hasContent(filename, '# dummy created'):
+ filename = self._configDir + os.sep + 'WebDashClient.example'
+ base.StringUtils.toFile(filename, '''# WebDashClient example configuration
+log.file=/var/log/local/WebDashClient.log
+server.port=58201
+server.host=127.0.0.1
+io.pattern=^(sda|sdb)$
+net.pattern=^(eth0|wlan0)$
+#host.name=caribou
+# time slice in seconds
+slice=60
+# possible tasks: filesystems processes users clouds
+tasks=filesystems-5:stress-2
+#fs.exclude=/media/dummy:/media/dummy2
+''')
+ self._logger.log('created: ' + filename)
+
+ def disconnect(self):
+ '''Frees the resources.
+ '''
+ if self._connection != None:
+ self._connection.close()
+ self._logger.log('disconnected', 2)
+ self._connection = None
+
+ def example(self):
+ text = '''# WebDashClient example configuration
+log.file=/var/log/local/WebDashClient.log
+server.port=58201
+server.host=127.0.0.1
+#host.name=caribou
+io.pattern=^sd[ab]$
+net.pattern=^ens
+#cloud.basedir=/media/clouds
+#cloud.excluded=/:dummy
+# time slice in seconds: the base unit of the task handling intervals
+slice=60
+# possible tasks: filesystems processes users clouds
+tasks=filesystems-5:stress-2
+#fs.excluded=/media/dummy:/media/dummy2
+'''
+ self.storeExample(text)
+
+ def task(self, task, inLoop=False):
+ '''Executes the tasks.
+ @param task: name of the task to execute
+ @param inLoop: True: the task is executed multiple times
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ if task == 'filesystems':
+ self.taskFileSystem()
+ elif task == 'stress':
+ if not inLoop:
+ self.taskStress()
+ time.sleep(1)
+ self.taskStress()
+ elif task == 'users':
+ self.taskUsers()
+ elif task == 'processes':
+ self.taskProcesses()
+ elif task == 'clouds':
+ self.taskClouds()
+ else:
+ rc = "unknown task: " + task
+ return rc
+
+ def taskFileSystem(self):
+ '''Find out the data of the filesystems and send it.
+ @return: None: success otherwise: error message
+ '''
+ self._logger.log('taskFileSystem()...', 4)
+ rc = None
+ infoList = base.LinuxUtils.diskFree()
+ excludes = ':' + self._configuration.getString('fs.excluded', '') + ':'
+ self._logger.log('taskFileSystem(): found: {} items excluded: {}'.format(len(infoList), excludes), 2)
+ for info in infoList:
+ # [path, stat.f_blocks*blocksize, stat.f_bfree*blocksize, stat.f_favail*blocksize]
+ ix = excludes.find(':' + info[0] + ':')
+ if ix < 0:
+ total = int(info[1])
+ free = int(info[3])
+ used = total - int(info[2])
+ args = [self._hostName, info[0], str(total), str(used), str(free)]
+ self._logger.log('taskFileSystem(): ' + '\n'.join(args), 3)
+ answer = self._connection.sendAndReceive('filesystems', args)
+ self._logger.log('answer: ' + answer, 3)
+ else:
+ self._logger.log('taskFileSystem(): excluded: ' + info[0], 3)
+ return rc
+
+ def cloudStatistic(self, path):
+ '''Detects the number of files/dirs, the youngest and the largest files of the cloud.
+ @param path: the full path of the directory containing the "data" directory
+ @return an array [files, dirs, size, trashsize, the statistic info as text]
+ '''
+ info = self._processHelper.executeInput(['hmdu', path, 'data', 'files_trashbin'], False)
+ parts = info.split(' ')
+ files = int(parts[2])
+ dirs = int(parts[4])
+ size = int(parts[5].replace('.', ''))
+ trashSize = int(parts[7].replace('.'))
+ info = 'Dateien: {} / {} Platzbedarf: {} / {} Verzeichnisse: {} / {}'.format(files, parts[4], size,
+ trashSize, parts[10], parts[12]);
+ return [files, dirs, size, trashSize, info]
+
+ def cloudTrash(self, path):
+ '''Detects the used spaces of all trashes of the cloud.
+ @param path: the full path of the directory containing the "data" directory
+ @return [size, dirs, files]
+ '''
+ rc = None
+ path += os.sep + 'data'
+ if not os.path.isdir(path):
+ self._logger.error('is not a directory: ' + path)
+ else:
+ # data/kawi/files_trashbin
+ users = os.listdir(path)
+ for user in users:
+ directory = path + os.sep + user + os.sep + 'files_trashbin'
+ if os.path.isdir(directory):
+ rc = base.FileHelper.directoryInfo(directory, None, None, -1, rc)
+ if rc == None:
+ rc = [0, 0, 0]
+ else:
+ rc = [rc._fileSizes, rc._dirCount, rc._fileCount]
+ return rc
+
+ def cloudLogs(self, path, maxCount=10):
+ '''Returns the last lines from the logfile.
+ @param path: the full path of the directory containing the "data" directory
+ @return the found logfiles, string encoded: '\n' => '\\n'', '\t' => '\\t''
+ '''
+ lines = ''
+ path += os.sep + 'data' + os.sep + 'nextcloud.log'
+ if not os.path.exists(path):
+ self._logger.error('missing logfile: ' + path)
+ else:
+ # data/kawi/files_trashbin
+ lines = base.FileHelper.tail(path, maxCount, True)
+ if lines != None:
+ lines = base.StringUtils.escChars('.'.join(lines))
+ else:
+ lines = ''
+ return lines
+
+ def cloudUsers(self, path):
+ '''Detects the usernames of the cloud.
+ @param path: the full path of the directory containing the "data" directory
+ @return a list of '|' separated users
+ '''
+ users = ''
+ path += os.sep + 'data'
+ if not os.path.isdir(path):
+ self._logger.error('is not a directory: ' + path)
+ else:
+ # data/kawi/files_trashbin
+ nodes = os.listdir(path)
+ for node in nodes:
+ if node != 'files_external' and os.path.isdir(path + os.sep + node) and not node.startswith('appdata_'):
+ users += '|' + node
+ return users[1:]
+
+ def run(self):
+ '''Executes the configured tasks in an endless loop.
+ @return: None: success otherwise: error message
+ '''
+ sliceSeconds = self._configuration.getInt('slice', 60)
+ if sliceSeconds < 5:
+ self._logger.error('wrong slice in configuration, changed to 5: ' + str(sliceSeconds))
+ sliceSeconds = 5
+ self._logger.log('starting endless task loop with slice {} and {} tasks'.format(sliceSeconds, len(self._tasks)), 1)
+ while True:
+ isConnected = False
+ for task in self._tasks:
+ if task.next():
+ if not isConnected:
+ rc = self.connect()
+ if rc != None:
+ self._logger.error(rc)
+ break
+ isConnected = True
+ self.task(task._name, True)
+ if isConnected:
+ self.disconnect()
+ self._logger.log('sleeping {} sec...'.format(sliceSeconds), 4)
+ time.sleep(sliceSeconds)
+ self._loopNo += 1
+ return None
+
+ def taskClouds(self):
+ '''Find out the data of the clouds and send it.
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ baseDir = self._configuration.getString('cloud.basedir')
+ excluded = self._configuration.getString('cloud.excluded', '').split(':')
+ cloudDirs = os.listdir(baseDir)
+ for node in cloudDirs:
+ if node in excluded:
+ pass
+ elif node in cloudDirs:
+ used = 0
+ free = 0
+ total = 0
+ info = '-'
+ pathData = baseDir + os.sep + 'data'
+ [trashSize, dirs, files] = [-1, 0, 0]
+ if self._loopNo % 1 == 0:
+ [files, dirs, used, trashSize, info] = base.StringUtils.escChars(self.cloudStatistic(pathData))
+ logs = ''
+ if self._loopNo % 3 == 0:
+ logs = self.cloudLogs(pathData)[0:12000]
+ users = ''
+ if self._loopNo % 3 == 0:
+ users = self.cloudUsers(pathData)[0:4000]
+ args = [self._hostName, pathData, str(total), str(used), str(free), str(trashSize), str(files), str(dirs), users, info, logs]
+ self._logger.log('taskClouds(): ' + '\t'.join(base.StringUtils.limitItemLength(args, 40)), 3)
+ answer = 'WAIT'
+ while answer == 'WAIT':
+ answer = self._connection.sendAndReceive('clouds', args)
+ self._logger.log('answer: ' + str(answer)[0:80], 3)
+ time.sleep(10)
+ elif node not in excluded:
+ self._logger.log('taskClouds(): excluded: {} [{}]'.format(info[0], ','.join(cloudDirs)[0:80]), 3)
+ return rc
+
+ def taskProcesses(self):
+ '''find out the data about the processes and memory and send it.
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ return rc
+
+ def taskUsers(self):
+ '''Find out the data about the users and send it.
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ return rc
+
+ def taskStress(self):
+ '''Find out the stress data and send it.
+ @return: None: success otherwise: error message
+ '''
+ rc = None
+ now = datetime.datetime.now().strftime('%s')
+ patternIo = self._configuration.getString('io.pattern', '^~$')
+ patternNet = self._configuration.getString('net.pattern', '^~$')
+ info = base.LinuxUtils.stress(patternIo, patternNet)
+ if self._lastStress == None:
+ self._logger.log('first stress data ignored', 1)
+ self._timeLastStress = now
+ self._lastStress = info
+ else:
+ args = [self._configuration.getString('host.name'),
+ str(int(now) - int(self._timeLastStress)),
+ str(int(info[0]) - int(self._lastStress[0])), str(int(info[1]) - int(self._lastStress[1])),
+ str(int(info[2]) - int(self._lastStress[2])), str(int(info[3]) - int(self._lastStress[3])),
+ "{:.1f}".format(info[4]), str(info[5]), str(info[6])]
+ self._lastStress = info
+ self._logger.log('taskStress() host time rdio wrio rdnet wrnet load memav swapav: ' + '\n'.join(args), 3)
+ answer = self._connection.sendAndReceive('stress', args)
+ self._logger.log('answer: ' + answer, 3)
+ return rc
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """usage: WebDashClient [<global_opts>] <command> [<args>]
+ Starts a TCP server to collect data from other hosts to store in the database
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+{}
+ example
+ Creates an example configuration
+ run
+ Executes the configured tasks
+ task <task>
+ Execute the <task>: filesystems users processes
+
+Examples:
+webdashclient -q install
+webdashclient -v3 task filesystems
+ """
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/wdcollect', 'run']
+ '''
+ serviceInfo = base.BaseTool.ServiceInfo('webdashclient', 'root', 'www-data',
+ 'A TCP client assembling data and sending it to the webdash server', 'WEBDASHCLIENT', None)
+ appInfo = base.BaseTool.ApplicationInfo('webdashclient', 'appl/WebDashClient.py', usage, serviceInfo)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = WebDashClient(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'run' or cmd == 'daemon':
+ rc = tool.run()
+ elif cmd == 'task':
+ if len(argv) < 1:
+ task = 'filesystems'
+ else:
+ task = ':'.join(argv)
+ rc = tool.connect()
+ if rc == None:
+ rc = tool.task(task)
+ else:
+ tool.usage('unknown command: ' + cmd)
+ tool.close()
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import time
+import sys
+import os.path
+import MySQLdb
+#import base.BaseTool
+
+sys.path.insert(0, '/usr/share/pyrshell')
+#import base.Logger
+#import base.MemoryLogger
+#import base.StringUtils
+import base.BaseTool
+import net.TcpTaskHandler
+import net.TcpServer
+
+class WebDashStorageHandler (net.TcpTaskHandler.TcpTaskHandler):
+ def __init__(self, configuration, logger, verboseLevel = 0):
+ '''Constructor.
+ @param configuration: the configuration file
+ @param logger: the logger
+ '''
+ net.TcpTaskHandler.TcpTaskHandler.__init__(self, logger)
+ self._configuration = configuration
+ # each thread has its own db connection: self._db[threadId] = connection()
+ self._db = dict()
+ self._verboseLevel = verboseLevel
+
+ def connectDB(self):
+ rc = None
+ host = self._configuration.getString('mysql.host', 'localhost')
+ db = self._configuration.getString('mysql.db')
+ user = self._configuration.getString('mysql.user')
+ code = self._configuration.getString('mysql.code')
+ if db == None or user == None or code == None:
+ self._logger.error("missing mysql.db/mysql.user etc. Cannot connect database.")
+ else:
+ rc = MySQLdb.connect(host=host, # your host
+ user=user, # username
+ passwd=code, # password
+ db=db) # name of the database
+ return rc
+
+ def close(self):
+ net.TcpTaskHandler.TcpTaskHandler.close(self)
+
+ def completeSql(self, sql, params):
+ '''Returns the sql statement with expanded placeholders
+ @param sql: the sql statement with placeholders '%s' for the parameters
+ @param params: an array with the (positional) parameters
+ @return the sql with expanded placeholders
+ '''
+ for item in params:
+ sql = sql.replace('%s', item[0:80], 1)
+ sql = sql.replace('\n', ' ')
+ return sql.replace(' ', ' ')
+
+ def example(self):
+ text = '''# webdashserver example configuration
+log.file=/var/log/local/webdashserver.log
+service.port=58201
+service.host=127.0.0.1
+mysql.user=webdash
+mysql.code=Secret.Phrase
+mysql.db=appwebdash
+# in seconds. all older records (of table stresses) will be deleted
+max.age.stresses=86400
+# Email data for error report:
+# Receiver of the error messages: may be a blank separated list of email addresses
+send.always=False
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.neutral@gmx.de
+smtp.user=hm.neutral@gmx.de
+smtp.code=sEcReT
+smtp.tls=True
+'''
+ self.storeExample(text)
+
+ def fulfill(self, args, serverThread):
+ '''FulFills the task like "filesystem".
+ @param args: the command arguments. args[0] contains the command
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @return: True: request is handled False: request is unknown, next chain member should be asked
+ '''
+ rc = True
+ threadId = serverThread._id
+ if args[0] == 'threadstart':
+ self._db[threadId] = self.connectDB()
+ elif args[0] == 'threadend':
+ self._db[threadId].close()
+ del self._db[threadId]
+ self._logger.log('=== thread {}: db closed. open connections ({}): {}'.format(threadId, len(self._db), base.StringUtils.join(' ', self._db.keys())), 2)
+ else:
+ keys = []
+ for key in self._db.keys():
+ keys.append(key)
+ for key in keys:
+ if int(key) < int(threadId) - 10:
+ self._db[key].close()
+ del self._db[key]
+ self._logger.log('connection lately closed: {}'.format(key), 1)
+ if len(self._db) >= 3:
+ self._logger.log("too many connection, blocking...")
+ serverThread.send('WAIT')
+ time.sleep(10)
+ else:
+ db = self._db[threadId]
+ if args[0] == 'filesystems':
+ self.storeFilesystemData(db, args[1], args[2], args[3], args[4], args[5])
+ serverThread.send('OK')
+ elif args[0] == 'clouds':
+ self.storeCloudData(db, args[1:])
+ serverThread.send('OK')
+ elif args[0] == 'stress':
+ # server, deltaTime, ioRead, ioWrite, netRead, netWrite, load, memoryAvailable, swapAvailable
+ self.storeStressData(db, args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9])
+ serverThread.send('OK')
+ else:
+ rc = False
+ return rc
+
+ def storeCloudData(self, db, args):
+ """Stores the data of a cloud into the database.
+ @param db: database handle
+ @param args: the data delivered from the client
+ """
+ # args = [self._hostName, path, str(total), str(used), str(free), str(trashSize), str(files), str(dirs), users, info, logs]
+ if len(args) == 9:
+ [server, name, total, used, free, trashSize, trashFiles, trashDirs, users] = args
+ logs = ''
+ info = ''
+ if len(args) == 10:
+ [server, name, total, used, free, trashSize, trashFiles, trashDirs, users, info] = args
+ logs = ''
+ else:
+ [server, name, total, used, free, trashSize, trashFiles, trashDirs, users, info, logs] = args
+ info = base.StringUtils.unescChars(info)
+ name = os.path.basename(name)
+ if name == '':
+ name = 'root'
+ sql = '''UPDATE clouds
+SET
+ cloud_total=%s,
+ cloud_used=%s,
+ cloud_free=%s,$
+ changed=NOW()
+WHERE
+ cloud_name=%s
+ and cloud_host = (SELECT host_id FROM hosts WHERE host_name=%s)'''
+ with db.cursor() as cursor:
+ params = [total, used, free]
+ # trashSize, trashFiles, trashDirs, logs, users, info,
+ sql2 = ''
+ if int(trashSize) >= 0 and int(trashDirs) > 0:
+ sql2 = 'cloud_trash=%s,cloud_trashfiles=%s,cloud_trashdirs=%s,'
+
+ params.append(trashSize)
+ params.append(trashFiles)
+ params.append(trashDirs)
+ if len(logs) > 0:
+ sql2 += 'cloud_log=%s,'
+ params.append(logs)
+ if len(users) > 0:
+ sql2 += 'cloud_users=%s,'
+ params.append(users)
+ if len(info) > 0:
+ sql2 += 'cloud_info=%s,'
+ params.append(info)
+ sql = sql.replace('$', sql2)
+ params.append(name)
+ params.append(server)
+ count = cursor.execute(sql, params)
+ if count == 0:
+ sql = '''INSERT INTO clouds
+(cloud_name, cloud_total, cloud_used, cloud_free, cloud_trash,cloud_trashfiles,cloud_trashdirs,cloud_log,cloud_users,cloud_info,changed, changedby, created, createdby, cloud_host)
+VALUES
+(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), 'crawler', NOW(), 'crawler', (SELECT host_id FROM hosts where host_name=%s));
+'''
+ params = [name, total, used, free, trashSize, trashFiles, trashDirs, logs, users, info, server]
+ rc = cursor.execute(sql, params)
+ if rc == None:
+ self._logger.log('storecloudData() [{}] {}'.format(count, self.completeSql(sql, params)), 3)
+ else:
+ self._logger.log('storecloudData() {}'.format(count, self.completeSql(sql, params)), 3)
+ else:
+ self._logger.log('storecloudData(): {} records changed: {}'.format(self.completeSql(sql, params), count), 3)
+ db.commit()
+
+ def storeFilesystemData(self, db, server, fsName, total, used, free):
+ """Stores the data of a filesystem into the database.
+ @param server: the sending server
+ @param fsName: the name of the filesystem
+ @param total: the total space of the filesystem in bytes
+ @param used: the used space of the filesystem in bytes
+ @param free: the available space of the filesystem
+ """
+ sql = '''UPDATE filesystems
+SET
+ filesystem_total=%s,
+ filesystem_used=%s,
+ filesystem_free=%s,
+ changed=NOW()
+WHERE
+ filesystem_mount=%s
+ and filesystem_host = (SELECT host_id FROM hosts WHERE host_name=%s)'''
+ with db.cursor() as cursor:
+ params = [total, used, free, fsName, server]
+ count = cursor.execute(sql, params)
+ if count == 0:
+ sql = '''INSERT INTO filesystems
+(filesystem_name, filesystem_total, filesystem_used, filesystem_free, changed, changedby, created, createdby, filesystem_host)
+VALUES
+(%s, %s, %s, %s, NOW(), 'crawler', NOW(), 'crawler', (SELECT host_id FROM hosts where host_name=%s));
+'''
+ if fsName == '/':
+ name = 'root'
+ else:
+ name = fsName.replace('/', '_')[1:]
+ params = [name, total, used, free, server]
+ rc = cursor.execute(sql, params)
+ if rc == None:
+ self._logger.log('storeFilesystemData() [{}] {}'.format(count, self.completeSql(sql, params)), 3)
+ else:
+ self._logger.log('storeFilesystemData() {}'.format(count, self.completeSql(sql, params)), 3)
+ else:
+ self._logger.log('storeFilesystemData(): {} records changed: {}'.format(self.completeSql(sql, params), count), 3)
+ db.commit()
+
+ def storeStressData(self, db, server, deltaTime, ioRead, ioWrite, netRead, netWrite, load, memoryAvailable, swapAvailable):
+ """Stores the data of a filesystem into the database.
+ @param server: the sending server
+ @param deltaTime: the byte data are sampled since this time in (msec)
+ @param ioRead: the read amount in bytes (disk io)
+ @param ioWrite: the write amount in bytes (disk io)
+ @param netRead: the read amount in bytes (network io)
+ @param netWrite: the write amount in bytes (network io)
+ @param load: the 1 minute load
+ @param memoryAvailable: the available ram memory
+ @param swapAvailable: the available swap memory
+ """
+ with db.cursor() as cursor:
+ maxAge = self._configuration.getInt('max.age.stresses', 0);
+ if maxAge > 0:
+ sql = 'delete from stresses where changed < timestampadd(second, -{}, NOW());'.format(maxAge)
+ cursor.execute(sql)
+ sql = '''INSERT INTO stresses
+(stress_time, stress_readio, stress_writeio, stress_readnet, stress_writenet, stress_load, stress_memoryavailable, stress_swapavailable, created, createdby, stress_host)
+VALUES
+(%s, %s, %s, %s, %s, %s, %s, %s, NOW(), 'webdash', (SELECT host_id FROM hosts where host_name=%s));
+'''
+ params = (deltaTime,
+ ioRead,
+ ioWrite,
+ netRead,
+ netWrite,
+ load,
+ memoryAvailable,
+ swapAvailable,
+ server
+ )
+ recId = cursor.execute(sql, params)
+ if recId != None:
+ self._logger.log('storeFilesystemData(): {} {}'.format(recId, self.completeSql(sql, params)), 3)
+ db.commit()
+
+class WebDashServer (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'webdashserver.conf', True)
+
+ def service(self):
+ """Starts the TCP server and wait for tasks.
+ """
+ port = self._configuration.getInt('service.port', 58201)
+ host = self._configuration.getString('service.host', '127.0.0.1')
+ handler = WebDashStorageHandler(self._configuration, self._logger, self._verboseLevel)
+ handler.setVerboseLevel(self._verboseLevel)
+ # def __init__(self, threadId, ip, port, clientSocket, server, verboseLevel = 0):
+ server = net.TcpServer.TcpServer(port, self._logger, handler, host, self._verboseLevel)
+ self._logger.log("listening on {}:{}...".format(host, port), 1)
+ server.listen()
+ handler.close()
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """usage: webdashserver [<global_opts>] <command>
+ Starts a TCP server to collect data from other hosts to store in the database
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ service or daemon
+ Starts the TCP server
+Examples:
+webdashserver -q install
+webdashserver -v3 service
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/wdserver', 'service']
+ '''
+ serviceInfo = base.BaseTool.ServiceInfo('webdashservice', 'webdash', 'webdash',
+ 'A TCP server storing data into a database', 'WEBDASH', None)
+ appInfo = base.BaseTool.ApplicationInfo('webdashserver', 'appl/WebDashServer.py', usage, serviceInfo)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = WebDashServer(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'service' or cmd == 'daemon':
+ rc = tool.service()
+ elif cmd == 'reload':
+ # rc = tool.service()
+ pass
+ else:
+ tool.usage('unknown command: ' + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Webserver: Scalable Vector Graphics tool
+
+@author: hm
+'''
+import os.path
+import sys
+import http.server
+import socketserver
+
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+import base.ProcessHelper
+
+class RequestHandler (http.server.SimpleHTTPRequestHandler):
+ def do_GET(self):
+ '''Customizes the handling of a GET request.
+ '''
+ #if self.server._verboseLevel >= 2:
+ # self.server._logger.log('GET: ' + self.requestline)
+ http.server.SimpleHTTPRequestHandler.do_GET(self)
+
+ def log_message(self, format, *restArgs):
+ '''Customizes logging of the requests.
+ @param format: the format of the log message
+ '''
+ self.server._logger.log('"{}" {} {}'.format(restArgs[0], restArgs[1], restArgs[2]), 2)
+
+class Webserver (base.BaseTool.BaseTool):
+ '''Translates from one wiki syntax into another.
+ '''
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'pywebserver.conf')
+ self._processTool = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger)
+ # True: delete user/db without confirmation: use only in unittests!
+ self._forceDeleting = False
+
+ def example(self):
+ '''Creates a example configuration.
+ '''
+ example = '''# Webserver example configuration
+log.file=/var/log/local/pywebserver.log
+port=20180
+document.root=/var/www/pywebserver
+'''
+ self.storeExample(example)
+
+ def run(self):
+ '''Runs the webserver service.
+ '''
+ port = self._configuration.getInt('port', 20180)
+ root = self._configuration.getString('document.root', '/var/www/pywebserver')
+ if not os.path.isdir(root):
+ self.usage('missing document root: ' + root)
+ os.chdir(root)
+ self._logger.log('document root: ' + os.path.realpath(os.curdir), 1)
+ self._logger.log('listening to port {}...'.format(port), 1)
+ httpd = socketserver.TCPServer(("", port), RequestHandler)
+ httpd._verboseLevel = self._verboseLevel
+ httpd._logger = self._logger
+ httpd.serve_forever()
+ httpd.server_close()
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """webserver [<global-opts>] <command>
+ A simple webserver used for transporting infos and files.
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+ daemon
+ start the service
+example:
+ webserver -v3 daemon
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/pywebserver', 'run']
+ '''
+ serviceInfo = base.BaseTool.ServiceInfo('pywebserver', 'pywebserver', 'pywebserver',
+ 'A HTTP webserver', 'WEBSERVER', None)
+ appInfo = base.BaseTool.ApplicationInfo('pywebserver', 'appl/Webserver.py', usage, serviceInfo)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = Webserver(options)
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'daemon' or cmd == 'run':
+ tool.run()
+ else:
+ tool.usage("unknown command: " + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+wikitool: Scalable Vector Graphics tool
+
+@author: hm
+'''
+import os.path
+import sys
+import re
+import time
+import math
+
+sys.path.insert(0, '/usr/share/pyrshell')
+import base.BaseTool
+
+class WikiStatus:
+ def __init__(self):
+ self._openScript = None
+ self._inTable = False
+ self._regTable = None
+ self._regHeadline = None
+ self._regUnorderedList = None
+ self._regUnorderedSubList = None
+
+class WikiTool (base.BaseTool.BaseTool):
+ '''Translates from one wiki syntax into another.
+ '''
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param globalOptions: the basic configurations
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'wikitool.conf')
+
+ def example(self):
+ example = '''# wikitool example configuration
+log.file=/var/log/local/wikitool.log
+'''
+ self.storeExample(example)
+
+ def gitlabToMediaWiki(self, argv):
+ '''Translates from gitlab into media wiki.
+ @param argv: arguments
+ @return rc: None or error message
+ '''
+ rc = None
+ source = argv[0]
+ target = argv[1]
+ argv = argv[2:]
+ status = WikiStatus()
+ if not os.path.exists(source):
+ rc = "input file {} does not exist".format(source)
+ else:
+ status._regHeadline = re.compile(r'^(#+)\s*(.*)')
+ status._regUnorderedList = re.compile(r'^ ?[-+*]\s*(.*)')
+ status._regUnorderedSubList = re.compile(r'^ ?[-+*]?\s*(.*)')
+ status._regTable = re.compile(r'^\|')
+ status._openScript = None
+ status._inTable = False
+ out = ''
+ with open(source, 'rb') as fpInput:
+ self._logger.log('open success: ' + source, 3)
+ try:
+ for rawLine in fpInput:
+ line = rawLine.decode('UTF-8')
+ out = self.handleLine(line, status, out)
+ except Exception as exc:
+ self._logger.error('gitlabToMediaWiki: ' + str(exc))
+ if status._inTable:
+ # remove last "|-\n":
+ out = out[0:-3]
+ out += "|}\n"
+ if target == '-':
+ print(out)
+ else:
+ self._logger.log('writing: {} bytes'.format(len(out)), 3)
+ try:
+ fpOut = open(target, 'wb')
+ rawLine = out.encode('UTF-8')
+ fpOut.write(rawLine)
+ fpOut.close()
+ except OSError as exc3:
+ self._logger.error('Exception ' + str(exc3))
+ except Exception as exc4:
+ self._logger.error('Exception ' + str(exc4))
+ if fpOut != None:
+ os.chmod(target, 0o666)
+ return rc
+
+ def handleLine(self, line, status, out):
+ '''Translate a line.
+ @param line: the line to translate
+ @param status: IN/OUT: the translator state
+ @param out: IN: the generated text
+ @result: the translated line
+ '''
+ ready = False
+ if status._openScript != None:
+ if line.startswith('```'):
+ out += status._openScript + "\n"
+ status._openScript = None
+ ready = True
+ else:
+ out += line
+ ready = True
+ elif line.startswith('```'):
+ language = line.strip()[3:]
+ if language == '':
+ out += '<pre>'
+ status._openScript = '</pre>'
+ else:
+ status._openScript = '</syntaxhighlight>'
+ out += '<syntaxhighlight lang="{}" "line=\'line\'>\n'.format(language)
+ ready = True
+ if not ready:
+ matcher = status._regTable.match(line)
+ if matcher != None:
+ cols = line.strip().split('|')
+ cols = cols[1:-1]
+ if not status._inTable:
+ out += "{|\n"
+ for col in cols:
+ out += '! ' + self.lineMarkupGitToMedia(col.strip()) + "\n"
+ status._inTable = True
+ else:
+ for col in cols:
+ out += '| ' + self.lineMarkupGitToMedia(col.strip()) + "\n"
+ out += '|-' + "\n"
+ ready = True
+ if not ready:
+ if status._inTable:
+ # remove last "|-\n":
+ out = out[0:-3]
+ out += "|}\n"
+ status._inTable = False
+ matcher = status._regHeadline.match(line)
+ if matcher != None:
+ countText = matcher.group(1)
+ prefix = '=' * len(countText)
+ out += '{} {} {}\n'.format(prefix, self.lineMarkupGitToMedia(matcher.group(2)), prefix)
+ ready = True
+ if not ready:
+ matcher = status._regUnorderedList.match(line)
+ if matcher != None:
+ out += '* {}\n'.format(self.lineMarkupGitToMedia(matcher.group(1)))
+ ready = True
+ if not ready:
+ matcher = status._regUnorderedSubList.match(line)
+ if matcher != None:
+ out += '** {}\n'.format(self.lineMarkupGitToMedia(matcher.group(1)))
+ else:
+ out += self.lineMarkupGitToMedia(line)
+ return out
+
+ def lineMarkupGitToMedia(self, line):
+ '''Transforms the markup inside the line (fat attributes ...)
+ @param line: line to transform
+ @return: the transformed line
+ '''
+ line = re.sub(r'\b__(.+?)__\b', r"'''\1'''", line)
+ line = re.sub(r'\*\*(.+?)\*\*', r"'''\1'''", line)
+ line = re.sub(r'\b([*_])(.+?)\1\b', r"''\2''", line)
+ line = re.sub(r'\[(.*?)\]\((.*?)\)', r'[[\2|\1]]', line)
+ return line
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """wikitool [<opts>] <command>
+ translate from one wiki dialect to another
+GLOBAL_OPTS
+GLOBAL_MODES
+<command>:
+{}
+ gitlab-to-mediawiki <input> <output>
+example:
+ wikitool -v2 gitlab-to-mediawiki /tmp/gitlab.txt /tmp/mediawiki.txt"
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/wikitool', 'run']
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('wikitool', 'appl/WikiTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = WikiTool(options)
+ tool._verboseLevel = 4
+ rc = None
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ if cmd == None:
+ pass
+ elif cmd == '':
+ tool.usage('missing subcommand')
+ elif cmd == 'gitlab-to-mediawiki':
+ if len(argv) < 2:
+ tool.usage('too few arguments')
+ else:
+ rc = tool.gitlabToMediaWiki(argv)
+ else:
+ tool.usage('unknown command: ' + cmd)
+ if rc != None:
+ tool.usage(rc)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+
+import sys
+import os.path
+
+sys.path.insert(0,'/usr/share/pyrshell')
+
+import base.Zipper
+import base.BaseTool
+
+
+class ZipTool (base.BaseTool.BaseTool):
+
+ def __init__(self, globalOptions):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ base.BaseTool.BaseTool.__init__(self, globalOptions, 'ziptool.conf')
+
+ def readArgFile(self, name):
+ '''Reads a file containing the arguments.
+ @param name: the file's name
+ @return: an array of arguments
+ '''
+ argv = []
+ with open(name) as fp:
+ for line in fp:
+ line = line.strip()
+ if not line.startswith('#') and line != '':
+ argv.append(line)
+ return argv
+
+ def zip(self, cmd, argv):
+ (argv, options) = self.splitArgsAndOpts(argv)
+ argv2 = None
+ for opt in options:
+ if opt.startswith('-a') or opt.startswith('--argv=') or opt.startswith('--argument-file='):
+ if len(options) > 1:
+ print('arguments behind {:s} will be ignored: {:s}'.format(opt, ' '.join(options[1:])))
+ if opt.startswith('-a'):
+ argFile = opt[2:]
+ else:
+ argFile = opt[opt.find('=') + 1:]
+ if not os.path.exists(argFile):
+ self.usage('optument file does not exist: ' + argFile)
+ else:
+ argv2 = self.readArgFile(argFile)
+ break
+ if argv2 != None:
+ argv += argv2
+ if len(argv) < 2:
+ self.usage('missing arguments')
+ archive = argv[0]
+ argv = argv[1:]
+ rc = None
+ if cmd == 'create':
+ tool = base.Zipper.Zipper(archive, 'w', self._logger)
+ if tool._zip != None:
+ rc = tool.create(argv)
+ elif cmd == 'extract':
+ tool = base.Zipper.Zipper(archive, 'r', self._logger)
+ if tool._zip != None:
+ rc = tool.extract(argv)
+ elif cmd == 'info':
+ tool = base.Zipper.Zipper(archive, 'r', self._logger)
+ if tool._zip != None:
+ rc = tool.info(argv)
+ else:
+ rc = 'unknown subcommand: ' + cmd
+ if rc != None:
+ self.usage(rc)
+
+ def example(self):
+ example = '''# ziptool example configuration
+log.file=/var/log/local/ziptool.log
+'''
+ self.storeExample(example)
+ return example
+
+def usage():
+ '''Returns an info about usage.
+ '''
+ return """usage: ziptool [<global_opts>] <command> [<opts>] [<params>]
+ Zip archive management
+<command>:
+ create <archive> [<opts>] <input1> [<opts>] <input2> ...]
+ Create a zip archive. Note: <opts> are relevant for all following <inputX>
+ <inputX>: a file or a directory
+ <opt>:
+ --shortest-path
+ The path name of the stored files is maximal shorted:
+ <input> may be /abc/def. The file /abc/def/x/a.txt has a stored name x/a.txt
+ --store-node-only
+ The path of the stored files is shorted to the last node of the base:
+ <input> may be /abc/def. The file /abc/def/x/a.txt has a stored name def/x/a.txt
+ --ignored-files=<reg_expr>
+ The matching files will not be stored
+ --ignored-dirs=<reg_expr>
+ The matching files will not be stored
+ --already-compressed-dirs=<reg_expr>
+ The matching directories will not be compressed
+ info <archive> <opts> [<pattern>]
+ <pattern>:
+ Only files matching the pattern are displayed.
+ If no pattern option (-r or -w) is given, the pattern is interpreted as substring of the full name
+ <opt>:
+ -1 or --name-only
+ Displays the full filename only
+ -r or --reg-expression
+ The pattern is interpreted as regular expression
+ -w or --wildcard(s)
+ The pattern is treated as a shell pattern:
+ '*': any string '?': one character '['<chars_or_ranges>']', e.g. "*.mp[34]
+ -s or --substring
+ The pattern is treated as a substring. This is the default
+ -p or --path
+ The pattern describes the path
+ -n or --node
+ the pattern describes the node
+ -f or --full
+ The pattern describes the full name (path and node)
+ extract <archive> <opts> [<pattern1> [ <opts> <pattern2> ... ]]
+ <opt>:
+ -o or --overwrite
+ Existing files will be overwritten. Default
+ -u or --update
+ Existing files will be overwritten if the stored file is younger
+ -f or --freshen
+ Replace existing files, do not create ones
+ -t or --not-overwrite
+ Ignore existing files
+ -r or --reg-expression
+ The pattern is interpreted as regular expression
+ -w or --wildcard(s)
+ The pattern is treated as a shell pattern:
+ '*': any string '?': one character '['<chars_or_ranges>']', e.g. "*.mp[34]
+ -s or --substring
+ The pattern is treated as a substring. This is the default
+ -p or --path
+ The pattern describes the path
+ -n or --node
+ the pattern describes the node
+ -f or --full
+ The pattern describes the full name (path and node)
+ backup <archive> [<opts>] <directory>
+ <directory>
+ the directory to store
+ <opt>:
+
+ restore <archive> [opts] [<target-directory>]
+ <opt>:
+ --delete
+ Each file which is in target directory but not in archive will be deleted
+Example:
+ziptool create --store-node-only /tmp/test.zip /home/adam /home/berta
+zt --args=/opt/backup/backup.args.txt
+"""
+
+def main(argv):
+ '''The main routine.
+ @param argv: the program arguments, e.g. ['/usr/local/bin/texttool', 'script', '-', 'data.csv']
+ @return: None: ok otherwise: error message
+ '''
+ appInfo = base.BaseTool.ApplicationInfo('ziptool', 'appl/ZipTool.py', usage)
+ (options, argv) = base.BaseTool.getGlobalOptions(argv, appInfo)
+ tool = ZipTool(options)
+ (cmd, argv) = tool.handleStandardCommands(argv)
+ rc = None
+ if cmd == None:
+ pass
+ else:
+ tool.zip(cmd, argv)
+ tool._finish()
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+class BaseLogger:
+ '''Base class of the loggers.
+ The derived class must implement the method log(message)
+ '''
+ def __init__(self, verboseLevel):
+ '''Constructor.
+ @param verboseLevel: logging is done only if minLevel < verboseLevel. minLevel is a parameter of log()
+ '''
+ self._verboseLevel = verboseLevel
+ self._logDebug = True
+ self._logInfo = True
+ self._errors = 0
+ self._maxErrors = 20
+ self._firstErrors = []
+ self._errorFilter = None
+ self._mirrorLogger = None
+ self._inUse = False
+
+ def debug(self, message):
+ '''Logs a debugging message.
+ @param message: the message to log
+ @return: True
+ '''
+ if self._mirrorLogger != None:
+ self._mirrorLogger.debug(message)
+ if self._logDebug:
+ self._inUse = True
+ self.log(message)
+ self._inUse = False
+ return True
+
+ def error(self, message):
+ '''Logs a message.
+ @param message: the error message to log
+ @return: False
+ '''
+ if self._mirrorLogger != None:
+ self._mirrorLogger.error(message)
+ filtered = self._errorFilter != None
+ if filtered:
+ if type(self._errorFilter) == str:
+ filtered = message.find(self._errorFilter) >= 0
+ else:
+ filtered = self._errorFilter.search(message) != None
+ if not filtered:
+ self._inUse = True
+ self.log('+++ ' + message)
+ self._errors += 1
+ if self._errors < self._maxErrors:
+ self._firstErrors.append(message)
+ self._inUse = False
+ return False
+
+ def info(self, message):
+ '''Logs an info message.
+ @param message: the message to log
+ @return: True
+ '''
+ if self._mirrorLogger != None:
+ self._mirrorLogger.info(message)
+ if self._logInfo:
+ self._inUse = True
+ self.log(message)
+ self._inUse = False
+ return True
+
+ def setMirror(self, logger):
+ '''Sets a "mirror" logger: all messages are logged to the mirror too
+ @param logger: the mirror logger
+ '''
+ if self._mirrorLogger != None:
+ logger.setLogger(self._mirrorLogger)
+ self._mirrorLogger = logger
+
+ def setErrorFilter(self, excluded, mirrorsToo = True):
+ '''Sets the error filter: if the pattern matches the error is ignored (not logged)
+ @param excluded: string: a substring of the ignored error
+ re.RegExpression: a compiled regular expression of the ignored errors
+ @param mirrorsToo: True: the filter is used for the mirror loggers too
+ '''
+ self._errorFilter = excluded
+ if mirrorsToo and self._mirrorLogger != None:
+ self._mirrorLogger.setErrorFilter(excluded)
+
+ def transferErrors(self, logger):
+ '''Transfers the error from another logger.
+ @param logger: the source of the errors to transfer
+ '''
+ self._errors += logger._errors
+ self._firstErrors += logger._firstErrors
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import tempfile
+import re
+import sys
+import os.path
+import subprocess
+import fnmatch
+import datetime
+import time
+import platform
+import shutil
+import grp
+import pwd
+import posix
+
+sys.path.insert(0, '/usr/share/pyrshell')
+
+import base.StringUtils
+import base.JavaConfig
+import base.Logger
+import base.MemoryLogger
+import base.ProcessHelper
+import base.FileHelper
+
+baseToolLatestTool = None
+baseToolResult = None
+
+def latestTool():
+ global baseToolLatestTool
+ return baseToolLatestTool
+
+def result():
+ global baseToolResult
+ return baseToolResult
+
+def setLatestTool(tool):
+ global baseToolLatestTool
+ baseToolLatestTool = tool
+
+def setResult(result):
+ global baseToolResult
+ baseToolResult = result
+
+glob_logger = None
+glob_errors = 0
+baseToolUnitTestActive = False
+
+def unitTestIsActive():
+ global baseToolUnitTestActive
+ return baseToolUnitTestActive
+
+class ServiceInfo:
+ '''Stores the default values of a systemd service.
+ '''
+ def __init__(self, starter, user, group, description, prefixEnvVar, additionalEnvVariables):
+ '''Constructor.
+ @param starter: name of the script which is started from the service
+ @param user: None or: the process is started as this user
+ @param group: None or: the process is started as this group
+ @param description: this info is showed from the "systemctl status" command
+ @param prefixEnvVar: prefix of the environment variables, e.g. 'WEBDASH'
+ @param additionalEnvVariables: None or the definition of non standard environment variables, e.g. 'A=B\nC=D'
+ '''
+ self._starter = starter
+ self._user = user
+ self._group = group
+ self._description = description
+ self._prefixEnvVar = prefixEnvVar
+ self._additionalEnvVariables = additionalEnvVariables
+
+class ApplicationInfo:
+ '''Stores the default values of an application.
+ '''
+ def __init__(self, application, module, usage, serviceInfo = None):
+ '''Construction.
+ @param application: name of the application
+ @param module: name of the module relative to base, e.g. 'appl/FtpTool.py'
+ @param usage: a function which shows the usage message and exits the process
+ @param serviceInfo: None or the info about the systemd service
+ '''
+ self._usage = usage
+ self._serviceInfo = serviceInfo
+ self._applicationName = application
+ self._module = module
+ self.anotherInit(application, module, usage, serviceInfo)
+
+ def anotherInit(self, application, module, usage, serviceInfo = None):
+ '''Basic initialization.
+ @param application: name of the application
+ @param module: name of the module relative to base, e.g. 'appl/FtpTool.py'
+ @param usage: a function which shows the usage message and exits the process
+ @param serviceInfo: None or the info about the systemd service
+ '''
+ self._serviceInfo = serviceInfo
+ self._usage = usage
+ self._applicationName = application
+ self._module = module
+
+def globOnError(function, path, exceptionInfo):
+ '''Used for logging errors in shutil services like rmtree()
+ @param function: the "owner" of the exception, platform dependent
+ @param path: the full filename raising the error
+ @param exceptionInfo: @see sys.exc_info()
+ '''
+ glob_errors += 1
+ if glob_logger != None:
+ (aType, value, traceback) = exceptionInfo
+ glob_logger.error('cannot remove {}: [{s} {s}]'.format(path, str(aType), value))
+
+class GlobalOptions:
+ def __init__(self, appInfo, verboseLevel=None):
+ '''Constructor.
+ '''
+ self._verboseLevel = verboseLevel if verboseLevel != None else 1
+ self._exitWith0 = unitTestIsActive()
+ self._runtime = False
+ self._logFiles = []
+ self._configDir = '/etc/pyrshell'
+ self._configFile = None
+ self._errorFilter = None
+ self._count = None
+ self._appInfo = appInfo
+ self._testSourceDir = None
+ self._testTargetDir = None
+ self._host = None
+
+class BaseTool:
+ '''Base class of some other tools.
+ '''
+
+ def __init__(self, globalOptions, defaultConfigFile, useThreads=False, additionalConfigDir=None):
+ '''Constructor.
+ @param globalOptions: an instance of GlobalOptions
+ @param defaultConfigFile: name of the configuration file if not set in globalOptions
+ @param useThreads: True: a threadsafe logger will be used
+ @param additionalConfigDir: None or a directory containing many config files. Full path or a node (in /etc/pyrshell)
+ '''
+ setLatestTool(self)
+ self._start = time.process_time()
+ self._startReal = time.time()
+ self._userId = posix.getuid()
+ self._isRoot = self._userId == 0
+ self._globalOptions = globalOptions
+ self._logger = None
+ if globalOptions._appInfo._serviceInfo != None:
+ self._initFromServiceInfo(globalOptions._appInfo._serviceInfo)
+ self._verboseLevel = globalOptions._verboseLevel
+ self.installLoggers(globalOptions)
+ self._exitWith0 = globalOptions._exitWith0
+ self._configDir = globalOptions._configDir
+ version = base.StringUtils.grepInFile('/etc/lsb-release', r'DISTRIB_RELEASE=([0-9.]+)', None, 1)
+ self._ubuntuVersion = None if len(version) == 0 else version[0]
+ version = base.StringUtils.fromFile('/etc/debian_version')
+ self._debianVersion = None if len(version) == None else version.split('/')[0]
+ if self._debianVersion != None:
+ if self._debianVersion.startswith('9.'):
+ self._debianVersion = 'stretch/stable'
+ elif self._debianVersion.startswith('8.'):
+ self._debianVersion = 'jessie/stable'
+ elif self._debianVersion.startswith('7.'):
+ self._debianVersion = 'whezzy/stable'
+ elif self._debianVersion.startswith('10.'):
+ self._debianVersion = 'buster/stable'
+ if additionalConfigDir != None and additionalConfigDir.find(os.sep) < 0:
+ self._additionalConfigDir = self._configDir + os.sep + additionalConfigDir
+ else:
+ self._additionalConfigDir = additionalConfigDir
+ if globalOptions._configFile == None:
+ globalOptions._configFile = defaultConfigFile
+ loggerName = None if len(globalOptions._logFiles) == 0 else globalOptions._logFiles[0]
+ if loggerName == None:
+ logger = base.MemoryLogger.MemoryLogger(self._verboseLevel)
+ elif loggerName == '-':
+ logger = base.MemoryLogger.MemoryLogger(self._verboseLevel)
+ elif useThreads:
+ logger = base.ThreadLogger.ThreadLogger(loggerName, self._verboseLevel)
+ else:
+ logger = base.Logger.Logger(loggerName, self._verboseLevel)
+ if loggerName == None:
+ self._logger = logger
+ else:
+ self.ensureDirectory(self._configDir)
+ fn = self._configDir + os.sep + globalOptions._configFile
+ if not os.path.exists(fn):
+ self.example()
+ self._configuration = base.JavaConfig.JavaConfig(fn, logger)
+ name = self._configuration.getString('log.file', tempfile.gettempdir() + os.sep + 'pyrshell.log')
+ base.FileHelper.ensureFileExists(name, '')
+ try:
+ os.chmod(name, 0o666)
+ except OSError as exc:
+ pass
+ logger2 = base.Logger.Logger(name, globalOptions._verboseLevel)
+ logger2.transferErrors(logger)
+ self._logger = logger2
+ self._beingRoot = os.geteuid() == 0
+ self._verboseLevel = globalOptions._verboseLevel
+ if globalOptions._errorFilter != None:
+ self._logger.setErrorFilter(globalOptions._errorFilter, True)
+ if self._additionalConfigDir != None:
+ base.FileHelper.ensureDirectory(self._additionalConfigDir, self._logger)
+ base.FileHelper.ensureDirectory('/var/log/local')
+ if hasattr(globalOptions._appInfo, '_usage'):
+ self._usage = globalOptions._appInfo._usage
+ self._processHelper = base.ProcessHelper.ProcessHelper(self._verboseLevel, self._logger)
+
+ def _finish(self):
+ '''Does the final work.
+ '''
+ end = time.process_time()
+ endReal = time.time()
+ if self._globalOptions._runtime:
+ self._logger.log('realtime: {:.3f} sec'.format(
+ endReal - self._startReal) + ' runtime: {:.3f} sec'.format(end - self._start))
+
+ def _initFromServiceInfo(self, serviceInfo):
+ '''Transfers the service info into the global options.
+ @param serviceInfo: an instance of ServiceInfo
+ '''
+ prefix = serviceInfo._prefixEnvVar
+ envVar = prefix + '_CONFIG'
+ if envVar in os.environ:
+ self._globalOptions._configDir = os.environ[envVar]
+ envVar = prefix + '_HOST'
+ if envVar in os.environ:
+ self._globalOptions._host = os.environ[envVar]
+ else:
+ self._globalOptions._host = BasicStatics.hostname(False)
+ envVar = prefix + '_APPL'
+ if envVar in os.environ:
+ self._globalOptions._appInfo._applicationNameName = os.environ[envVar]
+
+ def clearDirectory(self, directory):
+ '''Deletes all files and directories from a given directories.
+ '''
+ glob_errors = 0
+ glob_logger = self._logger
+ if not os.path.isdir(directory):
+ self._logger.error('missing directory: ' + directory)
+ else:
+ try:
+ for node in os.listdir(directory):
+ if node != '.' and node != '..':
+ full = directory + os.sep + node
+ if os.path.isdir(full):
+ shutil.rmtree(full, True, globOnError)
+ else:
+ try:
+ os.unlink(full)
+ except OSError as exc:
+ self._logger.error('cannot delete {}: {}'.format(full, str(exc)))
+ errors = glob_errors
+ if errors > 0:
+ self._logger.error('clearDirectory(): {d} errors'.format(errors))
+ finally:
+ glob_logger = None
+
+ def createBackup(self, full, suffix = ''):
+ '''Creates a backup file in /var/tmp.
+ @param full: the file to save
+ '''
+ node = os.path.basename(full)
+ suffix = node + '.' + base.FileHelper.pathToNode(os.path.dirname(full)) + datetime.datetime.now().strftime('-%Y.%m.%d_%H_%M_%S') + suffix
+ trg = '/var/tmp/' + node + suffix
+ if os.path.isdir(full):
+ self._processHelper.execute(['/bin/tar', 'czf', trg + '.tgz', '.'], True, '!shell', None, full)
+ else:
+ shutil.copy(full, trg)
+ self._logger.log('backup of {} created: {}'.format(full, trg), 2)
+
+ def ensureDirectory(self, path, mode = 0o777):
+ '''Ensures that a directory exists.
+ @param path: the full name of the directory
+ @param mode: the rights, e.g. 0o777 for all access for everyone
+ @return: None: cannot create directory
+ otherwise: path
+ '''
+ rc = base.FileHelper.ensureDirectory(path, self._logger, mode)
+ return rc
+
+ def ensureFileDoesNotExist(self, filename, pattern=None):
+ '''Ensures that a file does not exist.
+ @param filename: the file to delete if it exists.
+ @param pattern: None or a regular expression for nodes to delete, e.g. r'.*\.z\d\d$'
+ Note: re.match() is used to find. Do not forget '.*' at the top
+ '''
+ base.FileHelper.ensureFileDoesNotExist(filename, self._logger)
+ if pattern != None:
+ regExpr = self.regExprCompile(pattern, 'file pattern')
+ path = os.path.dirname(filename)
+ if path != '':
+ nodes = os.listdir(path)
+ for node in nodes:
+ if regExpr.match(node):
+ try:
+ full = path + os.sep + node
+ os.unlink(full)
+ except OSError as exp:
+ self._logger.error('cannot delete {:s}: {:s}'.format(full, str(exp)))
+
+ def ensureSymbolicLink(self, source, target, createTarget=True):
+ '''Ensures that a directory exists.
+ @param source: the full name of the link source, e.g. '../sibling'
+ @param target: full name of the file of type 'link'
+ @param createTarget: creates the target if it does not exist
+ @return: True: the link exists
+ '''
+ rc = base.FileHelper.ensureSymbolicLink(source, target, createTarget, self._logger)
+ return rc
+
+ def debianName(self):
+ return None if self._debianVersion == None else self._debianVersion.split('/')[0]
+
+ def errorFileToLog(self, filename, intro=None):
+ '''Dumps an error file into the log as errors.
+ @param filename: full path of file
+ @param intro: None or a preceding message
+ '''
+ if os.path.exists(filename):
+ with open(filename, 'r') as fp:
+ for line in fp:
+ if line != '':
+ if intro != None:
+ line = intro + ' ' + line
+ intro = None
+ self._logger.error(line)
+
+ def floatArgument(self, arg, defaultValue = None):
+ '''Gets an float argument. If an error is detected usage is called
+ @param arg: the argument, e.g. 'factor=1.23'
+ @return the number behind the first '='
+ '''
+ rc = defaultValue
+ index = arg.find('=')
+ if index < 0:
+ if defaultValue == None:
+ self.usage('missing float in ' + arg)
+ else:
+ try:
+ rc = float(arg[index+1:])
+ except ValueError:
+ self.usage('not a float behind "=" in ' + arg)
+ return rc
+
+ def fullPathToName(self, path):
+ '''Converts a path name into a node name: slashes are changed to '='
+ @param path: full path
+ @return: a node name (separator replaced by '=')
+ '''
+ rc = path.replace(os.sep, '=').lstrip('=')
+ return rc
+
+ def getFilenameOrCopy(self, directory, node):
+ '''Get a configuration file, a copy for unittests if needed:
+ If _testTargetDir is defined, the file is copied from directory to _testTargetDir and the filename with _testTargetDir dir is returned.
+ Otherwise the filename with directory is returned
+ @param directory: the directory used outside of unittests
+ @return the full filename in directory or in _targetDir
+ '''
+ if self._globalOptions._testTargetDir == None:
+ rc = directory + os.sep + node
+ else:
+ source = directory + os.sep + node
+ rc = self._globalOptions._testTargetDir + os.sep + node
+ if os.path.exists(source):
+ self._logger.log('copying {} -> {}'.format(source, rc), 2)
+ shutil.copy2(source, rc)
+ else:
+ self._logger.log('missing {}'.format(source), 3)
+ return rc
+
+ def getSource(self, directory, node = None):
+ '''Returns the source directory specified by directory.
+ For unit tests the result is taken from the global options (_testSourceDir)
+ @param directory: the directory's name
+ @param node: None or the filename without path
+ @return: the directory name (node == None) or the filename (directory + os.sep + node)
+ '''
+ if self._globalOptions._testSourceDir == None:
+ rc = directory
+ else:
+ rc = self._globalOptions._testSourceDir
+ if node != None:
+ if node.endswith(os.sep):
+ rc += node
+ else:
+ rc += os.sep + node
+ if rc.startswith('//'):
+ rc = rc[1:]
+ return rc
+
+ def getTarget(self, directory, node = None):
+ '''Returns the source directory specified by directory.
+ For unit tests the result is taken from the global options (_testTargetDir)
+ @param directory: the directory's name
+ @param node: None or the filename without path
+ @return: the directory name (node == None) or the filename (directory + os.sep + node)
+ '''
+ if self._globalOptions._testTargetDir == None:
+ rc = directory
+ else:
+ rc = self._globalOptions._testTargetDir
+ if node != None:
+ if node.endswith(os.sep):
+ rc += node
+ else:
+ rc += os.sep + node
+ return rc
+
+ @staticmethod
+ def handleCommonCommands(appInfo, argv, options):
+ '''Handles the non specific commands of the tool, e.g. 'install'.
+ @param appInfo: the description of the application
+ @param argv: the program arguments starting with the command, e.g. ['install']
+ @param options: the global options (extracted from the program arguments)
+ '''
+ rc = False
+ if len(argv) > 0:
+ cmd = argv[0]
+ if cmd == 'install':
+ tool = InstallTool(appInfo, options)
+ tool.install(appInfo._applicationNameName, appInfo._module, options)
+ if appInfo._serviceInfo != None:
+ info = appInfo._serviceInfo
+ tool.installAsService(argv[1:], info._user, info._group, info._prefixEnvVar, info._description)
+ rc = True
+ elif cmd == 'uninstall':
+ tool = InstallTool(appInfo, options)
+ if appInfo._serviceInfo != None:
+ tool.uninstallService(argv[1:])
+ tool.uninstall(appInfo._applicationNameName, argv[1:])
+ rc = True
+ return rc
+
+ def handleStandardCommands(self, argv):
+ '''Executes the common commands when the instance is initialized.
+ At this time the only common command is 'example'.
+ @param argv: the program arguments, e.g. ['info', '--short']
+ @return: a tuple (cmd, argv) cmd is the first argument, argv the rest of argv.
+ cmd==None: a standard command has been found
+ cmd=='': no subcommand has been found
+ '''
+ if len(argv) == 0:
+ cmd = ''
+ else:
+ cmd = argv[0]
+ argv = argv[1:]
+ options = self._globalOptions
+ appInfo = options._appInfo
+ if cmd == 'example':
+ self.example()
+ cmd = None
+ elif cmd == 'install':
+ tool = InstallTool(appInfo, options)
+ tool.install(appInfo._applicationName, appInfo._module, options)
+ if appInfo._serviceInfo != None:
+ info = appInfo._serviceInfo
+ tool.installAsService(argv, info._user, info._group, info._prefixEnvVar, info._description)
+ cmd = None
+ elif cmd == 'uninstall':
+ tool = InstallTool(appInfo, options)
+ if appInfo._serviceInfo != None:
+ tool.uninstallService(argv[1:])
+ tool.uninstall(appInfo._applicationName, argv[1:])
+ cmd = None
+ return (cmd, argv)
+
+ def installLoggers(self, options):
+ '''Initializes the loggers described in the options._logFiles: multiple loggers are mirrored
+ @param options: the program options
+ '''
+ BasicStatics.ensureDirectory('/var/log/local')
+ baseLogger = None
+ logger = None
+ for name in options._logFiles:
+ if name == '' or name == '-':
+ logger = base.MemoryLogger.MemoryLogger(options._verboseLevel)
+ else:
+ logger = base.Logger.Logger(name, options._verboseLevel)
+ if baseLogger == None:
+ baseLogger = logger
+ else:
+ # only the base logger should cry:
+ logger._verbose = False
+ baseLogger.setMirror(logger)
+ if logger == None:
+ logger = base.MemoryLogger.MemoryLogger(options._verboseLevel)
+ self._logger = logger
+
+ def integerOption(self, arg, defaultValue = None):
+ '''Gets an integer argument. If an error is detected usage is called
+ @param arg: the argument, e.g. 'count=123'
+ @return the number behind the first '='
+ '''
+ rc = defaultValue
+ index = arg.find('=')
+ if index < 0:
+ ix = 0
+ while ix < len(arg) and arg[ix] < '0' and arg[ix] > '9':
+ ix += 1
+ if ix < len(arg):
+ index = ix - 1
+ if index < 0:
+ if defaultValue == None:
+ self.usage('missing integer in ' + arg)
+ else:
+ try:
+ rc = int(arg[index+1:])
+ except ValueError:
+ self.usage('not an integer {} in {}'.format(arg[index+1:], arg))
+ return rc
+
+ def integerArgument(self, arg, defaultValue = None):
+ '''Gets an integer argument. If an error is detected usage is called
+ @param arg: the argument, e.g. 'count=123'
+ @return the number behind the first '='
+ '''
+ rc = defaultValue
+ try:
+ rc = int(arg)
+ except ValueError:
+ self.usage('not an integer {}'.format(arg))
+ return rc
+
+ def nearBackup(self, filename, moveNotCopy = False):
+ '''Makes a backup of a file in the same parent directory.
+ @param filename: the source file
+ @param moveNotCopy: True: the source will be renamed False: the file will be copyied
+ '''
+ if os.path.exists(filename):
+ target = filename + '.{}'.format(int(time.time()))
+ self._logger.log('creating backup file: ' + target, 2)
+ try:
+ if moveNotCopy:
+ os.rename(filename, target)
+ else:
+ shutil.copy2(filename, target)
+ except OSError as exc:
+ self._error('cannot create backup file {}: {}'.format(target, str(exc)))
+
+ def logFile(self, filename, messagePattern, start):
+ '''Writes metadata file size and runtime into the log.
+ @param filename: file to log
+ @param messagePattern: log message with makros %f (filename) %s (filesize) %t (filetime) and %r (runtime)
+ @param start: None or the start of the operation (for calculating runtime)
+ '''
+ stat = os.stat(filename)
+ size = base.StringUtils.formatSize(stat.st_size)
+ fdate = datetime.datetime.fromtimestamp(stat.st_mtime)
+ dateString = fdate.strftime("%Y.%m.%d %H:%M:%S")
+ runtime = '?' if start == None else '{:d} sec'.format(int(time.time() - start))
+ msg = messagePattern.replace('%f', filename).replace('%s', size).replace('%t', dateString).replace('%r', runtime)
+ self._logger.log(msg)
+ return msg
+
+ def publicIp(self):
+ '''Tries to detect the public IP of the host.
+ @return: None: nothing found othewise: the best known public IP
+ '''
+ self._processHelper.executeInputOutput(['ip', 'addr', 'show'], None)
+ lines = self._processHelper._rawOutput.decode().split('\n')
+ device = None
+ lastIpType = '0-none'
+ lastDevType = '0-none'
+ for line in lines:
+ matcher = re.match(r'\d: (\w+):', line)
+ if matcher != None:
+ device = matcher.group(1)
+ if device.startswith('e'):
+ devType = '9-ethernet'
+ elif device.startswith('w'):
+ devType = '7-wlan'
+ elif device == 'lo':
+ devType = '1-loopback'
+ else:
+ devType = '3-unknown'
+ continue
+ matcher = re.match(r'\s+inet (\S+)/', line)
+ if matcher != None:
+ ip = matcher.group(1)
+ if ip.startswith('127.'):
+ ipType = '1-localhost'
+ elif len(ip) > 6 and (ip.startswith('169.254.') or ip.startswith('192.168.') or ip.startswith('10.')
+ or (ip >= '172.16.' and ip <= '172.32.' and ip[6] == '.')):
+ ipType = '3-private'
+ else:
+ ipType = '9-public'
+ self._logger.log('device: {} ip: {} devType: {} ipType: {}'.format(device, ip, devType, ipType), 3)
+ if (devType > lastDevType) or (ipType > lastIpType):
+ lastIp = ip
+ lastIpType = ipType
+ lastDevType = devType
+ rc = lastIp if ipType > '1-localhost' else None
+ self._logger.log('public ip: {}'.format(str(rc)), 2)
+ return rc
+
+ def popd(self, directory):
+ '''Changes the current direcory (if needed and possible).
+ @param directory: None or the new current directory
+ @return None: directory = None
+ '': changing directory failed
+ otherwise: the current directory (before changing)
+ '''
+ if directory != None and directory != '':
+ os.chdir(directory)
+ if os.path.realpath(os.curdir) != os.path.realpath(directory):
+ self._logger.error('cannot change to directory ' + directory)
+
+ def pushd(self, directory):
+ '''Changes the current direcory (if needed and possible).
+ @param directory: None or the new current directory
+ @return None: directory = None
+ '': changing directory failed
+ otherwise: the current directory (before changing)
+ '''
+ if directory == None:
+ rc = None
+ else:
+ rc = os.curdir
+ os.chdir(directory)
+ if os.path.realpath(os.curdir) != os.path.realpath(directory):
+ os.chdir(rc)
+ self._logger.error('cannot change to directory ' + directory)
+ rc = ''
+ return rc
+
+ def regExprCompile(self, pattern, location, isCaseSensitive = False):
+ '''Compiles a regular expression.
+ @param pattern: a regular expression.
+ @param isCaseSensitive: true: the case is relevant
+ @return: the re.RegExpr instance
+ '''
+ rc = base.StringUtils.regExprCompile(pattern, location, self._logger, isCaseSensitive)
+ if rc == None:
+ msg = self._logger._firstErrors[-1] if len(self._logger._firstErrors) > 0 else 'wrong regular expression in ' + location
+ self.usage(msg)
+ return rc
+
+ def tableOfContent(self, archive):
+ '''Returns the table of content of a tar archive.
+ @param archive: the tar archive to inspect
+ @returns: the list of files stored into the archive
+ '''
+ lines = self._processHelper.executeInputOutput(['/bin/tar', 'tzf', archive], None, True)
+ return lines
+
+ def saveDirectoryByTar(self, name, source, target, opts):
+ '''Saves a directory into a tar archive.
+ @param name: archive name without extension
+ @param source: source directory
+ @param target: target directory (of the tar archive)
+ @param opts: None or an array of options like '-exclude=<pattern>'
+ '''
+ if not os.path.isdir(source):
+ self._logger.error('tar: unknown source {:s} for {:s}'.format(source, name))
+ elif not os.path.isdir(target):
+ self._logger.error('unknown target: ' + target)
+ else:
+ start = time.time()
+ # oldDir = os.getcwd()
+ os.chdir(source)
+ archive = target + os.sep + name + '.tgz'
+ self.ensureFileDoesNotExist(archive)
+ argv = ['/bin/tar', 'czf', archive]
+ if opts != None:
+ argv += opts
+ argv.append('.')
+ self._processHelper.execute(argv, True)
+ # os.chdir(oldDir)
+ self.logFile(archive, '%f: %s %t created in %r', start)
+
+ def saveDirectoryByZip(self, name, source, target, opts, interval = None):
+ '''Saves a directory into a zip archive.
+ @param name: will be used for the node name of the archive
+ @param source: the directory to save
+ @param target: the archive will be stored here
+ @param opts: None or an array of options. inclusions, exclusions...
+ @param interval: None or number of days. Only files younger than this will be stored
+ '''
+ if not os.path.isdir(source):
+ self._logger.error('zip: unknown source {:s} for {:s}'.format(source, name))
+ elif not os.path.isdir(target):
+ self._logger.error('unknown target: ' + target)
+ else:
+ start = time.time()
+ oldDir = os.getcwd()
+ os.chdir(source)
+ archive = target + os.sep + name + ('.zip' if interval == None else '.latest.zip')
+ self.ensureFileDoesNotExist(archive, name + r'\.z\d\d$')
+ no = 1
+ while True:
+ no += 1
+ fn = archive.replace('.zip', 'z{:2d}'.format(no))
+ if os.path.exists(fn):
+ self.ensureFileDoesNotExist(fn)
+ else:
+ break
+ dontCompress = '.zip:.tgz:.gzip:.gz:.bz2:.lz:.lzma:.xz:.7z:.deb:.rpm:.cab:.rar:.jar:.war:.ear' + \
+ '.jpg:.png:.gif:.tif:.tiff:.jpeg:.gzip:.mp3:.mpeg:.mp4:.m4p:.flv:.f4a:.f4b:.f4v:.vob:.ogv:.avi:.mov' + \
+ ':.odt:.ott:.ods:.odg:.otg:.odp:.odb:.doc:.docx:.xls:.xlsx:.xltx:.xlw'
+ argv = ['/usr/bin/zip', '-r5qyo', '-n', dontCompress]
+ volSize = self._configuration.getString('zip.volume.size')
+ if volSize != None:
+ if re.match(r'\d+[kmg]$', volSize) == None:
+ self._logger('wrong value in zip.volume.size')
+ else:
+ argv.append('-s')
+ argv.append(volSize)
+ if interval != None:
+ aDate = datetime.date.fromtimestamp(time.time() - interval * 86400).strftime('%Y-%m-%d')
+ argv.append('-t')
+ argv.append(aDate)
+ # should be exist at least one young file:
+ argv.append(archive)
+ argv.append('.')
+ if interval != None:
+ fnDummy = tempfile.gettempdir() + os.sep + '.saveDirectoryByZip.{:.0f}.mrk'.format(time.time())
+ base.StringUtils.toFile(fnDummy, '')
+ argv.append(fnDummy)
+ if opts != None:
+ argv += opts
+ self._processHelper.execute(argv, True)
+ os.chdir(oldDir)
+ if interval != None:
+ os.unlink(fnDummy)
+ self.logFile(archive, '%f: %s %t created in %r', start)
+
+ def splitArgsAndOpts(self, argv):
+ '''Splits the argument vector into arguments and options (starting with '-').
+ @param argv: the arguments, e.g. ['random', '--seed=Wow']
+ @return: a tuple (argv, options)
+ '''
+ argv2 = []
+ options = []
+ for arg in argv:
+ if arg.startswith('-'):
+ options.append(arg)
+ else:
+ argv2.append(arg)
+ return (argv2, options)
+
+ def storeExample(self, text, filename=None, path=None):
+ '''Stores the text in an configiguration file (if that does not exist) or in an example file.
+ @param text: the example file content
+ @param filename: None: the application name will be used. Otherwise: the name of the configuration file
+ '''
+ if filename == None:
+ filename = self._globalOptions._appInfo._applicationName + '.conf'
+ if path == None:
+ path = self._configDir
+ full = path + os.sep + filename
+ if os.path.exists(full):
+ parts = filename.split('.')
+ if len(parts) == 1:
+ filename = filename + '.example'
+ else:
+ filename = '.'.join(parts[0:-1]) + '.example'
+ full = path + os.sep + filename
+ self._logger.log('creating ' + full)
+ base.StringUtils.toFile(full, text)
+
+ def usage(self, message):
+ '''Shows a usage message.
+ @param message: the reason of the usage call (an error message)
+ '''
+ self._logger.error(message)
+ msg = self._usage()
+ if msg != None:
+ msg = msg.replace('GLOBAL_OPTS', BasicStatics.usageGlobals(self._configuration._filename))
+ appInfo = self._globalOptions._appInfo
+ installOpts = defaultUser = defaultGroup = environLogfile = ''
+ if appInfo._serviceInfo != None:
+ serviceInfo = appInfo._serviceInfo
+ defaultUser = serviceInfo._user
+ defaultGroup = serviceInfo._group
+ environLogfile = None
+ for name in os.environ:
+ if name.endswith('_LOG'):
+ environLogfile = name
+ break
+ if environLogfile == None:
+ environLogfile = appInfo._applicationName.upper() + '_LOG'
+ msg = msg.replace('GLOBAL_MODES', BasicStatics.usageInstallUninstall(appInfo._applicationName,
+ defaultUser, defaultGroup, installOpts, environLogfile)).rstrip()
+ if self._verboseLevel > 0:
+ print(msg)
+ print('+++ ' + message)
+ if not self._exitWith0:
+ sys.exit(1)
+
+class BasicStatics:
+
+ @staticmethod
+ def argsToDictionary(argv, usage):
+ '''Tests the arguments and put them into a dictionary.
+ '''
+ rc = dict()
+ rc['autostart'] = True
+ while len(argv) > 0:
+ if argv[0].startswith('--user='):
+ user = argv[0][7:]
+ if not re.match(r'^[\w-]+$', user):
+ usage('invalid characters in <user>:' + user)
+ else:
+ rc['user'] = user
+ elif argv[0].startswith('--group='):
+ group = argv[0][8:]
+ if not re.match(r'^[\w-]+$', group):
+ usage('invalid characters in <group>:' + group)
+ else:
+ rc['group'] = group
+ elif argv[0].startswith('--application='):
+ application = argv[0][14:]
+ if not re.match(r'^[\w-]+$', application):
+ usage('invalid characters in <application>:' + application)
+ else:
+ rc['application'] = application
+ elif argv[0].startswith('--host='):
+ host = argv[0][7:]
+ if not re.match(r'^[\w-]+$', host):
+ usage('invalid characters in <host>:' + host)
+ else:
+ rc['host'] = host
+ elif argv[0] == '--no-auto-start':
+ rc['autostart'] = False
+ elif argv[0].startswith('-l'):
+ rc['logfile'] = argv[0][2:]
+ elif argv[0].startswith('--log='):
+ rc['logfile'] = argv[0][6:]
+ else:
+ usage('unknown option: ' + argv[0])
+ argv = argv[1:]
+ return rc
+
+ @staticmethod
+ def ensureDirectory(path, logger=None, rights=0o777):
+ '''Ensures that a directory exists.
+ @param path: the full name of the directory
+ @param rights: the access rights.
+ @param logger: None or the logger
+ @return: None: cannot create directory
+ otherwise: path
+ '''
+ if not os.path.isdir(path):
+ if os.path.exists(path):
+ BasicStatics.error('{:s} is not a directory but a file'.format(path), logger)
+ os.makedirs(path, rights)
+ if os.path.isdir(path):
+ BasicStatics.log('created: ' + path, logger)
+ else:
+ path = None
+ return path
+
+ @staticmethod
+ def ensureFileDoesNotExist(filename, pattern=None, logger = None):
+ '''Ensures that a file does not exist.
+ @param filename: the file to delete if it exists.
+ If pattern is defined this name is used to detect the directory for pattern matching
+ @param pattern: None or a regular expression for nodes to delete, e.g. r'.*\.z\d\d$'
+ Note: re.match() is used to find. Do not forget '.*' at the top
+ '''
+ if os.path.exists(filename):
+ try:
+ os.unlink(filename)
+ BasicStatics.log('removed: ' + filename, logger)
+ except OSError as exp:
+ BasicStatics.error('cannot delete {:s}: {:s}'.format(filename, str(exp)), logger)
+ if pattern != None:
+ regExpr = base.StringUtils.regExprCompile(pattern, 'file pattern', logger)
+ path = os.path.dirname(filename)
+ if path != '':
+ nodes = os.listdir(path)
+ for node in nodes:
+ if regExpr.match(node):
+ try:
+ full = path + os.sep + node
+ os.unlink(full)
+ except OSError as exp:
+ BasicStatics.error('cannot delete {:s}: {:s}'.format(full, str(exp)), logger)
+
+ @staticmethod
+ def ensureUserAndGroup(user, group, logger, userId = None, groupId = None):
+ '''Tests whether a given user and group exists. If not they will be created.
+ @param user: None or the username
+ @param group: None or the group name
+ @param logger: None error will be didplayed via print()
+ @param userId: None or the user id (only used while creating)
+ @param groupId: None or the group id (only used while creating)
+ '''
+ if user != None:
+ if BasicStatics.userExists(user):
+ BasicStatics.log('user exists: ' + user, logger)
+ else:
+ subprocess.call(['/usr/sbin/useradd', '-s', '/bin/false', user])
+ if not BasicStatics.userExists(user):
+ BasicStatics.error('cannot create user ' + user, logger)
+ else:
+ BasicStatics.log('user created: ' + user, logger)
+ if group != None:
+ if BasicStatics.groupExists(group):
+ BasicStatics.log('group exists: ' + group, logger)
+ else:
+ # create group if not exists:
+ subprocess.call(['/usr/sbin/groupadd', '-f', group])
+ if BasicStatics.groupExists(group):
+ BasicStatics.log ('group created: ' + group, logger)
+ else:
+ BasicStatics.error('cannot create group: ' + group, logger)
+
+ @staticmethod
+ def error(msg, logger):
+ '''Logs an error.
+ @param msg: error message
+ @param logger: None: print will be used. Otherwise: the logger
+ '''
+ if logger != None:
+ logger.error(msg)
+ else:
+ print('+++ ' + msg)
+
+ @staticmethod
+ def findFiles(path, pattern, useFullName=True):
+ '''Finds all files of a directory given by a pattern.
+ @param path: the directory to search
+ @param pattern: a pattern with unix shell wildcards: *: any string ?: one char [<ranges>]
+ @param useFullName: True: the result contain the path too False: only the nodes are returned
+ @return: an array with the found names
+ '''
+ rc = []
+ for node in os.listdir(path):
+ if fnmatch.fnmatch(node, pattern):
+ if useFullName:
+ rc.append(path + os.sep + node)
+ else:
+ rc.append(node)
+ return rc
+
+ @staticmethod
+ def firstFile(path, pattern, useFullName=True):
+ '''Finds the first file matching a given pattern.
+ @param path: the directory to search
+ @param pattern: a pattern with unix shell wildcards: *: any string ?: one char [<ranges>]
+ @param useFullName: True: the result contain the path too False: only the nodes are returned
+ @return: None or the found name
+ '''
+ rc = None
+ for node in os.listdir(path):
+ if fnmatch.fnmatch(node, pattern):
+ if useFullName:
+ rc = path + os.sep + node
+ else:
+ rc = node
+ break
+ return rc
+
+ @staticmethod
+ def grep(pattern, filename):
+ '''Finds all lines matching a given pattern.
+ @param pattern: the string to search, if starting with '/' it is a regular expr.
+ @param filename: the file to search
+ @return: an array of the found lines. May be empty.
+ '''
+ rc = []
+ hasWildcards = pattern.find('*') >= 0 or pattern.find('?') >= 0
+ if hasWildcards:
+ pattern = ('*' + pattern + '*').replace('**', '*').replace('**', '*')
+ with open(filename, 'rb') as fp:
+ for line in fp:
+ again = False
+ try:
+ line = line.decode('UTF-8')
+ except UnicodeDecodeError:
+ again = True
+ if again:
+ line = line.decode('latin1')
+ if hasWildcards and fnmatch.fnmatch(line, pattern) or not hasWildcards and line.find(pattern) >= 0:
+ rc.append(line)
+ return rc
+
+ @staticmethod
+ def groupExists(name):
+ '''Tests whether a given group exists.
+ @param name: the group's name
+ '''
+ try:
+ info = grp.getgrnam(name)
+ except KeyError:
+ info = None
+ return info != None
+
+ @staticmethod
+ def hostname(fullQualified=False):
+ '''Returns the local hostname.
+ @param fullQualified: True: the hostname with domain will be returned
+ @return: the hostname
+ '''
+ rc = base.StringUtils.fromFile('/etc/hostname').strip()
+ if not fullQualified:
+ ix = rc.find('.')
+ if ix > 0:
+ rc = rc[0:ix]
+ return rc
+
+ @staticmethod
+ def log(message, logger=None):
+ '''Logs an message.
+ @param message: the message to show
+ @param logger: None or the logger
+ '''
+ if logger != None:
+ logger.log(message)
+ else:
+ print(message)
+
+ @staticmethod
+ def usageGlobals(defaultConfigFile = ''):
+ return '''<global_opt>:
+ -0 or --exit-with-0
+ exit code is 0 not depending on any condition
+ -c<dir> or --configuration-directory=<dir>
+ Configuration directory: contains the configuration files. Default: '/etc/pyrshell'
+ -j<file> or --job-file=<file>
+ The main configuration file inside the configuration directory. Default: "{}"
+ -l<file> or --log=<file>
+ The logfile. If empty no logfile will be created.
+ -q or --quiet
+ No logs to stdout
+ -r or --runtime
+ Writes the runtime to the logger
+ --test-source-dir=<path>
+ Used in unit tests
+ --test-target-dir=<path>
+ Used in unit tests
+ -v<level> or --verbose-level=<level>
+ Verbose level: 1: summary 2: details start 3: fine Default: 1'''.format(
+ defaultConfigFile.replace('\n', ' '))
+
+ @staticmethod
+ def usageInstallUninstall(defaultApplication, defaultUser = None, defaultGroup = None, installOpts = None, environLogfile = None):
+ if defaultUser == None or defaultUser == '':
+ rc = '''<global-command>:
+ example
+ creates an example configuration
+ install <opts>
+ installs the tool as command
+ <opts>:
+ --application=<application>
+ the name of the application. Default: {}
+ -l<file> or --log=<log>
+ file for logging output (of the daemon).
+ Default: /var/log/local/<application>.log
+ uninstall
+ uninstalls the application
+ <opts>:
+ --purge
+ remove configuration files too
+ --application=<application>
+ the name of the application. Default: '{}'
+'''.format(defaultApplication, defaultApplication)
+ else:
+ rc = '''<global-command>
+install <opts>
+ installs the daemon as a systemd service
+ <opts>:
+ --application=<application>
+ the name of the application. Default: {}
+ --user=<user>
+ the daemon runs as this user. Default: <application> or '{}'
+ --group=<group>
+ the daemon runs under this group. Default: <application> or '{}'
+ --no-auto-start
+ the service does not start at boot time (systemctl disable <application>)
+ -l<file> or --log=<log>
+ file for logging output (of the daemon).
+ Default: os.environ['{}'] or /var/log/local/<application>.log
+ uninstall <opts>
+ --purge
+ remove configuration files too
+ --application=<application>
+ the name of the application. Default: '{}'
+ --hold-user
+ the user will not be deleted.
+ Note: the user will be deleted only if its name contains the application
+ --hold-group
+ the user will not be deleted
+ Note: the group will be deleted only if its name contains the application'''.format(
+ defaultApplication, defaultUser, defaultGroup, environLogfile, defaultApplication)
+ return rc
+
+ @staticmethod
+ def userExists(name):
+ '''Tests whether a given user exists.
+ @param name: the user's name
+ '''
+ try:
+ info = pwd.getpwnam(name)
+ except KeyError:
+ info = None
+ return info != None
+
+class InstallTool:
+ '''Implements an installer.
+ '''
+ def __init__(self, appInfo, globalOptions):
+ '''Constructor:
+ @param appInfo: the basic infos
+ @param globalOptions: the basic initialization values
+ '''
+ self._appInfo = appInfo
+ if hasattr(globalOptions, '_appInfo'):
+ self._usage = globalOptions._appInfo._usage
+ self._program = os.path.abspath(sys.argv[0])
+ self._systemDPath = '/etc/systemd/system/'
+ self._configPath = globalOptions._configDir
+ self._configFile = globalOptions._configFile if globalOptions._configFile != None else appInfo._applicationName + '.conf'
+ self._logger = base.MemoryLogger.MemoryLogger(1)
+
+ def createSystemDScript(self, serviceName, starter, user, group, description):
+ '''Creates the file controlling a systemd service.
+ @param serviceName: used for syslog and environment file
+ @param starter: name of the starter script without path, e.g. 'pymonitor'
+ @param user: the service is started with this user
+ @param group: the service is started with this group
+ @param description: this string is showed when the status is requestes.
+ '''
+ systemdFile = '{}{}.service'.format(self._systemDPath, self._appInfo._applicationName)
+ script = '''[Unit]
+Description={}.
+After=syslog.target
+[Service]
+Type=simple
+User={}
+Group={}
+WorkingDirectory=/etc/pyrshell
+EnvironmentFile=-/etc/pyrshell/{}.env
+ExecStart=/usr/local/bin/{} daemon
+ExecReload=/usr/local/bin/{} reload
+SyslogIdentifier={}
+StandardOutput=syslog
+StandardError=syslog
+Restart=always
+RestartSec=3
+[Install]
+WantedBy=multi-user.target
+'''.format(description, user, group, serviceName, starter, starter, serviceName)
+ with open(systemdFile, "w") as fp:
+ fp.write(script)
+ print('systemd script created: ' + systemdFile)
+
+ def ensureEnvironmentFile(self, envVarPrefix, configPath, logfile, envVariables=None, logger=None):
+ '''Tests whether a environmentment variable exists. If not it will be created.
+ @param envVarPrefix: a prefix of all variable names, e.g. 'MONITOR'
+ @param configPath: the path of the directory containing the configuration file, e.g. '/etc/pyrshell'
+ @param logFile: the log file, e.g. '/var/log/local/pymonitor.log'
+ @param envVariables: None or additional entries of the environment file, e.g. 'MONITOR_HOST=localhost\n'
+ '''
+ envFile = self._configPath + os.sep + self._appInfo._applicationName + '.env'
+ if os.path.exists(envFile):
+ BasicStatics.log('environment file exists: ' + envFile, logger)
+ else:
+ path = ':'.join(sys.path) + ':' + os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
+ with open(envFile, "w") as fp:
+ fp.write('PYTHONPATH={}\n'.format(path))
+ fp.write('{}_CONFIG={}\n'.format(envVarPrefix, configPath))
+ fp.write('{}_APPL={}\n'.format(envVarPrefix, self._appInfo._applicationName))
+ fp.write('{}_LOGFILE={}\n'.format(envVarPrefix, logfile))
+ if envVariables != None:
+ fp.write(envVariables);
+ if not os.path.exists(envFile):
+ BasicStatics.error('cannot create environment file ' + envFile, logger)
+ else:
+ BasicStatics.error('environment file created: ' + envFile, logger)
+
+ def ensureStartScript(self):
+ fn = '/usr/local/bin/' + self._appInfo._applicationName
+ if os.path.islink(fn):
+ os.unlink(fn)
+ if os.path.exists(fn):
+ self._logger.error('cannot remove link: ' + fn)
+ else:
+ self._logger.log('link removed: ' + fn)
+ if not os.path.exists(fn):
+ prog = os.path.abspath(sys.argv[0])
+ baseDir = os.path.dirname(os.path.dirname(prog))
+ with open(fn, "w") as fp:
+ fp.write('#! /bin/bash\n')
+ fp.write('export PYTHONPATH={:s}:$PYTHONPATH\n'.format(baseDir))
+ fp.write(prog + ' $*\n')
+ self._logger.log('created: ' + fn)
+ os.chmod(fn, 0o755)
+
+ def evalOption(self, option):
+ '''Evaluates a non standard option.
+ Should be overwritten in deriveded classes.
+ @param option: the option to inspect
+ @return: True: option recognized False: option not recognized
+ '''
+ return False
+
+ def install(self, scriptName, module, options):
+ '''Installs a given Python module as external linux script.
+ @param scriptName name of the script in /usr/local/bin, e.g. 'ftptool'
+ @param module name of the module relative to base, e.g. 'appl/FtpTool.py'
+ '''
+ user = os.geteuid()
+ if user != 0:
+ self.usage('you must be root!')
+ else:
+ src = '/usr/share/pyrshell/' + module
+ trg = '/usr/local/bin/' + scriptName
+ if not os.path.exists(src):
+ self.usage('missing module ' + src)
+ if os.path.exists(trg):
+ trg2 = trg + datetime.datetime.now().strftime('%s')
+ print(trg + ' already exists. Renameing to ' + trg2)
+ os.rename(trg, trg2)
+ os.symlink(src, trg)
+ print(src + ' installed into ' + trg)
+ # BasicStatics.ensureConfiguration(options, self._configFile, 'install', self._usage, self._example)
+
+ def installAsService(self, argv, defaultUser, defaultGroup, prefixEnvVar, description):
+ '''Installs the monitor as a systemd service.
+ @param argv: program arguments, e.g. ['--user=service']
+ @param defaultUser: the service ist started under this user
+ @param defaultGroup: the service ist started under this group
+ @param prefixEnvVar: the prefix of the environment variables, e.g. 'MONITOR'
+ @param description: a string shown by the 'systemctl status <service>'
+ command
+ '''
+ user = os.geteuid()
+ if user != 0:
+ self.usage('you must be root!')
+ user = None
+ group = None
+ application = None
+ logfile = None
+ autoStart = True
+ starter = None
+ while len(argv) > 0:
+ if argv[0].startswith('--user='):
+ user = argv[0][7:]
+ if not re.match(r'^[\w-]+$', user):
+ self.usage('invalid characters in <user>:' + user)
+ elif argv[0].startswith('--group='):
+ group = argv[0][8:]
+ if not re.match(r'^[\w-]+$', group):
+ self.usage('invalid characters in <group>:' + group)
+ elif argv[0].startswith('--application='):
+ application = argv[0][14:]
+ if not re.match(r'^[\w-]+$', application):
+ self.usage('invalid characters in <application>:' + application)
+ elif argv[0].startswith('--starter='):
+ starter = argv[0][10:]
+ if not re.match(r'^[\w-]+$', application):
+ self.usage('invalid characters in <starter>:' + starter)
+ elif argv[0] == '--no-auto-start':
+ autoStart = False
+ elif argv[0].startswith('-l'):
+ logfile = argv[0][2:]
+ elif argv[0].startswith('--log='):
+ logfile = argv[0][10:]
+ elif self.evalOption(argv[0]):
+ pass
+ else:
+ self.usage('unknown option: ' + argv[0])
+ argv = argv[1:]
+ if application != None and len(application) > 0:
+ appInfo = ApplicationInfo(application, None, usage)
+ self._appInfo = appInfo
+ if starter == None:
+ starter = self._appInfo._applicationName
+ if user == None or len(user) == 0:
+ user = defaultUser if defaultUser != None else self._appInfo._applicationName
+ if group == None or len(group) == 0:
+ group = defaultGroup if defaultGroup != None else self._appInfo._applicationName
+ if logfile == None:
+ logfile = '/var/log/local/' + self._appInfo._applicationName + '.log'
+ self.createSystemDScript(self._appInfo, starter, user, group, description)
+ BasicStatics.ensureUserAndGroup(user, group, self._logger)
+ # envVarPrefix, configPath, logfile, envVariables=None
+ self.ensureEnvironmentFile(prefixEnvVar, self._configPath, logfile)
+ self.ensureStartScript()
+ if autoStart:
+ subprocess.call(['/bin/systemctl', 'enable', self._appInfo._applicationName])
+ print('+++ check configuration in {}/{} and start with:\nsystemctl start {}'.format(self._configPath, self._configFile, starter))
+
+ def uninstall(self, scriptName, argv):
+ '''Uninstalls the systemd service.
+ @param scriptName name of the script in /usr/local/bin, e.g. 'ftptool'
+ '''
+ user = os.geteuid()
+ if user != 0:
+ self.usage('you must be root!')
+ else:
+ trg = '/usr/local/bin/' + scriptName
+ purge = False
+ for arg in argv:
+ if arg == '--purge':
+ purge = True
+ else:
+ self.usage('unknown option: ' + arg)
+ BasicStatics.ensureFileDoesNotExist(trg)
+ if purge:
+ fn = self._configPath + os.sep + self._configFile
+ BasicStatics.ensureFileDoesNotExist(fn)
+
+ def uninstallService(self, argv):
+ '''Uninstalls the systemd service.
+ @param argv: program arguments
+ '''
+ user = os.geteuid()
+ if user != 0:
+ self.usage('you must be root!')
+ deleteUser = True
+ deleteGroup = True
+ purge = False
+ application = self._appInfo._applicationName
+ while len(argv) > 0:
+ if argv[0] == '--purge':
+ purge = True
+ elif argv[0] == '--hold-user':
+ deleteUser = False
+ elif argv[0] == '--hold-group':
+ deleteGroup = False
+ elif argv[0].startswith('--application='):
+ application = argv[0][14:]
+ else:
+ self.usage('unknown option: ' + argv[0])
+ argv = argv[1:]
+ subprocess.call(['/bin/systemctl', 'disable', application])
+ systemdFile = '/etc/systemd/system/{}.service'.format(application)
+ ini = None
+ if os.path.exists(systemdFile):
+ ini = base.JavaConfig.JavaConfig(systemdFile, self._logger, True)
+ os.unlink(systemdFile)
+ if os.path.exists(systemdFile):
+ print('+++ cannot delete ' + systemdFile)
+ else:
+ print('removed: ' + systemdFile)
+ user = ini.getString('User', application) if ini != None else application
+ if deleteUser and BasicStatics.userExists(user):
+ subprocess.call(['/usr/sbin/userdel', user])
+ if BasicStatics.userExists(application):
+ print('+++ cannot delete user ' + user)
+ else:
+ print('user deleted: ' + user)
+
+ group = ini.getString('Group', application) if ini != None else application
+ if deleteGroup and BasicStatics.groupExists(group):
+ subprocess.call(['/usr/sbin/groupdel', group])
+ if BasicStatics.groupExists(group):
+ print('+++ cannot delete group ' + group)
+ else:
+ print('group deleted: ' + group)
+ fn = '/usr/local/bin/' + application
+ if os.path.exists(fn):
+ os.unlink(fn)
+ if os.path.exists(fn):
+ print('+++ cannot remove ' + fn)
+ else:
+ print('removed: ' + fn)
+ def usage(self, message):
+ '''Display a simplified usage message.
+ @param message: the error message
+ '''
+ self._logger.error(message)
+
+ def writeExampleConfiguration(self):
+ '''Writes an example configuration file.
+ '''
+ configFile = self._configPath + os.sep + self._configFile
+ if os.path.exists(configFile):
+ configFile = self._configPath + os.sep + self._appInfo._applicationName + '.example.conf'
+ if type(self._example) == str:
+ example = self._example
+ name = None
+ else:
+ [example, name, content] = self._example
+ base.StringUtils.toFile(configFile, example)
+ print("example configuration written: " + configFile)
+ if name != None:
+ base.StringUtils.toFile(name, content)
+ print("example configuration written: " + name)
+
+def getGlobalOptions(argv, appInfo, additionalOptions=None):
+ '''Evaluates the global options.
+ @param argv: the program arguments
+ @param appInfo: None or the application info
+ @param additionalOptions: None or an array of allowed options, e.g. ['m', 'n']
+ @return: a tuple (globalOptions, argv)
+ '''
+ def usage(message, options):
+ tool = BaseTool(options, options._appInfo._applicationName + '.conf')
+ if len(argv) < 1 or argv[0] != '-q':
+ print(tool._usage())
+ else:
+ options._verboseLevel = 0
+ print('+++ ', message)
+
+ argv = argv[1:]
+ rc = GlobalOptions(appInfo)
+ verboseLevel = 1
+ if additionalOptions == None:
+ additionalOptions = []
+ while len(argv) > 0 and argv[0].startswith('-'):
+ arg = argv[0]
+ if arg.startswith('-l'):
+ rc._logFiles.append(arg[2:])
+ elif arg.startswith('--log='):
+ rc._logFiles.append(arg[6:])
+ elif arg.startswith('-c'):
+ rc._configDir = arg[2:]
+ elif arg.startswith('--configuration-directory='):
+ rc._configDir = arg[26:]
+ elif arg.startswith('-f'):
+ rc._configDir = arg[2:]
+ elif arg.startswith('--configuration-file='):
+ rc._configFile = arg[11:]
+ elif arg == '-0' or arg == '--exit-with-0':
+ rc._exitWith0 = True
+ elif arg == '-q' or arg == '--quiet':
+ verboseLevel = 0
+ elif arg == '-r' or arg == '--runtime':
+ rc._runtime = True
+ elif arg.startswith('--test-source-dir='):
+ rc._testSourceDir = arg[18:]
+ elif arg.startswith('--test-target-dir='):
+ rc._testTargetDir = arg[18:]
+ elif arg.startswith('-v') or arg.startswith('--verbose-level='):
+ try:
+ start = 2 if arg[1] == 'v' else 16
+ verboseLevel = int (arg[start:])
+ except ValueError:
+ appInfo._usage('not an integer in ' + arg)
+ else:
+ usage('unknown global option ' + arg, rc)
+ argv = argv[1:]
+ if verboseLevel not in [0, 1, 2, 3, 4, 5]:
+ usage('illegal verbose level: ' + str(verboseLevel), rc)
+ verboseLevel = 0
+ rc._verboseLevel = verboseLevel
+ if len(rc._logFiles) == 0:
+ rc._logFiles.append('/var/log/local/{}.log'.format(appInfo._applicationName))
+ version = platform.python_version()
+ if not version.startswith('3'):
+ usage('Wrong python version. We need version 3 and have ' + version)
+ return (rc, argv)
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+'''
+Created on 05.11.2018
+
+@author: hm
+'''
+import random
+import base64
+import math
+import time
+
+class CryptoEngine:
+ '''Implements a Pseudo Random Generator with the KISS algorithm.
+ We want an algorithm which can be implemented in any programming language, e.g. in JavaScript or Java.
+ JavaScript (at this moment) only contains floating point calculation.
+ Java knows only signed integers or floating point numbers.
+ Therefore we use IEEE 754 (64 bit floating point).
+ '''
+
+ def __init__(self, logger):
+ '''Constructor.
+ @param logger: the logger
+ '''
+ self._counter = 0
+ self._base64Trailer = '!#$%&()*';
+ self._uBoundBase64Tail = '*';
+ self._x = 372194.0
+ # @cond _y != 0
+ self._y = 339219.0
+ # @cond z | c != 0
+ self._z = 470811222.0
+ self._c = 1.0
+ self._logger = logger
+
+ def bytesToString(self, aBytes):
+ '''Converts a string into a byte array without encoding.
+ @param aBytes: byte array to convert
+ @return a string
+ '''
+ try:
+ rc = aBytes.decode('ascii')
+ except UnicodeDecodeError as exc:
+ rc = -1
+ raise exc
+ return rc
+
+ def decode(self, string, charSet):
+ '''Decodes a string encoded by encode().
+ Format of the string: version salt encrypted
+ '0' (version string)
+ 4 characters salt
+ rest: the encrypted string
+ @param string: string to encode
+ @param charSet: the character set of the string and the result, e.g. 'word'
+ @return: the decoded string (clear text)
+ '''
+ self._counter += 1
+ aSet = self.getCharSet(charSet)
+ aSize = len(aSet)
+ rc = ''
+ if string.startswith('0'):
+ prefix = string[1:5]
+ string = string[5:]
+ aHash = self.hash(prefix)
+ self.setSeed(aHash, 0x20111958, 0x4711, 1);
+ length = len(string)
+ for ix in range(length):
+ ix3 = aSet.find(string[ix])
+ ix2 = (aSize + ix3 - self.nextInt(aSize - 1)) % aSize
+ rc += aSet[ix2]
+ return rc
+
+ def decodeBinary(self, string):
+ '''Decodes a string encrypted by encryptBinary().
+ @param string: string to decode
+ @return: the decoded string (clear text)
+ '''
+ aSet = self.getCharSet('base64')
+ aSize = len(aSet)
+ rc = ''
+ if string.startswith('0'):
+ prefix = string[1:5]
+ string = string[5:]
+ aHash = self.hash(prefix)
+ self.setSeed(aHash, 0x20111958, 0x4711, 1);
+ aLen = len(string)
+ buffer = ''
+ # replace the trailing '=' "randomly" with a char outside the character set:
+ if aLen > 0 and string[aLen - 1] == '=':
+ string[aLen - 1] = self._base64Trailer[self._counter * 7 % len(self._base64Trailer)]
+ if aLen > 1 and string[aLen - 2] == '=':
+ string[aLen - 2] = self._base64Trailer[self._counter * 13 % len(self._base64Trailer)]
+ for ix in range(aLen):
+ ix3 = aSet.find(string[ix])
+ ix2 = (aSize + ix3 - self.nextInt(aSize - 1)) % aSize
+ buffer += aSet[ix2]
+ binBuffer = self.stringToBytes(buffer + '\n')
+ try:
+ binBuffer2 = base64.decodebytes(binBuffer)
+ except Exception as exc:
+ if str(exc) == 'Incorrect padding':
+ try:
+ binBuffer = binBuffer[0:-1]
+ binBuffer2 = base64.decodebytes(binBuffer)
+ except Exception:
+ binBuffer = binBuffer[0:-1]
+ binBuffer2 = base64.decodebytes(binBuffer)
+ ix = binBuffer2.find(b'\n')
+ if ix >= 0:
+ binBuffer2 = binBuffer2[0:ix]
+ rc = self.bytesToString(binBuffer2)
+ return rc
+
+ def encode(self, string, charSet):
+ '''Encodes a string with a randomly generated salt.
+ Format of the string: version salt encoded
+ '0' (version string)
+ 4 characters salt
+ rest: the encoded string
+ @param string: string to encode
+ @param charSet: the character set of the string and the result, e.g. 'word'
+ @return: the encrypted string
+ '''
+ self._counter += 1
+ self.setSeedRandomly()
+ rc = self.nextString(4, charSet)
+ aSet = self.getCharSet(charSet)
+ aSize = len(aSet)
+ aHash = self.hash(rc)
+ self.setSeed(aHash, 0x20111958, 0x4711, 1)
+ length = len(string)
+ for ix in range(length):
+ ix3 = aSet.find(string[ix])
+ ix2 = (ix3 + self.nextInt(aSize - 1)) % aSize
+ rc += aSet[ix2]
+ return '0' + rc
+
+ def encodeBinary(self, string):
+ '''Encrypts a string with a randomly generated salt.
+ The string can be based on any char set. It will be base64 encoded before encryption.
+ Format of the result: version salt encrypted
+ '0' (version string)
+ 4 characters salt
+ rest: the encrypted string
+ @param string: the string or bytes to encrypt
+ @return: the encoded string
+ '''
+ self.setSeedRandomly()
+ if type(string) == str:
+ string = self.stringToBytes(string)
+ # convert it to a ascii usable string
+ string += b'\n'
+ buffer = base64.encodebytes(string)
+ string = self.bytesToString(buffer).rstrip()
+ rc = self.nextString(4, 'base64')
+ aSet = self.getCharSet('base64')
+ aSize = len(aSet)
+ aHash = self.hash(rc)
+ self.setSeed(aHash, 0x20111958, 0x4711, 1)
+ length = len(string)
+ for ix in range(length):
+ ix3 = aSet.find(string[ix])
+ ix2 = (ix3 + self.nextInt(aSize - 1)) % aSize
+ rc += aSet[ix2]
+ return '0' + rc
+
+ def getCharSet(self, name):
+ '''Returns a string with all characters of the charset given by name.
+ @param name: the name of the charset
+ @return: None: unknown charset
+ otherwise: the charset as string
+ '''
+ if name == 'dec':
+ rc = '0123456789'
+ elif name == 'hex':
+ rc = '0123456789abcdef'
+ elif name == 'upper':
+ rc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ elif name == 'lower':
+ rc = 'abcdefghijklmnopqrstuvwxyz'
+ elif name == 'alfa':
+ rc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+ elif name == 'word':
+ rc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_'
+ elif name == 'ascii94':
+ rc = r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~'''
+ elif name == 'ascii95':
+ rc = r''' !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~'''
+ elif name == 'ascii':
+ rc = r''' !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''' + chr(127)
+ elif name == 'base64':
+ rc = r'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
+ else:
+ self._logger.error('unknown character set: ' + name)
+ rc = ''
+
+ return rc
+
+ def getCharSetNames(self):
+ '''Returns the list of the known charset names.
+ @return the list of the known charset names
+ '''
+ rc = [
+ 'dec',
+ 'hex',
+ 'upper',
+ 'lower',
+ 'alfa',
+ 'word',
+ 'ascii94',
+ 'ascii95',
+ 'ascii',
+ 'base64']
+ return rc
+
+ def hash(self, string):
+ '''Converts a string into an integer.
+ @param string: the string to convert
+ @return: the hash value
+ '''
+ rc = len(string)
+ count = rc
+ for ix in range(count):
+ rc = (rc * (ix + 1) + (ord(string[ix]) << (ix % 4 * 7))) & 0x7fffffff
+ return rc
+
+ def nextChar(self, charSet='ascii'):
+ '''Returns a pseudo random character.
+ @param charSet: the result is a character from this string
+ @return: a pseudo random character
+ '''
+ aSet = self.getCharSet(charSet)
+ ix = self.nextInt(0, len(aSet) - 1)
+ rc = aSet[ix]
+ return rc
+
+ def nextInt(self, maxValue=0x7fffffff, minValue = 0):
+ '''Returns a pseudo random 31 bit integer.
+ @param maxValue: the maximal return value (inclusive)
+ @param minValue: the minimal return value (inclusive)
+ @return: a number from [minValue..maxValue]
+ '''
+ if maxValue == minValue:
+ rc = minValue
+ else:
+ if minValue > maxValue:
+ temp = maxValue
+ maxValue = minValue
+ minValue = temp
+ rc = self.nextSeed()
+ rc = rc % (maxValue - minValue) + minValue
+ return rc
+
+ def nextString(self, length, charSet):
+ '''Returns a pseudo random string.
+ @param length: the length of the result
+ @param charSet: all characters of the result are from this string
+ @return: a pseudo random string with the given charset and length
+ '''
+ aSet = self.getCharSet(charSet)
+ aSize = len(aSet)
+ rc = ''
+ aRandom = None
+ for ix in range(length):
+ if ix % 4 == 0:
+ aRandom = self.nextSeed()
+ else:
+ aRandom >>= 8
+ rc += aSet[aRandom % aSize]
+ return rc
+
+ def nextSeed(self):
+ '''Sets the next seed and returns a 32 bit random value.
+ @return: a pseudo random number with 0 <= rc <= 0xffffffff
+ '''
+ # linear congruential generator (LCG):
+ self._x = math.fmod(69069.0 * self._x + 473219.0, 4294967296)
+ # Xorshift
+ #self._y ^= int(self._y) << 13
+ self._y = math.fmod(int(self._y) ^ int(self._y) << 13, 4294967296)
+ #self._y ^= self._y >> 17
+ self._y = math.fmod(int(self._y) ^ int(self._y) >> 17, 4294967296)
+ #self._y ^= self._y << 5
+ self._y = math.fmod(int(self._y) ^ int(self._y) << 5, 4294967296)
+ # multiply with carry:
+ t = 698769069.0 * self._z + self._c
+ #self._c = math.fmod(t >> 32, 2)
+ self._c = math.fmod(int(t) >> 32, 2)
+ self._z = math.fmod(t, 4294967296)
+ return int(math.fmod(self._x + self._y + self._z, 4294967296))
+
+ def oneTimePad(self, user, data):
+ '''Builds a one time pad.
+ @param user: the user id
+ @param data: None or additional data: allowed char set: word
+ @return: char set: word
+ '''
+ if data != None and self.testCharSet(data, 'word') >= 0:
+ rc = ''
+ else:
+ padData = '{:08x}{:04x}'.format(int(round(time.time())), user) + data
+ rc = self.encode(padData, 'word');
+ return rc
+
+ def restoreSeed(self, seed):
+ '''Returns the current seed as string.
+ @return the seed as string
+ '''
+ parts = seed.split(':')
+ self.setSeed(float(parts[0]), float(parts[1]), float(parts[2]), float(parts[3]))
+
+ def saveSeed(self):
+ '''Returns the current seed as string.
+ @return the seed as string
+ '''
+ rc = '{}:{}:{}:{}'.format(repr(self._x), repr(self._y), repr(self._z), repr(self._c))
+ return rc
+
+ def setSeed(self, x, y, z, c):
+ '''Sets the parameter of the KISS algorithm.
+ @param x:
+ @param y:
+ @param z:
+ @param c:
+ '''
+ self._x = math.fmod(x, 4294967296)
+ self._y = 1234321.0 if y == 0 else math.fmod(y, 4294967296)
+ if z == 0 and c == 0:
+ c = 1.0
+ self._c = math.fmod(c, 2)
+ self._z = math.fmod(z, 4294967296)
+
+ def setSeedFromString(self, seedString):
+ '''Converts a string, e.g. a password, into a seed.
+ @param seedString: the string value to convert
+ '''
+ if seedString == '':
+ seedString = 'Big-Brother2.0IsWatching!You'
+ while len(seedString) < 8:
+ seedString += seedString
+ x = self.hash(seedString[0:len(seedString) - 3])
+ y = self.hash(seedString[1:8])
+ z = self.hash(seedString[3:5])
+ c = self.hash(seedString[1:])
+ self.setSeed(x, y, z, c)
+
+ def setSeedRandomly(self):
+ '''Brings "true" random to the seed
+ '''
+ utime = time.time()
+ rand1 = int(math.fmod(1000*1000*utime, 1000000000.0))
+ rand2 = int(math.fmod(utime*1000, 1000000000.0))
+ self.setSeed(rand1, rand2, int(random.random() * 0x7fffffff), 1)
+
+ def stringToBytes(self, string):
+ '''Converts a string into a byte array without encoding.
+ @param string: string to convert
+ @return a bytes array
+ '''
+ rc = string.encode('ascii')
+ return rc
+
+ def testCharSet(self, string, charSet):
+ '''Tests whether all char of a string belong to a given charSet.
+ @param string: string to test
+ @param charSet: the char set to test
+ @return: -1: success
+ otherwise: the index of the first invalid char
+ '''
+ aSet = self.getCharSet(charSet)
+ rc = -1
+ for ix in range(len(string)):
+ if aSet.find(string[ix]) < 0:
+ rc = ix
+ break
+ return rc
+
+ def unpackOneTimePad(self, pad, maxDiff = 60):
+ '''Decodes a one time pad.
+ @param pad: the encoded one time pad
+ @param maxDiff: maximal difference (in seconds) between time of the pad and now
+ @return: None: invalid pad
+ otherwise: a tuple (time, user, data)
+ '''
+ padData = self.decode(pad, 'word')
+ length = len(padData)
+ if length < 12 or self.testCharSet(padData[0:12], 'hex') >= 0 or self.testCharSet(padData[12:], 'word') >= 0:
+ rc = None
+ else:
+ padTime = int(padData[0:8], 16)
+ now = time.time()
+ if abs(now - padTime) >= maxDiff:
+ rc = None
+ else:
+ user = int(padData[8:12], 16)
+ data = None if len(padData) == 12 else padData[12:]
+ rc = (padTime, user, data)
+ return rc
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+'''
+Created on 25.04.2018
+
+@author: hm
+'''
+
+import os
+import stat
+import datetime
+import time
+import shutil
+import re
+import tarfile
+import zipfile
+import tempfile
+
+import base.StringUtils
+import base.LinuxUtils
+
+fileHelperRegExprWildcards = re.compile(r'[*?\[\]]')
+
+class DirInfo:
+ '''Stores the directory info
+ '''
+ def __init__(self, maxYoungest=5, maxLargest=5, maxOldest=5, maxSmallest=5, minSize=1, dirsOnly=False, filesOnly=False, trace=0):
+ '''Constructor.
+ @param maxYoungest: the maximal number of entries in self._youngest
+ @param maxLargest: the maximal number of entries in self._largest
+ @param maxOldest: the maximal number of entries in self._oldest
+ @param maxLargest: the maximal number of entries in self._smallest
+ @param minSize: the minimum size of the entries in self._smallest
+ @param dirsOnly: True: only directories will be processed
+ @param filesOnly: True: only files (not dirs) will be processed
+ @param trace: if > 0: after processing this amount of nodes a statistic is logged
+ '''
+ self._fileCount = 0
+ self._fileSizes = 0
+ self._dirCount = 0
+ self._dirPattern = None
+ self._filePattern = None
+ self._ignoredDirs = 0
+ self._ignoredFiles = 0
+ self._youngest = []
+ self._largest = []
+ self._smallest = []
+ self._oldest = []
+ self._maxYoungest = maxYoungest
+ self._maxLargest = maxLargest
+ self._maxLargest = maxOldest
+ self._maxSmallest = maxSmallest
+ self._minSize = minSize
+ self._timeYoungest = 0
+ self._timeOldest = 0
+ self._sizeLargest = 0
+ self._dirsOnly = dirsOnly
+ self._filesOnly = filesOnly
+ self._trace = trace
+ self._nextTracePoint = trace
+
+def _error(message, logger = None):
+ '''Prints an error message.
+ @param message: error message
+ @param logger: None or the logger
+ '''
+ if logger == None:
+ print('+++ ' + message)
+ else:
+ logger.error(message)
+
+def _log(message, logger = None):
+ '''Prints a message.
+ @param message: error message
+ @param logger: None or the logger
+ '''
+ if logger == None:
+ print(message)
+ else:
+ logger.log(message)
+
+def clearDirectory(path, logger=None):
+ '''Deletes (recursivly) all files and subdirectories of a given path.
+ Note: if the path is not a directory (or it does not exists) it will not be handled as an error
+ @param path: the directory to clear
+ @param logger: None or an error reporter
+ '''
+ if os.path.exists(path):
+ for node in os.listdir(path):
+ full = path + os.sep + node
+ if os.path.isdir(full):
+ shutil.rmtree(full, True)
+ else:
+ os.unlink(full)
+ if os.path.exists(full) and logger != None:
+ _error('cannot remove: ' + full, logger)
+
+def copyDirectory(source, target, option = None, logger=None, verboseLevel = 0):
+ '''Copies all files (and dirs) from source to target directory.
+ @param source: the base source directory
+ @param target: the base target directoy()
+ @param option: None, 'clear' or 'update'
+ 'clear': all files (and subdirs) of target will be deleted
+ 'update': only younger or not existing files will be copied False: all files will be copied
+ @param logger: None or the logger
+ '''
+ if option == 'clear':
+ if verboseLevel >= 2:
+ _log('clearing ' + target, logger)
+ clearDirectory(target, logger)
+ for node in os.listdir(source):
+ src = source + os.sep + node
+ trg = target + os.sep + node
+ if os.path.islink(src):
+ if not option == 'update' or not os.path.exists(trg):
+ ref = os.readlink(src)
+ if verboseLevel >= 2:
+ _log('symlink: {} [{}]'.format(trg, ref))
+ try:
+ os.symlink(ref, trg)
+ except OSError as exc:
+ _error('cannot create a symlink: {} -> {}'.format(ref, trg), logger)
+ elif os.path.isdir(src):
+ if option != 'update' or not os.path.exists(trg):
+ if verboseLevel >= 2:
+ _log('directory: {} -> {}'.format(src, trg))
+ shutil.copytree(src, trg, True)
+ else:
+ copyDirectory(src, trg, option, logger)
+ else:
+ if not os.path.exists(trg) or option == 'update' and os.path.getmtime(src) > os.path.getmtime(trg):
+ try:
+ if verboseLevel >= 2:
+ _log('{} -> {}'.format(src, trg))
+ shutil.copy2(src, trg)
+ except OSError as exc:
+ _error('cannot copy {}: {}'.format(trg, str(exc)))
+
+def copyIfExists(source, target, option = None, logger=None):
+ '''Copies all files (and dirs) from source to target directory.
+ @param source: the base source directory
+ @param target: the base target directoy()
+ @param options: None
+ @param logger: error logger
+ @param verboseLevel: True: do logging
+ '''
+ if os.path.exists(source):
+ if logger != None:
+ logger.log('copying {} => {} ...'.format(source, target), 2)
+ shutil.copy2(source, target)
+
+def directoryInfo(path, filePattern = None, dirPattern = None, maxDepth = -1, fileInfo = None,
+ maxYoungest=5, maxLargest=5, maxOldest=5, maxSmallest=5, minSize=1, dirsOnly=False,
+ filesOnly=False, trace=0):
+ '''Returns the directory info of the given path.
+ @param path: the full path of the directory to inspect
+ @param filePattern: None or a regular expression (as text) describing the file names to inspect
+ @param dirPattern: None or a regular expression (as text) describing the directory names to inspect
+ @param maxDepth: maximal depth of recursion. < 0: unlimited 0: only the start directory
+ @param dirInfo: None or a DirectoryInfo instance which will be completed
+ @param maxYoungest: the maximal number of entries in DirInfo._youngest[]
+ @param maxLargest: the maximal number of entries in DirInfo._largest[]
+ @param maxSmallest: the maximal number of entries in DirInfo._smallest[]
+ @param minSize: the minimum size of the entries in DirInfo._smallest[]
+ @param dirsOnly: only directories will be part of the result
+ @param filessOnly: only files (not directories) will be part of the result
+ @param trace: if > 0: a statistic is printed if this amount of nodes (files or nodes) is processed
+ @return: a DirInfo instance
+ '''
+ def infoOneDir(path, depth, fileInfo):
+ def showStatistic(info):
+ print('{}: dirs: {} files: {} ignored dirs: {} ignored files: {}'.format(path, info._dirCount, info._fileCount, info._ignoredDirs, info._ignoredFiles))
+ info._nextTracePoint += info._trace
+ if not isinstance(fileInfo, DirInfo):
+ depth = 0
+ fileInfo._dirCount += 1
+ if fileInfo._trace > 0 and fileInfo._dirCount + fileInfo._fileCount + fileInfo._ignoredDirs + fileInfo._ignoredFiles > fileInfo._nextTracePoint:
+ showStatistic(fileInfo)
+ try:
+ nodes = os.listdir(path)
+ except PermissionError:
+ fileInfo._ignoredDirs += 1
+ return
+ if fileInfo._trace > 0 and len(nodes) == 0 and fileInfo._dirCount + fileInfo._fileCount + fileInfo._ignoredDirs + fileInfo._ignoredFiles % fileInfo._trace == 0:
+ showStatistic(fileInfo)
+ for node in nodes:
+ if fileInfo._trace > 0 and fileInfo._dirCount + fileInfo._fileCount + fileInfo._ignoredDirs + fileInfo._ignoredFiles > fileInfo._nextTracePoint:
+ showStatistic(fileInfo)
+ full = path + os.sep + node
+ stats = os.lstat(full)
+ isDir = stat.S_ISDIR(stats.st_mode)
+ if isDir:
+ if not fileInfo._filesOnly:
+ length = len(fileInfo._youngest)
+ if fileInfo._maxYoungest > 0 and (length < fileInfo._maxYoungest or stats.st_mtime > fileInfo._timeYoungest):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxYoungest:
+ del fileInfo._youngest[0]
+ fileInfo._youngest.append(str(stats.st_mtime) + ':' + path + os.sep + node)
+ fileInfo._youngest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._timeYoungest = float(fileInfo._youngest[0].split(':')[0])
+ length = len(fileInfo._oldest)
+ if fileInfo._maxOldest > 0 and (length < fileInfo._maxOldest or stats.st_mtime < fileInfo._timeOldest):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxOldest:
+ del fileInfo._oldest[-1]
+ fileInfo._oldest.insert(0, str(stats.st_mtime) + ':' + path + os.sep + node)
+ fileInfo._oldest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._timeOldest = float(fileInfo._oldest[-1].split(':')[0])
+ if (fileInfo._dirPattern == None or fileInfo._dirPattern.match(node) == None) and (maxDepth == None or maxDepth < 0 or depth < maxDepth):
+ infoOneDir(path + os.sep + node, depth + 1, fileInfo)
+ else:
+ fileInfo._ignoredDirs += 1
+ else: # not isDir
+ if fileInfo._dirsOnly:
+ fileInfo._ignoredFiles += 1
+ continue
+ if (fileInfo._filePattern == None or fileInfo._filePattern.match(node) != None):
+ fileInfo._fileSizes += stats.st_size
+ fileInfo._fileCount += 1
+ length = len(fileInfo._largest)
+ if fileInfo._maxLargest > 0 and (length < fileInfo._maxLargest or stats.st_size > fileInfo._sizeLargest):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxLargest:
+ del fileInfo._largest[0]
+ fileInfo._largest.append(str(stats.st_size) + ':' + path + os.sep + node)
+ fileInfo._largest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._sizeLargest = float(fileInfo._largest[-1].split(':')[0])
+ length = len(fileInfo._smallest)
+ if fileInfo._maxSmallest > 0 and (stats.st_size >= fileInfo._minSize and (length < fileInfo._maxSmallest or stats.st_size > fileInfo._sizeSmallest)):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxSmallest:
+ del fileInfo._smallest[-1]
+ fileInfo._smallest.insert(0, str(stats.st_size) + ':' + path + os.sep + node)
+ fileInfo._smallest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._sizeSmallest = float(fileInfo._smallest[-1].split(':')[0])
+ length = len(fileInfo._youngest)
+ if fileInfo._maxYoungest > 0 and (length < fileInfo._maxYoungest or stats.st_mtime > fileInfo._timeYoungest):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxYoungest:
+ del fileInfo._youngest[0]
+ fileInfo._youngest.append(str(stats.st_mtime) + ':' + path + os.sep + node)
+ fileInfo._youngest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._timeYoungest = float(fileInfo._youngest[0].split(':')[0])
+ length = len(fileInfo._oldest)
+ if fileInfo._maxOldest > 0 and (length < fileInfo._maxOldest or stats.st_mtime < fileInfo._timeOldest):
+ if not base.LinuxUtils.isReadable(stats, euid, egid):
+ fileInfo._ignoredFiles += 1
+ else:
+ if length >= fileInfo._maxOldest:
+ del fileInfo._oldest[-1]
+ fileInfo._oldest.insert(0, str(stats.st_mtime) + ':' + path + os.sep + node)
+ fileInfo._oldest.sort(key=lambda x: float(x.split(':')[0]))
+ fileInfo._timeOldest = float(fileInfo._oldest[-1].split(':')[0])
+ else:
+ fileInfo._ignoredFiles += 1
+ continue
+ # end of infoOneDir()
+ if fileInfo == None:
+ fileInfo = DirInfo(maxYoungest, maxLargest, maxOldest, maxSmallest, minSize, dirsOnly, filesOnly, trace)
+ if filePattern != None:
+ fileInfo._filePattern = base.StringUtils.regExprCompile(filePattern, 'file pattern')
+ if dirPattern != None:
+ fileInfo._filePattern = base.StringUtils.regExprCompile(dirPattern, 'dir pattern')
+
+ euid = os.geteuid()
+ egid = os.getegid()
+ fileInfo._maxYoungest = maxYoungest
+ fileInfo._maxLargest = maxLargest
+ fileInfo._maxOldest = maxOldest
+ fileInfo._maxDepth = maxDepth
+ infoOneDir(path, 0, fileInfo)
+ return fileInfo
+
+def distinctPaths(path1, path2):
+ '''Tests whether two paths are not part of each other.
+ @param path1: first path to test
+ @param path2: 2nd path to test
+ @return: True: path1 is not parent of path2 and path2 is not parent of path1
+ '''
+ dir1 = os.path.realpath(path1)
+ dir2 = os.path.realpath(path2)
+ return not dir1.startswith(dir2) and not dir2.startswith(dir1)
+
+def ensureDirectory(directory, logger = None, mode=0o777):
+ '''Ensures that the given directory exists.
+ @param directory: the complete name
+ @param logger: None or the logger
+ @return: None: could not create the directory
+ otherwise: the directory's name
+ '''
+ if not os.path.isdir(directory):
+ try:
+ os.lstat(directory)
+ os.unlink(directory)
+ except FileNotFoundError:
+ pass
+ _log('creating {}{} ...'.format(directory, os.sep), logger)
+ try:
+ os.makedirs(directory, mode)
+ except OSError as exc:
+ _error('cannot create dir {}: {}'.format(directory, str(exc), logger))
+ if not os.path.isdir(directory):
+ directory = None
+ return directory
+
+def ensureFileDoesNotExist(filename, logger = None):
+ '''Ensures that a file does not exist.
+ @param filename: the file to delete if it exists.
+ @param logger: None or the logger
+ '''
+ try:
+ info = os.lstat(filename)
+ try:
+ if os.path.isdir(filename):
+ _log('removing {}{} ...'.format(filename, os.sep), logger)
+ shutil.rmtree(filename, False)
+ else:
+ _log('removing {} ...'.format(filename), logger)
+ os.unlink(filename)
+ except OSError as exp:
+ _error('cannot delete {:s}: {:s}'.format(filename, str(exp)), logger)
+ except FileNotFoundError:
+ pass
+
+def ensureFileExists(filename, content = '', logger = None):
+ '''Ensures that a file does not exist.
+ @param filename: the file to create if it does not exist
+ @param content: this text will be stored for a new created file
+ @param logger: None or the logger
+ '''
+ try:
+ if os.path.exists(filename):
+ if os.path.isdir(filename):
+ _log('is a directory: {}'.format(filename), logger)
+ else:
+ _log('creating {} ...'.format(filename), logger)
+ base.StringUtils.toFile(filename, content)
+ except OSError as exc:
+ _error('problems with {}: {}'.format(filename, str(exc)))
+
+def ensureSymbolicLink(source, target, createTarget=True, logger = None):
+ '''Ensures that a directory exists.
+ @param source: the full name of the link source, e.g. '../sibling'
+ @param target: full name of the file of type 'link'
+ @param createTarget: creates the target if it does not exist
+ @param logger: None or the logger
+ @return: True: the link exists
+ '''
+ info = None
+ try:
+ info = os.lstat(target)
+ except FileNotFoundError:
+ pass
+ if info != None:
+ if os.path.islink(target):
+ oldLink = os.readlink(target)
+ if oldLink != source:
+ _log('changing link from {} to {}'.format(oldLink, source), logger)
+ os.unlink(target)
+ elif os.path.isdir(target):
+ _error('target {} is already a directory (not a link)'.format(target), logger)
+ else:
+ _log('removing the non link file ' + target, logger)
+ os.unlink(target)
+ if not os.path.exists(target):
+ baseDir = os.path.dirname(target)
+ if not os.path.isdir(baseDir):
+ if createTarget:
+ ensureDirectory(baseDir, logger)
+ hasParent = os.path.isdir(baseDir)
+ if not hasParent:
+ _error('parent of target is not a directory: ' + baseDir, logger)
+ absSource = base.FileHelper.joinPaths(source, target, logger) if source.startswith('..') else source
+ if not os.path.exists(absSource):
+ _error('missing source {} [= {}]'.format(source, absSource), logger)
+ elif hasParent:
+ _log('creating symbol link {} -> {}'.format(source, target), logger)
+ os.symlink(source, target)
+ rc = os.path.islink(target) and os.readlink(target) == source
+ return rc
+
+def fileClass(path):
+ '''Returns the file class of the file.
+ @param path: the full filename
+ @return: a tuple (class, subclass): class: 'container', 'text', 'binary', 'unknown'
+ subclass of 'container': 'dir', 'tar', 'tgz', 'zip'
+ subclass of 'text': 'xml', 'shell'
+ '''
+ def isBinaryByte(bb):
+ rc = bb < 0x09 or bb > 0x0d and bb < 0x20
+ return rc
+ def isBinary(byteArray):
+ found = 0
+ rc = False
+ for ix in range(len(byteArray)):
+ bb = byteArray[ix]
+ if bb == b'\x00':
+ rc = True
+ break;
+ elif isBinaryByte(bb):
+ found += 1
+ if found > 100 or found > len(byteArray) / 10:
+ rc = True
+ break
+ return rc
+ def isNullString(byteArray):
+ '''Tests whether the byteArray is a text delimited with 0.
+ @param byteArray: array to test
+ @return True: only text and '\0' is part of byteArray
+ '''
+ ix = 0
+ rc = True
+ hasNull = False
+ while ix < len(byteArray):
+ if byteArray[ix] == 0:
+ hasNull = True
+ elif isBinaryByte(byteArray[ix]):
+ rc = False
+ break
+ ix += 1
+ return rc and hasNull
+ def isNullNumber(byteArray):
+ '''Tests whether the byteArray are digits delimited with 0.
+ @param byteArray: array to test
+ @return True: only decimal digits and '\0' is part of byteArray
+ '''
+ ix = 0
+ rc = True
+ hasNull = False
+ while ix < len(byteArray):
+ if byteArray[ix] == 0:
+ hasNull = True
+ elif not (byteArray[ix] >= 0x30 and byteArray[ix] <= 0x39): # TAB
+ rc = False
+ break
+ ix += 1
+ return rc and hasNull
+ if os.path.isdir(path):
+ (theClass, subClass) = ('container', 'dir')
+ else:
+ with open(path, 'rb') as fp:
+ start = fp.read(4096)
+ if start.startswith(b'\x1f\x8b\x08'):
+ (theClass, subClass) = ('container', 'tar')
+ elif start.startswith(b'BZ') and isBinary(start[8:80]):
+ (theClass, subClass) = ('container', 'tar')
+ elif start.startswith(b'PK') and isBinary(start[2:32]):
+ (theClass, subClass) = ('container', 'zip')
+ elif isNullString(start[0:100]) and isNullNumber(start[100:0x98]):
+ (theClass, subClass) = ('container', 'tar')
+ elif (start[0:100].lower().find(b'<xml>') >= 0 or start[0:100].lower().find(b'<html') >= 0) and not isBinary(start):
+ (theClass, subClass) = ('text', 'xml')
+ elif len(start) > 5 and start.startswith(b'#!') and not isBinary(start):
+ (theClass, subClass) = ('text', 'shell')
+ elif isBinary(start):
+ (theClass, subClass) = ('binary', 'binary')
+ else:
+ (theClass, subClass) = ('text', 'text')
+ return (theClass, subClass)
+
+def fileType(path):
+ '''Returns the file type: 'file', 'dir', 'link', 'block'
+ @param path: the full filename
+ @return: the filetype: 'file', 'dir', 'link', 'block', 'char'
+ '''
+ if os.path.islink(path):
+ rc = 'link'
+ elif os.path.isdir(path):
+ rc = 'dir'
+ else:
+ rc = 'file'
+ return rc
+
+def fromBytes(line):
+ '''Converts a line with type bytes into type str.
+ @param line: line to convert
+ '''
+ try:
+ rc = line.decode()
+ except UnicodeDecodeError:
+ try:
+ rc = line.decode('latin-1')
+ except UnicodeDecodeError:
+ rc = line.decode('ascii', 'ignore')
+ return rc
+
+def hasWildcards(filename):
+ '''Tests whether a filename has wildcards.
+ @param filename: filename to test
+ @return: True: the filename contains wildcard like '*', '?' or '[...]'
+ '''
+ global fileHelperRegExprWildcards
+ rc = fileHelperRegExprWildcards.search(filename) != None
+ return rc
+
+def joinPaths(relPath, start = None, logger = None):
+ '''Joins a relative path and a start path to a non relative path.
+ Example: joinPath('../brother', '/parent/sister') is '/parent/brother'
+ @param relPath: the relative path, e.g. '../sister'
+ @param start: the start point for joining, e.g. 'family/sister'. If None: the current directory
+ @param logger: for error logging
+ @returns the non relative path, e.g. 'family/brother'
+ '''
+ rc = None
+ relParts = relPath.split(os.sep)
+ if start == None:
+ start = os.curdir
+ startParts = start.split(os.sep)
+ if len(relParts) == 0 or relParts[0] != '..':
+ if logger != None:
+ logger.error('not a relative path: ' + relPath)
+ else:
+ rc = ''
+ while len(relParts) > 0 and relParts[0] == '..':
+ if len(startParts) == 0:
+ if logger != None:
+ logger.error('too many backsteps in relpath {} for start {}'.format(relPath, start))
+ rc = None
+ break
+ relParts = relParts[1:]
+ startParts = startParts[0:-1]
+ if rc != None:
+ rc = os.sep.join(startParts)
+ if len(relParts) > 0:
+ if rc == '':
+ rc = os.sep.join(relParts)
+ else:
+ rc += os.sep + os.sep.join(relParts)
+ return rc
+
+def listFile(statInfo, full, orderDateSize = True, humanReadable = True):
+ '''Builds the info for one file (or directory)
+ @param statInfo: the info returned by os.(l)stat()
+ @param full: the filename
+ @param orderDateSize: True: order is date left of size False: order is size leftof date
+ @param humanReadable: True: better for reading (matching unit), e.g. "10.7 GByte" or "3 kByte"
+ '''
+ if stat.S_ISDIR(statInfo.st_mode):
+ size = '<dir>'
+ elif humanReadable:
+ size = "{:>8s}".format(base.StringUtils.formatSize(statInfo.st_size))
+ else:
+ size = '{:13.6f} MB'.format(statInfo.st_size / 1000000)
+ fdate = datetime.datetime.fromtimestamp(statInfo.st_mtime)
+ dateString = fdate.strftime("%Y.%m.%d %H:%M:%S")
+ if orderDateSize:
+ rc = '{:s} {:>12s} {:s}'.format(dateString, size, full)
+ else:
+ rc = '{:>12s} {:s} {:s}'.format(size, dateString, full)
+ return rc
+
+def pathToNode(path):
+ '''Changed a path into a name which can be used as node (of a filename).
+ @param path: the path to convert
+ @return: path with replaced path separators
+ '''
+ rc = path.replace(os.sep, '_').replace(':', '_')
+ return rc
+
+def splitFilename(full):
+ '''Splits a filename into its parts.
+ @param full: the filename with path
+ @return: a dictionary with the keys 'full', 'path', 'node', 'fn', 'ext'
+ example: { 'full': '/home/jonny.txt', 'path': '/home/', 'node' : 'jonny.txt', 'fn': 'jonny' , 'ext': '.txt' }
+ '''
+ rc = dict()
+ rc['full'] = full
+ ix = full.rfind(os.sep)
+ if ix < 0:
+ rc['path'] = ''
+ node = rc['node'] = full
+ else:
+ rc['path'] = full[0:ix+1]
+ node = rc['node'] = full[ix+1:]
+ ix = node.rfind('.', 1)
+ if ix < 0:
+ rc['fn'] = node
+ rc['ext'] = ''
+ else:
+ rc['fn'] = node[0:ix]
+ rc['ext'] = node[ix:]
+ return rc
+
+def setModified(path, timeUnix, date = None):
+ '''Sets the file modification time.
+ @precondition: exactly one of date and timeUnix must be None and the other not None
+ @param path: the full path of the file to modify
+ @param timeUnix: None or the time to set (unix timestamp since 1.1.1970)
+ @param date: None or the datetime to set (datetime.datetime instance)
+ @return: True: success False: precondition raised
+ '''
+ dateModified = None
+ rc = True
+ if date != None:
+ dateModified = time.mktime(date.timetuple())
+ elif timeUnix == None:
+ rc = False
+ else:
+ dateModified = timeUnix
+ if dateModified != None:
+ try:
+ os.utime(path, (dateModified, dateModified))
+ except Exception as exc:
+ msg = str(exc)
+ raise exc
+ return rc
+
+def tail(filename, maxLines = 1, withLineNumbers = False):
+ lines = []
+ if maxLines < 1:
+ maxLines = 1
+ with open(filename, "r") as fp:
+ lineNo = 0
+ for line in fp:
+ lineNo += 1
+ if len(lines) >= maxLines:
+ del lines[0]
+ lines.append(line)
+ if withLineNumbers:
+ lineNo -= len(lines) - 1
+ for ix in range(len(lines)):
+ lines[ix] = '{}: {}'.format(lineNo, lines[ix])
+ lineNo += 1
+ return lines
+
+def tempFile(node, subDir = None, logger = None):
+ '''Returns the name of a file laying in the temporary directory.
+ @param node: the filename without path
+ @param subdir: None or a subdirectory in the temp directory (may be created)
+ @param logger: error logger
+ '''
+ path = tempfile.gettempdir() + os.sep
+ if subDir != None:
+ path += subDir
+ os.makedirs(path, 0o777, True)
+ path += os.sep
+ path += node
+ return path
+
+def unpack(archive, target, logger = None, clear = False):
+ '''Copies the content of an archive (tar, zip...) into a given directory.
+ @param archive: name of the archive, the extension defines the type: '.tgz': tar '.zip': zip
+ @param target: the directory which will be filled by the archive content. Will be created if needed
+ @param logger: None or logger
+ '''
+ if not os.path.exists(target):
+ os.makedirs(target, 0o777, True)
+ elif not os.path.isdir(target):
+ _error('target is not a directory: ' + target)
+ archive = None
+ elif clear:
+ clearDirectory(target, logger)
+ if archive == None:
+ pass
+ elif archive.endswith('.tgz'):
+ tar = tarfile.open(archive, 'r:gz')
+ tar.extractall(target)
+ elif archive.endswith('.zip'):
+ zip = zipfile.ZipFile(archive, 'r')
+ zip.extractall(target)
+ else:
+ _error('unknown file extend: ' + archive, logger)
+
+if __name__ == '__main__':
+ info = directoryInfo('/etc')
+ print('{}: file(s): {} / {:.3f} MB dir(s): {} ignored (files/dirs): {} / {}'.format('/etc', info._fileCount, info._fileSizes / 1024 / 1024.0,
+ info._dirCount, info._ignoredFiles, info._ignoredDirs))
+ lines = tail('/etc/fstab', 5, True)
+ print('{}:\n{}'.format('/etc/fstab', ''.join(lines)))
--- /dev/null
+'''
+Created on 18.07.2018
+
+@author: hm
+'''
+import re
+import os.path
+
+
+class JavaConfig(object):
+ '''
+ Handles a java style configuration file.
+ Format:
+ <variable> = <value>
+ # comment
+ '''
+
+ def __init__(self, filename, logger, ignoreIniHeader = False):
+ '''
+ Constructor.
+ @param filename: the filename with path
+ @param logger: the logger
+ @param ignoreIniHeader: True: '[<section>]' will be ignored
+ '''
+ self._ignoreIniHeader = ignoreIniHeader
+ self._vars = dict()
+ self._logger = logger
+ self.readConfig(filename)
+
+ def readConfig(self, filename):
+ self._filename = filename
+ self._vars = dict()
+ regExpr = re.compile('([\w.]+)\s*=\s*(.*)$')
+ if not os.path.exists(filename):
+ self._logger.error('missing ' + filename)
+ else:
+ with open(filename, "r") as fp:
+ lineNo = 0
+ for line in fp:
+ lineNo += 1
+ line = line.strip()
+ if line.startswith('#') or line == '':
+ continue
+ matcher = regExpr.match(line)
+ if matcher != None:
+ self._vars[matcher.group(1)] = matcher.group(2)
+ elif self._ignoreIniHeader and line.startswith('['):
+ continue
+ else:
+ self._logger.error('{:s} line {:d}: unexpected syntax [expected: <var>=<value>]: {:s}'.format(filename, lineNo, line))
+
+ def getString(self, variable, defaultValue=None):
+ '''Returns the value of a given variable.
+ @param variable: name of the Variable
+ @param defaultValue: if variable does not exist this value is returned
+ @return: None: Variable not found otherwise: the value
+ '''
+ rc = defaultValue if variable not in self._vars else self._vars[variable]
+ return rc
+
+ def getInt(self, variable, defaultValue=None):
+ '''Returns the value of a given variable.
+ @param variable: name of the Variable
+ @param defaultValue: if variable does not exist this value is returned
+ @return: None: Variable not found or not an integer
+ otherwise: the int value
+ '''
+ rc = defaultValue
+ if variable in self._vars:
+ value = self._vars[variable]
+ try:
+ rc = int(value)
+ except ValueError:
+ self._logger.error("{}: variable {} is not an integer: {}".format(self._filename, variable, value))
+ rc = defaultValue
+ return rc
+
+ def getKeys(self, regExpr = None):
+ '''Returns an array of (filtered) keys.
+ @param regExpr: None or a regular expression to filter keys. regExpr can be an object or a text
+ example: re.compile(r'^\s*pattern.\d+$', re.I)
+ @return: the array of sorted keys matching the regExpr
+ '''
+ if type(regExpr) == str:
+ regExpr = re.compile(regExpr)
+ keys = self._vars.keys()
+ rc = []
+ for key in keys:
+ if regExpr == None or regExpr.search(key):
+ rc.append(key)
+ rc.sort()
+ return rc
--- /dev/null
+'''
+Created on 25.04.2018
+
+@author: hm
+'''
+
+import os
+import base.StringUtils
+import subprocess
+import re
+import stat
+
+def diskFree(verboseLevel = 0, logger = None):
+ '''Returns an info about the mounted filesystems.
+ @return: a list of info entries: entry: [name, totalBytes, freeBytes]
+ '''
+ if logger != None and verboseLevel > 3:
+ logger.log('taskFileSystem()...')
+
+ rc = []
+ ignoredDevs = ['udev', 'devpts', 'tmpfs', 'securityfs', 'pstore', 'cgroup', 'tracefs', 'mqueue', 'hugetlbfs', 'debugfs']
+ with open('/proc/mounts', 'r') as f:
+ for line in f:
+ dev, path, fstype = line.split()[0:3]
+ if logger != None and verboseLevel > 3:
+ logger.log(line)
+ if (fstype == 'sysfs' or fstype == 'proc' or dev in ignoredDevs or path.startswith('/proc/')
+ or path.startswith('/sys/') or path.startswith('/run/') or path.startswith('/dev/loop')
+ or path.startswith('/snap/') or not os.path.isdir(path)):
+ continue
+ if logger != None and verboseLevel > 3:
+ logger.log(path + '...')
+ try:
+ stat = os.statvfs(path)
+ blocksize = stat.f_bsize
+ rc.append([path, stat.f_blocks*blocksize, stat.f_bfree*blocksize, stat.f_bavail*blocksize])
+ except FileNotFoundError:
+ # if mounted by autofs: the path can not be found
+ pass
+ return rc
+
+def diskIo():
+ '''Returns a list of [diskname, countReads, countWrites, countDiscards] arrays.
+ Data are accumulated since last boot.
+ Note: sector size: 512 Byte
+ @see https://www.kernel.org/doc/Documentation/iostats.txt
+ @return: array of arrays [id, diskname, countReads, countWrites, countDiscards], e.g. [ ['8-0-sda', 299, 498, 22 ] ]
+ '''
+ rc = []
+ with open('/proc/diskstats', 'r') as fp:
+ for line in fp:
+ # 1......2.....3....4.............5...........6...........7.........8..............9............10...........11.........12.........13.....14.............15................16.............17...............18
+ # mainid subid name readscomplete readsmerged readsectors readmsecs writescomplete writesmerged writesectors writesmsec inprogress iomsec weightediomsec discardscompleted discardsmerged discardssectors discardsmsec
+ # 8 0 sda 101755 2990 6113900 37622 69827 44895 1535408 41169 0 85216 2732 0 0 0 0
+ # 8 1 sda1 82 0 6368 22 0 0 0 0 0 76 0 0 0 0 0
+ parts = line.split()
+ rc.append(['{}-{}'.format(parts[0], parts[1]), parts[2], parts[5], parts[9], parts[16]])
+ return rc
+
+def isExecutable(statInfo, euid, egid):
+ '''Tests whether the file or directory) is executable
+ @param statInfo: the result of os.stat()
+ @param euid: the effective UID of the current process. We can get it with os.geteuid()
+ @param egid: the the effective GID of the current process. We can get it with os.getegid()
+ @return: True: the file is executable
+ '''
+ if statInfo.st_uid == euid:
+ # S_IXUSR S_IXGRP S_IXOTH
+ mask = (stat.S_IXUSR | stat.S_IXOTH)
+ elif statInfo.st_gid == egid:
+ mask = (stat.S_IXGRP | stat.S_IXOTH)
+ else:
+ mask = stat.S_IXOTH
+ return (statInfo.st_mode & mask) != 0
+
+def isReadable(statInfo, euid, egid):
+ '''Tests whether the file or directory) is readable.
+ @param statInfo: the result of os.stat()
+ @param euid: the effective UID of the current process. We can get it with os.geteuid()
+ @param egid: the the effective GID of the current process. We can get it with os.getegid()
+ @return: True: the file is readable
+ '''
+ if statInfo.st_uid == euid:
+ # S_IXUSR S_IXGRP S_IXOTH
+ mask = (stat.S_IRUSR | stat.S_IROTH)
+ elif statInfo.st_gid == egid:
+ mask = (stat.S_IRGRP | stat.S_IROTH)
+ else:
+ mask = stat.S_IROTH
+ return (statInfo.st_mode & mask) != 0
+
+def isWritable(statInfo, euid, egid):
+ '''Tests whether the file or directory) is writable.
+ @param statInfo: the result of os.stat()
+ @param euid: the effective UID of the current process. We can get it with os.geteuid()
+ @param egid: the the effective GID of the current process. We can get it with os.getegid()
+ @return: True: the file is writable
+ '''
+ if statInfo.st_uid == euid:
+ mask = (stat.S_IWUSR | stat.S_IWOTH)
+ elif statInfo.st_gid == egid:
+ mask = (stat.S_IWGRP | stat.S_IWOTH)
+ else:
+ mask = stat.S_IWOTH
+ return (statInfo.st_mode & mask) != 0
+
+def stress(patternDisks, patternInterface):
+ '''Returns the load data of a server.
+ Note: the byte data (ioReadBytes ... netWriteBytes) are summarized since boot time.
+ @param patternDisk: a regular expression of the disk devices used for the result (sum is built), e.g. 'sd[ab]'
+ @param patternInterface: a regular expression of the network interfaces used for the result (sum is built), e.g. 'eth0|wlan0'
+ @return: [ioReadBytes, ioWriteBytes, netReadBytes, netWriteBytes, load1Minute, memoryAvailable, swapAvailable]
+ '''
+ readIO = 0
+ writeIO = 0
+ rexprDisks = base.StringUtils.regExprCompile(patternDisks, 'disk pattern')
+ with open('/proc/diskstats', 'r') as fp:
+ for line in fp:
+ # 1......2.....3....4.............5...........6...........7.........8..............9............10...........11.........12.........13.....14.............15................16.............17...............18
+ # mainid subid name readscomplete readsmerged readsectors readmsecs writescomplete writesmerged writesectors writesmsec inprogress iomsec weightediomsec discardscompleted discardsmerged discardssectors discardsmsec
+ # 8 0 sda 101755 2990 6113900 37622 69827 44895 1535408 41169 0 85216 2732 0 0 0 0
+ # 8 1 sda1 82 0 6368 22 0 0 0 0 0 76 0 0 0 0 0
+ parts = line.split()
+ if rexprDisks.match(parts[2]) != None:
+ readIO += int(parts[5])
+ writeIO += int(parts[9])
+ readIO *= 512
+ writeIO *= 512
+ readNet = 0
+ writeNet = 0
+ rexprNet = base.StringUtils.regExprCompile(patternInterface, 'interface pattern')
+ with open('/proc/net/dev', 'r') as fp:
+ for line in fp:
+ # 1......2........3......4....5....6....7.....8..........9.........10.......11.....12....13...14...15....16......17
+ # Inter-| Receive | Transmit
+ # face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
+ # lo: 33308 376 0 0 0 0 0 0 33308 376 0 0 0 0 0 0
+ parts = line.split()
+ # remove ':' from the first field:
+ if rexprNet.match(parts[0][0:-1]) != None:
+ readNet += int(parts[1])
+ writeNet += int(parts[9])
+ with open('/proc/loadavg', 'rb') as fp:
+ loadMin1 = float(fp.read().decode().split()[0])
+ #@return: [TOTAL_RAM, AVAILABLE_RAM, TOTAL_SWAP, FREE_SWAP, BUFFERS]
+ with open('/proc/meminfo', 'r') as fp:
+ lines = fp.read().split('\n')
+ freeRam = _getNumber(lines[2])
+ freeSwap = _getNumber(lines[15])
+ return [readIO, writeIO, readNet, writeNet, loadMin1, freeRam, freeSwap]
+
+def users():
+ '''Returns the users currently logged in.
+ @return: None: parser error. otherwise: tuple of entries (USERNAME, IP, LOGINSTART, LOGINDURATION, CPUTIME)
+ '''
+ with subprocess.Popen('/usr/bin/w', stdout=subprocess.PIPE) as proc:
+ data = proc.stdout.read().decode()
+ lines = data.split('\n')[2:]
+ rc = []
+ for line in lines:
+ if line == '':
+ break
+ #hm pts/0 88.67.239.209 21:17 1:32 m 6:37 0.04 s w
+ #hm pts/0 88.67.239.209 21:17 60s 0.00s 0.00s w
+ parts = line.split()
+ if len(parts) < 4:
+ rc = None
+ break
+ rc.append((parts[0], parts[2], parts[3], parts[4], parts[5] if parts[5].find(':') > 0 else parts[6]))
+ return rc
+
+def load():
+ '''Returns average loads.
+ @return: [LOAD_1_MINUTE, LOAD_5_MINUTE, LOAD_10_MINUTE, RUNNING_PROCESSES, PROCESSES]
+ '''
+ with open('/proc/loadavg', 'rb') as fp:
+ data = fp.read().decode()
+ matcher = re.match(r'(\S+)\s+(\S+)\s+(\S+)\s+(\d+)/(\d+)', data)
+ if matcher == None:
+ rc = None
+ else:
+ rc = [float(matcher.group(1)), float(matcher.group(2)), float(matcher.group(3)), int(matcher.group(4)), int(matcher.group(5))]
+ return rc
+
+def _getNumber(line):
+ parts = line.split()
+ return int(parts[1])
+
+def memoryInfo():
+ '''Returns the memory usage.
+ @return: [TOTAL_RAM, AVAILABLE_RAM, TOTAL_SWAP, FREE_SWAP, BUFFERS]
+ '''
+ with open('/proc/meminfo', 'rb') as fp:
+ lines = fp.read().decode().split('\n')
+ rc = [_getNumber(lines[0]), _getNumber(lines[2]), _getNumber(lines[14]), _getNumber(lines[15]), _getNumber(lines[3])]
+ return rc
+
+def mdadmInfo(filename = '/proc/mdstat'):
+ '''Returns the info about the software raid systems.
+ @return: a list of array [name, type, members, blocks, status>,
+ e.g. [['md0', 'raid1', 'dm-12[0] dm-13[1]', 1234, 'OK'], ['md1', 'raid0', 'sda1[0] sdb1[1]', 1234, 'broken']]
+ status: 'OK', 'recovery', 'broken'
+ '''
+ rc = []
+ if os.path.exists(filename):
+ with open(filename, 'r') as fp:
+ # md2 : active raid1 sdc1[0] sdd1[1]
+ # md1 : active raid1 hda14[0] sda11[2](F)
+ rexpr1 = re.compile(r'^(\w+) : active (raid\d+) (.*)')
+ # 1953378368 blocks super 1.2 [2/2] [UU]
+ rexpr2 = re.compile('^\s+(\d+) blocks.*\[([_U]+)\]')
+ members = None
+ for line in fp:
+ matcher = rexpr1.match(line)
+ if matcher:
+ name = matcher.group(1)
+ aType = matcher.group(2)
+ members = matcher.group(3)
+ continue
+ matcher = rexpr2.match(line)
+ if matcher:
+ blocks = matcher.group(1)
+ status = matcher.group(2)
+ status2 = 'broken' if status.find('_') >= 0 or members != None and members.find('(F)') > 0 else 'OK'
+ rc.append([name, aType, members, int(blocks), status2])
+ continue
+ if line.find('recovery') > 0:
+ rc[len[rc - 1][4]] = 'recovery'
+ return rc
+
+if __name__ == '__main__':
+ infos = diskFree()
+ for info in infos:
+ print(base.StringUtils.join(' ', info))
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import datetime
+import os.path
+import base.BaseLogger
+
+class Logger(base.BaseLogger.BaseLogger):
+ def __init__(self, logfile, verboseLevel):
+ '''Constructor.
+ @param logfile: the file for logging
+ @param verboseLevel: > 0: logging to stdout too
+ '''
+ base.BaseLogger.BaseLogger.__init__(self, verboseLevel)
+ self._logfile = logfile
+ # Test accessability:
+ try:
+ with open(self._logfile, 'a'):
+ pass
+ except OSError as exc:
+ msg = '+++ cannot open logfile {}: {}'.format(self._logfile, str(exc))
+ print(msg)
+ self.error(msg)
+
+ def log(self, message, minLevel = 0):
+ '''Logs a message.
+ @param message: the message to log
+ @param minLevel: the logging is done only if _verboseLevel >= minLevel
+ @return: true: OK false: error on log file writing
+ '''
+ rc = False
+ try:
+ if not self._inUse and self._mirrorLogger != None:
+ self._mirrorLogger.log(message)
+ now = datetime.datetime.now()
+ message = now.strftime('%Y.%m.%d %H:%M:%S ') + message
+ if self._verboseLevel >= minLevel:
+ print(message)
+ with open(self._logfile, 'a') as fp:
+ rc = True
+ fp.write(message + '\n')
+ except:
+ pass
+ return rc
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+'''
+Created on 08.05.2018
+
+@author: hm
+'''
+import base.BaseLogger
+import re
+
+class MemoryLogger(base.BaseLogger.BaseLogger):
+ '''Implements a logger storing the logging messages in an internal array.
+ '''
+ def __init__(self, verboseLevel = 0):
+ '''Constructor.
+ @param verboseLevel: > 0: the messages will be printed (to stdout)
+ '''
+ base.BaseLogger.BaseLogger.__init__(self, verboseLevel)
+ self._lines = []
+
+ def contains(self, string):
+ '''Tests whether the log contains a given string.
+ @param string: string to search
+ @return: True: the log contains the string
+ '''
+ rc = False
+ for line in self._lines:
+ if line.find(string) >= 0:
+ rc = True
+ break
+ return rc
+
+ def getMessages(self):
+ '''Returns the internal messages as array.
+ @return: array of messages
+ '''
+ return self._lines
+
+ def log(self, message, minLevel = 0):
+ '''Logs a message.
+ @param message: the message to log
+ @param minLevel: the logging is done only if _verboseLevel >= minLevel
+ @return: True: OK
+ '''
+ if self._verboseLevel >= minLevel:
+ print(message)
+ self._lines.append(message)
+ return True
+
+ def matches(self, pattern, flags = 0):
+ '''Tests whether the log contains a given regular expression.
+ @param pattern: reg expression to search, e.g. r'\d+'
+ @param flags: flags of the method re.compile(), e.g. re.I (for ignore case)
+ @return: True: the log contains the string
+ '''
+ rc = False
+ regExpr = base.StringUtils.regExprCompile(pattern, 'memory logger pattern', None, flags == 0)
+ if regExpr != None:
+ for line in self._lines:
+ if regExpr.search(line):
+ rc = True
+ break
+ return rc
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+#! /usr/bin/python3
+'''
+processhelper: starting external scripts/programs
+
+@author: hm
+'''
+import subprocess
+import tempfile
+import os
+import time
+import sys
+
+sys.path.insert(0, '/usr/share/pyrshell')
+
+import base.StringUtils
+
+
+class ProcessHelper:
+
+ def __init__(self, verboseLevel, logger):
+ '''Constructor:
+ @param logger: display output
+ '''
+ self._logger = logger
+ self._verboseLevel = verboseLevel
+ self._output = None
+ self._rawOutput = None
+ self._error = None
+
+ def execute(self, argv, logOutput, mode='!shell', timeout=None, currentDirectory=None):
+ '''Executes an external program.
+ @param argv: a list of arguments, starting with the program name
+ @param logOutput: True: the result of stdout is written to stdout via logger. Note: the raw output is available as self._output[]
+ @param timeout: None or the timeout of the external program
+ @return: None (logOutput==False) or array of strings
+ '''
+ curDir = self.pushd(currentDirectory)
+ if argv == None:
+ self._logger.error('execute(): missing argv (is None)')
+ elif curDir != '':
+ self._logger.log('executing: ' + ' '.join(argv), 3)
+ shell = mode == 'shell'
+ proc = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
+ (out, err) = proc.communicate(None, timeout)
+ self._output = []
+ self._error = []
+ if logOutput:
+ for line in out.decode().split('\n'):
+ line2 = line.rstrip()
+ if len(line) > 1:
+ self._output.append(line2)
+ self._logger.log(line2, 1)
+ for line in err.decode().split('\n'):
+ msg = line.rstrip()
+ if msg != '':
+ self._error.append(msg)
+ self._logger.error(msg)
+ self.popd(curDir)
+ return None if not logOutput else self._output
+
+ def executeCommunicate(self, process, inputString, logOutput, timeout):
+ '''Handles the output of subprocess calls.
+ @param process: the process to inspect
+ @param inputString: None or an input string
+ @param logOutput: True: output should be returned
+ @param timeout: the longest time a process should use
+ '''
+ if inputString == None:
+ (out, err) = process.communicate(timeout=timeout)
+ else:
+ (out, err) = process.communicate(inputString.encode(), timeout)
+ self._rawOutput = out
+ if logOutput:
+ for line in out.decode().split('\n'):
+ if len(line) > 0:
+ self._output.append(line)
+ self._logger.log(line)
+ for line in err.decode().split('\n'):
+ if line != '':
+ self._error.append(line)
+ self._logger.error(line)
+
+ def executeInput(self, argv, logOutput, inputString=None, mode='!shell', timeout=None):
+ '''Executes an external program with input from stdin.
+ @param argv: a list of arguments, starting with the program name
+ @param logOutput: True: the result of stdout is written to stdout via logger. Note: the raw output is available as self._output[]
+ @param inputString: None or the input for the program as string
+ @param timeout: None or the timeout of the external program
+ '''
+ self._output = []
+ self._error = []
+ if inputString == None:
+ inputString = ''
+ self._logger.log('executing: ' + ' '.join(argv), 3)
+ if mode == 'not used and shell':
+ fn = tempfile.gettempdir() + '/dbtool.' + str(time.time())
+ base.StringUtils.toFile(fn, inputString)
+ command = argv[0] + " '" + "' '".join(argv[1:]) + "' < " + fn
+ subprocess.run([command], check=True, shell=True)
+ os.unlink(fn)
+ else:
+ try:
+ proc = subprocess.Popen(argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=mode == 'shell')
+ self.executeCommunicate(proc, inputString, logOutput, timeout)
+ except OSError as exc:
+ msg = str(exc)
+ self._logger.error(msg)
+ self._error = msg.split('\n')
+ except Exception as exc2:
+ msg = str(exc2)
+ self._logger.error(msg)
+ self._error = msg.split('\n')
+
+ def executeInputOutput(self, argv, inputString=None, logOutput=False, mode='!shell', timeout=None):
+ '''Executes an external program with input from stdin and returns the output.
+ @param argv: a list of arguments, starting with the program name
+ @param inputString: None or the input for the program as string
+ @param timeout: None or the timeout of the external program
+ @return: a list of lines (program output to stdout)
+ '''
+ self.executeInput(argv, logOutput, inputString, mode, timeout)
+ rc = self._output
+ if (rc == None or len(rc) == 0) and self._rawOutput != None and len(self._rawOutput) > 0:
+ try:
+ rc = self._rawOutput.decode('utf-8').split('\n')
+ except UnicodeDecodeError as exc:
+ self._logger.error('executeInputOutput(): {}\n[{}]\n"{}"'.format(str(exc), ','.join(argv), '' if inputString == None else inputString[0:80]))
+ rc = base.StringUtils.minimizeArrayUtfError(self._rawOutput.split(b'\n'), self._logger if self._verboseLevel >= 2 else None)
+ return rc
+
+ def executeInChain(self, argv1, inputString, argv2, mode='shell', timeout=None):
+ '''Executes 2 programs with input from stdin as chain and returns the output.
+ @param argv1: a list of arguments for the first program, starting with the program name
+ @param inputString: None or the input for the first program as string
+ @param argv2: a list of arguments for the second program, starting with the program name
+ @param timeout: None or the timeout of the external program
+ @return: a list of lines (program output to stdout)
+ '''
+ self._output = []
+ self._error = []
+ self._logger.log('executing: ' + ' '.join(argv1) + '|' + ' '.join(argv2), 3)
+ rc = []
+ if mode == 'shell':
+ fnOut = tempfile.gettempdir() + '/dbtool.out.' + str(time.time())
+ if inputString == None:
+ inputPart = ''
+ else:
+ fnIn = tempfile.gettempdir() + '/dbtool.in.' + str(time.time())
+ inputPart = "< '" + fnIn + "' "
+ base.StringUtils.toFile(fnIn, inputString)
+ command = argv1[0] + " '" + "' '".join(argv1[1:]) + "' " + inputPart + "| " + argv2[0] + " '" + "' '".join(argv2[1:]) + "' > " + fnOut
+ try:
+ subprocess.run([command], check=True, shell=True)
+ data = base.StringUtils.fromFile(fnOut)
+ rc = self._output = data.split('\n')
+ except Exception as exc:
+ self._logger.error(str(exc))
+ if inputString != None:
+ os.unlink(fnIn)
+ os.unlink(fnOut)
+ else:
+ try:
+ p1 = subprocess.Popen(argv1, stdout=subprocess.PIPE)
+ p2 = subprocess.Popen(argv2, stdin=p1.stdout, stdout=subprocess.PIPE)
+ # Allow p1 to receive a SIGPIPE if p2 exits.
+ p1.stdout.close()
+ self.executeCommunicate(p2, None, True, timeout)
+ rc = self._output
+ except Exception as exc:
+ self._logger.error(str(exc))
+ return rc
+
+ def executeScript(self, script, node=None, logOutput=True, args=None, timeout=None):
+ '''Executes an external program with input from stdin.
+ @param script: content of the script
+ @param node: script name without path (optional)
+ @param logOutput: True: the result of stdout is written to stdout via logger. Note: the raw output is available as self._output[]
+ @param args: None or an array of additional arguments, e.g. ['-v', '--dump']
+ @param timeout: None or the timeout of the external program
+ @return: None (logOutput==False) or array of strings
+ '''
+ self._logger.log('executing {}...'.format('script' if node == None else node), 3)
+ if node == None:
+ node = 'processtool.script'
+ fn = tempfile.gettempdir() + os.sep + node + str(time.time())
+ base.StringUtils.toFile(fn, script)
+ os.chmod(fn, 0o777)
+ argv = [fn]
+ if args != None:
+ argv += args
+ rc = self.execute(argv, logOutput, 'shell', timeout)
+ os.unlink(fn)
+ return rc
+
+ def popd(self, directory):
+ '''Changes the current direcory (if needed and possible).
+ @param directory: None or the new current directory
+ @return None: directory = None
+ '': changing directory failed
+ otherwise: the current directory (before changing)
+ '''
+ if directory != None and directory != '':
+ os.chdir(directory)
+ if os.path.realpath(os.curdir) != os.path.realpath(directory):
+ self._logger.error('cannot change to directory ' + directory)
+
+ def pushd(self, directory):
+ '''Changes the current direcory (if needed and possible).
+ @param directory: None or the new current directory
+ @return None: directory = None
+ '': changing directory failed
+ otherwise: the current directory (before changing)
+ '''
+ if directory == None:
+ rc = None
+ else:
+ rc = os.curdir
+ os.chdir(directory)
+ if os.path.realpath(os.curdir) != os.path.realpath(directory):
+ os.chdir(rc)
+ self._logger.error('cannot change to directory ' + directory)
+ rc = ''
+ return rc
+
--- /dev/null
+'''
+Created on 09.08.2019
+
+@author: hm
+'''
+import re
+import math
+
+import base.StringUtils
+
+class PySymbol:
+ '''Implements the info of a class, class variable, parameter, local variable...
+ '''
+ def __init__(self, name):
+ self._name = name
+ self._dataType = None
+ self._isConst = True
+
+class Symbols:
+ '''Implements a symbol table: dictionaries for classes, class variables ...
+ '''
+ def __init__(self, parent):
+ '''Constructor.
+
+ '''
+ self._classes = dict()
+ self._classVariables = dict()
+ self._localVariables = dict()
+ self._parent = parent
+ self._regClassVariableAssignment = re.compile(r'self\.(\w+)\s*(/?[+-/*]?)=(?!=)\s*(\S+)')
+ self._regLocalVariableAssignment = re.compile(r'(\w+)\s*(/?[+-/*]?)=(?!=)\s*(\S+)')
+ self._regReturn = re.compile(r'return\s+(\S+)')
+ self._regSymbol = re.compile(r'[_a-zA-Z][\w+]*')
+ self._regNumber = re.compile(r'-?\d+')
+ self._regBoolean = re.compile(r'\b(True|False)\b')
+ self._regObject = re.compile(r'[({\[]|(dict|list|[A-Z]\w*)\s*\(')
+ self._regObjectMethodCall = re.compile('^self\.(\w+)\.\w')
+
+ def declareClassVariables(self):
+ '''Puts the declaration of the class variables into the output.
+ '''
+ for name in self._classVarNames:
+ (aType, isConst) = self.getType('class', name, 'string')
+ self._parent.put(1, 'private ' + name + ': ' + aType + ';')
+
+ def findType(self, symbolType, line, defaultType = None):
+ '''Tries to detect the type of a variable.
+ @param symbolType: "local" or "class"
+ @param line: the line to inspect
+ @param defaultType: the return value if no type is detected
+ @return: tuple (name, type):
+ name: None or the symbol name, type: defaultType or the found data type
+ '''
+ aType = defaultType
+ name = None
+ if line.find('rc = 0') >= 0:
+ name = None
+ regExpr = self._regLocalVariableAssignment if symbolType == 'local' else self._regClassVariableAssignment
+ matcher = regExpr.match(line)
+ if matcher != None:
+ name = matcher.group(1)
+ value = matcher.group(3)
+ rc2 = self.typeOfValue(value)
+ if rc2 != None:
+ aType = rc2
+ return (name, aType)
+
+ def getType(self, symbolType, name, defaultType = None):
+ '''Returns the data type of a variable.
+ @param symbolType: "local" or "class"
+ @param name: the variable name
+ @return: tuple (type, isConst): type: None or the data type of the variable isConst: True: the symbol is a constant
+ '''
+ aType = defaultType
+ isConst = False
+ variables = self._localVariables if symbolType == 'local' else self._classVariables
+ if name in variables.keys():
+ aType = variables[name]._dataType
+ if aType == None:
+ aType = defaultType
+ isConst = variables[name]._isConst
+ return (aType, isConst)
+
+ def isFirstAssignment(self, line):
+ '''Tests whether the line contains an assignment to a not known variable.
+ @param line: line to inspect
+ @return: True: the line contains the an assignment to a not known variable
+ '''
+ rc = False
+ matcher = self._regLocalVariableAssignment.match(line)
+ if matcher != None:
+ # group(2) contains the first char of '+=' ...
+ rc = matcher.group(2) == ''
+ return rc
+
+ def isVariable(self, symbolType, name):
+ '''Tests whether a name is a known variable.
+ @param symbolType: 'local' or 'class'
+ @param name: the variable to test
+ '''
+ dictionary = self._localVariables if symbolType == 'local' else self._classVariables
+ return name in dictionary.keys()
+
+ def parseClass(self, ixLine):
+ '''Finds the properties of the class variables.
+ @param ixLine: the start index of the method
+ '''
+ converter = self._parent
+ converter._lockPut = True
+ lastLine = converter.find('class ', ixLine)
+ ixFirst = ixLine + 1
+ self._classVarNames = []
+ while ixLine < lastLine:
+ (ixLine, line, level) = converter.getLine(ixLine)
+ (name, aType) = self.findType('class', line)
+ if name == None:
+ matcher = self._regObjectMethodCall.match(line)
+ if matcher != None:
+ name, aType = matcher.group(1), 'object'
+ if name != None and name not in self._classVarNames:
+ self.setType(aType, name, self._classVariables)
+ self._classVarNames.append(name)
+ ixLine += 1
+ converter._lockPut = False
+
+ def parseMethod(self, ixLine):
+ '''Finds the properties of the symbols of a method.
+ @param ixLine: the start index of the method
+ @return: ixEnd: the last line of the method
+ '''
+ converter = self._parent
+ converter._lockPut = True
+ lastLine = converter.find('def ', ixLine)
+ while ixLine <= lastLine:
+ (ixLine, line, level) = converter.getLine(ixLine)
+ matcher = self._regReturn.match(line)
+ if matcher == None:
+ (name, aType) = self.findType('local', line)
+ else:
+ converter._hasReturn = True
+ (name, aType) = ('$return', self.typeOfValue(matcher.group(1)))
+ self.setType(aType, name, self._localVariables)
+ ixLine += 1
+ converter._lockPut = False
+ return lastLine
+
+ def setType(self, aType, name, variables):
+ '''Sets the type if defined.
+ @param aType: None: the method does nothing otherwise: the variable type will be set if it is undefined
+ @param name: the variable name
+ @param variables: the dictionary of the variables
+ '''
+ if name != None:
+ if name not in variables.keys():
+ variables[name] = PySymbol(name)
+ variables[name]._dataType = aType
+ else:
+ if variables[name]._dataType == None:
+ variables[name]._dataType = aType
+ elif aType != None and variables[name]._dataType != aType:
+ variables[name]._dataType = 'any'
+ # second assignment: is not a constant
+ variables[name]._isConst = False
+
+ def typeOfValue(self, value):
+ '''Detects the type of a variable given by a assignment.
+ @param matcher: a re.MatchExpression instance
+ @return: the detected variable type (attempt)
+ '''
+ aType = None
+ if self._regBoolean.match(value) != None:
+ aType = 'boolean'
+ elif self._regNumber.match(value) != None:
+ aType = 'number'
+ elif value.startswith('"') or value.startswith("'"):
+ aType = 'string'
+ elif self._regObject.match(value):
+ aType = 'object'
+ else:
+ matcher = self._regSymbol.match(value)
+ if matcher != None:
+ name = matcher.group(0)
+ kind = "class" if name.startswith('self.') else "local"
+ (aType, isConst) = self.getType(kind, name)
+ return aType
+
+class PythonToTypeScript:
+ def __init__(self, pythonSource, typeScript):
+ '''Constructor.
+ @param pythonSource: IN: a TextTool instance which contains the python source
+ @param typeScript: OUT: the converted type script
+ '''
+ self._hasReturn = False
+ self._lockPut = False
+ self._symbols = Symbols(self)
+ self._python = pythonSource
+ self._logger = pythonSource._logger
+ self._verboseLevel = 3
+ self._typeScript = typeScript
+ self._regReservedWords = re.compile(r'\b(True|False|None|self|__init__|print)\b')
+ self._reservedWords = { 'True' : 'true', 'False' : 'false', 'None' : 'null', 'self' : 'this',
+ '__init__' : 'constructor', 'print' : 'console.log' }
+ self._indentItem = '\t'
+ self._indentWidth = 1
+ self._currentMethod = None
+ self._baseClass = None
+ self._regEndOfModule = re.compile(r'if\s+__name__\s*==\s*\S__main__\S\s*:\s*$')
+ regEndOfModule = re.compile(r'if\s+__name__\s*==\s*.__main__.\s*:')
+
+ def appendSemicolon(self):
+ '''Appends a semicolon in the last output line if meaningful
+ '''
+ if len(self._typeScript._lines) > 0:
+ lines = self._typeScript._lines
+ # search the last non comment line:
+ ix = len(lines) - 1
+ while ix > 0 and lines[ix].lstrip().startswith('//'):
+ ix -= 1
+ line = lines[ix]
+ if line.find('{') < 0 and not line.endswith(';') and not line.endswith('*/'):
+ self._typeScript._lines[ix] +=';'
+
+ def kindOf(self, line):
+ '''Detects the object type of the line.
+ @param line: the stripped line to inspect.
+ @return: "class", "method" or "statement"
+ '''
+ if type(line) != str:
+ line = str(line)
+ if line.startswith('class '):
+ rc = 'class'
+ elif line.startswith('def '):
+ rc = 'method'
+ else:
+ rc = 'statement'
+ return rc
+
+ def convert(self):
+ '''Converts python code to typescript code
+ '''
+ self._indentItem = self.findIdentItem()
+ ixLine = 0
+ line = None
+ while not self.endOfContent(ixLine):
+ if line == None:
+ (ixLine, line, level) = self.getLine(ixLine)
+ ix = self.docComment(ixLine)
+ if ix > ixLine:
+ (ixLine, line, level) = self.getLine(ix)
+ self._logger.log('{}: {}'.format(ixLine, line), 3)
+ (ixLine, line, kind) = self.convertItem(ixLine, line)
+ base.StringUtils.toFile('/tmp/out.ts', '\n'.join(self._typeScript._lines))
+
+ def convertClass(self, ixLine, firstLine):
+ '''Converts a class.
+ @param ixLine: the index of the first line of the class
+ @param line: the stripped line to inspect
+ @return: the tuple (ixLine, line, kind) with the index and content of the next line below the class
+ '''
+ self._currentClass = firstLine.split()[1]
+ baseClass = ''
+ self._baseClass = None
+ ix = firstLine.find('(')
+ if ix > 0:
+ self._baseClass = firstLine[ix + 1:firstLine.find(')')].split('.')[-1]
+ baseClass = ' extends ' + self._baseClass
+ ixEnd = self._currentClass.find('(')
+ if ixEnd > 0:
+ self._currentClass = self._currentClass[0:ixEnd].strip()
+ else:
+ # skip ':'
+ self._currentClass = self._currentClass[0:-1].strip()
+ kind = 'class'
+ while not self.endOfContent(ixLine) and kind == 'class':
+ level = self.getIndentLevel(ixLine)
+ ixLine += 1
+ ixLine = self.docComment(ixLine)
+ self._symbols.parseClass(ixLine)
+ self.put(level, 'export class ' + self._currentClass + baseClass + ' {')
+ self._symbols.declareClassVariables()
+ self._positionClass = len(self._typeScript._lines)
+ kind = 'statement'
+ lastIx = -1
+ while not self.endOfContent(ixLine) and kind != 'class':
+ (ixLine, line, kind) = self.convertMethod(ixLine, level)
+ if ixLine == lastIx:
+ break
+ lastIx = ixLine
+ self.unputEmpty()
+ self.put(level, '}')
+ self.put(0, '')
+ return (ixLine, line, kind)
+
+ def convertItem(self, ixLine, line, ixEnd = None):
+ '''Converts a code item (class, method or statement).
+ @param ixLine: the index of the first line of the class
+ @param line: the stripped line to inspect
+ @param ixEnd: None or the last line to process
+ @return: the tuple (ixLine, line, kind) with the index and content of the next line below the processed line
+ '''
+ ixStart = ixLine
+ level = self.getIndentLevel(ixLine)
+ while line.startswith('import'):
+ parts = line.split()[1].split('.')
+ if len(parts) == 2 and parts[0] != 'os':
+ package = parts[0]
+ module = parts[1]
+ self.put(level, 'import { $M } from "../$P/$M";'.replace('$M', module).replace('$P', package))
+ ixLine += 1
+ (ixLine, line, level) = self.getLine(ixLine)
+ kind = self.kindOf(line)
+ if kind == 'class':
+ (ixLine, line, kind) = self.convertClass(ixLine, line)
+ elif kind == 'method':
+ (ixLine, line, kind) = self.convertMethod(ixLine, level)
+ elif kind == 'statement':
+ (ixLine, line, kind) = self.convertStatement(ixLine, level, ixEnd)
+ elif line.startswith("'''") or line.startswith('"""'):
+ ixLine = self.docComment(ixLine)
+ (ixLine, line, level) = self.getLine(ixLine)
+ else:
+ self._python._logger.error('unrecognized input in line {}: {}'.format(ixLine + 1, line) )
+ ixLine += 1
+ (ixLine, line, level) = self.getLine(ixLine)
+ kind = self.kindOf(line)
+ return (ixLine if ixLine > ixStart else ixLine + 1, line, kind)
+
+ def convertMethod(self, ixLine, line):
+ '''Converts a class.
+ @param ixLine: the index of the first line of the class
+ @param level: the level of indention
+ @return: the tuple (ixLine, line) with the index and content of the next line below the method
+ '''
+ self._hasReturn = False
+ (ixLine, line) = self.skipEmptyLines(ixLine)
+ level = self.getIndentLevel(ixLine)
+ kind = self.kindOf(line)
+ if kind == 'method':
+ ixLine = self.docComment(ixLine + 1)
+ ixEnd = self._symbols.parseMethod(ixLine)
+ out = line.replace('def ', '')
+ self._currentMethod = out.split()[0].split('(')[0]
+ params = line[line.find('(')+1:line.find(')')].split(',')
+ out = self._currentMethod + '('
+ for param in params:
+ if param == 'self':
+ continue
+ param = param.strip()
+ if not out.endswith('('):
+ out += ', '
+ (name, aType) = self._symbols.findType("local", param)
+ if aType == None:
+ aType = 'string'
+ if name == None:
+ out += param + ': string'
+ else:
+ out += name + ': ' + aType + param[len(name):]
+ out += ')'
+ if self._hasReturn:
+ (aType, isConst) = self._symbols.getType('local', '$return', 'string')
+ out += ': ' + aType
+ out += ' {'
+ self.put(level, out[0:-1] + ' {')
+ kind = 'statement'
+ (ixLine, line, level) = self.getLine(ixLine)
+ while not self.endOfContent(ixLine) and kind == 'statement' and (ixEnd == None or ixLine <= ixEnd):
+ (ixLine, line, kind) = self.convertItem(ixLine, line, ixEnd)
+ self.appendSemicolon()
+ self.put(level - 1, '}')
+ self.put(0, '')
+ return (ixLine, line, kind)
+
+ def convertStatement(self, ixLine, line, ixEnd = None):
+ '''Converts a statement.
+ @param ixLine: the index of the first line of the class
+ @param level: the level of indention
+ @param ixEnd: None or the last line to process
+ @return: the tuple (ixLine, line) with the index and content of the next line below the method
+ '''
+ (ixLine, line) = self.skipComments(ixLine)
+ openBlocks = []
+ while not self.endOfContent(ixLine) and (ixEnd == None or ixLine <= ixEnd):
+ (ixLine, line) = self.skipEmptyLines(ixLine)
+ (ixLine, line) = self.skipComments(ixLine)
+ if self._currentMethod == '__init__':
+ if self._baseClass != None:
+ ix = line.find(self._baseClass)
+ ix += 1
+ if ix > 0:
+ args = line.split('(self,')[1].strip()[0:-1]
+ self.put(self.getIndentLevel(ixLine), 'super({});'.format(args))
+ (ixLine, line, currentLevel) = self.getLine(ixLine + 1)
+ (name, aType) = self._symbols.findType('class', line, 'string')
+ self._logger.log('{}: {}'.format(ixLine, line), 3)
+ isFirstAssignment = self._symbols.isFirstAssignment(line)
+ (name, aType) = self._symbols.findType('local', line, 'string')
+ (aType, isConst) = self._symbols.getType('local', name)
+ if name == None:
+ line = None
+ elif isFirstAssignment:
+ line = ('let ' if not isConst else 'const ') + line
+ kind = self.kindOf(line)
+ if kind != 'statement':
+ break
+ else:
+ if line == None:
+ (ixLine, line) = self.skipComments(ixLine)
+ kind = self.kindOf(line)
+ if kind != 'statement':
+ break
+ currentLevel = self.getIndentLevel(ixLine)
+ #ignoreIf = toPush != None and toPush[1] == '// else'
+ ignoreIf = line.startswith('else:') or line.startswith('elif ')
+ while len(openBlocks) > 0 and openBlocks[-1][0] >= currentLevel :
+ (level2, body) = openBlocks.pop()
+ if not (ignoreIf and level2 == currentLevel and body.startswith('// if')):
+ self.put(level2, '} ' + body)
+ toPush = self.putStatement(currentLevel, line)
+ self.appendSemicolon()
+ if toPush != None:
+ openBlocks.append(toPush)
+ # if level2 < currentLevel and not (line.startswith('else:') or line.startswith('else ')):
+ (ixLine, line, currentLevel) = self.getLine(ixLine + 1)
+ if len(openBlocks) > 0:
+ (level2, body) = openBlocks.pop()
+ self.put(level2, '}' + body)
+ return (ixLine, line, self.kindOf(line))
+
+ def docComment(self, ixLine):
+ '''Puts a document comment from Python to JScript
+ @param ixLine: the index in _python where the comment starts.
+ @pre: the first non blank symbol is """ or 3 ticks (')
+ @return: the index of the next line below the doc comment
+ '''
+ self._hasReturn = False
+ line = self._python._lines[ixLine].strip()
+ marker = line[0:3]
+ if marker == '"""' or marker == "'''":
+ level = self.getIndentLevel(ixLine)
+ commentIndent = 0 if level < 1 else level - 1
+ self.put(commentIndent, '/**')
+ self.put(commentIndent, ' * ' + line[3:])
+ ixLine += 1
+ while not self.endOfContent(ixLine):
+ line = self._python._lines[ixLine].strip()
+ ixLine += 1
+ if line.endswith(marker):
+ self.put(commentIndent, ' * ' + line[0:-3])
+ break
+ else:
+ self.put(commentIndent, ' * ' + line)
+ self.unputEmpty()
+ self.put(commentIndent, ' */')
+ return ixLine
+
+ def endOfContent(self, ixLine):
+ '''Returns whether the source is completely processed.
+ @return True: the content is completely processed
+ '''
+ if ixLine == None or type(ixLine) != int:
+ rc = True
+ else:
+ rc = ixLine >= len(self._python._lines)
+ return rc
+
+ def find(self, token, ixLine):
+ '''Searches the given token starting with a given index.
+ @param token: string to detect
+ @param ixLine: the start index
+ @return: the line index above the line the token or (if not found) last line of the source
+ '''
+ rc = None
+ while rc == None and not self.endOfContent(ixLine):
+ (ixLine, line, level) = self.getLine(ixLine)
+ if line.startswith(token) or self._regEndOfModule.match(line):
+ rc = ixLine - 1
+ ixLine += 1
+ if rc == None:
+ rc = len(self._python._lines)
+ return rc
+
+ def findIdentItem(self):
+ '''Inspects some lines of the input to detect the width of one indention step.
+ @return: the string representing one level of indent.
+ '''
+ regIndent = re.compile('^(\s+)(\w+)')
+ ixLine = 0
+ indents =dict()
+ lengths = [0, 0, 0, 0, 0, 0, 0, 0, 0]
+ countRelevantLines = 0
+ linesWithBlanks = 0
+ self._indentWidth = 1
+ countIndenters = 0
+ while ixLine < len(self._python._lines):
+ line = self._python._lines[ixLine]
+ if line.startswith('def '):
+ countIndenters += 1
+ matcher = regIndent.match(line)
+ ixLine += 1
+ if matcher != None and matcher.group(1) != '':
+ if matcher.group(2) in ['def']:
+ countIndenters += 1
+ if countIndenters or matcher.group(2) in ['if', 'else', 'for', 'while', 'return']:
+ countRelevantLines += 1
+ indent = matcher.group(1)
+ level = len(indent)
+ if level > 0 and level < 9:
+ lengths[level] += 1
+ if indent[0] == ' ':
+ linesWithBlanks += 1
+ if level in indents:
+ indents[level] += 1
+ else:
+ indents[level] = 1
+ if countRelevantLines > 50:
+ break
+ factors = [0, 0, 0, 0, 0, 0, 0, 0, 0]
+ for ix in indents.keys():
+ for factor in range(2, len(factors)):
+ if ix % factor == 0:
+ factors[factor] += 1
+ for factor in range(1, len(factors)):
+ # a factor can exist but no matching length: factor=2 and indent=' '*4
+ # lengths[factor] can be incorrect, we assume at least 50% correct:
+ if self._indentWidth < factors[factor] and lengths[factor] >= countIndenters:
+ self._indentWidth = factor
+ item = ' ' if linesWithBlanks >= countRelevantLines - linesWithBlanks else '\t'
+ rc = item * self._indentWidth if self._indentWidth > 1 else '\t'
+ self._logger.log('indention width: {} item: {}'.format(self._indentWidth, '\\t' if item == '\t' else '" "'))
+ return rc
+
+ def getLine(self, ixLine):
+ '''Gets the info of the line at a given index.
+ @param ixLine: the index of the line
+ @return: the tuple (ixLine, line, level). Because of comments the ixLine may be incremented
+ '''
+ changed = True
+ ix = ixLine
+ while changed:
+ (ix, line) = self.skipComments(ix)
+ (ix, line) = self.skipEmptyLines(ix)
+ changed = ix != ixLine
+ ixLine = ix
+ level = self.getIndentLevel(ixLine)
+ return (ixLine, line, level)
+
+ def getIndentLevel(self, ixLine):
+ '''Returns the whitespaces of the python source with index ixLine.
+ @param ixLine: the index of the source line
+ @return: the indention level
+ '''
+ rc = 0
+ if not self.endOfContent(ixLine):
+ ix = 0
+ line = self._python._lines[ixLine]
+ while ix < len(line) and line[ix].isspace():
+ ix += 1
+ if ix > 0:
+ if line[0] != '\t':
+ rc = ix // self._indentWidth
+ return rc
+
+ def put(self, level, line, position = None):
+ '''Puts a given line into the typescript lines.
+ @param level: the indention level
+ @param line: the line to put
+ '''
+ def subst(matcher):
+ rc = self._reservedWords[matcher.group(1)]
+ return '?' if rc == None else rc
+ if not self._lockPut:
+ if line.startswith('}'):
+ line += ''
+ if type(level) != int:
+ level += 1
+ if level > 0:
+ line = self._indentItem * level + line
+ if position == None:
+ self._typeScript._lines.append(self._regReservedWords.sub(subst, line))
+ else:
+ self._typeScript._lines.insert(position, line)
+
+ def putStatement(self, level, line):
+ '''Completes a statement, e.g. inserts '(' for if, while..., and put it to the output
+ @param level: the indention level
+ @param line: line to inspect
+ @return: None or tuple (level, comment)
+ '''
+ out = line
+ toPush = None
+ if out.endswith(':'):
+ out = out[0:-1] + ') {'
+ if out.startswith('if '):
+ toPush = (level, '// if')
+ out = out.replace('if ', 'if (')
+ elif out.startswith('else) {'):
+ toPush = (level, '// else')
+ out = '} else {'
+ elif out.startswith('elif '):
+ toPush = (level, '// elif')
+ out = out.replace('elif ', 'else if (')
+ elif out.startswith('while '):
+ toPush = (level, '// while')
+ out = out.replace('while ', 'while (')
+ elif out.startswith('for '):
+ toPush = (level, '// for')
+ name = line.split()[1]
+ if self._symbols.isVariable('local', name):
+ out = out.replace('for ', 'for (')
+ else:
+ out = out.replace('for ', 'for (let ')
+ if out != '':
+ self.put(level, out)
+ return toPush
+
+ def unputEmpty(self):
+ '''if the last line of the output is empty it will be removed.
+ '''
+ if len(self._typeScript._lines) > 0 and (self._typeScript._lines[-1].strip() == '' or self._typeScript._lines[-1].strip() == '*'):
+ self._typeScript._lines.pop()
+
+ def skipEmptyLines(self, ixLine):
+ '''Skippes empty lines and put them into _typeScript
+ @param ixLine: the index in _python to start
+ @return: a tuple(ixLine, line): the index and the stripped content of the next not empty line
+ '''
+ line = ''
+ while not self.endOfContent(ixLine):
+ line = self._python._lines[ixLine].strip()
+ if line == '':
+ ixLine += 1
+ else:
+ break
+ return (ixLine, line)
+
+ def skipComments(self, ixLine):
+ '''Skippes empty lines and put them into _typeScript
+ @param ixLine: the index in _python where the comment starts.
+ @return: a tuple(ixLine, line): the index and the stripped content of the line below the comment
+ '''
+ line = ''
+ while not self.endOfContent(ixLine):
+ line = self._python._lines[ixLine].strip()
+ if line.startswith('#'):
+ self.put(self.getIndentLevel(ixLine), '//' + line[1:])
+ ixLine += 1
+ else:
+ break
+ return (ixLine, line)
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 19.04.2018
+
+@author: hm
+'''
+import re
+import os
+import datetime
+
+# .................................1.....1....2.....2....3.....3
+stringUtilRexprDate = re.compile(r'^(\d{4})[.-](\d\d?)[.-](\d\d?)')
+# ...................................1.....1.2....2 a..3.....3a
+stringUtilRexprTime = re.compile(r'^(\d\d?):(\d\d?)(?::(\d\d?))?$')
+stringUtilRexprInt = re.compile(r'^0[xX]([0-9a-fA-F]+)|0([0-7]+)|(\d+)$')
+
+def arrayContains(lines, regExpr):
+ '''Tests whether at least one line of the array lines contains a given regular expression.
+ @param lines: array of text lines
+ @param regExpr: a string or a regexpr object
+ @return: True: at least one item of lines contains the regular expression regExpr
+ '''
+ if type(regExpr) == str:
+ regExpr = re.compile(regExpr)
+ found = False
+ for line in lines:
+ if regExpr.search(line) != None:
+ found = True
+ break
+ return found
+
+def asInt(value, defaultValue = None):
+ '''Tests whether a value is an integer. If not the defaultValue is returned. Othewise the integer is returned.
+ @param value: string value to test
+ @return: defaultValue: the value is not an integer Otherwise: the value as integer
+ '''
+ matcher = stringUtilRexprInt.match(value)
+ if matcher == None:
+ rc = defaultValue
+ else:
+ if value.startswith('0x') or value.startswith('0X'):
+ rc = int(matcher.group(1), 16)
+ elif value.startswith('0'):
+ rc = int(value, 8)
+ else:
+ rc = int(value)
+ return rc
+
+def escChars(text):
+ '''Return the text with escaped meta characters like \n, \t, \\.
+ @param text: text to convert
+ @return: the text with escaped chars.
+ '''
+ text = text.replace('\\', '\\\\')
+ text = text.replace('\t', '\\t')
+ text = text.replace('\n', '\\n')
+ return text
+
+def firstMatch(aList, regExpr, start=0):
+ '''Return the matching object of the first matching line of a given line list.
+ @param aList: an array of lines to inspect
+ @param regExpr: a compiled regular expression, e.g. re.compile(r'^\w+ =\s(.*)$')
+ @param start: the first line index to start searching
+ @return: None: nothing found
+ otherwise: the matching object of the hit
+ '''
+ matcher = None
+ while start < len(aList):
+ matcher = regExpr.search(aList[start])
+ if matcher != None:
+ break
+ start += 1
+ return matcher
+
+def formatSize(size):
+ '''Formats the filesize with minimal length.
+ @param size: size in bytes
+ @return: a string with a number and a unit, e.g. '103 kByte'
+ '''
+ if size < 1000:
+ rc = str(size) + ' Byte'
+ else:
+ if size < 1000000:
+ unit = 'KB'
+ size /= 1000.0
+ elif size < 1000000000:
+ unit = 'MB'
+ size /= 1000000.0
+ elif size < 1000000000000:
+ unit = 'GB'
+ size /= 1000000000.0
+ else:
+ unit = 'TB'
+ size /= 1000000000000.0
+ rc = '{:.3f} {:s}'.format(size, unit)
+ return rc
+
+def fromFile(filename, sep = None):
+ '''Reads the content of a file.
+ @param filename: the name of the file to read
+ @param sep: None or the split separator
+ @param content: the content of the file. If sep == None: a string. Otherwise an array
+ '''
+ rc = ''
+ if os.path.exists(filename):
+ with open(filename, 'r') as fp:
+ rc = fp.read()
+ if sep != None:
+ rc = rc.split(sep)
+ return rc
+
+def grepInFile(filename, regExpr, limit = None, group = None):
+ '''Returns all lines of a given file matching a given regular expression.
+ @param filename: the name of the file to inspect
+ @param regExpr: a compiled regular expression, e.g. re.compile(r'^\w+ =')
+ @param limit: the maximal count of returned lines
+ @param group: None or: the content of the group (defined by the group-th parenthesis) will be returned
+ @return: a list of found lines or groups (see group), may be empty
+ '''
+ rc = []
+ if type(regExpr) == str:
+ regExpr = re.compile(regExpr)
+ if os.path.exists(filename):
+ with open(filename, 'r') as fp:
+ for line in fp:
+ line = line.strip()
+ matcher = regExpr.search(line)
+ if matcher != None:
+ if group != None:
+ rc.append(matcher.group(group))
+ else:
+ rc.append(line)
+ if limit != None:
+ limit -= 1
+ if limit <= 0:
+ break
+ return rc
+
+def hasContent(filename, beginOfComment = '#'):
+ '''Tests whether a file has a content without empty lines or comment lines.
+ @param beginOfComment this string starts a comment line
+ @return: True: there are lines which are not empty and not comments.
+ '''
+ rc = False
+ if os.path.exists(filename):
+ with open(filename, 'r') as fp:
+ for line in fp:
+ line = line.strip()
+ if line != '' and not line.startswith(beginOfComment):
+ rc = True
+ break
+ return rc
+
+def join(separator, args):
+ '''Joins all entries of a list into a string.
+ @param separator: the separator between the list items
+ @param args: list to join. Items may be not strings
+ @return: a string with all items of args separated by separator
+ '''
+ rc = ''
+ if args != None:
+ for item in args:
+ if rc != '':
+ rc += separator
+ rc += str(item)
+ return rc
+
+def limitItemLength(array, maxLength, elipsis = '...'):
+ '''Copies the input array and limits each item to the given maximum.
+ @param array: source array
+ @param maxLength: the maximal length of each item of the result
+ @param suffix: the suffix for limited items, e.g. '...'
+ @return: the copy of the array with limited items
+ '''
+ rc = []
+ lenElipsis = len(elipsis)
+ for item in array:
+ if len(item) > maxLength:
+ if maxLength >= lenElipsis:
+ item = item[0:maxLength-lenElipsis] + elipsis
+ else:
+ item = item[0:maxLength]
+ rc.append(item)
+ return rc
+
+def minimizeArrayUtfError(lines, logger = None):
+ '''Converts a string array of bytes into an array of UTF-8 strings.
+ It minimizes the part which can not be converted.
+ @param lines: a list of byte lines
+ @param logger: None or the error logger
+ @param logError: True: conversion errors will be logged
+ '''
+ rc = []
+ for line in lines:
+ try:
+ rc.append(line.decode('utf-8'))
+ except UnicodeDecodeError:
+ rc.append(minimizeStringUtfError(line, logger))
+ return rc
+
+def minimizeStringUtfError(line, logger = None):
+ '''Converts a string of bytes into an UTF-8 string.
+ It minimizes the part which can not be converted.
+ @param lines: a list of byte lines
+ @param logger: None or the error logger
+ '''
+ rc = ''
+ def convert(part):
+ try:
+ rc = part.decode('utf-8')
+ except UnicodeDecodeError as exc:
+ if logger != None:
+ logger.error('cannot decode: ' + part.decode('ascii', 'ignore')[0:80])
+ rc = None
+ if len(line) < 10:
+ part = convert(line)
+ if part != None:
+ rc += part
+ else:
+ try:
+ rc = line.decode('latin-1')
+ except:
+ rc = line.decode('ascii')
+ else:
+ half = int(len(line) / 2)
+ part = convert(line[0:half])
+ if part != None:
+ rc += part
+ else:
+ rc += minimizeStringUtfError(line[0:half], logger)
+ part = convert(line[half:])
+ if part != None:
+ rc += part
+ else:
+ rc += minimizeStringUtfError(line[half:], logger)
+ return rc
+
+def regExprCompile(pattern, location, logger = None, isCaseSensitive = False):
+ '''Compiles a regular expression.
+ @param pattern: a regular expression.
+ @param logger: for error logging
+ @param isCaseSensitive: true: the case is relevant
+ @return: None: error occurred Otherwise: the re.RegExpr instance
+ '''
+ rc = None
+ try:
+ rc = re.compile(pattern, 0 if isCaseSensitive else re.I)
+ except Exception as exc:
+ msg = 'error in regular expression in {}: {}'.format(location, str(exc))
+ if logger == None:
+ print('+++ ' + msg)
+ else:
+ logger.error(msg)
+ return rc
+
+def toFile(filename, content, separator=''):
+ '''Writes a string into a file.
+ @param filename: the name of the file to write
+ @param content: the string to write
+ '''
+ if type(content) == list:
+ content = separator.join(content)
+ mode = 'wb' if type(content) == bytes else 'w'
+ with open(filename, mode) as fp:
+ fp.write(content)
+
+def toFloat(value):
+ '''Converts a string into a float.
+ Possible data types: int, date, datetime, float.
+ Value of date/datetime: seconds since 1.1.1970
+ Value of time: seconds since midnight
+ @param value: the string to convert
+ @return [float, dataType] or [error_message, dataType]
+ '''
+ if type(value) == float:
+ rc = value
+ else:
+ if type(value) != str:
+ value = str(value)
+ matcher = stringUtilRexprDate.match(value)
+ if matcher != None:
+ length = len(matcher.group(0))
+ value = value[length+1:]
+ rc = datetime.datetime(int(matcher.group(1)), int(matcher.group(2)), int(matcher.group(3))).timestamp()
+ matcher = stringUtilRexprTime.match(value)
+ if matcher != None:
+ hours, mins = int(matcher.group(1)), int(matcher.group(2))
+ secs = (hours * 60 + mins)*60
+ rc += secs
+ if matcher.group(3):
+ rc += int(matcher.group(3))
+ else:
+ matcher = stringUtilRexprTime.match(value)
+ if matcher != None:
+ hours, mins = int(matcher.group(1)), int(matcher.group(2))
+ rc = (hours * 60 + mins)*60
+ if matcher.group(3):
+ rc += int(matcher.group(3))
+ else:
+ matcher = stringUtilRexprInt.match(value)
+ if matcher != None:
+ if matcher.group(3):
+ rc = float(matcher.group(3))
+ elif matcher.group(1):
+ rc = float(int(value[2:], 16))
+ elif matcher.group(2):
+ rc = float(int(value, 8))
+ else:
+ try:
+ rc = float(value)
+ except ValueError:
+ rc = 'float (or int or date(time)) expected, found: ' + value
+ return rc
+
+def toFloatAndType(value):
+ '''Converts a string into a float.
+ Possible data types: int, date, datetime, float.
+ Value of date/datetime: seconds since 1.1.1970
+ Value of time: seconds since midnight
+ @param value: the string to convert
+ @return [float, dataType] or [error_message, dataType]
+ '''
+ dataType = 'undef'
+ if type(value) == float:
+ dataType = 'float'
+ rc = value
+ else:
+ matcher = stringUtilRexprDate.match(value)
+ if matcher != None:
+ dataType = 'date'
+ length = len(matcher.group(0))
+ value = value[length+1:]
+ rc = datetime.datetime(int(matcher.group(1)), int(matcher.group(2)), int(matcher.group(3))).timestamp()
+ matcher = stringUtilRexprTime.match(value)
+ if matcher != None:
+ dataType += 'time'
+ hours, mins = int(matcher.group(1)), int(matcher.group(2))
+ secs = (hours * 60 + mins)*60
+ rc += secs
+ if matcher.group(3):
+ rc += int(matcher.group(3))
+ else:
+ matcher = stringUtilRexprTime.match(value)
+ if matcher != None:
+ hours, mins = int(matcher.group(1)), int(matcher.group(2))
+ dataType = 'time'
+ rc = (hours * 60 + mins)*60
+ if matcher.group(3):
+ rc += int(matcher.group(3))
+ else:
+ matcher = stringUtilRexprInt.match(value)
+ if matcher != None:
+ dataType = 'int'
+ if matcher.group(3):
+ rc = float(matcher.group(3))
+ elif matcher.group(1):
+ rc = float(int(value[2:], 16))
+ elif matcher.group(2):
+ rc = float(int(value, 8))
+ else:
+ try:
+ rc = float(value)
+ dataType = 'float'
+ except ValueError:
+ rc = 'float (or int or date(time)) expected, found: ' + value
+ return [rc, dataType]
+
+def toString(value, dataType, floatPrecision = None):
+ '''Converts a numeric value into a string.
+ @param value: a numeric value
+ @param dataType: 'date', 'datetime', 'time', 'float', 'int'
+ @param floatPrecision: None or if the type is a float, the number of digits behind the point
+ @return: the value as string
+ '''
+ if dataType == 'date':
+ date = datetime.datetime.fromtimestamp(value)
+ rc = date.strftime('%Y.%m.%d')
+ elif dataType == 'datetime':
+ if type(value) == str and value.find(':') >= 0:
+ rc = value
+ else:
+ date = datetime.datetime.fromtimestamp(value)
+ rc = date.strftime('%Y.%m.%d %H:%M')
+ elif dataType == 'time':
+ if type(value) == 'str' and value.find(':') >= 0:
+ rc = value
+ else:
+ rc = '{:2d}:{:2d}'.format(value / 3600, value % 3600 / 60)
+ elif floatPrecision != None:
+ if type(value) == str:
+ value = float(value)
+ aFormat = '{' + ':.{}f'.format(floatPrecision) + '}'
+ rc = aFormat.format(value)
+ else:
+ rc = '{}'.format(value)
+ return rc
+
+def tailOfWord(words, wordPrefix):
+ '''Returns the part of a word behind the word prefix.
+ Example: words: "-e! -m" wordPrefix: "-e" result: "!"
+ @param words: a string with words separated by space or tab
+ @param wordPrefix: the word starting with this prefix will be searched
+ @return: None: word prefix not found
+ the word suffix
+ '''
+ rc = None
+ if words.startswith(wordPrefix):
+ ixStart = 0
+ else:
+ ixStart = words.find(wordPrefix)
+ if ixStart > 0 and not words[ixStart-1].isspace():
+ ixStart = words.find(' ' + wordPrefix)
+ if ixStart < 0:
+ ixStart = words.find('\t' + wordPrefix)
+ if ixStart >= 0:
+ ixStart += len(wordPrefix)
+ ixEnd = words.find(' ', ixStart)
+ ixEnd2 = words.find('\t', ixStart)
+ if ixEnd < 0 or ixEnd2 > 0 and ixEnd2 < ixEnd:
+ ixEnd = ixEnd2
+ if ixEnd < 0:
+ ixEnd = len(words)
+ rc = words[ixStart:ixEnd]
+ return rc
+
+def unescChars(text):
+ '''Returns the text without escaped meta characters like \n, \t, \\.
+ @param text: text to convert
+ @return: the text with unescaped chars
+ '''
+ text = text.replace('\\n', '\n')
+ text = text.replace('\\t', '\t')
+ text = text.replace('\\\\', '\\')
+ return text
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 08.06.2018
+
+@author: hm
+'''
+import re
+import os.path
+import sys
+import base.MemoryLogger
+
+IGNORE=re.RegexFlag.IGNORECASE
+MARKER_MANUAL = '°manual°'
+MARKER_EXAMPLE = '°example°'
+
+class Pos:
+ def __init__(self, line, col):
+ '''Constructor.
+ @param line: the line index
+ @param col: the column index
+ '''
+ self._line = line
+ self._col = col
+
+ def adaptDeleted(self, start, end):
+ '''Adapts the position if a part of a given line is deleted.
+ @param start: start position
+ @param end: end position
+ '''
+ if self._line > start._line or self._line == start._line and self._col > start._col:
+ if self._line > end._line:
+ self._line -= end._line - start._line
+ elif self._line == end._line:
+ self._line = start._line
+ if self._col > end._col:
+ self._col = start._col
+ else:
+ self._col = start._col + self._col - end._col
+ else:
+ self._col = start._col
+ self._line = start._line
+
+ def adaptInserted(self, start, lines):
+ '''Adapts the position if a part of a given line is deleted.
+ @param start: start position
+ @param lines: number of inserted lines
+ '''
+ if self._line > start._line:
+ self._line += lines
+
+ def clone(self, source):
+ '''Makes a deep copy of a given source.
+ @param source: the source to copy
+ '''
+ self._line = source._line
+ self._col = source._col
+
+ def less(self, position):
+ '''Tests whether a given position is less (above) the instance.
+ @param position: the position to compare. Type: Pos
+ @return: position < self
+ '''
+ return position._line < self._line or (
+ position._line == self._line and position._col < self._col)
+
+class Method:
+ def __init__(self, name, argNames, argTypes, defaultValues, resultType):
+ '''
+ @param name: the method's name
+ @param argNames: a list of argument names e.g. ['length', 'start']
+ @param argTypes: a list of argument types, e.g. ['int', 'text']
+ @param defaultValues: a list of argument default values, e.g. [3, 'nice']
+ @param resultType: the data type of the method's result, e.g. 'int'
+ @param parent: an instance of Variable
+ '''
+ self._name = name
+ self._argTypes = argTypes
+ self._resultType = resultType
+ self._argNames = argNames
+ self._defaultValues = defaultValues
+
+ def checkArgs(self, args):
+ '''Checks and calculates the argument vector of the method.
+ @param args: the argument vector: each item contains int, string or Variable
+ @return: None: ok
+ otherwise: the error message
+ '''
+ rc = None
+ if len(args) > len(self._argNames):
+ rc = 'too many arguments: {:d} / {:d}'.format(len(args), len(self._argNames))
+ else:
+ for ix in range(len(args)):
+ arg = args[ix]
+ expectedType = self._argTypes[ix]
+ if isinstance(arg, Variable):
+ if expectedType.startswith('var.'):
+ expectedType2 = expectedType[4:]
+ if arg._type != expectedType2:
+ rc = 'not an expected variable type {:s} for argument {:d} ({:s}): {:s}'.format(expectedType2, ix+1, self._argNames[ix], arg._name)
+ elif arg._type != expectedType:
+ rc = 'not an expected type {:s} for argument {:d} ({:s}): {:s}'.format(expectedType, ix+1, self._argNames[ix], arg._name)
+ elif expectedType == 'int' or expectedType == 'text':
+ args[ix] = arg._value
+ elif expectedType == 'int':
+ if type(arg) != int:
+ try:
+ args[ix] = int(arg)
+ except ValueError:
+ rc = 'not an int value for argument {:d} ({:s}): {:s}'.format(ix+1, self._argNames[ix], arg)
+ if rc:
+ break
+ for ix in range(len(args), len(self._defaultValues)):
+ args.append(self._defaultValues[ix])
+ return rc
+
+class Variable:
+ '''Base class of variables.
+ '''
+ def __init__(self, name, varType):
+ '''Constructor.
+ @param name: the variable's name
+ @param varType: the variable's type:. 'text', 'dict' or 'list'
+ '''
+ self._name = name
+ self._type = varType
+ def getMethod(self, name):
+ '''Gets the Method instance given by name.
+ @param name: the methods name
+ @return None: not found
+ otherwise: the Method instance
+ '''
+ return None
+
+class DictVariable(Variable):
+ '''Implements a variable containing a dictionary: a list of (string, string) tuples.
+ '''
+ def __init__(self, name):
+ '''Constructor.
+ @param name: the variable's name
+ '''
+ Variable.__init__(self, name, 'dict')
+ self._dict = dict()
+ self._addMethod('keys', ['listVar'], ['dict'], None, 'list')
+
+ def call(self, name, argv, textProcessor):
+ '''Calls a variable specific method.
+ @param name: name of the method to call
+ @param argv: argument vector
+ @param textProcessor: delivers the environment
+ @return: the return value of the method
+ '''
+ rc = None
+ if name == 'keys':
+ rc = argv[0].join(self._list)
+ return rc
+
+ def getMethod(self, name):
+ '''Gets the Method instance given by name.
+ @param name: the methods name
+ @return None: not found
+ otherwise: the Method instance
+ '''
+ return None if name not in DictVariable._methods else DictVariable._methods[name]
+
+ _methods = dict()
+ @staticmethod
+ def _addMethod(name, argNames, argTypes, defaultValues, resultType):
+ DictVariable._methods[name] = Method(name, argNames, argTypes, defaultValues, resultType)
+DictVariable._addMethod('keys', ['listVar'], ['dict'], [None], None)
+
+class IntegerVariable(Variable):
+ '''Implements a variable containing an integer.
+ '''
+ def __init__(self, name):
+ '''Constructor.
+ @param name: the variable's name
+ '''
+ Variable.__init__(self, name, 'int')
+ self._value = 0
+
+class ListVariable(Variable):
+ '''Implements a variable containing a list of strings.
+ '''
+ def __init__(self, name):
+ '''Constructor.
+ @param name: the variable's name
+ '''
+ Variable.__init__(self, name, 'list')
+ self._list = []
+
+ def call(self, name, argv, textProcessor):
+ '''Calls a variable specific method.
+ @param name: name of the method to call
+ @param argv: argument vector
+ @param textProcessor: delivers the environment
+ @return: the return value of the method
+ '''
+ rc = None
+ if name == 'size':
+ rc = len(self._list)
+ elif name == 'join':
+ rc = argv[0].join(self._list)
+ return rc
+
+ def getMethod(self, name):
+ '''Gets the Method instance given by name.
+ @param name: the methods name
+ @return None: not found
+ otherwise: the Method instance
+ '''
+ return None if name not in ListVariable._methods else ListVariable._methods[name]
+
+ _methods = dict()
+ @staticmethod
+ def _addMethod(name, argNames, argTypes, defaultValues, resultType):
+ ListVariable._methods[name] = Method(name, argNames, argTypes, defaultValues, resultType)
+ListVariable._addMethod('size', [], [], [], 'int')
+ListVariable._addMethod('join', ['glue'], ['text'], ['\n'], None)
+
+class TextVariable(Variable):
+ '''Implements a variable containing a simple string.
+ '''
+ def __init__(self, name, value = ''):
+ '''Constructor.
+ @param name: the variable's name
+ @param value: the initial value of the variable
+ '''
+ Variable.__init__(self, name, 'text')
+ self._value = value
+
+ def call(self, name, argv, textProcessor):
+ '''Calls a variable specific method.
+ @param name: name of the method to call
+ @param argv: argument vector
+ @param textProcessor: delivers the environment
+ @return: the return value of the method
+ '''
+ rc = None
+ if name == 'length':
+ rc = len(self._value)
+ elif name == 'split':
+ separator = argv[1]
+ if len(separator) > 3 and separator.startswith('/') and separator.endswith('/'):
+ argv[0]._list = re.split(separator[1:-1], self._value, argv[2])
+ else:
+ argv[0]._list = self._value.split(separator, argv[2])
+ return rc
+
+ def getMethod(self, name):
+ '''Gets the Method instance given by name.
+ @param name: the methods name
+ @return None: not found
+ otherwise: the Method instance
+ '''
+ return None if name not in TextVariable._methods else TextVariable._methods[name]
+
+ _methods = dict()
+ @staticmethod
+ def _addMethod(name, argNames, argTypes, defaultValues, resultType):
+ TextVariable._methods[name] = Method(name, argNames, argTypes, defaultValues, resultType)
+TextVariable._addMethod('length', [], [], [], 'int')
+TextVariable._addMethod('split', ['listVar', 'separator', 'maxSplit'], ['list', 'text', 'int'], [None, r'\s+', 0x7fffffff], None)
+
+class TextProcessor:
+ '''Engine to manipulate text data.
+ <current position>:
+ The processor has exactly one current position. It can be changed by the statements
+ 'goto' and 'find'. The current position can be notified by '#'.
+ <marks>:
+ The processor knows 10 "marks", notified by '#0' .. '#9'.
+ A mark can be set by the "mark" command. It copies the current position into the
+ specified mark.
+ <pattern>:
+ A pattern is notified by a delimiter (a non blank and non alfanumeric character),
+ a regular expression and the same delimiter, e.g. /\s(\w+)/ or !([a-f0-9])?!
+ <_range>:
+ The "_range" command defines a _range with a start and an end position.
+ If a _range exists all statements are executed only inside the _range.
+ Example: the hit of a replacement command is outside the _range:
+ the replacement will not be done.
+ If a "goto" or "find" command defines a current position outside the _range
+ the command sequence will be stopped.
+ <statements>:
+ !<var>[<index>] = { <int> | string }
+ d(elete) <length>|<mark>
+ else
+ f(ind) <pattern>
+ f(ind)b(ackwards)
+ fi
+ g(oto) <line> [<col>] | <mark>
+ i(nsert) <string>
+ if
+ l(oad)
+ m(ark) <mark>
+ n(umericassign)
+ p(rint)
+ r(ange) [ <markStart> [ <markEnd> ]]
+ read
+ s(ubstitute) /<pattern>/<replacement>/flags
+ v(ariable)
+ w(rite)
+ '''
+ def __init__(self, filename=None, content=None, logger = None):
+ '''Constructor.
+ @param filename: None or the file with the lines to process
+ @param content: string: content to process, will be splitted into a list of lines
+ array: list of lines
+ '''
+ self._logger = logger if logger else base.MemoryLogger.MemoryLogger()
+ self._name = None
+ self._errors = 0
+ self._lines = None
+ self._lineNo = None
+ self._changed = False
+ self._ignoreMacros = False
+ self._scriptName = None
+ self._reFlags = 0
+ self._currentPos = Pos(0, 0)
+ self._startRange = Pos(0, 0)
+ self._vars = dict()
+ self._reExpr = dict()
+ self._endRange = Pos(0x7fffffff, 0x7fffffff)
+ # Note: index 10: beginOfFile index 11: endOfFile
+ self._marks = [Pos(-1, -1), Pos(-1, -1), Pos(-1, -1), Pos(-1, -1), Pos(-1, -1),
+ Pos(-1, -1), Pos(-1, -1), Pos(-1, -1), Pos(-1, -1), Pos(-1, -1),
+ Pos(0, 0), Pos(0, 0)]
+ if content != None:
+ if type(content) == list:
+ self._lines = content
+ elif type(content) == str:
+ self._lines = content.split('\n')
+ else:
+ self._logger.error('unexpected content type: ' + str(type(content)))
+ elif filename != None:
+ self._readFile(filename)
+ self._clearProgram()
+
+ def _adaptDeleted(self, start, end):
+ '''Adapts the internal positions changed by a deletion.
+ @param start: start position
+ @param end: end position
+ '''
+ self._startRange.adaptDeleted(start, end)
+ self._endRange.adaptDeleted(start, end)
+ for ix in range(len(self._marks)):
+ self._marks[ix].adaptDeleted(start, end)
+
+ def _adaptInserted(self, start, lines):
+ '''Adapts the internal positions changed by an insertion.
+ @param start: start position
+ @param lines: a list of inserted lines
+ '''
+ aCount = len(lines)
+ self._startRange.adaptInserted(start, aCount)
+ self._endRange.adaptInserted(start, aCount)
+ for ix in range(len(self._marks)):
+ self._marks[ix].adaptInserted(start, aCount)
+
+ def _assign(self, statements, doExecute=True):
+ '''Handles the "assign" command.
+ '$' <var>[ '[' <int>|<key> ']' = { <int> | <delim><string><delim> } <opts>
+ @param statements: a string starting with a "assignment" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "assigment" command
+ '''
+ if statements == MARKER_MANUAL:
+ return '''<var> = { <int> | <delim><string><delim> | <var_expression>} <opts>
+<var>:
+ '!'<name> or '!<name>'[' {<integer> | <string>}']'
+<var_expression>
+ { '!'<name> | '!<name>'.'<method>'('<args>')' }
+<args>:
+ { '' | <arg> { ',' <arg> }* }
+<arg>:
+ { <number> | <delim><string><delim> | '!'<variable> }
+<opt>:
+ -p<char> or --var-prefix=<char>
+ Variables in <string> will be expanded
+ -e<char> or --meta-esc=<char>
+ String meta characters will be expanded: if <char> == '\': \t (tabulator) \n \r \\x<hexdigit><hexdigit>
+'''
+ elif statements == MARKER_EXAMPLE:
+ return '''!number = 33
+!title = 'The Bible'
+!color["red"] = /f00/
+!names[3] = $name
+!argv=!arguments.split(";")
+!text="%{prefix}title: %color[/red/]-%items[%ix]" -p%
+'''
+ elif not doExecute:
+ #.........................1...1..2...2..3......3
+ matcher = self._match(r'\$(\w+)\.(\w+)\(([^()]*)\)[\s;]*',
+ 'call', statements, False)
+ if matcher:
+ argv = self._extractArgv(matcher.group(3))
+ # count==3
+ #..................var...............method............argv.
+ self._args.append([matcher.group(1), matcher.group(2), argv])
+ else:
+ # ..................,.....1...1a....2......2..a........3....4..45...5.....6...6..7...7..8.....8...3....9
+ matcher = self._match(r'\$(\w+)(?:\[([^\]]*)\])?\s*=\s*(\d+|(\W)(.*?)\4|\$(\w+)\.(\w+)\(([^()]*)\))\s*((?:-\w\S*|--[\w-]+(=\S+)?)\s*)*[\s;]*',
+ 'assignment', statements)
+ if matcher:
+ if matcher.group(6) != None:
+ argv = self._extractArgv(matcher.group(8))
+ # count==6
+ #...........................name...............var............method.....argv..options
+ self._args.append([matcher.group(1), matcher.group(6), matcher.group(7), argv, matcher.group(9), '!call'])
+ else:
+ # count==5
+ # ................name...............key...............number............string...........options
+ self._args.append([matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(5), matcher.group(9)])
+ else:
+ matcher = None
+ count = len(self._args[self._ixStatement])
+ if count == 6:
+ # assignment with call
+ key = None
+ (name, var, method, argv, options, dummy) = self._args[self._ixStatement]
+ value = self._callMethod(var, method, argv)
+ if type(value) == int:
+ number = value
+ string = None
+ else:
+ string = value
+ number = None
+ if count == 5:
+ (name, key, number, string, options) = self._args[self._ixStatement]
+
+ if count == 3:
+ (var, method, argv) = self._args[self._ixStatement]
+ self._callMethod(var, method, argv)
+ else:
+ if name not in self._vars:
+ self._error('unknown variable: ' + name)
+ if string != None:
+ number = None
+ varInfo = self._vars[name]
+ (key, string) = self._checkOptsAndExpand(options, [key, string])
+ if varInfo._type == 'int':
+ if key != None:
+ self._error('index not meaningful for int variable {:s}'.format(name))
+ elif number != None:
+ varInfo._value = int(number)
+ else:
+ try:
+ varInfo._value = int(string) if string else 0
+ except ValueError:
+ self._error("assignment of non integer to variable {:s}: '{:s}'".format(
+ name, string))
+ elif varInfo._type == 'text':
+ if key != None:
+ self._error('index not meaningful for test variable {:s}'.format(name))
+ else:
+ varInfo._value = string if number == None else number
+ elif varInfo._type == 'dict':
+ if key == None:
+ self._error('missing index for dict variable ' + name)
+ else:
+ varInfo._dict[key] = string if string != None else number
+ elif varInfo._type == 'list':
+ if key == None:
+ self._error('missing index for list variable ' + name)
+ elif key == '':
+ varInfo._list.append(string if string != None else number)
+ else:
+ try:
+ index = int(key)
+ except ValueError:
+ self._error('index {:s} is not a number for list variable {:s}'.format(
+ key, name))
+ index = None
+ if index != None:
+ if index < 0 or index >= len(varInfo._list):
+ self._error('index {:d} out of _range for list variable {:s}: {:d}'.format(
+ index, name, len(varInfo._list) - 1))
+ else:
+ varInfo._list[index] = string if string != None else number
+ else:
+ pass
+ return None if matcher == None else matcher.end()
+
+ def _cacheRegExpr(self, pattern, name):
+ '''Returns a compiled regular expression using a cache.
+ @param pattern: the regular expression pattern
+ @param name: the name of the command
+ '''
+ if name in self._reExpr:
+ reExpr = self._reExpr[name]
+ else:
+ reExpr = base.StringUtils.regExprCompile(pattern, 'cache pattern', self._logger)
+ self._reExpr[name] = reExpr
+ return reExpr
+
+ def _callMethod(self, name, method, argv):
+ '''Calculates the value of a method belonging to a given variable.
+ @param name: variable name
+ @param method: name of the method
+ @param argv: the argument vector
+ '''
+ rc = None
+ if name not in self._vars:
+ self._error('unknown variable ' + name)
+ else:
+ varInfo = self._vars[name]
+ methodInfo = varInfo.getMethod(method)
+ if methodInfo == None:
+ self._error('unknown method {:s} for variable {:s} with type {:s}'.format(method, name, varInfo._type))
+ else:
+ error = methodInfo.checkArgs(argv)
+ if error != None:
+ self._error(error)
+ else:
+ rc = varInfo.call(method, argv, self)
+ return rc
+
+ def _checkOptions(self, options, shortOpts, longOpts):
+ '''Tests whether the options are valid.
+ @param options: the options to inspect, e.g. '-m2 -a. --max-depth=4'
+ @param shortOpts: a string with all short options (one char) e.g. 'afv'
+ @param longOpts: a blank separated list of long options, e.g. 'max-depth min-depth'
+ '''
+ if options != None:
+ opts = options.split()
+ longs = longOpts.split(' ')
+ for opt in opts:
+ if opt.startswith('--'):
+ ix = opt.find('=')
+ name = opt[2:] if ix < 0 else opt[2:ix]
+ if name not in longs:
+ self._error('unknown long option: ' + name + ' allowed: ' + longOpts)
+ elif opt.startswith('-'):
+ if opt[1] not in shortOpts:
+ self._error('unknown short option: ' + opt[1] + ' allowed: ' + shortOpts)
+
+ def _checkOptsAndExpand(self, options, toChange, additionalShortOpts = '', additionalLongOpts = ''):
+ '''Check the options and expand if ordered.
+ @param options: the list of the current options, e.g. "-v+ --max-depth=3"
+ @param toChange: string or a list of strings to change. If option of expanding is given
+ this string or these strings will be expanded.
+ @param additionalShortOpts: '' or a string with all short options but 'e' and 'p'
+ @param additionalLongOpts: '' or a string with all long options but 'meta-esc' and 'var-prefix'
+ @return: the expanded version of toChange (string or list of strings)
+ '''
+ self._checkOptions(options, 'ep' + additionalShortOpts, 'var-prefix meta-esc' + additionalLongOpts)
+ escMeta = self._parseOption(options, 'e', 'meta-esc')
+ if escMeta != None:
+ if len(escMeta) != 1 or escMeta.isalnum():
+ self._error('invalid meta-esc (length=1, non alfanum): ' + escMeta)
+ else:
+ if type(toChange) == str:
+ toChange = self._expandMeta(toChange, escMeta)
+ else:
+ rc = []
+ for value in toChange:
+ rc.append(self._expandMeta(value, escMeta))
+ toChange = rc
+ varPrefix = self._parseOption(options, 'p', 'var-prefix')
+ if toChange != None and varPrefix != None:
+ if len(varPrefix) != 1 or varPrefix.isalnum():
+ self._error('invalid var-prefix (length=1, non alfanum): ' + varPrefix)
+ else:
+ if type(toChange) == str:
+ toChange = self._expandVar(toChange, varPrefix)
+ else:
+ rc = []
+ for value in toChange:
+ rc.append(self._expandVar(value, varPrefix))
+ toChange = rc
+ return toChange
+
+ def _clearProgram(self):
+ '''Resets the internal structs representing the program.
+ '''
+ # list of full statements, e.g. ["g7 2;i "a", "p #8"]
+ self._statements = []
+ # list of statement names, e.g. ["g", "i", "p"]
+ self._commands = []
+ # index in args/statements/commands
+ self._ixStatement = -1
+ # list of parts of the parsed statements, e.g. [[7, None, None, None, None],[None, "a"], ["8"]]
+ self._args = []
+ # for compound statements:
+ # _infoOfBlock[<indexOfIf>] = ['if', <indexOfElse>, <indexOfFi>]
+ # _infoOfBlock[<indexOfElse>] = ['else', <indexOfFi>]
+ self._infoOfBlock = dict()
+ # parsing: _openBlocks[topOfStack>] = ['if', <indexOfIf>]
+ self._openBlocks = []
+
+ def _delete(self, statements, doExecute=True):
+ '''Handles the "delete" command.
+ d(elete) <length> [c(hars)|l(ines)|<mark>
+ @param statements: a string starting with a "delete" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "delete" command
+ '''
+ if not doExecute:
+ # ..................,....a.......a....b..1...1...2.c.......c...d.......d.2...3..3b
+ matcher = self._match(r'd(?:elete)?\s*(?:(\d+)\s*(c(?:hars?)?|l(?:ines?)?)?|#(\d))[\s;]*',
+ 'delete', statements)
+ self._args.append([matcher.group(1), matcher.group(2), matcher.group(3)])
+ else:
+ matcher = None
+ (length, unit, markNo) = self._args[self._ixStatement]
+ if length != None:
+ length = int(length)
+ if unit != None and unit.startswith('l'):
+ # delete lines
+ ixEnd = len(self._lines)
+ if self._currentPos._line + length < ixEnd:
+ ixEnd = self._currentPos._line + length
+ self._deleteBetween(Pos(self._currentPos._line, 0), Pos(ixEnd, 0))
+ else:
+ self._deleteBetween(self._currentPos, Pos(self._currentPos._line, self._currentPos._col + length))
+ else:
+ mark = self._marks[int(markNo)]
+ if mark._line < 0:
+ self._error('undefined mark: ' + statements[0:20])
+ elif self._currentPos.less(mark):
+ # mark is above the current position:
+ self._deleteBetween(mark, self._currentPos)
+ self._currentPos = mark
+ else:
+ self._deleteBetween(self._currentPos, mark)
+ return None if matcher == None else matcher.end()
+
+ def _deleteBetween(self, start, end):
+ '''Deletes the text between two given positions.
+ @param start: the start position
+ @param end: the end position
+ '''
+ if start._line == end._line:
+ line = self._lines[start._line]
+ length = len(line)
+ if end._col > length:
+ self._lines[start._line] = line[0:start._col]
+ else:
+ self._lines[start._line] = line[0:start._col] + line[end._col:]
+ else:
+ line = self._lines[start._line][0:start._col]
+ line2 = self._lines[end._line]
+ if end._col < len(line2):
+ line += self._lines[end._line][end._col:]
+ self._lines[start._line] = line
+ for ix in range(min(len(self._lines) - 1, end._line), start._line - 1, -1):
+ del self._lines[ix]
+ self._adaptDeleted(start, end)
+
+ def _else(self, doExecute):
+ '''Handles the "else" command.
+ @param doExecute: False: parsing step True: interpreting step
+ '''
+ if not doExecute:
+ ixElse = len(self._args)
+ self._args.append(None)
+ blocks = len(self._openBlocks) - 1
+ if blocks < 0:
+ self._error('else without if')
+ elif self._openBlocks[blocks][0] != 'if':
+ self._error('else without if: end of {:s} expected'.format(self._openBlocks[blocks][0]))
+ else:
+ ixIf = self._openBlocks[blocks][1]
+ if self._infoOfBlock[ixIf][1] != None:
+ self._error('2nd else found. First is statement ' + str(self._infoOfBlock[ixIf][1]))
+ else:
+ self._infoOfBlock[ixElse] = ['else', None]
+ self._infoOfBlock[ixIf][1] = ixElse
+ else:
+ ixFi = self._infoOfBlock[self._ixStatement][1]
+ self._ixStatement = ixFi
+
+ def _error(self, msg):
+ '''Handles an error.
+ @param msg: the error message
+ '''
+ self._errors += 1
+ no = self._ixStatement + 1
+ if no <= 0:
+ no = len(self._statements) + 1
+ self._logger.error(msg + ' [statement {:d}]'.format(no))
+
+ def _executeScript(self, script):
+ '''Executes a script.
+ @script: a list of lines representing the script
+ '''
+ self._lineNo = 0
+ for line in script:
+ self._lineNo += 1
+ if not line.strip().startswith('#'):
+ self.parse(line)
+ self.interpret()
+
+ def _expandMeta(self, string, escChar = '\\', varChar = '&'):
+ '''Expands variables and meta characters.
+ @param string: the string containing meta characters and variables, e.g. "\\t\\x2e"
+ @param escChar: the introduction of a meta character, e.g. '\\'
+ @return string with each meta char and variable replaced by its value
+ '''
+ # ..........................1.....................1
+ pattern = '\\' + escChar + '([tnr]|x[0-9a-fA-F]{2})'
+ reExpr = self._cacheRegExpr(pattern, 'expandmeta' + escChar)
+ rc = ''
+ lastPos = 0
+ for matcher in reExpr.finditer(string):
+ found = matcher.group(1)
+ tag = found[0]
+ if tag == 't':
+ expanded = '\t'
+ elif tag == 'n':
+ expanded = '\n'
+ elif tag == 'r':
+ expanded = '\r'
+ else:
+ expanded = chr(int(found[1:3], 16))
+ rc += string [lastPos:matcher.start()] + expanded
+ lastPos = matcher.end()
+ rc += string[lastPos:]
+ return rc
+
+ def _expandVar(self, string, varChar = '&'):
+ '''Expands variables.
+ @param string: the string containing meta characters and variables, e.g. "\t&{x}y&dict[key] &list[3]"
+ @param varChar: the introduction of a variable, e.g. '&'
+ @return string with each variable replaced by its value
+ '''
+ # replace scalar variables: may be in index expression of a compound variables, e.g. &x[&ix]
+ pattern = '\\' + varChar + '(\w+|\{\w+\})'
+ reExpr = self._cacheRegExpr(pattern, 'expandvar' + varChar)
+ rc = ''
+ lastPos = 0
+ for matcher in reExpr.finditer(string):
+ found = matcher.group(1)
+ endPos = matcher.end()
+ if endPos < len(string) and string[endPos] == '[':
+ continue
+ name = found[1:-1] if found.startswith('{') else found
+ if name not in self._vars:
+ self._error('unknown variable {:s} while string expanding'.format(name))
+ break
+ varInfo = self._vars[name]
+ if varInfo._type not in ['int', 'text']:
+ self._error('wrong type {:s} of variable {:s} while string expanding'.format(varInfo._type, name))
+ break
+ rc += string [lastPos:matcher.start()] + str(varInfo._value)
+ lastPos = matcher.end()
+ rc += string[lastPos:]
+ #replace dictionaries and lists:
+ # ..........................1...........1..2......2
+ pattern = '\\' + varChar + '(\w+|\{\w+\})\[([^\]]+)\]'
+ reExpr = self._cacheRegExpr(pattern, 'expandcompound' + varChar)
+ string = rc
+ rc = ''
+ lastPos = 0
+ for matcher in reExpr.finditer(string):
+ found = matcher.group(1)
+ name = found[1:-1] if found.startswith('{') else found
+ if name not in self._vars:
+ self._error('unknown variable {:s} while string expanding'.format(name))
+ break
+ varInfo = self._vars[name]
+ index = matcher.group(2)
+ if varInfo._type == 'list':
+ intVal = int(index)
+ if intVal == 0 and index != '0':
+ self._error('index "{:s}" of list variable {:s} is not an integer'.format(index, name))
+ break
+ if intVal < 0 or intVal > len(varInfo._list):
+ self._error('wrong index {:d} of list variable {:s}: 0-{:d}'.format(intVal, name, -1+len(varInfo._list)))
+ break
+ expanded = varInfo._list[intVal]
+ elif varInfo._type == 'dict':
+ if index not in varInfo._dict:
+ self._error('wrong index {:s} for dict variable {:s}'.format(index, name))
+ break
+ expanded = varInfo._dict[index]
+ else:
+ self._error('variable {:s} is not a list or a dictionary'.format(name))
+ break
+ rc += string [lastPos:matcher.start()] + expanded
+ lastPos = matcher.end()
+ rc += string[lastPos:]
+ return rc
+
+ def _extractArgv(self, args):
+ '''Puts an argument list into a list ("argument vector").
+ @param args: the arguments as string: a comma separated list of terms, e.g. "3, 'wow', buffer"
+ @return: the argument vector, e.g. [3, 'wow', <Variable-instance>]
+ '''
+ argv = []
+ # .........................a..1..12...2...3...3.4...........4a
+ matcher = self._match(r'\s*(?:(\W)(.*?)\1|(\d+)|([a-zA-Z]\w*))?\s*', 'arg', args)
+ while args != '' and matcher != None:
+ if matcher.group(2) != None:
+ argv.append(matcher.group(2))
+ elif matcher.group(3) != None:
+ argv.append(int(matcher.group(3)))
+ elif matcher.group(4) != None:
+ name = matcher.group(4)
+ if name not in self._vars:
+ self._error('unknown variable ' + name)
+ break
+ argv.append(self._vars[name])
+ args = args[matcher.end():]
+ if args.startswith(','):
+ args = args[1:]
+ matcher = self._reExpr['arg'].match(args)
+ if args != '' and self._errors == 0:
+ self._error('invalid syntax in argument list: ' + args)
+ return argv
+
+ def _fi(self, doExecute):
+ '''Handles the "fi" command.
+ @param doExecute: False: parsing step True: interpreting step
+ '''
+ if not doExecute:
+ ixFi = len(self._args)
+ self._args.append(None)
+ blocks = len(self._openBlocks) - 1
+ if blocks < 0:
+ self._error('fi without if')
+ elif self._openBlocks[blocks][0] != 'if':
+ self._error('unexpected fi: expected block end of ' + self._openBlocks[blocks][0])
+ else:
+ ixIf = self._openBlocks[blocks][1]
+ # self._infoOfBlock[ixFi] = ['fi', ixIf]
+ self._infoOfBlock[ixIf][2] = ixFi
+ ixElse = self._infoOfBlock[ixIf][1]
+ if ixElse != None:
+ self._infoOfBlock[ixElse][1] = ixFi
+ self._openBlocks.pop()
+
+ def _find(self, statements, doExecute=True):
+ '''Handles the "find" command.
+ f(ind) [b(ackwards)] <pattern><flags>
+ @param statements: a string starting with a "find" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "find" command
+ '''
+ if not doExecute:
+ # .......................a.....a.....1b..........b....1.2..23...3.....4....4.5
+ matcher = self._match(r'f(?:ind)?\s*(b(?:ackwards)?\s*)?(\W)(.*?)\2\s*([ic])?(\s*(?:(?:-\w\S*|--[\w-]+(?:=\S+)?)\s*)*)[\s;]*',
+ 'find', statements)
+ self._args.append([matcher.group(1) != None, matcher.group(3), matcher.group(4), matcher.group(5)])
+ else:
+ matcher = None
+ (backwards, pattern, flags, options) = self._args[self._ixStatement]
+ pattern = self._checkOptsAndExpand(options, pattern)
+ if backwards:
+ aTuple = self.rindexOf(pattern, self._currentPos._line, self._currentPos._col, flags)
+ else:
+ aTuple = self.indexOf(pattern, self._currentPos._line, self._currentPos._col, flags)
+ if aTuple == None:
+ self._error('pattern not found: ' + pattern)
+ else:
+ self._currentPos._line = aTuple[0]
+ self._currentPos._col = aTuple[1]
+ return None if matcher == None else matcher.end()
+
+ def _firstPos(self, firstLine, firstCol):
+ '''Calculates the first position respecting the _range
+ @param firstLine: the line number to start
+ @param firstCol: the column to start
+ @return: a tuple (line, col)
+ '''
+ ix = max(firstLine, 0, self._startRange._line)
+ col = 0
+ if ix == firstLine and firstCol > col:
+ col = firstCol
+ if self._startRange._line == ix and self._startRange._col > firstCol:
+ firstCol = self._startRange._col
+ return (ix, col)
+
+ def _for(self, statements, doExecute=True):
+ '''Handles the "for" command.
+ A for statement has a variable, a value storage and a block.
+ Each loop through the block is introduced by setting the variable to the next value from the storage.
+ If the storage is a list variable the values are the list values.
+ If the storage is a dict variable the values are the keys of the dict.
+ If the storage is a string the values are the word in the string (separated by blanks).
+
+ @param statements: a string starting with a "if" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: tuple (condition, the length of the "if" command)
+ '''
+ if not doExecute:
+ # ..........................1...1....a..2...2.3..34...4..a5
+ matcher = self._match(r'for (\w+) in (?:(\w+)|(\W)(.*?)\3|range)(\s*(?:(?:-\w\S*|--[\w-]+(?:=\S+)?)\s*)*)',
+ 'for', statements)
+ ixIf = len(self._args)
+ self._openBlocks.append(['if', ixIf])
+ self._infoOfBlock[ixIf] = ['if', None, None]
+ self._args.append([matcher.group(2), matcher.group(3), matcher.group(5), matcher.group(6)])
+ else:
+ matcher = None
+ (pattern, flags, expression, options) = self._args[self._ixStatement]
+ (pattern, expression) = self._checkOptsAndExpand(options, [pattern, expression])
+
+ def _if(self, statements, doExecute=True):
+ '''Handles the "if" command.
+ The if statement has a condition and one or two blocks (then block, else block)
+ Depending on the condition the first or the second block will be executed.
+ @param statements: a string starting with a "if" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: tuple (condition, the length of the "if" command)
+ '''
+ if not doExecute:
+ # ...........................a.........1..12...2.....3....3..4..45...5..a6
+ matcher = self._match(r'if\s+(?:find\s+(\W)(.*?)\1\s*([ic])?|(\W)(.*?)\4)(\s*(?:(?:-\w\S*|--[\w-]+(?:=\S+)?)\s*)*)then',
+ 'if', statements)
+ ixIf = len(self._args)
+ self._openBlocks.append(['if', ixIf])
+ self._infoOfBlock[ixIf] = ['if', None, None]
+ self._args.append([matcher.group(2), matcher.group(3), matcher.group(5), matcher.group(6)])
+ else:
+ matcher = None
+ (pattern, flags, expression, options) = self._args[self._ixStatement]
+ (pattern, expression) = self._checkOptsAndExpand(options, [pattern, expression])
+ if pattern != None:
+ aTuple = self.indexOf(pattern, self._currentPos._line, self._currentPos._col, flags)
+ condition = aTuple != None
+ if condition:
+ self._currentPos._line = aTuple[0]
+ self._currentPos._col = aTuple[1]
+ elif expression != None:
+ condition = expression != '' and expression != '0'
+ else:
+ self._error('I am confused: if: no pattern and no expression')
+ condition = False
+ if not condition:
+ ixElse = self._infoOfBlock[self._ixStatement][1]
+ self._ixStatement = ixElse
+ return None if matcher == None else matcher.end()
+
+ def _insert(self, statements, doExecute=True):
+ '''Handles the "insert" command.
+ Inserts a given string at the current position.
+ i(nsert) <delim>string<delim> [<options>] [
+ @param statements: a string starting with a "delete" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "goto" command
+ '''
+ if not doExecute:
+ # .......................a.......a....1..12...2..3...b..c.................d......d.c...b.3
+ matcher = self._match(r'i(?:nsert)?\s*(\W)(.*?)\1(\s*(?:(?:-\w\S*|--[\w-]+(?:=\S+)?)\s*)*)[\s;]*',
+ 'insert', statements)
+ self._args.append([matcher.group(2), matcher.group(3)])
+ else:
+ matcher = None
+ (toInsert, options) = self._args[self._ixStatement]
+ toInsert = self._checkOptsAndExpand(options, toInsert)
+ if toInsert != None:
+ lines = toInsert.split('\n')
+ ix = self._currentPos._line
+ col = self._currentPos._col
+ if len(lines) == 1:
+ self._lines[ix] = self._lines[ix][0:col] + toInsert + self._lines[ix][col:]
+ elif ix >= len(self._lines):
+ for line in lines:
+ self._lines.append(line)
+ else:
+ self._lines.insert(ix + 1, lines[len(lines) - 1] + self._lines[ix][col:])
+ self._lines[ix] = self._lines[ix][0:col] + lines[0]
+ for ix2 in range(1, len(lines) - 1):
+ self._lines.insert(ix + ix2, lines[ix2])
+ self._adaptInserted(self._currentPos, lines)
+
+ return None if matcher == None else matcher.end()
+
+ def _goto(self, statements, doExecute=True):
+ '''Handles the "goto" command.
+ Changes the current position by line/column (absolute and relative) or by a given mark
+ g(oto) [+|-]<line> [[+|-]<col>] | <mark>
+ @param statements: a string starting with a "delete" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "goto" command
+ '''
+ if not doExecute:
+ # .......................a.....a....b..1.....12...2c.....3.....34...4c...5.....-.5b
+ matcher = self._match(r'g(?:oto)?\s*(?:([+-]?)(\d+)(?:\s*([-+]?)(\d+))?|#([<>0-9]))?[\s;]*',
+ 'goto', statements)
+ self._args.append([matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4), matcher.group(5)])
+ else:
+ matcher = None
+ (signLine, line, signCol, col, markNo) = self._args[self._ixStatement]
+ if line != None:
+ line = int(line)
+ if signLine == '':
+ self._currentPos._line = line
+ elif signLine == '+':
+ self._currentPos._line += line
+ else:
+ self._currentPos._line -= line
+ if col == None:
+ self._currentPos._col = 0
+ else:
+ col = int(col)
+ if signCol == '':
+ self._currentPos._col = col
+ elif signCol == '+':
+ self._currentPos._col += col
+ else:
+ self._currentPos._col -= col
+ elif markNo != None:
+ ixMark = self._indexOfMark(markNo)
+ if self._marks[ixMark] < 0:
+ self._error('undefined mark in "goto" command: #' + str(ixMark))
+ else:
+ self._currentPos.clone(self._marks[ixMark])
+ else:
+ self._errors('I am confused')
+ return None if matcher == None else matcher.end()
+
+ def _indexOfMark(self, name):
+ '''Returns the index number of the mark given by name.
+ @param name: '<', '>' or a decimal digit
+ @return: 0..11
+ '''
+ if name == '<':
+ rc = 10
+ elif name == '>':
+ rc = 11
+ self._marks[rc]._line = len(self._lines)
+ else:
+ rc = int(name)
+ return rc
+
+ def _load(self, statements, doExecute=True):
+ '''Handles the "load" command.
+ Transfers content from the file content (current position) to a given variable.
+ l(oad) <variable> { #[0-9]|<number> [c(ars)|l(ines)] }
+ @param statements: a string starting with a "load" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "load" command
+ '''
+ if not doExecute:
+ # .......................1...1.......2...2...3.4.......4.5...5...6.a.......a...b.....b...6.3
+ matcher = self._match(r'l(oad)?\s+\$?(\w+)\s*(#([<>0-9])|(\d+)\s*(l(?:ines?)?|c(?:hars?)?)?)?[\s;]*', 'load', statements)
+ if matcher:
+ self._args.append([matcher.group(2), matcher.group(4), matcher.group(5), matcher.group(6)])
+ else:
+ matcher = None
+ loadAll = False
+ (name, markNo, number, unit) = self._args[self._ixStatement]
+ if markNo != None:
+ ixMark = self._indexOfMark(markNo)
+ mark = self._marks[ixMark]
+ if mark.less(self._currentPos):
+ content = self._loadRange(self._currentPos, mark)
+ else:
+ content = self._loadRange(mark, self._currentPos)
+ elif number != None:
+ number = int(number)
+ if unit == None or unit.startswith('c'):
+ content = self._loadRange(self._currentPos,
+ Pos(self._currentPos._line, self._currentPos._col + number))
+ else:
+ content = self._loadRange(self._currentPos,
+ Pos(self._currentPos._line + number, 0))
+ else:
+ loadAll = True
+ if name not in self._vars:
+ self._error('unknown variable {:s} in load'.format(name))
+ else:
+ varInfo = self._vars[name]
+ if loadAll and varInfo._type != 'list':
+ bof = self._indexOfMark('<')
+ eof = self._indexOfMark('>')
+ content = content = self._loadRange(self._marks[bof], self._marks[eof])
+ if varInfo._type == 'int':
+ try:
+ varInfo._value = int(content)
+ except ValueError:
+ self._error('wrong value {:s} for the int variable {:s}'.format(content[0:20], name))
+ elif varInfo._type == 'text':
+ varInfo._value = content
+ elif varInfo._type == 'list':
+ if loadAll:
+ varInfo._list = self._lines[:]
+ else:
+ varInfo._list = content.split('\n')
+ else:
+ self._error('cannot load into the {:s} variable {:s}'.format(varInfo._type, name))
+ return None if matcher == None else matcher.end()
+
+ def _loadRange(self, start, end):
+ '''Returns a _range of the lines as string.
+ @param start: the start position
+ @param end: the end position (excluded). end > start
+ '''
+ countLines = len(self._lines)
+ if start._line == end._line:
+ rc = self._lines[start._line][start._col:min(len(self._lines[start._line]), end._col)]
+ else:
+ rc = self._lines[start._line][start._col:]
+ for ix in range(start._line + 1, min(end._line, countLines)):
+ rc += '\n' + self._lines[ix]
+ if end._line < countLines:
+ endCol = min(end._col, len(self._lines[end._line]))
+ rc += '\n' + self._lines[end._line][0:endCol]
+ return rc
+
+ def _mark(self, statements, doExecute=True):
+ '''Handles the "mark" command.
+ Sets a given mark to the current position.
+ m(ark) #[0-9]
+ @param statements: a string starting with a "mark" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "mark" command
+ '''
+ if not doExecute:
+ # .......................a.....a.....1.....1
+ matcher = self._match(r'm(?:ark)?\s*#([0-9]);]*', 'mark', statements)
+ self._args.append(int(matcher.group(1)))
+ else:
+ matcher = None
+ no = self._args[self._ixStatement]
+ self._marks[no].clone(self._currentPos)
+ return None if matcher == None else matcher.end()
+
+ def _match(self, pattern, name, statements, cryOnError=True):
+ '''Returns a re.Matcher instance of a pattern for a given command.
+ A cache of compiled reg. expressions will be maintained.
+ @param pattern: the regular expression pattern
+ @param name: the name of the command
+ @param statements: a text starting a statements with the given command
+ '''
+ reExpr = self._cacheRegExpr(pattern, name)
+ matcher = reExpr.match(statements)
+ if matcher == None and cryOnError:
+ self._error('syntax error in "{:s}" command. Syntax: {:s}\n{:s}'.format(
+ name, pattern, statements[0:40], ) )
+ return matcher
+
+ def _numericAssignment(self, statements, doExecute=True):
+ '''Handles the "assign" command.
+ '$' <var> {-=|+=|*=|/=|%=|:=} { <term> {-|+|*|/|%} <term> }
+ @param statements: a string starting with a "numeric assignment" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "numeric assignment" command
+ '''
+ if not doExecute:
+ # ..................,.....1...1...2.......2....a..3.....3...4...4a...b..5.......5...c..6.....6...7...7cb
+ matcher = self._match(r'\$(\w+)\s*([-+*/%:])=\s*(?:(-?\d+)|\$(\w+))\s*(?:([-+*/%])\s*(?:(-?\d+)|\$(\w+)))?[\s;]*',
+ 'numassign', statements)
+ if matcher:
+ self._args.append([matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4),
+ matcher.group(5), matcher.group(6), matcher.group(7)])
+ else:
+ matcher = None
+ (name, opAssignment, number1, var1, op, number2, var2) = self._args[self._ixStatement]
+ if name not in self._vars:
+ self._error('unknown variable: ' + name)
+ else:
+ varInfo = self._vars[name]
+ term1 = self._term(number1, var1)
+ if term1 != None and op != None:
+ term2 = self._term(number2, var2)
+ if term2 != None:
+ if op == '+':
+ term1 += term2
+ elif op == '-':
+ term1 -= term2
+ elif op == '*':
+ term1 *= term2
+ elif op == '/':
+ if term2 == 0:
+ self._error('division by 0')
+ term1 = None
+ else:
+ term1 = int(term1 / term2)
+ elif op == '%':
+ if term2 == 0:
+ self._error('0 at % operator')
+ term1 = None
+ else:
+ term1 %= term2
+ if term1 != None:
+ if opAssignment == ':':
+ varInfo._value = term1
+ elif varInfo._value != None:
+ if opAssignment == '+':
+ varInfo._value += term1
+ elif opAssignment == '-':
+ varInfo._value -= term1
+ elif opAssignment == '*':
+ varInfo._value *= term1
+ elif opAssignment == '/':
+ if term1 == 0:
+ self._error('division by 0')
+ varInfo._value = None
+ else:
+ varInfo._value = int(varInfo._value / term1)
+ elif opAssignment == '%':
+ varInfo._value %= term1
+ return None if matcher == None else matcher.end()
+
+ def _oneStatement(self, cmd, statements, doExecute):
+ '''Executes or parses one statements.
+ @param cmd: the statement name
+ @param statements: the statements. The first of them will be executed / parsed
+ @param doExecute: True: the statement will be interpreted. Otherwise it will be parsed to split
+ @param opt: optional parameter
+ @return: the length of the first statement
+ '''
+ length = None
+ if cmd == 'assign':
+ length = self._assign(statements, doExecute)
+ elif cmd == 'numeric':
+ length = self._numericAssignment(statements, doExecute)
+ elif cmd == 'd' or cmd == 'delete':
+ length = self._delete(statements, doExecute)
+ elif cmd == 'else':
+ length = 4
+ self._else(doExecute)
+ elif cmd == 'f' or cmd == 'find' or cmd == 'fb' or cmd == 'findbackwards' or cmd == 'fbackwards':
+ length = self._find(statements, doExecute)
+ elif cmd == 'fi':
+ length = 2
+ self._fi(doExecute)
+ elif cmd == 'g' or cmd == 'goto':
+ length = self._goto(statements, doExecute)
+ elif cmd == 'i' or cmd == 'insert':
+ length = self._insert(statements, doExecute)
+ elif cmd == 'if':
+ length = self._if(statements, doExecute)
+ elif cmd == 'l' or cmd == 'load':
+ length = self._load(statements, doExecute)
+ elif cmd == 'm' or cmd == 'mark':
+ length = self._mark(statements, doExecute)
+ elif cmd == 'p' or cmd == 'print':
+ length = self._print(statements, doExecute)
+ elif cmd == 'r' or cmd == '_range':
+ length = self._range(statements, doExecute)
+ elif cmd == 'read':
+ length = self._read(statements, doExecute)
+ elif cmd == 's' or cmd == 'substitute':
+ length = self._substitute(statements, doExecute)
+ elif cmd == 'v' or cmd == 'var' or cmd == 'variable':
+ length = self._variable(statements, doExecute)
+ elif cmd == 'w' or cmd == 'write':
+ length = self._write(statements, doExecute)
+ else:
+ self._logger.error('unknown command: ' + cmd)
+ return length
+
+ def _parseOption(self, options, shortOption, longOption):
+ '''Gets the value of a given option from all options.
+ @param options: a string with all found options
+ @param shortOption: a char as short option name
+ @param longOption: a string as long option name
+ @return: None: option not found
+ otherwise: the value of the option
+ '''
+ rc = None
+ if options != None and options != '':
+ rc = base.StringUtils.tailOfWord(options, '--' + longOption + '=')
+ if rc == None:
+ rc = base.StringUtils.tailOfWord(options, '--' + longOption)
+ if rc == None:
+ rc = base.StringUtils.tailOfWord(options, '-' + shortOption)
+ return rc
+
+ def _print(self, statements, doExecute=True):
+ '''Handles the "print" command.
+ Transfers content from the file content (current position) to a given variable.
+ p(rint) { #[0-9]|<number> [c(ars)|l(ines)]|<delim><string><delim> [<opts>] }
+ @param statements: a string starting with a "print" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "print" command
+ '''
+ if not doExecute:
+ # .......................a......a....b...1.....1.2...2...3.c.......c...d.......d.3..4..45...5..b6...e..f.................g......g.f...e.6
+ matcher = self._match(r'p(?:rint)?\s+(?:#([<>0-9])|(\d+)\s*(l(?:ines?)?|c(?:hars?)?)?|(\W)(.*?)\4)(\s*(?:(?:-\w[^;\s]*|--[\w-]+(?:=\S+)?)\s*)*)[\s;]*',
+ 'print', statements)
+ self._args.append([matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(5), matcher.group(6)])
+ else:
+ matcher = None
+ (markNo, number, unit, content, options) = self._args[self._ixStatement]
+ if content != None:
+ content = self._checkOptsAndExpand(options, content, 'l', 'to-log')
+ else:
+ self._checkOptions(options, 'l', 'to-log')
+ toLog = self._parseOption(options, 'l', 'to-log')
+ if markNo != None:
+ ixMark = self._indexOfMark(markNo)
+ mark = self._marks[ixMark]
+ if mark.less(self._currentPos):
+ content = self._loadRange(self._currentPos, mark)
+ else:
+ content = self._loadRange(mark, self._currentPos)
+ elif number != None:
+ number = int(number)
+ if unit == None or unit.startswith('c'):
+ content = self._loadRange(self._currentPos,
+ Pos(self._currentPos._line, self._currentPos._col + number))
+ else:
+ content = self._loadRange(self._currentPos,
+ Pos(self._currentPos._line + number, 0))
+ if content.endswith('\n'):
+ content = content[0:-1]
+ if toLog == None:
+ print(content)
+ else:
+ self._logger.log(content)
+ return None if matcher == None else matcher.end()
+
+ def _lastPos(self, lastLine, lastCol):
+ '''Calculates the last position respecting the _range
+ @param lastLine: the line number to start
+ @param lastCol: the column to start
+ @return: a tuple (line, col)
+ '''
+ ix = min(lastLine, len(self._lines) - 1, self._endRange._line)
+ col = len(self._lines[ix])
+ if ix == lastLine and lastCol < col:
+ col = lastCol
+ if self._endRange._line == ix and self._endRange._col < lastCol:
+ lastCol = self._endRange._col
+ return (ix, col)
+
+ def _range(self, statements, doExecute=True):
+ '''Handles the "_range" command.
+ Sets a line _range for operating.
+ r(ange) [ <markStart> [ <markEnd> ]]
+ @param statements: a string starting with a "_range" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "_range" command
+ '''
+ if not doExecute:
+ # ....................a......a....b..1...1...c...2..2.cb
+ matcher = re.match('^r(?:ange)?\s*(?:(#\d)\s*(?:#(\d)?))?[;\s]*', statements)
+ self._args.append([int(matcher.group(1)), matcher.group(2)])
+ else:
+ matcher = None
+ (start, end) = self._args[self._ixStatement]
+ if end == None:
+ self._startRange = Pos(0, 0)
+ self._endRange = Pos(len(self._lines), 0)
+ else:
+ self._startRange.clone(self._marks[start])
+ if end == None:
+ self._endRange = Pos(len(self._lines), 0)
+ else:
+ self._endRange.clone(self._marks[int(end)])
+ return None if matcher == None else matcher.end()
+
+ def _read(self, statements, doExecute=True):
+ '''Handles the "read" command.
+ Reads the file given by name
+ r(ead) <delim><string><delim> [<opts>]
+ @param statements: a string starting with a "write" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "write" command
+ '''
+ if not doExecute:
+ # .............................1..12...2..3...c..d.................e......e.d...c.3
+ matcher = self._match(r'read\s*(\W)(.*?)\1(\s*(?:(?:-\w[^\s;]*|--[\w-]+(?:=\S+)?)\s*)*)[\s;]*', 'read', statements)
+ self._args.append([matcher.group(2), matcher.group(3)])
+ else:
+ matcher = None
+ (name, options) = self._args[self._ixStatement]
+ if name == None:
+ name = self._name
+ else:
+ name = self._checkOptsAndExpand(options, name)
+ self._readFile(name)
+ return None if matcher == None else matcher.end()
+
+ def _readFile(self, name):
+ '''Reads a given file into the internal lines.
+ @param name: the filename
+ '''
+ self._lines = []
+ if not os.path.exists(name):
+ self._error('file does not exists: ' + name)
+ else:
+ with open(name, "r") as fp:
+ self._lines = fp.read().split('\n')
+
+ def _readStdIn(self):
+ '''Reads the content of stdin and returns the array of lines.
+ @return: the list of lines
+ '''
+ rc = []
+ for line in sys.stdin:
+ if line.endswith('\n'):
+ line = line[0:-1]
+ if line.endswith('\r'):
+ line = line[0:-1]
+ rc.append(line)
+ return rc
+
+ def _substitute(self, statements, doExecute=True):
+ '''Handles the "find" command.
+ s(ubstitute) <delim>pattern<delim>replacement<delim> [c|i] <opts>
+ @param statements: a string starting with a "substitute" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "substitute" command
+ '''
+ if not doExecute:
+ # .......................a...........a....1..12...2..3...3.....4....4.5...b..c...............d....d.c...b.5
+ matcher = self._match(r's(?:ubstitute)?\s*(\W)(.*?)\1(.*?)\1\s*([ic])?(\s*(?:(-\w\S*|--[\w-]+(=\S+)?)\s*)*)[\s;]*',
+ 'substitute', statements)
+ self._args.append([matcher.group(2), matcher.group(3), matcher.group(4), matcher.group(5)])
+ else:
+ matcher = None
+ (pattern, replacement, flags, options) = self._args[self._ixStatement]
+ (pattern, replacement) = self._checkOptsAndExpand(options, [pattern, replacement],
+ 'l', ' limit')
+ limit = self._parseOption(options, 'l', 'limit')
+ if limit == None:
+ limit = 0x7fffffff
+ else:
+ try:
+ limit = int(limit)
+ except ValueError:
+ self._error('+++ option limit is invalid: ' + limit)
+ limit = 1
+ self.replace(pattern, replacement, limit, self._currentPos._line, self._currentPos._col, flags)
+ return None if matcher == None else matcher.end()
+
+ def _term(self, number, name):
+ '''Returns the numeric value of a term: a number or a numeric variable.
+ @param number: None or a numeric constant
+ @param name: None or a variable name. Must contain a numeric value
+ @return: None: error found
+ otherwise: the numeric value of the term
+ '''
+ rc = None
+ if number != None:
+ rc = int(number)
+ elif name != None:
+ if name not in self._vars:
+ self._error('unknown variable: ' + name)
+ else:
+ varInfo = self._vars[name]
+ if varInfo._type == 'int':
+ rc = varInfo._value
+ elif varInfo._type == 'text':
+ try:
+ rc = int(varInfo._value)
+ except ValueError:
+ self._error('text variable {:s} contains no numeric value: {:s}'.format(name, varInfo._value))
+ else:
+ self._error('numeric term expected but variable {:s} has type {:s}'.format(name, varInfo._type))
+ else:
+ self._error('I am confused: no num, no term')
+ return rc
+
+ def _variable(self, statements, doExecute=True):
+ '''Handles the "variable" command.
+ c(reate)\s+(var(iable)?\s+$(\w+)|dict(ionary)?\s+%(\w+)|array\s+@(\w+))
+ @param statements: a string starting with a "delete" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "delete" command
+ '''
+ if not doExecute:
+ #........................a....b.......b.a..1.c.....c...d.....d...e.....e...f....f.1...2...23g........g.3
+ matcher = self._match(r'v(?:ar(?:iable)?)? (t(?:ext)?|l(?:ist)?|d(?:ict)?|i(?:nt)?)\s+(\w+)((?:\s+\w+)+)?[\s;]*',
+ 'var', statements)
+ names = [matcher.group(2)]
+ rest = matcher.group(3)
+ if rest != None:
+ for name in rest.split():
+ names.append(name)
+ aType = matcher.group(1)
+ self._args.append(aType)
+ for name in names:
+ if name in self._vars:
+ msg = '' if aType == self._vars[name]._type else ' with a different type: ' + self._vars[name]._type
+ self._error('variable {:s} defined twice'.format(name) + msg)
+ continue
+ if aType.startswith('t'):
+ self._vars[name] = TextVariable(name)
+ elif aType.startswith('d'):
+ self._vars[name] = DictVariable(name)
+ elif aType.startswith('l'):
+ self._vars[name] = ListVariable(name)
+ elif aType.startswith('i'):
+ self._vars[name] = IntegerVariable(name)
+ else:
+ self._error('unknown variable type ' + aType)
+ break
+ else:
+ matcher = None
+ return None if matcher == None else matcher.end()
+
+ def _write(self, statements, doExecute=True):
+ '''Handles the "write" command.
+ Write the lines into a file
+ w(rite) [<delim><filename><delim>] [<opts>] }
+ @param statements: a string starting with a "write" command
+ @param doExecute: False: no action, syntax check and command splitting
+ @return: the length of the "write" command
+ '''
+ if not doExecute:
+ # .......................a......a....b..1..12...2..b.3...c..d.................e......e.d...c.3
+ matcher = self._match(r'w(?:rite)?\s*(?:(\W)(.*?)\1)?(\s*(?:(?:-\w\S*|--[\w-]+(?:=\S+)?)\s*)*)[\s;]*', 'write', statements)
+ self._args.append([matcher.group(2), matcher.group(3)])
+ else:
+ matcher = None
+ (name, options) = self._args[self._ixStatement]
+ if name == None:
+ name = self._name
+ else:
+ name = self._checkOptsAndExpand(options, name)
+ if name == None or name == '':
+ self._error('missing file name for writing')
+ else:
+ with open(name, "w") as fp:
+ for line in self._lines:
+ fp.write(line + '\n')
+ return None if matcher == None else matcher.end()
+
+ def execute(self, argv):
+ '''Executes a sequence of statements on a given file.
+ @param argv: the arguments: [<statements>] or [<statements>, <file>]
+ @return: None: OK
+ otherwise: the error message
+ '''
+ rc = None
+ if len(argv) == 0:
+ rc = 'missing statements'
+ else:
+ statements = argv[0]
+ if len(argv) > 1:
+ if argv[1] != '-':
+ self._readFile(argv[1])
+ else:
+ self._lines = self._readStdIn()
+ self.parseAndInterpret(statements)
+ return rc
+
+ def parseAndInterpret(self, statements):
+ '''Parses and executes a statement list.
+ @param statements: a sequence of statements
+ @return: 0: success 1: syntax error
+ '''
+ self._clearProgram()
+ self.parse(statements)
+ if self._errors > 0:
+ rc = 1
+ else:
+ rc = self.interpret()
+ return rc
+
+ def indexOf(self, pattern, startLine=0, startCol = 0, flags = None):
+ '''Returns the first index of a pattern given by a reg. expression.
+ Note: the _range is respected while the search
+ @param pattern: a regular expression to search
+ @param startLine: the index of the first line to search: 0..N-1
+ @param startCol: the index of the char in the first line for starting the search: 0..M-1
+ @param flags: flags for re.compile(): 'i': ignore case 'c': case sensitive otherwise: re.RegexFlag e.g. re.I
+ @return: None: not found
+ otherwise: a tuple (ixLine, ixCol, length, matcher) of the expression
+ '''
+ ix = startLine
+ maxLine = len(self._lines)
+ if self._endRange._line < maxLine:
+ maxLine = self._endRange._line
+ unprocessedLine = None
+ colPrefixLength = 0
+ if flags == None:
+ flags = self._reFlags
+ elif flags == 'i':
+ flags = IGNORE
+ elif flags == 'c':
+ flags = 0
+
+ reExpr = base.StringUtils.regExprCompile(pattern, 'indexOf', self._logger, flags == 0)
+ ixLine = None
+ while unprocessedLine != None or ix < maxLine:
+ if unprocessedLine != None:
+ toInspect = unprocessedLine
+ else:
+ colPrefixLength = 0
+ toInspect = self._lines[ix]
+ if ix == startLine and startCol > 0:
+ toInspect = toInspect[startCol:]
+ colPrefixLength = startCol
+ ix += 1
+ matcher = reExpr.search(toInspect)
+ if matcher == None:
+ unprocessedLine = None
+ else:
+ ixLine = ix - 1
+ ixCol = matcher.start()
+ length = matcher.end() - ixCol
+ unprocessedLine = toInspect[ixCol + length:]
+ colPrefixLength += ixCol + length;
+ break
+ return None if ixLine == None else (ixLine, colPrefixLength - length, length, matcher)
+
+ def indexOfMultiple(self, patterns, startLine=0, startCol = 0):
+ '''Returns the first index of a squences of patterns given by reg. expressions.
+ @param patterns: a list of regular expressions to search
+ @param startLine: the index of the first line to search: 0..N-1
+ @param startCol: the index of the char in the first line for starting the search: 0..M-1
+ @return: None: not found
+ otherwise: a tuple (ixLine, ixCol, length, matcher) of the expression
+ '''
+ rc = None
+ for pattern in patterns:
+ rc = self.indexOf(pattern, startLine, startCol)
+ if rc == None:
+ break
+ else:
+ startLine = rc[0]
+ startCol = rc[1] + rc[2]
+ return rc
+
+ def interpret(self):
+ '''Interprets all statements.
+ '''
+ rc = 0
+ self._ixStatement = 0
+ doExecute = True
+ while self._ixStatement < len(self._statements) and self._errors == 0:
+ self._oneStatement(self._commands[self._ixStatement], self._statements[self._ixStatement], doExecute)
+ self._ixStatement += 1
+ return rc
+
+ def rindexOf(self, pattern, startLine=0x7fffffff, startCol = 0x7ffffff, flags = None):
+ '''Returns the last index of a pattern given by a reg. expression.
+ Note: the _range is respected while the search
+ @param pattern: a regular expression to search
+ @param startLine: the index of the first line (from the end) to search: 0..N-1
+ @param startCol: the index of the char in the first line for starting the search: 0..M-1
+ @param flags: flags for re.compile(), e.g. re.I
+ @return: None: not found
+ otherwise: a tuple (ixLine, ixCol, length, matcher) of the expression
+ '''
+ if flags == 'i':
+ flags = IGNORE
+ elif flags == 'c':
+ flags = 0
+ else:
+ flags = self._reFlags
+ reExpr = re.compile(pattern, flags if flags != None else self._reFlags)
+ def find(line, start, end):
+ matcherList = []
+ for m in reExpr.finditer(line, start, end):
+ matcherList.append(m)
+ count = len(matcherList)
+ if count == 0:
+ return None
+ # Bug in finditer(): if a nonempty match is preceeded by an empty it eats the first char
+ if count > 1 and matcherList[count - 2].group() == '' and matcherList[count - 1].group() != '':
+ # search again 1 char above the last hit:
+ return reExpr.search(max(0, matcherList.start() - 1))
+ else:
+ return matcherList[count - 1]
+ (ix, lastCol) = self._lastPos(startLine, startCol)
+ firstCol = 0 if ix != self._startRange._line else self._startRange._col
+ matcher = None if lastCol <= 0 else find(self._lines[ix], firstCol, lastCol)
+ if matcher == None:
+ (ixFirst, firstCol) = self._firstPos(0, 0)
+ # search in full lines:
+ ix -= 1
+ while ix > ixFirst:
+ matcher = find(self._lines[ix], 0, len(self._lines[ix]))
+ if matcher != None:
+ break
+ ix -= 1
+ if matcher == None:
+ firstCol = 0 if self._startRange._line != ixFirst else self._startRange._col
+ matcher = find(self._lines[ixFirst], firstCol, len(self._lines[ixFirst]))
+ rc = None if matcher == None else (ix, matcher.start(), matcher.end() - matcher.start(), matcher)
+ return rc
+
+ def parse(self, statements):
+ '''Splits a command sequence into single statements.
+ Note: the method can be called multiple times to collect some sources.
+ @param statements: a sequence of statements to parse
+ '''
+ doExecute = False
+ while statements != None and len(statements) > 0:
+ # ..........................12......2.........3........3.......1
+ matcher = re.match(r'^[\s;]*(([a-z]+)|\$\w+\s*([-+*/%:])=|\$\w+)', statements)
+ if matcher == None:
+ self._logger.error('unknown command: ' + statements.strip()[0:40])
+ ix = statements.find(';')
+ if ix > 0:
+ statements = statements[ix:]
+ else:
+ break
+ elif matcher.group(2) != None:
+ cmd = matcher.group(2)
+ elif matcher.group(3) != None:
+ cmd = 'numeric'
+ else:
+ cmd = 'assign'
+ if matcher.start(1) > 0:
+ statements = statements[matcher.start(1):]
+ length = self._oneStatement(cmd, statements, doExecute)
+ if length == None:
+ length = statements.find(';')
+ if self._errors == 0:
+ self._error('cannot parse: ' + statements[0:length])
+ if length < 0:
+ break;
+ self._statements.append(statements[0:length])
+ self._commands.append(cmd)
+ statements = statements[length:]
+
+ def replace(self, pattern, replacement, limit = 0x7ffffff, startLine = 0, startCol = 0, flags = None):
+ '''Replaces a search expression given by patterns with a replacement.
+ The last reg. expression will be replaced by a given string
+ @param pattern: a regular expression to search
+ @param replacement: the reg. expression will be replaced by this string
+ @param limit: the search/replacement process will be done in such many lines
+ @param startLine: the index of the line to start search
+ @param startCol: the index of the column (of the startLine) to start search
+ @param flags: search flags:
+ string: 'i': ignore case 'c': case sensitive None: default
+ int: flags like re.I|re.D
+ @return: False: not found True: found and replaced
+ '''
+ rc = False
+ hasMacros = not self._ignoreMacros and re.search(r'\[1-9]', replacement) != None
+ replacement2 = replacement
+ while limit > 0:
+ limit -= 1
+ aTuple = self.indexOf(pattern, startLine, startCol, flags)
+ rc = aTuple != None
+ if not rc:
+ break
+ else:
+ (startLine, startCol, length, matcher) = aTuple
+ line = self._lines[startLine]
+ if hasMacros:
+ replacement2 = matcher.expand(replacement)
+ line = line[0:startCol] + replacement2 + line[startCol+length:]
+ self._lines[startLine] = line
+ self._changed = True
+ startCol += len(replacement2)
+ return rc
+
+ def script(self, argv):
+ '''Reads a sequence of statements from a file or stdin and executes them on a given file.
+ @param argv: the arguments: [<script>] or [<script>, <file>]
+ @return: None: OK
+ otherwise: the error message
+ '''
+ rc = None
+ if len(argv) == 0:
+ rc = 'missing script'
+ else:
+ if argv[0] == '-':
+ script = self._readStdIn()
+ else:
+ self._scriptName = argv[0]
+ if not os.path.exists(argv[0]):
+ rc = 'script {:s} does not exist'.format(argv[0])
+ else:
+ with open(argv[0], 'r') as fp:
+ script = fp.read().split()
+ if rc == None:
+ if len(argv) > 1:
+ if argv[1] != '-':
+ self._readFile(argv[1])
+ else:
+ self._lines = self._readStdIn()
+ self._executeScript(script)
+ return rc
+
+ def setIgnoreCase(self, ignoreCase=True):
+ '''Sets or clears the case sensivity of the following searches.
+ @param ignoreCase: True: the search will be case sensitive
+ '''
+ if ignoreCase:
+ self._reFlags |= IGNORE
+ else:
+ self._reFlags &= IGNORE
+
+if __name__ == '__main__':
+ pass
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import datetime
+import threading
+from base.Logger import Logger
+
+class ThreadLogger(Logger):
+ def __init__(self, logfile, verbose):
+ '''Constructor.
+ @param logfile: the file for logging
+ @param verbose: true: logging to stdout too
+ '''
+ self._lock = threading.Lock()
+ Logger.__init__(self, logfile, verbose)
+
+ def log(self, message, minLevel = 0):
+ '''Logs a message.
+ @param message: the message to log
+ @param minLevel: the logging is done only if _verboseLevel >= minLevel
+ @return: true: OK false: error on log file writing
+ '''
+ rc = False
+ now = datetime.datetime.now()
+ message = now.strftime('%Y.%m.%d %H:%M:%S') + '[{}] {:s}'.format(threading.current_thread()._name, message)
+ with self._lock:
+ if self._verboseLevel >= minLevel:
+ print(message)
+ with open(self._logfile, 'a') as fp:
+ rc = True
+ fp.write(message + '\n')
+ return rc
+
+if __name__ == '__main__':
+ logger = ThreadLogger('/tmp/test.log', True)
+ logger.log('Hi');
--- /dev/null
+'''
+Created on 05.07.2018
+
+@author: hm
+'''
+import zipfile
+import re
+import os
+import time
+import base.MemoryLogger
+from _ast import arg
+from fnmatch import fnmatch
+
+class Zipper(object):
+ '''
+ Maintain a zip archive.
+ '''
+
+ def __init__(self, name, mode = 'w', logger = None,
+ noCompression = 'zip|tgz|gzip|gz|bz2|lz|lzma|xz|7z|deb|rpm|cab|rar|jar|war|ear'
+ + '|jpg|png|gif|tiff?jpeg|gzip|mp3|mpeg|mp4|m4p||flv|f4[abv]|vob|ogv|avi|mov'
+ + '|odt|ott|ods|odg|otg|odp|odb|doc|docx|xlsx?|xltx|xlw'):
+ '''
+ Constructor.
+ @param name: the archive filename
+ @param mode: 'w' create a new archive 'a': append to an achive 'r': open readonly.
+ @param noCompression: a '|' separated list of file extensions: these files will be stored without expression
+ '''
+ self._name = name
+ self._createStatistic = ZipCreateStatistic()
+ self._extractStatistic = ZipExtractStatistic()
+ self._logger = logger if logger != None else base.MemoryLogger.MemoryLogger(1)
+ self._mode = mode
+ self._fullNames = None
+ self._nodes = []
+ self._zip = None
+ try:
+ self._zip = zipfile.ZipFile(name, mode, zipfile.ZIP_DEFLATED)
+ except Exception as exc:
+ self._logger.error('cannot {:s} archive {:s}: {:s}'.format('read' if mode=='r' else 'write', name, str(exc)))
+ self._rexprNoCompression = re.compile(r'\.(' + noCompression + ')$', re.RegexFlag.IGNORECASE);
+ self._dst = time.daylight
+
+ def append(self, name, internalName = None, compression=True):
+ '''Appends a file to the archive.
+ @param name: the full name of the file to store
+ @param internalName: this name will be stored. If None the name will be used (without a preceding '/')
+ @param compression: False: never compress the file True: compress if file extension "allows" this
+ '''
+ self._createStatistic._files += 1
+ if not compression:
+ self._createStatistic._uncompressedFiles += 1
+ if self._mode == 'r':
+ raise Exception('cannot append to readonly archive')
+ if not compression:
+ self._createStatistic._uncompressedFiles += 1
+ else:
+ mapper = self._rexprNoCompression.search(name)
+ if mapper != None:
+ compression = False
+ self._createStatistic._uncompressedByExt += 1
+ try:
+ if not os.path.islink(name):
+ self._zip.write(name, internalName, zipfile.ZIP_DEFLATED if compression else zipfile.ZIP_STORED)
+ else:
+ self._createStatistic._links += 1
+ zipInfo = zipfile.ZipInfo(internalName)
+ zipInfo.create_system = 3
+ # long type of hex val of '0xA1ED0000L',
+ # say, symlink attr magic...
+ zipInfo.external_attr = 2716663808
+ self._zip.writestr(zipInfo, os.readlink(name))
+ except Exception as exc:
+ self._logger.error('cannot read: {:s} [{:s}]'.format(name, str(exc)))
+
+ def appendDir(self, dirName, lengthBase=0, regExprFilesIgnored = None, recursive=True, regExprDirsIgnored=None,
+ regExprDirsNoCompression = None, compression=True):
+ '''Appends all file of a directory to the archive.
+ @param dirName: the full name of the directory to store
+ @param lengthBase: if > 0: the stored name will be cut at the top by this number of characters
+ @param regExprFilesIgnored: None or a compiled regular expression of file which are not stored,
+ e.g. re.compile('~$')
+ @param recursive: True: all subdirectories will be stored too
+ @param regExprDirsIgnored: None or a compiled regular expression of directories which are not stored,
+ e.g. re.compile(r'^\.(metadata|project)$');
+ @param regExprDirsNoCompression: None or the compiled regular expression of directories which will not be compressed
+ e.g. re.compile(r'^\.(git|svn|cvs)$');
+ @param compressed: False: none of the files/dirs will be compressed
+ '''
+ self._createStatistic._directories += 1
+ if not compression:
+ self._createStatistic._uncompressedDirs += 1
+ files = os.listdir(dirName)
+ dirs = []
+ fullPrefix = dirName + os.sep
+ internalPrefix = (dirName if lengthBase == 0 else dirName[lengthBase:]) + '/'
+ self.appendDirectoryEntry(dirName, internalPrefix)
+ if os.sep != '/':
+ internalPrefix = internalPrefix.replace(os.sep, '/')
+ for name in files:
+ full = fullPrefix + name
+ if not os.path.isdir(full):
+ if regExprFilesIgnored == None:
+ self.append(full, internalPrefix + name, compression)
+ elif regExprFilesIgnored.search(name):
+ self._createStatistic._ignoredFiles += 1
+ else:
+ self.append(full, internalPrefix + name, compression)
+ elif recursive:
+ if regExprDirsIgnored == None:
+ dirs.append(name)
+ elif regExprDirsIgnored.search(name) == None:
+ dirs.append(name)
+ else:
+ self._createStatistic._ignoredDirs += 1
+ for name in dirs:
+ if not compression:
+ compr = False
+ else:
+ if regExprDirsNoCompression == None:
+ compr = True
+ else:
+ if name.find('.git') >= 0:
+ pass
+ matcher = regExprDirsNoCompression.search(name)
+ if matcher == None:
+ compr = True
+ else:
+ compr = False
+ self._createStatistic._uncompressedRoots += 1
+ self.appendDir(fullPrefix + name, lengthBase, regExprFilesIgnored, recursive, regExprDirsIgnored,
+ regExprDirsNoCompression, compr)
+
+ def appendDirectoryEntry(self, dirName, internalName):
+ '''Appends a directory into the zip archive.
+ @param dirName: the full directory name
+ @param internalName: the name in the archive
+ '''
+ if not internalName.endswith('/'):
+ internalName += '/'
+ statInfo = os.lstat(dirName)
+ mTime = time.localtime(statInfo.st_mtime)
+ zipInfo = zipfile.ZipInfo(internalName, mTime)
+ zipInfo.external_attr = statInfo.st_mode << 16
+ self._zip.writestr(zipInfo, '')
+
+ def close(self):
+ '''Frees the resources.
+ '''
+ self._zip.close()
+
+ def create(self, argv):
+ '''Creates a zip archive.
+ @param argv: the program arguments (behind the archive>)
+ @return: None: OK otherwise: the error message
+ '''
+ rc = None
+ mode = 'f'
+ regExprFilesIgnored = None
+ regExprDirsIgnored = None
+ regExprDirsNoCompression = None
+ logStatistic = False
+ for arg in argv:
+ if arg == '--store-node-only':
+ mode = 'n'
+ elif arg == '--statistic':
+ logStatistic = True
+ elif arg == '--shortest-path':
+ mode = 's'
+ elif arg.startswith('--ignored-files='):
+ regExprFilesIgnored = base.StringUtils.regExprCompile(arg[16:], 'ignored files pattern', self._logger)
+ elif arg.startswith('--ignored-dirs='):
+ regExprDirsIgnored = base.StringUtils.regExprCompile(arg[15:], 'ignored dirs pattern', self._logger)
+ elif arg.startswith('--already-compressed-dirs='):
+ expr = arg[26:]
+ regExprDirsNoCompression = base.StringUtils.regExprCompile(expr, 'already compressed dirs pattern', self._logger)
+ elif arg.startswith('-'):
+ rc = 'unknown option: ' + arg
+ else:
+ if arg.endswith(os.sep):
+ arg = arg[:-1]
+ if mode == 'f':
+ start = 0
+ elif mode == 's':
+ start = len(arg)
+ else:
+ start = arg.rfind(os.sep) + 1
+ if not os.path.exists(arg):
+ self._logger.error('file does not exist: ' + arg)
+ elif os.path.isdir(arg):
+ compr = True
+ if regExprDirsNoCompression != None:
+ node = os.path.basename(arg)
+ compr = regExprDirsNoCompression.search(node) == None
+ if not compr:
+ self._createStatistic._uncompressedRoots += 1
+ self.appendDir(arg, start, regExprFilesIgnored, True, regExprDirsIgnored, regExprDirsNoCompression, compr)
+ else:
+ if mode == 's':
+ start = start = arg.rfind(os.sep) + 1
+ elif mode == 'n':
+ start = arg.rfind(os.sep, 0, start - 1)
+ self.append(arg, arg[start:])
+ if rc:
+ if len(argv) == 0:
+ rc = 'missing input file(s)'
+ if logStatistic:
+ self._createStatistic.log(self._logger)
+ return rc
+
+ def extract(self, argv):
+ '''Creates a zip archive.
+ @param argv: the program arguments (behind the archive>)
+ @return: None: OK otherwise: the error message
+ '''
+ rc = None
+ patternMode = 's'
+ targetMode = 'n'
+ writeMode = 'o'
+ destination = '.' + os.sep
+ for arg in argv:
+ if arg.startswith('-d') or arg.startswith('--destination='):
+ destination = arg[2:] if arg.startswith('-d') else arg[14:]
+ if not os.path.isdir(destination):
+ self._logger.error('not a directory: ' + destination)
+ break
+ destination += os.sep
+ elif arg == '-u' or arg == '--update':
+ writeMode = 'u'
+ elif arg == '-o' or arg == '--overwrite':
+ writeMode = 'o'
+ elif arg == '-t' or arg == '--not-overwrite':
+ writeMode = 'n'
+ elif arg == '-f' or arg == '--freshen':
+ writeMode = 'f'
+ elif re.match(r'(?:-r|--reg-expr(?:ession)?)$', arg):
+ patternMode = 'r'
+ elif re.match(r'(?:-w|--wildcards?)$', arg):
+ patternMode = 'w'
+ elif re.match(r'(?:-s|--substring)$', arg):
+ patternMode = 's'
+ elif re.match(r'(?:-p|--path)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-f|--full(?:-name)?)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-p|--path)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-n|--node)$', arg):
+ targetMode = 'n'
+ elif arg.startswith('-'):
+ self._logger.error('unknown option: ' + arg)
+ else:
+ if self._fullNames == None:
+ self._fullNames = self._zip.namelist()
+ if patternMode == 'r':
+ pattern = base.StringUtils.regExprCompile(arg, 'pattern', self._logger)
+ for name in self._fullNames:
+ if targetMode == 'p':
+ item = os.path.dirname(name)
+ elif targetMode == 'n':
+ item = os.path.basename(name)
+ else:
+ item = name
+ if patternMode == 'f':
+ found = item == arg
+ elif patternMode == 'r':
+ found = pattern.search(item)
+ elif patternMode == 's':
+ found = item.find(arg) >= 0
+ else:
+ found = fnmatch(item, arg)
+ if found:
+ self._extractStatistic._matches += 1
+ doWrite = False
+ full = destination + name
+ if writeMode == 'o':
+ doWrite = True
+ elif writeMode == 'u':
+ try:
+ mtime = os.lstat(full).st_mtime
+ info = self._zip.getinfo(name)
+ ziptime = time.mktime((info.date_time[0], info.date_time[1], info.date_time[2],
+ info.date_time[3], info.date_time[4], info.date_time[5], 0, 0, self._dst))
+ doWrite = ziptime < mtime
+ if doWrite:
+ self._extractStatistic._updates += 1
+ except OSError:
+ doWrite = True
+ self._extractStatistic._creations += 1
+ elif writeMode == 'f':
+ doWrite = os.path.exists(full)
+ if doWrite:
+ self._extractStatistic._refreshs += 1
+ elif writeMode == 't':
+ doWrite = not os.path.exists(full)
+ else:
+ self._logger.error('I am confused: writeMode=' + writeMode)
+ if doWrite:
+ try:
+ self._zip.extract(name, destination)
+ except Exception as exc:
+ self._logger.error('cannot extract {:s}: {:s}'.format(full, str(exc)) )
+ return rc
+
+ def info(self, argv):
+ '''Displays info about a zip archive.
+ @param argv: the program arguments (behind the archive>)
+ @return: None: OK otherwise: the error message
+ '''
+ rc = None
+ nameOnly = False
+ patternMode = 's'
+ targetMode = 'n'
+ for arg in argv:
+ if arg == '-1' or arg == '--name-only':
+ nameOnly = True
+ elif re.match(r'(?:-r|--reg-expr(?:ession)?)$', arg):
+ patternMode = 'r'
+ elif re.match(r'(?:-w|--wildcards?)$', arg):
+ patternMode = 'w'
+ elif re.match(r'(?:-s|--substring)$', arg):
+ patternMode = 's'
+ elif re.match(r'(?:-p|--path)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-f|--full(?:-name)?)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-p|--path)$', arg):
+ targetMode = 'p'
+ elif re.match(r'(?:-n|--node)$', arg):
+ targetMode = 'n'
+ elif arg.startswith('-'):
+ self._logger.error('unknown option: ' + arg)
+ else:
+ if self._fullNames == None:
+ self._fullNames = self._zip.namelist()
+ if patternMode == 'r':
+ pattern = base.StringUtils.regExprCompile(arg, 'r pattern', self._logger)
+ for name in self._fullNames:
+ if targetMode == 'p':
+ item = os.path.dirname(name)
+ elif targetMode == 'n':
+ item = os.path.basename(name)
+ else:
+ item = name
+ if patternMode == 'f':
+ found = item == arg
+ elif patternMode == 'r':
+ found = pattern.search(item)
+ elif patternMode == 's':
+ found = item.find(arg) >= 0
+ else:
+ found = fnmatch(item, arg)
+ if found:
+ if nameOnly:
+ self._logger.log(name)
+ else:
+ info = self._zip.getinfo(name)
+ (year, month, day, hour, minute, second) = info.date_time
+ percent = 0 if info.file_size == 0 else int(info.compress_size * 100 / info.file_size)
+ self._logger.log('{:12d} {:3}% {:d}.{:02d}.{:02d}-{:02d}:{:02d}:{:02d} {:s}'.format(
+ info.file_size, percent, year, month, day, hour, minute, second, name))
+ return rc
+
+ def infosOf(self, node=None, path=None):
+ '''Returns a list of ZipInfo instances given by path and node name or patterns.
+ @param node: None or complete node name.
+ If a directory is given and filename is None: all files of the directory will be returned
+ string: the complete node name
+ compiled regular expression: all matching files will be returned,
+ e.g. re.compile('[.]txt$', re.I)
+ @param path: None of the complete path of the files to return
+ string: the complete path, ending with '/'
+ compiled regular expression: files of all matching directory will be returned,
+ e.g. re.compile('^(etc|home)/$')
+ @return: a list of ZipInfo instances. May be empty
+ '''
+ rc = []
+ allInfos = node == None and path == None
+ if not allInfos and self._fullNames == None:
+ self._fullNames = self._zip.namelist()
+ countNodes = len(self._nodes)
+ if allInfos:
+ rc = self._zip.infolist()
+ elif type(node) == str and type(path) ==str:
+ full = path + node
+ for current in self._fullNames:
+ if full == current:
+ info = self._zip.getinfo(full)
+ rc.append(info)
+ break
+ elif node == None and type(path) == str:
+ pathLength = len(path)
+ for current in self._fullNames:
+ if current.startswith(path) and pathLength == current.rfind('/') + 1:
+ info = self._zip.getinfo(current)
+ rc.append(info)
+ else:
+ for ix in range(len(self._fullNames)):
+ full = self._fullNames[ix]
+ pathCurrent = None
+ if ix < countNodes:
+ nodeCurrent = self._nodes[ix]
+ else:
+ ix2 = full.rfind('/')
+ if ix2 < 0:
+ nodeCurrent = full
+ pathCurrent = ''
+ else:
+ nodeCurrent = full[ix2+1:]
+ pathCurrent = full[0:ix2+1]
+ self._nodes.append(nodeCurrent)
+ if node == None:
+ found = True
+ elif type(node) == str:
+ found = node == nodeCurrent
+ else:
+ found = node.search(nodeCurrent)
+ if found:
+ if path == None:
+ found = True
+ elif type(path) == str:
+ if pathCurrent != None:
+ found = pathCurrent == path
+ else:
+ found = full.startswith(path) and len(path) + len(node) == len(full)
+ else:
+ if pathCurrent != None:
+ found = path.search(pathCurrent)
+ else:
+ if path.search(full) == None:
+ found = False
+ else:
+ pathCurrent = full[0:len(full)- len(nodeCurrent)]
+ found = path.search(pathCurrent)
+ if found:
+ info = self._zip.getinfo(full)
+ rc.append(info)
+ return rc
+
+class ZipExtractStatistic:
+ '''Holds the data of zip archive extraction.
+ '''
+ def __init__(self):
+ self._updates = 0
+ self._overwrites = 0
+ self._refreshs = 0
+ self._creations = 0
+ self._matches = 0
+
+class ZipCreateStatistic:
+ '''Holds the data of a zip archive creation.
+ '''
+ def __init__(self):
+ self._links = 0
+ self._directories = 0
+ self._files = 0
+ self._ignoredDirs = 0
+ self._ignoredFiles = 0
+ self._uncompressedFiles = 0
+ self._uncompressedDirs = 0
+ self._uncompressedRoots = 0
+ self._uncompressedByExt = 0
+
+ def log(self, logger):
+ logger.log('file(s): {:d} dir(s): {:d} links: {:d}'.format(self._files, self._directories, self._links))
+ logger.log('ignored file(s): {:d} ignored dir(s): {:d}'.format(self._ignoredFiles, self._ignoredDirs))
+ logger.log('uncompressed file(s)/by extension: {:d}/{:d} uncompressed dir(s)/roots: {:d}/{:d}'.format(
+ self._uncompressedFiles, self._uncompressedByExt, self._uncompressedDirs, self._uncompressedRoots))
\ No newline at end of file
--- /dev/null
+apt-get install python3-mysqldb libmysqlclient-dev
--- /dev/null
+[PHP]
+
+;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;
+
+
+
+
+
+
+; ! boolean NOT
+
+
+
+
+
+;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;
+
+
+
+
+;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;
+
+
+
+
+
+; log_errors
+
+
+
+
+
+
+
+
+
+
+;;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;;
+
+
+
+;;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;;
+
+engine = On
+
+short_open_tag = Off
+
+precision = 14
+
+; reasons.
+; Off = Disabled
+output_buffering = 4096
+
+; is doing.
+
+
+
+zlib.output_compression = Off
+
+
+
+implicit_flush = Off
+
+unserialize_callback_func =
+
+; precision.
+serialize_precision = -1
+
+;open_basedir =
+
+disable_functions = pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,
+
+disable_classes =
+
+
+
+
+
+zend.enable_gc = On
+
+
+
+;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;
+
+expose_php = Off
+
+;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;
+
+max_execution_time=600
+
+max_input_time=600
+
+
+
+memory_limit=2048M
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+; Error handling and logging ;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;
+; of PHP
+;
+error_reporting = E_ALL & ~E_DEPRECATED & ~E_STRICT
+
+display_errors = Off
+
+display_startup_errors = Off
+
+log_errors = On
+
+log_errors_max_len = 1024
+
+ignore_repeated_errors = Off
+
+ignore_repeated_source = Off
+
+report_memleaks = On
+
+
+
+
+
+html_errors = On
+
+
+
+
+
+;error_log = php_errors.log
+
+
+
+
+
+;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;
+
+
+
+variables_order = "GPCS"
+
+request_order = "GP"
+
+register_argc_argv = Off
+
+auto_globals_jit = On
+
+
+post_max_size=512M
+
+auto_prepend_file =
+
+auto_append_file =
+
+;
+default_mimetype = "text/html"
+
+default_charset = "UTF-8"
+
+
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;
+;
+
+doc_root =
+
+user_dir =
+
+
+
+enable_dl = Off
+
+
+;cgi.nph = 1
+
+
+
+
+
+
+;cgi.rfc2616_headers = 0
+
+;cgi.check_shebang_line=1
+
+;;;;;;;;;;;;;;;;
+; File Uploads ;
+;;;;;;;;;;;;;;;;
+
+file_uploads = On
+
+; specified).
+
+upload_max_filesize=512M
+
+max_file_uploads=100
+
+;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;
+
+allow_url_fopen = On
+
+allow_url_include = Off
+
+;from="john@doe.com"
+
+
+default_socket_timeout = 60
+
+
+;;;;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;;;;
+
+;
+;
+;
+;
+;
+;
+;
+;
+;
+
+
+
+;;;;;;;;;;;;;;;;;;;
+;;;;;;;;;;;;;;;;;;;
+
+[CLI Server]
+cli_server.color = On
+
+[Date]
+
+
+
+
+
+[filter]
+
+
+[iconv]
+
+
+
+[imap]
+
+[intl]
+
+[sqlite3]
+
+
+[Pcre]
+
+
+
+[Pdo]
+
+
+[Pdo_mysql]
+pdo_mysql.default_socket=
+
+[Phar]
+
+
+
+[mail function]
+SMTP = localhost
+smtp_port = 25
+
+
+
+
+mail.add_x_header = Off
+
+;mail.log =
+
+[ODBC]
+
+
+
+
+odbc.allow_persistent = On
+
+odbc.check_persistent = On
+
+odbc.max_persistent = -1
+
+odbc.max_links = -1
+
+odbc.defaultlrl = 4096
+
+odbc.defaultbinmode = 1
+
+[Interbase]
+ibase.allow_persistent = 1
+
+ibase.max_persistent = -1
+
+ibase.max_links = -1
+
+
+
+
+
+ibase.timestampformat = "%Y-%m-%d %H:%M:%S"
+
+ibase.dateformat = "%Y-%m-%d"
+
+ibase.timeformat = "%H:%M:%S"
+
+[MySQLi]
+
+mysqli.max_persistent = -1
+
+
+mysqli.allow_persistent = On
+
+mysqli.max_links = -1
+
+mysqli.default_port = 3306
+
+mysqli.default_socket =
+
+mysqli.default_host =
+
+mysqli.default_user =
+
+mysqli.default_pw =
+
+mysqli.reconnect = Off
+
+[mysqlnd]
+mysqlnd.collect_statistics = On
+
+mysqlnd.collect_memory_statistics = Off
+
+; file.
+
+
+
+
+
+
+
+[OCI8]
+
+
+
+
+
+
+
+
+
+
+[PostgreSQL]
+pgsql.allow_persistent = On
+
+pgsql.auto_reset_persistent = Off
+
+pgsql.max_persistent = -1
+
+pgsql.max_links = -1
+
+pgsql.ignore_notice = 0
+
+pgsql.log_notice = 0
+
+[bcmath]
+bcmath.scale = 0
+
+[browscap]
+
+[Session]
+session.save_handler = files
+
+;
+;
+;
+;
+;
+;
+;
+
+session.use_strict_mode = 0
+
+session.use_cookies = 1
+
+
+session.use_only_cookies = 1
+
+session.name = PHPSESSID
+
+session.auto_start = 0
+
+session.cookie_lifetime = 0
+
+session.cookie_path = /
+
+session.cookie_domain =
+
+session.cookie_httponly =
+
+session.cookie_samesite =
+
+session.serialize_handler = php
+
+session.gc_probability = 0
+
+session.gc_divisor = 1000
+
+session.gc_maxlifetime = 1440
+
+
+session.referer_check =
+
+session.cache_limiter = nocache
+
+session.cache_expire = 180
+
+session.use_trans_sid = 0
+
+session.sid_length = 26
+
+session.trans_sid_tags = "a=href,area=href,frame=src,form="
+
+
+session.sid_bits_per_character = 5
+
+
+
+
+
+
+
+
+[Assertion]
+zend.assertions = -1
+
+
+
+
+
+
+
+[COM]
+
+
+
+
+
+;com.code_page=
+
+[mbstring]
+
+
+
+
+
+
+
+
+
+
+
+[gd]
+
+[exif]
+
+
+
+
+
+
+[Tidy]
+
+tidy.clean_output = Off
+
+[soap]
+soap.wsdl_cache_enabled=1
+
+soap.wsdl_cache_dir="/tmp"
+
+soap.wsdl_cache_ttl=86400
+
+soap.wsdl_cache_limit = 5
+
+[sysvshm]
+
+[ldap]
+ldap.max_links = -1
+
+[dba]
+
+[opcache]
+;opcache.enable=1
+
+;opcache.enable_cli=0
+
+
+
+
+
+
+
+
+
+
+
+; passes
+
+
+
+; are cached.
+
+
+
+;opcache.error_log=
+
+
+
+
+
+; errors.
+;opcache.mmap_base=
+
+;opcache.file_cache=
+
+
+
+;opcache.file_cache_fallback=1
+
+
+
+
+
+[curl]
+
+[openssl]
+;openssl.cafile=
+
+
+; Local Variables:
+; End:
+
+
+xdebug.remote_port=9000
+xdebug.remote_enable=On
+xdebug.remote_handler=dbgp
+xdebug.remote_host=127.0.0.1
+xdebug.remote_autostart=1
--- /dev/null
+'''
+Created on 11.05.2018
+
+@author: hm
+'''
+import smtplib
+import re
+import imghdr
+import email.message
+
+import base.MemoryLogger
+
+class EMail:
+
+ def __init__(self, subject, text=None, html=None):
+ '''Constructor.
+ @param subject: the subject of the mail
+ @param text: the ascii text of the mail
+ @param html: the html text of the mail
+ '''
+ self._subject = subject
+ self._text = text
+ self._html = html
+ self._multiPart = email.message.EmailMessage()
+ if subject != None:
+ self._multiPart['Subject'] = subject
+ if text != None:
+ self._multiPart.set_content(text)
+ if html != None:
+ self._multiPart.add_alternative(html, subtype='html')
+
+ def addImage(self, filename):
+ '''Adds a image file as attachement.
+ @param filename: this file will be appended, should be MIME type "image"
+ '''
+ with open(filename, 'rb') as fp:
+ img_data = fp.read()
+ self._multiPart.add_attachment(img_data, maintype='image', subtype=imghdr.what(None, img_data))
+
+ def sendTo(self, sender, recipient, cc=None, bcc=None, logger = None):
+ '''
+ Sends the email to the given recipients.
+ @param sender: None or an email address of the sender
+ @param recipient: the recipient or a list of recipients
+ @param cc: None or a carbon copy recipient or a list of CC
+ @param bcc: None or blind carbon copy recipient or a list of BCC
+ @param logger: None or logger
+ @return: dictionary: empty: success otherwise: e.g. { "three@three.org" : ( 550 ,"User unknown" ) }
+ '''
+ def normEmail(email):
+ if type(email) == list:
+ rc = ', '.join(email)
+ elif email.find(' ') > 0:
+ rc = re.sub(r' +', ', ', email.strip())
+ else:
+ rc = email
+ return rc
+ rc = []
+ try:
+ server = smtplib.SMTP(self._host, self._port)
+ server.ehlo()
+ if self._withTls:
+ server.starttls()
+ server.ehlo()
+ server.login(self._username, self._code)
+ self._multiPart['From'] = sender
+ self._multiPart['To'] = normEmail(recipient)
+ if cc != None:
+ self._multiPart['Cc'] = normEmail(cc)
+ if bcc != None:
+ self._multiPart['BCC'] = normEmail(bcc)
+ rc = server.send_message(self._multiPart)
+ except Exception as exc:
+ if logger != None:
+ logger.error('sending email failed: {}\n{}\n{}'.format(str(exc), self._subject, self._text))
+ return rc
+
+ def setSmtpLogin(self, host, port, username, code, withTls = True):
+ '''Sets the login data for the SMTP server.
+ @param host: the SMTP server, e.g. 'smtp.gmx.net'
+ @param port: the port of the SMTP service
+ @param username: the user known to the SMTP service
+ @param code: the password of the SMTP service
+ @param withTls: True: TSL encryption will be used
+ '''
+ self._host = host
+ self._port = port
+ self._username = username
+ self._code = code
+ self._withTls = withTls
+
+def sendStatusEmail(recipient, subject, body, sender, host, port, user, code, withTls, logger):
+ '''Sends an email.
+ @param receipient: the email address of the recipient or a blank separated list of recipients
+ @param subject: a short info
+ @param body: HTML or plain text
+ @param sender: the email address of the sender
+ @param host: the SMTP host
+ @param port: the SMTP port
+ @param user: the SMTP user
+ @param code: the SMTP password
+ @param withTls: True: TLS (encryption) is used
+ @param logger: for error logging
+ '''
+ email = EMail(subject, body)
+ parts = recipient.split(' ')
+ email.setSmtpLogin(host, port, user, code, withTls)
+ cc = None if len(parts) < 2 else parts[1:]
+ rc = email.sendTo(sender, parts[0], cc, logger)
+ if len(rc) == 0:
+ logger.log('email sent to ' + recipient)
+ else:
+ logger.error('sending email "{}" failed: {}'.format(subject, str(rc)))
+
+def main():
+ logger = base.MemoryLogger.MemoryLogger(1)
+ email = EMail('Testmail von Python', 'Hi!\nHat funktoniert', '''
+<html>
+<body>
+<h1>Hi</h1>
+<p>Hat funktioniert!</p
+</body>
+</html>
+ ''')
+ # email.setSmtpLogin('smtp.1und1.de', 587, 'software@infeos.de', '4WebApp2018!')
+ email.setSmtpLogin('smtp.gmx.net', 587, 'hm.neutral@gmx.de', '1G.e.h.t.H.e.i.m', True)
+ email.sendTo('hamatoma@gmx.de', 'hamatoma@gmx.de', None, None, logger)
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+'''
+Created on 21.04.2018
+
+@author: hm
+'''
+
+import os.path
+import shutil
+import time
+import subprocess
+import re
+import net.TcpTaskHandler
+
+class FileTcpTaskHandler(net.TcpTaskHandler.TcpTaskHandler):
+ '''Handler for file operations.
+ For security reasons only files in predefined directories can be processed.
+ Most of the tasks return 'OK' if success or an error message
+ '''
+ def __init__(self, name, baseDirectories, logger, blocksize = 8*1024*1024):
+ '''Constructor.
+ @param name: if more than once instances exists the client use this name to address the handler
+ @param baseDirectories: a list of base directories only files in these directories tree can be processed
+ @param logger: a logger, type ThreadLogger
+ @param blockSize: put and get will transfer blocks with this size
+ '''
+ self._name = name
+ self._directories = baseDirectories
+ self._blocksize = blocksize
+ net.TcpTaskHandler.TcpTaskHandler.__init__(self, logger)
+
+ def argumentError(self, message, serverThread, exception = None):
+ '''Handles an argument error, e.g. wrong number of arguments.
+ @param message: the error message
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @param exception: None or the raised exception
+ '''
+ info = ''
+ if exception != None and len(exception.args) > 1:
+ info = ' ' + exception.args[1]
+ serverThread.send('+++ "file": ' + message + info)
+
+ def copy(self, args, serverThread):
+ '''Copies a file.
+ @param args: the command arguments: source and target
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if len(args) < 2:
+ self.argumentError('copy: missing target', serverThread)
+ elif not self.validPath(args[0]) or not self.validPath(args[1]):
+ self.argumentError('copy: invalid path', serverThread)
+ else:
+ try:
+ shutil.copyfile(args[0], args[1], follow_symlinks=False)
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('copy failed', serverThread, exc)
+
+ def delete(self, args, serverThread):
+ '''Deletes a file.
+ @param args: the command arguments: source
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if not self.validPath(args[0]):
+ self.argumentError('delete: invalid path', serverThread)
+ else:
+ try:
+ os.unlink(args[0])
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('delete failed', serverThread, exc)
+
+ def diskFree(self, args, serverThread):
+ '''Gets the information of disk spaces.
+ @param args: the command arguments: not used
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ try:
+ with subprocess.Popen('/bin/df', stdout=subprocess.PIPE) as proc:
+ data = proc.stdout.read().decode()
+ lines = data.split('\n')
+ # /dev/mapper/ocz1t-home 32896880 15790076 15412700 51% /home
+ regExpr = re.compile(r'(\S+)\s+(\d+)\s+(\d+)\s+\d+\s+\d+%\s+(\S+)')
+ answer = 'OK\n'
+ for line in lines:
+ matcher = regExpr.match(line)
+ if matcher:
+ dev, total, used, mount = matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4)
+ if dev != 'tmpfs' and not mount.startswith('/run/'):
+ answer += '\t'.join([mount, total, used]) + '\n'
+ serverThread.send(answer)
+ except OSError as exc:
+ self.argumentError('delete failed', serverThread, exc)
+
+ def fulfill(self, args, serverThread):
+ '''Fulfills the tasks 'file': (move, copy, delete, mkdir, rmdir, put, get).
+ @param args: the command arguments. args[0] contains the command
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @return: True: request is handled False: request is unknown, next chain member should be asked
+ '''
+ rc = True
+ if args[0] == 'file':
+ if len(args) < 3:
+ serverThread.send('+++ "file": missing arguments')
+ elif args[2] == 'df':
+ self.diskFree(args[3:], serverThread)
+ elif len(args) < 4:
+ serverThread.send('+++ "file": missing arguments')
+ elif args[1] != self._name:
+ rc = False
+ elif args[2] == 'move':
+ self.move(args[3:], serverThread)
+ elif args[2] == 'copy':
+ self.copy(args[3:], serverThread)
+ elif args[2] == 'delete':
+ self.delete(args[3:], serverThread)
+ elif args[2] == 'mkdir':
+ self.mkDir(args[3:], serverThread)
+ elif args[2] == 'get':
+ self.get(args[3:], serverThread)
+ elif args[2] == 'put':
+ self.put(args[3:], serverThread)
+ else:
+ rc = self.argumentError('unknown subcommand: ' + args[2], serverThread)
+ else:
+ rc = False
+ return rc
+
+ def get(self, args, serverThread):
+ '''Copies a local file to the client.
+ Format of the answer:
+ 'OK' SIZE OFFSET LENGTH MTIME MODE OWNER GROUP '\n'
+ All numbers are hexadecimal. MTIME: microseconds
+ @param args: the command arguments: source and offset
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if len(args) < 2:
+ self.argumentError('get: offset', serverThread)
+ elif not self.validPath(args[0]):
+ self.argumentError('get: invalid path', serverThread)
+ else:
+ try:
+ offset = int(args[1])
+ info = os.stat(args[0])
+ length = self._blocksize if info.st_size - offset > self._blocksize else info.st_size - offset
+ head = 'OK {:x} {:x} {:x} {:x} {:x} {:x} {:x}\n'.format(info.st_size, offset, length, int(info.st_mtime_ns / 1000), info.st_mode, info.st_uid, info.st_gid)
+ with open(args[0], 'rb') as fp:
+ fp.seek(offset)
+ data = head.encode() + fp.read(self._blocksize)
+ serverThread.sendBinary(data)
+ except Exception as exc:
+ self.argumentError('get: file not found', serverThread, exc)
+ pass
+ def mkDir(self, args, serverThread):
+ '''Creates a subdirectory.
+ @param args: the command arguments: source [mtime mode uid gid]
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ name = args[0]
+ if not self.validPath(name):
+ self.argumentError('mkdir: invalid path', serverThread)
+ else:
+ try:
+ mode = 0o777 if len(args) < 3 else int(args[2], 16)
+ os.makedirs(name, mode, True)
+ if len(args) >= 5:
+ atime = int(time.time() * 1000000000)
+ mtime = int(args[1], 16) * 1000
+ os.utime(name, ns=(atime, mtime))
+ os.chmod(name, mode)
+ os.setuid(name, int(args[3], 16))
+ os.setuid(name, int(args[4], 16))
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('mkdir failed', serverThread, exc)
+
+ def move(self, args, serverThread):
+ '''Moves a file.
+ @param args: the command arguments: source and target
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if len(args) < 2:
+ self.argumentError('move: missing target', serverThread)
+ elif not self.validPath(args[0]) or not self.validPath(args[1]):
+ self.argumentError('move: invalid path', serverThread)
+ else:
+ try:
+ os.replace(args[0], args[1])
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('move failed', serverThread)
+
+ def put(self, args, serverThread):
+ '''Receives a file from the client.
+ @param args: the command arguments: source SIZE OFFSET LENGTH MTIME MODE OWNER GROUP
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ name = args[0]
+ if len(args) < 8:
+ self.argumentError('put: missing group', serverThread)
+ elif not self.validPath(args[0]):
+ self.argumentError('put: invalid path', serverThread)
+ else:
+ try:
+ offset = args[2]
+ mode = 'wb' if offset == 0 else 'ab'
+ fp = open(name, mode)
+ if fp == None:
+ self.argumentError('put cannot open for writing: ' + name, serverThread)
+ else:
+ fp.write(serverThread._firstBlock)
+ while True:
+ data = serverThread.readBinary()
+ if not data:
+ break
+ else:
+ fp.write(data)
+ atime = int(time.time() * 1000000000)
+ mtime = int(args[4], 16) * 1000
+ fileNo = fp.fileno()
+ os.fchmod(fileNo, int(args[5], 16))
+ os.fchown(fileNo, int(args[6], 16), int(args[7], 16))
+ fp.close()
+ os.utime(name, ns=(atime, mtime))
+ serverThread.send('OK')
+ except Exception as exc:
+ self.argumentError('put failed', serverThread, exc)
+
+ def validPath(self, path):
+ '''Tests whether the path is inside a directory tree specified in _directories[]
+ @param path: path to test
+ @return: True: path is valid False: invalid path
+ '''
+ rc = False
+ path = os.path.normpath(path)
+ for aDir in self._directories:
+ if path.startswith(aDir):
+ rc = True
+ break
+ return rc
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+#! /usr/bin/python3
+'''
+Created on 07.04.2018
+
+@author: hm
+'''
+from ftplib import FTP
+import re
+import datetime
+import stat
+import os
+import base.StringUtils
+
+class FtpEngine:
+ def __init__(self, host, port, user, passw, logger, verboseLevel = 1):
+ '''Constructor.
+ @param host: the server name, e.g. 'ftp.hamatoma.de'
+ @param port: the server port
+ @param user: the username for login
+ @param passw: the password for login
+ @param logger: the logger
+ @param verboseLevel: details of logging: 1: summary 2: directory 3: file
+ '''
+ self.sep = '/'
+ self._logger = logger
+ self._ftp = FTP()
+ self._ftp.connect(host, port, 30)
+ self._ftp.login(user, passw)
+ self._printDepth = 0
+ features = self._ftp.sendcmd('FEAT')
+ if features.find('MLST') < 0:
+ self._logger.error ('+++ ftp command MLSD not supported')
+ self._maxDepth = 2048
+ self._utc = datetime.timezone.utc
+ self._verboseLevel = verboseLevel
+ def close(self):
+ '''Closes the FTP connection and frees the resouces.
+ '''
+ self._ftp.close()
+
+ def compare(self, ftpDir, localDir, depth):
+ '''Compares a local directory tree with a remote FTP directory tree.
+ @param ftpDir: the remote directory
+ @param localDir: the local directory
+ @param depth: the depth of the directory
+ @return: a tuple (total, fileCount, dirCount)
+ '''
+ if depth == 0 and self._verboseLevel == 1 or self._verboseLevel >= 2:
+ self._logger.log('= {:s}:'.format(ftpDir))
+ fileCount = 0
+ dirCount = 1
+ total = 0
+ dirs = []
+ sepLocal = '' if localDir == '/' else '/'
+ sepFtp = '' if ftpDir == '/' else '/'
+ localFiles = os.listdir(localDir)
+ for name, facts in self._ftp.mlsd(ftpDir, ['type', 'size', 'modify']):
+ if name == '.' or name == '..':
+ continue
+ try:
+ ix = localFiles.index(name)
+ except:
+ ix = -1
+ if ix < 0:
+ self._logger.error('+++ missing local file: ' + localDir + sepLocal + name)
+ else :
+ del(localFiles[ix])
+ aType = facts['type']
+ if aType == 'dir':
+ if depth < self._maxDepth:
+ if name != '.' and name != '..':
+ dirs.append(name)
+ self.testFile(localDir + sepLocal + name, facts)
+ elif aType == 'file':
+ total += int(facts['size'])
+ fileCount += 1
+ self.testFile(localDir + sepLocal + name, facts)
+ elif aType == 'OS.unix=symlink':
+ self.testFile(localDir + sepLocal + name, facts)
+ for name in localFiles:
+ self._logger.error('+++ missing ftp file: ' + localDir + sepLocal + name)
+ for node in dirs:
+ (diffTotal, diffFiles, diffDirs) = self.compare(ftpDir + sepFtp + node, localDir + sepLocal + node, depth + 1)
+ total += diffTotal
+ fileCount += diffFiles
+ dirCount += diffDirs
+ if depth < self._printDepth:
+ self._logger.log('{:12d} [{:6d} {:5d}] {:s}'.format(total, fileCount, dirCount, ftpDir))
+ return (total, fileCount, dirCount)
+
+ def diskUsage(self, directory, depth):
+ '''Calculate the space used by a given FTP directory.
+ This method uses the MLSD extension of the FTP server.
+ @param directory: name of the directory to inspect
+ @param depth: the depth of the directory relative to the start point
+ @return: a tuple (total, fileCount, dirCount)
+ '''
+ if depth == 0 and self._verboseLevel == 1 or self._verboseLevel >= 2:
+ self._logger.log('= {:s}:'.format(directory))
+ fileCount = 0
+ dirCount = 1
+ total = 0
+ dirs = []
+ infos = self._ftp.mlsd(directory, ['type', 'size'])
+ for name, facts in infos:
+ aType = facts['type']
+ if aType == 'dir':
+ if depth < self._maxDepth:
+ if name != '.' and name != '..':
+ dirs.append(name)
+ pass
+ elif aType == 'file' or aType == 'OS.unix=symlink':
+ total += int(facts['size'])
+ fileCount += 1
+ # print('dirs: ' + ' '.join(dirs))
+ separator = '' if directory == '/' else '/'
+ for node in dirs:
+ (diffTotal, diffFiles, diffDirs) = self.diskUsage(directory + separator + node, depth + 1)
+ total += diffTotal
+ fileCount += diffFiles
+ dirCount += diffDirs
+ if depth < self._printDepth:
+ print('{:d} [{:d} {:d}] {:s}'.format(total, fileCount, dirCount, directory))
+ return (total, fileCount, dirCount)
+
+ def diskUsage2(self, directory, depth):
+ '''Calculate the space used by a given FTP directory.
+ This method uses the LIST command which is always available.
+ @param directory: name of the directory to inspect
+ @param depth: the depth of the directory relative to the start point
+ @return: a tuple (total, fileCount, dirCount)
+ '''
+ if depth == 0 and self._verboseLevel == 1 or self._verboseLevel >= 2:
+ self._logger.log('= {:s}:'.format(directory))
+ self._dirs += 1
+ lines = []
+ self._ftp.retrlines('LIST ' + directory, lines.append)
+ sizes = 0
+ dirs = []
+ for line in lines:
+ matcher = re.match('(\S+)\s+\d+\s+\d+\s+\d+\s+(\d+)\s+\S+\s+\S+\s+\S+\s+(.*)', line)
+ if matcher:
+ flags = matcher.group(1)
+ size = matcher.group(2)
+ name = matcher.group(3)
+ if flags[0] == 'd':
+ if depth < self._maxDepth:
+ if name != '.' and name != '..':
+ dirs.append(name)
+ elif flags[0] == '-':
+ sizes += int(size)
+ self._files += 1
+ # print('dirs: ' + ' '.join(dirs))
+ if directory == '/':
+ for node in dirs:
+ sizes += self.diskUsage2(directory + node, depth + 1)
+ else:
+ for node in dirs:
+ sizes += self.diskUsage2(directory + '/' + node, depth + 1)
+ return sizes
+
+ def listFile(self, name, facts):
+ '''Lists one file.
+ @param name: the full filename
+ @param facts: the file's properties
+ '''
+ dt = facts['modify']
+ aType = facts['type']
+ if aType == 'file':
+ size = int(facts['size'])
+ size = base.StringUtils.formatSize(size)
+ self._logger.log('{:10s} {}.{}.{}/{}:{}:{} {:s}'.format(size, dt[0:4], dt[4:6], dt[6:8], dt[8:10], dt[10:12], dt[12:14], name))
+ elif aType == 'dir':
+ self._logger.log('{:10s} {}.{}.{}/{}:{}:{} {:s}'.format('<dir>', dt[0:4], dt[4:6], dt[6:8], dt[8:10], dt[10:12], dt[12:14], name))
+ elif aType == 'OS.unix=symlink':
+ self._logger.log('{:10s} {}.{}.{}/{}:{}:{} {:s}'.format('<link>', dt[0:4], dt[4:6], dt[6:8], dt[8:10], dt[10:12], dt[12:14], name))
+ else:
+ self._logger.log('{:10s} {}.{}.{}/{}:{}:{} {:s}'.format('<' + aType + '>', dt[0:4], dt[4:6], dt[6:8], dt[8:10], dt[10:12], dt[12:14], name))
+
+ def listTree(self, ftpDir, depth):
+ '''Compares a local directory tree with a remote FTP directory tree.
+ @param ftpDir: the remote directory
+ @param depth: the depth of the directory
+ @return: a tuple (total, fileCount, dirCount)
+ '''
+ if depth == 0 and self._verboseLevel == 1 or self._verboseLevel >= 2:
+ self._logger.log('= {:s}:'.format(ftpDir))
+ fileCount = 0
+ dirCount = 1
+ total = 0
+ dirs = []
+ sepFtp = '' if ftpDir == '/' else '/'
+ for name, facts in self._ftp.mlsd(ftpDir, ['type', 'size', 'modify']):
+ if name == '.' or name == '..':
+ continue
+ aType = facts['type']
+ if aType == 'dir':
+ if depth < self._maxDepth:
+ if name != '.' and name != '..':
+ dirs.append(name)
+ self.listFile(ftpDir + sepFtp + name, facts)
+ elif aType == 'file':
+ total += int(facts['size'])
+ fileCount += 1
+ self.listFile(ftpDir + sepFtp + name, facts)
+ elif aType == 'OS.unix=symlink':
+ self.listFile(ftpDir + sepFtp + name, facts)
+ for node in dirs:
+ (diffTotal, diffFiles, diffDirs) = self.listTree(ftpDir + sepFtp + node, depth + 1)
+ total += diffTotal
+ fileCount += diffFiles
+ dirCount += diffDirs
+ return (total, fileCount, dirCount)
+
+ def removeTree(self, directory):
+ '''Deletes a directory with all its files and subdirectories.
+ @param directory: the directory to remove
+ @return a tuple (bytes, dirCount, fileCount)
+ '''
+ self._logger.log('= {:s}:'.format(directory), 2)
+ fileCount = 0
+ dirCount = 1
+ total = 0
+ dirs = []
+ try:
+ infos = self._ftp.mlsd(directory, ['type', 'size'])
+ for name, facts in infos:
+ aType = facts['type']
+ if aType == 'dir':
+ if name != '.' and name != '..':
+ dirs.append(name)
+ pass
+ elif aType == 'file' or aType == 'OS.unix=symlink':
+ full = directory + self.sep + name
+ try:
+ self._ftp.delete(full)
+ self._logger.log(full, 3)
+ except Exception as exc:
+ self._logger.error('cannot delete {:s}: {:s}'.format(full, str(exc)))
+ total += int(facts['size'])
+ fileCount += 1
+ # print('dirs: ' + ' '.join(dirs))
+ separator = '' if directory == self.sep else self.sep
+ for node in dirs:
+ full = directory + separator + node
+ (diffTotal, diffFiles, diffDirs) = self.removeTree(full)
+ total += diffTotal
+ fileCount += diffFiles
+ dirCount += diffDirs
+ except Exception as exc:
+ self._logger.error('removeTree() failed: ' + str(exc))
+ if directory != '/':
+ try:
+ self._ftp.rmd(directory)
+ except Exception as exc:
+ self._logger.error('cannot delete directory {:s}: {:s}'.format(directory, str(exc)))
+ return (total, fileCount, dirCount)
+
+ def testFile(self, localFile, facts):
+ '''Tests a local file against facts given from the FTP server
+ @param localFile: the name of the local file
+ @param facts: the data of the FTP file
+ '''
+ self._logger.log(localFile, 3)
+ try:
+ statInfo = os.lstat(localFile)
+ if stat.S_ISDIR(statInfo.st_mode):
+ localType = 'dir'
+ elif stat.S_ISLNK(statInfo.st_mode):
+ localType = 'OS.unix=symlink'
+ elif stat.S_ISREG(statInfo.st_mode):
+ localType = 'file'
+ else:
+ localType = 'unknown';
+ if localType != facts['type']:
+ self._logger.error('+++ different type: {:s} / {:s}: {:s}'.format(localType, facts['type'], localFile))
+ else:
+ if localType != 'dir':
+ size = int(facts['size'])
+ if statInfo.st_size != size:
+ print('+++ different sizes: {:d} / {:d} '.format(statInfo.st_size, size) + localFile)
+ localDate = datetime.datetime.fromtimestamp(statInfo.st_mtime, self._utc)
+ localModified = localDate.strftime('%Y%m%d%H%M%S')
+ ftpModified = facts['modify'][0:14]
+ if localModified != ftpModified:
+ self._logger.error('+++ different modified date: {:s} / {:s} '.format(localModified, ftpModified))
+ except OSError:
+ self._logger.error('+++ missing local: ' + localFile)
+
+ def serverInfo(self):
+ '''Logs some FTP server info.
+ '''
+ self._logger.log(self._ftp.getwelcome())
+ self._logger.log('Features:')
+ self._logger.log(self._ftp.sendcmd('FEAT'))
--- /dev/null
+'''
+Created on 17.05.2018
+
+@author: hm
+'''
+
+import http.client
+import urllib.parse
+import base.Logger
+import ssl
+import urllib3
+
+class HttpClient:
+ '''Implements a HTTP or HTTPS client.
+ '''
+ def __init__(self, verboseLevel, logger):
+ '''Constructor.
+ @param logger: the logger, type Logger
+ '''
+ self._logger = logger
+ self._verboseLevel = verboseLevel
+ self._data = None
+ self._response = None
+ self._pool = urllib3.PoolManager()
+
+ def close(self):
+ '''Frees the resources.
+ '''
+ pass
+
+ def _handleSingleRequest(self, url, method, timeout=10):
+ '''Handles a single HTTP request (not following relocations).
+ @param url: the URL of the website
+ @param method: the request method, e.g. 'HEAD'
+ '''
+ self._data = None
+ self._response = None
+ try:
+ self._response = self._pool.request(method, url, timeout=timeout, retries=False)
+ if method == 'GET':
+ self._data = self._response.read()
+ if self._data == b'' and self._response.data != None:
+ self._data = self._response.data
+ except Exception as exc:
+ self._logger.error('error on processing [{}] {}: {}'.format(method, url, str(exc)))
+ # connection.close()
+ if self._verboseLevel > 1:
+ if self._response != None:
+ self._logger.debug('url: {} status: {} reason: {}'.format(url, self._response.status, self._response.reason))
+ else:
+ self._logger.debug('url: {}: no response'.format(url))
+
+ def _handleSingleRequest2(self, url, method, timeout=10):
+ '''Handles a single HTTP request (not following relocations).
+ @param url: the URL of the website
+ @param method: the request method, e.g. 'HEAD'
+ '''
+ parts = urllib.parse.urlparse(url)
+ netloc = parts.netloc
+ # reusing connection ("pipelining") is not supported by http.client
+ if url.startswith('https'):
+ context = ssl._create_unverified_context()
+
+ connection = http.client.HTTPSConnection(netloc, timeout=timeout, context=context)
+ elif parts.scheme == 'http':
+ connection = http.client.HTTPConnection(netloc, timeout=timeout)
+ path = '/' if parts.path == '' else parts.path
+ self._data = None
+ if parts.query != '':
+ ix = url.find(path)
+ if ix > 0:
+ path = url[ix:]
+ self._response = None
+ try:
+ connection.request(method, path)
+ self._response = connection.getresponse()
+ if method == 'GET':
+ self._data = self._response.read()
+ except Exception as exc:
+ self._logger.error('error on processing [{}] {}: {}'.format(method, url, str(exc)))
+ connection.close()
+ if self._response != None:
+ self._logger.log('url: {} status: {} reason: {}'.format(url, self._response.status, self._response.reason), 2)
+ else:
+ self._logger.log('url: {}: no response'.format(url), 2)
+
+ def getContent(self, url, relocationCount = 0):
+ '''Returns the header of a website.
+ @param url: the URL of the website
+ @param relocationCount: number of relocations to follow
+ @return: '' or the html content
+ '''
+ self.handleRequest(url, 'GET', relocationCount)
+ data = self._data
+ return data
+
+ def getHead(self, url, relocationCount = 5):
+ '''Returns the header of a website.
+ @param url: the URL of the website
+ @param relocationCount: number of relocations to follow
+ @return: '' or the content-type
+ '''
+ self.handleRequest(url, 'HEAD', relocationCount)
+ rc = self.getHeaderField('content-type', '')
+ return rc
+
+ def getHeaderField(self, field, defaultValue = None):
+ '''Gets the value of a header field.
+ @param field: name of the header field, e.g. 'content-length'
+ @return defaultValue: the field is unknown
+ otherwise: the value of the field
+ '''
+ rc = defaultValue
+ for key in self._response.headers.keys():
+ if key.lower() == field.lower():
+ rc = self._response.headers[key]
+ break
+ return rc
+
+ def getRealUrl(self, url):
+ '''Returns the first not relocated URL of a given URL.
+ @param url: URL to inspect
+ @return: <url>: url is not relocated otherwise: the first not relocated URL of a chain
+ '''
+ self._handleSingleRequest(url, 'HEAD')
+ while self._response != None and self._response.status > 300 and self._response.status < 400:
+ url = self.getHeaderField('location', '')
+ self._handleSingleRequest(url, 'HEAD')
+ return url
+
+ def handleRequest(self, url, method, relocationCount):
+ '''Handles a HTTP request.
+ @param url: the URL of the website
+ @param method: the request method, e.g. 'HEAD'
+ @param relocationCount: number of relocations to follow
+ @return: url of the end of the relocation chain
+ '''
+ self._handleSingleRequest(url, method)
+ status = 499 if self._response == None else self._response.status
+ while relocationCount > 0 and (status >= 301 and status < 400):
+ relocationCount -= 1
+ url = self.getHeaderField('location', '')
+ self._handleSingleRequest(url, method)
+ status = 499 if self._response == None else self._response.status
+ return url
+
+if __name__ == '__main__':
+ logger = base.Logger.Logger('/tmp/httpclient.log', True)
+ client = HttpClient(1, logger)
+ url = 'https://wiki.hamatoma.de'
+ data = client.getHead(url)
+ print('content-length of {}: {}'.format(url, data))
+ client.close()
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 21.04.2018
+
+@author: hm
+'''
+
+import os.path
+import net.TcpTaskHandler
+import time
+import base.LinuxUtils
+
+class LinuxTcpTaskHandler(net.TcpTaskHandler.TcpTaskHandler):
+ '''Handler for linux specific operations.
+ '''
+ def __init__(self, logger):
+ '''Constructor.
+ @param logger: a logger, type ThreadLogger
+ '''
+ net.TcpTaskHandler.TcpTaskHandler.__init__(self, logger)
+
+ def argumentError(self, message, serverThread, exception = None):
+ '''Handles an argument error, e.g. wrong number of arguments.
+ @param message: the error message
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @param exception: None or the raised exception
+ '''
+ info = ''
+ if exception != None and len(exception.args) > 1:
+ info = ' ' + exception.args[1]
+ serverThread.send('+++ "file": ' + message + info)
+
+ def fulfill(self, args, serverThread):
+ '''Fulfills the tasks 'file': (move, copy, delete, mkdir, rmdir, put, get).
+ @param args: the command arguments. args[0] contains the command
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @return: True: request is handled False: request is unknown, next chain member should be asked
+ '''
+ rc = True
+ if args[0] == 'diskfree':
+ infos = base.LinuxUtils.diskFree()
+ answer = 'OK\n'
+ for info in infos:
+ answer += base.StringUtils.join('\t', infos) + '\n'
+ serverThread.send(answer)
+ elif args[0] == 'users':
+ infos = base.LinuxUtils.users()
+ answer = 'OK\n'
+ for info in infos:
+ answer += base.StringUtils.join('\t', info) + '\n'
+ serverThread.send(answer)
+ elif args[0] == 'load':
+ info = base.LinuxUtils.load()
+ answer = 'OK\n' + base.StringUtils.join('\t', info)
+ elif args[0] == 'memoryinfo':
+ info = base.LinuxUtils.memoryInfo()
+ answer = 'OK\n' + base.StringUtils.join('\t', info)
+ serverThread.send(answer)
+ else:
+ rc = False
+ return rc
+
+ def get(self, args, serverThread):
+ '''Copies a local file to the client.
+ Format of the answer:
+ 'OK' SIZE OFFSET LENGTH MTIME MODE OWNER GROUP '\n'
+ All numbers are hexadecimal. MTIME: microseconds
+ @param args: the command arguments: source and offset
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if len(args) < 2:
+ self.argumentError('get: offset', serverThread)
+ elif not self.validPath(args[0]):
+ self.argumentError('get: invalid path', serverThread)
+ else:
+ try:
+ offset = int(args[1])
+ info = os.stat(args[0])
+ length = self._blocksize if info.st_size - offset > self._blocksize else info.st_size - offset
+ head = 'OK\n{:x} {:x} {:x} {:x} {:x} {:x} {:x}\n'.format(info.st_size, offset, length, int(info.st_mtime_ns / 1000), info.st_mode, info.st_uid, info.st_gid)
+ with open(args[0], 'rb') as fp:
+ fp.seek(offset)
+ data = head.encode() + fp.read(self._blocksize)
+ serverThread.sendBinary(data)
+ except Exception as exc:
+ self.argumentError('get: file not found', serverThread, exc)
+ pass
+ def mkDir(self, args, serverThread):
+ '''Creates a subdirectory.
+ @param args: the command arguments: source [mtime mode uid gid]
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ name = args[0]
+ if not self.validPath(name):
+ self.argumentError('mkdir: invalid path', serverThread)
+ else:
+ try:
+ mode = 0o777 if len(args) < 3 else int(args[2], 16)
+ os.makedirs(name, mode, True)
+ if len(args) >= 5:
+ atime = int(time.time() * 1000000000)
+ mtime = int(args[1], 16) * 1000
+ os.utime(name, ns=(atime, mtime))
+ os.chmod(name, mode)
+ os.setuid(name, int(args[3], 16))
+ os.setuid(name, int(args[4], 16))
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('mkdir failed', serverThread, exc)
+
+ def move(self, args, serverThread):
+ '''Moves a file.
+ @param args: the command arguments: source and target
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ if len(args) < 2:
+ self.argumentError('move: missing target', serverThread)
+ elif not self.validPath(args[0]) or not self.validPath(args[1]):
+ self.argumentError('move: invalid path', serverThread)
+ else:
+ try:
+ os.replace(args[0], args[1])
+ serverThread.send('OK')
+ except OSError as exc:
+ self.argumentError('move failed', serverThread, exc)
+
+ def put(self, args, serverThread):
+ '''Receives a file from the client.
+ @param args: the command arguments: source SIZE OFFSET LENGTH MTIME MODE OWNER GROUP
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ '''
+ name = args[0]
+ if len(args) < 8:
+ self.argumentError('put: missing group', serverThread)
+ elif not self.validPath(args[0]):
+ self.argumentError('put: invalid path', serverThread)
+ else:
+ try:
+ offset = args[2]
+ mode = 'wb' if offset == 0 else 'ab'
+ fp = open(name, mode)
+ if fp == None:
+ self.argumentError('put cannot open for writing: ' + name, serverThread)
+ else:
+ fp.write(serverThread._firstBlock)
+ while True:
+ data = serverThread.readBinary()
+ if not data:
+ break
+ else:
+ fp.write(data)
+ atime = int(time.time() * 1000000000)
+ mtime = int(args[4], 16) * 1000
+ fileNo = fp.fileno()
+ os.fchmod(fileNo, int(args[5], 16))
+ os.fchown(fileNo, int(args[6], 16), int(args[7], 16))
+ fp.close()
+ os.utime(name, ns=(atime, mtime))
+ serverThread.send('OK')
+ except Exception as exc:
+ self.argumentError('put failed', serverThread, exc)
+
+ def validPath(self, path):
+ '''Tests whether the path is inside a directory tree specified in _directories[]
+ @param path: path to test
+ @return: True: path is valid False: invalid path
+ '''
+ rc = False
+ path = os.path.normpath(path)
+ for aDir in self._directories:
+ if path.startswith(aDir):
+ rc = True
+ break
+ return rc
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 14.04.2018
+
+@author: hm
+'''
+import socket
+from base.Logger import Logger
+import traceback
+import base.StringUtils
+
+class TcpClient:
+ '''A client for communication with TcpServer.
+ '''
+ def __init__(self, host, port, logger):
+ '''Constructor.
+ @param host: the host of the server
+ @param port: the port of the server
+ @param logger: the logger
+ '''
+ self._host = host
+ self._port = port
+ self._logger = logger
+ self._verbose = True
+ self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self._socket.connect((self._host, self._port))
+
+ def send(self, command, args = None):
+ '''Sends a command with args.
+ @param command: command name
+ @param args: arguments as list
+ '''
+ msg = command
+ if args != None:
+ msg += '\t' + base.StringUtils.join('\t', args)
+ msg += '\n'
+ try:
+ self._socket.sendall(msg.encode())
+ except Exception as exc:
+ # + ' '.join(exc.args)
+ self._logger.error(exc.__class__.__name__ + ': ' + base.StringUtils.join(' ', exc.args))
+ traceback.print_exc(3)
+
+ def sendAndReceive(self, command, args = None):
+ '''Sends a command with args.
+ @param command: command name
+ @param args: arguments as list
+ @return: answer from the server
+ '''
+ msg = command
+ if args != None:
+ for item in args:
+ msg += '\t' + str(item)
+ msg += '\n'
+ data = None
+ try:
+ data = msg.encode()
+ self._socket.sendall(data)
+ data = self._socket.recv(0x10000).decode()
+ except Exception as exc:
+ self._logger.error(exc.__class__.__name__ + ': ' + base.StringUtils.join(' ', exc.args))
+ return data
+
+ def close(self):
+ self._socket.close()
+ self._socket = None
+
+if __name__ == '__main__':
+ logger = Logger('/tmp/client.log', 1)
+ client = TcpClient('localhost', 58112, logger)
+ data = client.sendAndReceive('echo', ['A', 1, 'C'])
+ logger.log('echo answer: ' + str(data))
+ data2 = client.sendAndReceive('time')
+ logger.log('time answer: ' + data2)
+ data = client.sendAndReceive('echo', ['B', 2, 'C'])
+ logger.log('echo answer: ' + str(data))
+ client.send('bye')
+ client.send('stop')
+ client.close()
+ print('starting client2:')
+ client2 = TcpClient('localhost', 58112, logger)
+ client2.send('stop')
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+
+import sys
+import socket
+import traceback
+import threading
+import base.ThreadLogger
+from net.TcpTaskHandler import TcpTaskHandler
+
+class ServerThread(threading.Thread):
+ '''Handles a connection to one client in an own thread.
+ '''
+ def __init__(self, threadId, ip, port, clientSocket, server, verboseLevel = 0):
+ '''Constructor.
+ @param threadId: a unique number of the thread
+ @param ip: the client address
+ @param port: the client port
+ @param socket: the client specific socket
+ @param server: the 'parent' of the thread, type TcpServer
+ @param verboseLevel: detail level of logging 0: none 1: summary 2: normal 3: often
+ '''
+ self._id = threadId
+ self._ip = ip
+ self._verboseLevel = verboseLevel
+ self._port = port
+ self._server = server
+ self._socket = clientSocket
+ self._shouldStop = False
+ self._request = None
+ self._firstBlock = None
+ self._locker = threading.Lock()
+ server._logger.log('{:d}: thread started for {:s}:{:d}'.format(threadId, ip, port), 1)
+ threading.Thread.__init__(self, name='T{:03d}'.format(threadId))
+
+ def lock(self):
+ '''Locks the thread for exclusive access.
+ '''
+ self._locker.acquire()
+
+ def send(self, data):
+ '''Sends an answer to the client.
+ @param data: the data to send, type: string
+ '''
+ self._socket.send(data.encode())
+
+ def sendBinary(self, data):
+ '''Sends an answer to the client.
+ @param data: the data to send, type: bytes
+ '''
+ self._socket.send(data)
+
+ def run(self):
+ '''Does the job of a thread.
+ Receives data from the client and gives them to the task chain.
+ '''
+ self._server._logger.log('Starting thread {:d}'.format(self._id), 1)
+ separator = '\n'.encode()
+ self._server._handlerChain.fulfill(['threadstart'], self)
+ while not self._shouldStop :
+ try:
+ data = self._socket.recv(self._server._bufferSize)
+ ix = data.find(separator)
+ self._request = data[0:ix].decode()
+ if ix < len(data) - 1:
+ self._firstBlock = data[ix+1:]
+ if self._request == '':
+ self._server._logger.error('empty command. aborting...')
+ break
+ if self._request.find('\t') < 0:
+ args = [self._request]
+ else:
+ args = self._request.split('\t')
+ self._server._logger.log('Thread {}: received: {}'.format(self._id, base.StringUtils.limitItemLength(args, 40)), 3)
+ handler = self._server._handlerChain
+ while not handler.fulfill(args, self):
+ handler = handler._nextHandler
+ if handler == None:
+ self._server._logger.error('unknown request (no handler found) from {:s}:{:d} {}'.format(self._ip, self._port, self._request[0:50]))
+ break
+ except Exception as exc:
+ self._server._logger.error('exception {:s} raised:'.format(exc.__class__.__name__))
+ traceback.print_exc(2)
+ raise exc
+ self._server._handlerChain.fulfill(['threadend'], self)
+ self._socket.close()
+ self._socket = None
+ self._server._logger.log('Thread {:d}: finished'.format(self._id), 2)
+
+ def unlock(self):
+ '''Ends the locking.
+ '''
+ self._locker.release()
+
+class TcpServer:
+ '''Implements a TCP server
+ '''
+ def __init__(self, port, logger, handler, host='0.0.0.0', verboseLevel=0):
+ '''Constructor.
+ @param port: port for listening
+ @param logger: logging unit, type ThreadLogger
+ @param handler: the task handler, type TcpTaskHandler
+ @param host: listening host
+ @param pollInterval: number of seconds (as float) between two polls of shutdown requests
+ @param verboseLevel: detail level of logging 0: none 1: summary 2: normal 3: often
+ '''
+ if sys.version_info[0] < 3:
+ logger.error('python version 3 expected, found: ' + str(sys.version_info[0]))
+ self._port = port
+ self._host = host
+ self._logger = logger
+ self._handlerChain = handler
+ self._verboseLevel = verboseLevel
+ self._connection = None
+ self._bufferSize = 64*1024
+ self._shouldHalt = False
+ logger.info('listening on {:s}:{:d}'.format(host, port))
+ self.listen()
+
+ def listen(self):
+ tcpServer = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ tcpServer.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ tcpServer.bind((self._host, self._port))
+ threads = []
+ threadId = 0
+
+ while not self._shouldHalt:
+ threadId += 1
+ tcpServer.listen(4)
+ self._logger.log('Multithreaded Python server : Waiting for connections from TCP clients...', 1)
+ (clientSocket, (ip, port)) = tcpServer.accept()
+ newthread = ServerThread(threadId, ip, port, clientSocket, self, self._verboseLevel)
+ newthread.start()
+ maxIx = len(threads) - 1
+ for ix in range(maxIx + 1):
+ if not threads[maxIx - ix].is_alive():
+ self._logger.log('removed from threads: {:d}'.format(threads[maxIx - ix]._id), 3)
+ del threads[maxIx - ix]
+ threads.append(newthread)
+ self._logger.log('waiting for finish of all ({:d}) threads ...'.format(len(threads)), 2)
+ for t in threads:
+ t.join()
+ tcpServer.close()
+
+if __name__ == '__main__':
+ logger = base.ThreadLogger.ThreadLogger('/tmp/TcpServer.log', True)
+ handler = TcpTaskHandler(logger)
+ server = TcpServer(58112, logger, handler)
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
+
+import datetime
+
+class TcpTaskHandler:
+ '''Implements a base class which "knows" one ore more commands sent to the server.
+ Derived classes must implement fulfill(self, args, serverThread).
+ fulfill() should use args[0] to identify the current command.
+ Writing an answer is done by using serverThread.send()
+ This base class knows the commands 'echo', 'time', 'stop' and 'bye'
+ Some task handlers can build a chain to handle many commands with different handlers: use setNextHandler()
+ '''
+ def __init__(self, logger):
+ self._nextHandler = None
+ self._logger = logger
+ self._verboseLevel = 0
+
+ def fulfill(self, args, serverThread):
+ '''FulFills the tasks "echo", "time", "stop" and "bye".
+ @param args: the command arguments. args[0] contains the command
+ @param serverThread: the handler with type ServerThread or derived. Used for writing an answer to the client
+ @return: True: request is handled False: request is unknown, next chain member should be asked
+ '''
+ rc = True
+
+ if args[0] == 'time':
+ now = datetime.datetime.now()
+ serverThread.send( now.strftime('%Y.%m.%d %H:%M:%S %s'))
+ elif args[0] == 'echo':
+ serverThread.send("\t".join(args[1:]))
+ elif args[0] == 'stop':
+ serverThread._shouldStop = True
+ elif args[0] == 'bye':
+ serverThread._server._shouldHalt = True
+ else:
+ rc = False
+ return rc
+
+ def setNextHandler(self, handler):
+ '''Sets the next handler in the handler chain.
+ @param handler: the next handler, derived from TcpBaseStreamHandler
+ '''
+ if handler != None:
+ handler.setHandler(self._nextHandler)
+ self._nextHandler = handler
+
+ def close(self):
+ '''Finishes the work: all resources will be freed.
+ '''
+ pass
+
+ def setVerboseLevel(self, level, recursive=True):
+ '''Sets the detail level of logging.
+ @param level: the new level
+ @param recursive: True: all handlers in the chain should set this level
+ '''
+ self._verboseLevel = level
+ if recursive and self._nextHandler != None:
+ self._nextHandler.setVerboseLevel(level, True)
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
+
+class PackageTest:
+ def __init__(self, name):
+ self._name = name
+ self._modules = 0
+ self._errors = 0
+ self._asserts = 0
+
+ def run(self, clazz):
+ tester = clazz()
+ tester.run()
+ self._modules += 1
+ self._errors += tester._errors
+ self._asserts += tester._asserts
+
+ def _finish(self):
+ print('= {:s}: {:d} modules with {:d} asserts and {:d} error(s)'.format(self._name, self._modules, self._asserts, self._errors))
+
+if __name__ == '__main__':
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+
+import re
+import traceback
+import os.path
+import tempfile
+import posix
+import fnmatch
+
+import base.FileHelper
+import base.BaseTool
+import fnmatch
+
+class UnitTestAppInfo:
+ def __init__(self, usage):
+ self._usage = usage
+
+class UnitTestCase:
+ def __init__(self):
+ self._userId = posix.getuid()
+ self._isRoot = self._userId == 0
+ self._inTestSuite = False
+ self._errors = 0
+ self._name = type(self).__name__
+ self._asserts = 0
+ self._application = UnitTestAppInfo(self.usage)
+ base.BaseTool.baseToolUnitTestActive = True
+ self._logger = base.MemoryLogger.MemoryLogger(True)
+ self._silentLogger = base.MemoryLogger.MemoryLogger(False)
+
+ def _describeDifference(self, str1, str2, prefix = ''):
+ '''Logs the difference of two strings.
+ @param str1: first string to compare
+ @param str2: second string to compare
+ @param prefix: '' or a prefix
+ @return: a string describing the difference
+ '''
+ count1 = len(str1)
+ count2 = len(str2)
+ ixDiff = -1
+ for ix in range(min(count1, count2)):
+ if str1[ix] != str2[ix]:
+ ixDiff = ix
+ break;
+ if ixDiff != -1:
+ rc = '+++ {:s}different at pos {:d}: {:s}/{:s}\n'.format(prefix, ixDiff + 1, str1[ixDiff:ixDiff+5], str2[ixDiff:ixDiff+5])
+ elif count1 > count2:
+ rc = '+++ expected is longer:\n'
+ else:
+ rc = '+++ expected is shorter:\n'
+ rc += str1 + '\n' + str2
+ if ixDiff != -1:
+ rc += '\n' + ('=' * ixDiff) + '^'
+ pass
+ return rc
+
+ def assertDirExists(self, current):
+ '''Tests whether a given directory exists.
+ @param current: the directory to inspect
+ @returns: True: the directory exists
+ '''
+ self._asserts += 1
+ rc = True
+ if not os.path.exists(current):
+ rc = self.error('directory does not exist: ' + current)
+ elif not os.path.isdir(current):
+ rc = self.error('file {} exists but it is not a directory'.format(current))
+ return rc
+
+ def assertEquals(self, expected, current):
+ '''Tests whether two values are equal.
+ @param expected: the expected value
+ @param current: the value to test
+ @return: True: values are equal
+ '''
+ rc = False
+ self._asserts += 1
+ if type(expected) != type(current):
+ self.error('different types: ' + str(type(expected)) + ' / ' + str(type(current)))
+ else:
+ rc = expected == current
+ if not rc:
+ if type(expected) is str:
+ if expected.find('\n') < 0:
+ self.error(self._describeDifference(expected, current))
+ else:
+ listExpected = expected.splitlines()
+ listCurrent = current.splitlines()
+ ixDiff = -1
+ for ix in range(min(len(listExpected), len(listCurrent))):
+ if listExpected[ix] != listCurrent[ix]:
+ self.error(self._describeDifference(listExpected[ix], listCurrent[ix], 'in line {:d}: '.format(ix + 1)))
+ ixDiff = ix
+ break
+ if ixDiff == -1 and len(listExpected) != len(listCurrent):
+ if len(listExpected) < len(listCurrent):
+ msg = 'expected has fewer lines: {:d}/{:d}\n'.format(len(listExpected), len(listCurrent)) + listCurrent[len(listExpected)]
+ else:
+ msg = 'expected has more lines: {:d}/{:d}\n'.format(len(listExpected), len(listCurrent)) + listExpected[len(listCurrent)]
+ self.error(msg)
+ elif type(expected) is int:
+ self.error('different: {:d}/{:d} [0x{:x}/0x{:x}]'.format(expected, current, expected, current))
+ else:
+ self.error('different: {:s} / {:s}'.format(str(expected), str(current)))
+ return rc
+
+ def assertFalse(self, current):
+ '''Tests whether a value is False.
+ @param current: value to test
+ @returns: True: the value is False
+ '''
+ rc = True
+ self._asserts += 1
+ if current != False:
+ rc = self.error('+++ False expected, found: ' + str(current))
+ return rc
+
+ def assertFileContent(self, expectedFile, currentContent):
+ '''Compares a given string with an expected file content.
+ @param expectedFile: the name of the file with the expected content, e.g. 'data/abc.xyz'
+ @param currentContent: the content to compare
+ @returns: True: file content is the expected
+ '''
+ rc = True
+ self._asserts += 1
+ full = expectedFile if expectedFile.startswith(os.sep) else os.path.dirname(__file__) + os.sep + expectedFile
+ if not os.path.exists(full):
+ rc =self.error('missing file: ' + expectedFile)
+ else:
+ with open(full, 'r') as fp:
+ expected = fp.read()
+ rc = self.assertEquals(expected, currentContent)
+ if not rc:
+ tempFile = self.tempFile(os.path.basename(expectedFile))
+ with open(tempFile, "w") as fp:
+ fp.write(currentContent)
+ self.log('meld {} {}'.format(tempFile, full))
+ return rc
+
+ def assertFileContains(self, expected, currentFile):
+ '''Tests whether a given file contains a given content.
+ @param expected: content to search. May be a string or a re.RegExpr instance
+ @param currentFile: file to inspect
+ @returns: True: the file contains the expected string
+ '''
+ rc = True
+ self._asserts += 1
+ if not os.path.isfile(currentFile):
+ rc = self.error('missing file ' + currentFile)
+ else:
+ found = False
+ with open(currentFile) as fp:
+ # lineNo = 0
+ for line in fp:
+ #lineNo += 1
+ #if lineNo == 126:
+ # lineNo = 126
+ if type(expected) == str:
+ if line.find(expected) >= 0:
+ found = True
+ break
+ else:
+ if expected.search(line) != None:
+ found = True
+ break
+ if not found:
+ text = expected if type(expected) == str else expected.pattern
+ rc = self.error('missing content {:s} in {:s}'.format(text[0:20], currentFile))
+ return rc
+
+ def assertFileExists(self, filename):
+ '''Tests whether a given file exists.
+ @param filename: the name of the file to test
+ @returns: True: the file exists
+ '''
+ rc = True
+ self._asserts += 1
+ if not os.path.exists(filename):
+ rc = self.error('file does not exist: ' + filename)
+ return rc
+
+ def assertFileNotContains(self, unexpectedContent, currentFile):
+ '''Tests whether a given file contains not a given content.
+ @param unexpectedContent: content to search
+ @param currentFile: file to inspect
+ '''
+ rc = True
+ self._asserts += 1
+ if not os.path.isfile(currentFile):
+ rc = self.error('missing file ' + currentFile)
+ else:
+ found = False
+ with open(currentFile) as fp:
+ for line in fp:
+ if line.find(unexpectedContent) >= 0:
+ found = True
+ break
+ if found:
+ rc = self.error('unexpected content {:s} in {:s}'.format(unexpectedContent[0:20], currentFile))
+ return rc
+
+ def assertFileNotExists(self, filename):
+ '''Tests whether a given file does not exist.
+ @param filename: the name of the file to test
+ @returns: True: the file does not exist
+ '''
+ rc = True
+ self._asserts += 1
+ if os.path.exists(filename):
+ rc = self.error('file exists: ' + filename)
+ return rc
+
+ def assertMatches(self, expectedRegExpr, current, flags=0):
+ '''Tests whether a string matches a given regular expression.
+ @param expectedRegExpr: regular expression
+ @param current: string to test
+ @param flags: flags for re.match, e.g. re.IGNORECASE or re.MULTILINE
+ @returns: True: the value matches the expected
+ '''
+ rc = True
+ self._asserts += 1
+ if not re.search(expectedRegExpr, current, flags):
+ rc = self.error('+++ does not match\n' + expectedRegExpr + '\n' + current)
+ return rc
+
+ def assertNodeExists(self, path, nodePattern):
+ '''Tests whether at least one file exists in a given path with a given node pattern.
+ @param path: the directory to inspect
+ @param nodePattern: a pattern with unix wildcards to inspect
+ @return True: node found
+ '''
+ nodes = os.listdir(path)
+ found = False
+ for node in nodes:
+ if fnmatch.fnmatch(node, nodePattern):
+ found = True
+ break
+ if not found:
+ self.error('node {} not found in {}'.format(nodePattern, path))
+ return found
+
+ def assertNone(self, current):
+ '''Tests whether a value is None.
+ @param current: value to test
+ @returns: True: the value is None
+ '''
+ rc = True
+ self._asserts += 1
+ if current != None:
+ rc = self.error('+++ None expected, found: ' + str(current))
+ return rc
+
+ def assertNotNone(self, current):
+ '''Tests whether a value is not None.
+ @param current: value to test
+ @returns: True: the value is not None
+ '''
+ self._asserts += 1
+ if current == None:
+ self.error('+++ unexpected None found')
+
+ def assertTrue(self, current):
+ '''Tests whether a value is True.
+ @param current: value to test
+ @returns: True: the value is True
+ '''
+ rc = True
+ self._asserts += 1
+ if current != True:
+ rc = self.error('+++ True expected, found: ' + str(current))
+ return rc
+
+ def clearDirectory(self, path):
+ '''Removes all files and subdirs in a given directory.
+ @param path: name of the directory
+ '''
+ base.FileHelper.clearDirectory(path, self._logger)
+
+ def ensureDirectory(self, directory):
+ '''Ensures that the given directory exists.
+ @param directory: the complete name
+ @return: None: could not create the directory
+ otherwise: the directory's name
+ '''
+ rc = base.FileHelper.ensureDirectory(directory, self._logger)
+ return rc
+
+ def ensureFileDoesNotExist(self, filename):
+ '''Ensures that a file does not exist.
+ @param filename: the file to delete if it exists.
+ '''
+ base.FileHelper.ensureFileDoesNotExist(filename, self._logger)
+
+ def error(self, message):
+ '''Displays an error with backtrace.
+ @param message: error message
+ @returns: False
+ '''
+ self._errors += 1
+ print(message)
+ info = traceback.extract_stack()
+ # ignore runtime methods:
+ while len(info) > 1 and (info[0].filename.find('/pysrc/') > 0 or info[0]._line.startswith('tester.run()')
+ or info[0].filename.find('UnitTestCase.py') > 0 and info[0]._line.startswith('method()')):
+ del info[0]
+ # ignore UnitTest internals:
+ length = len(info)
+ while length > 1 and info[length - 1].filename.find('UnitTestCase.py') > 0:
+ del info[length - 1]
+ length -= 1
+ for entry in info:
+ print('{:s}:{:d} {:s}'.format(entry.filename, entry.lineno, entry.line))
+ return False
+
+ def log(self, message):
+ '''Displays a message
+ @param message: message
+ '''
+ print(message)
+
+ def getSummary(self):
+ '''Return the summary message.
+ @return the summary message
+ '''
+ return self._summary
+
+ def setInTestSuite(self, value):
+ '''Sets the attribute.
+ @param value: the new value of _inTestSuite
+ '''
+ self._inTestSuite = value
+
+ def run(self):
+ '''Searches the methods starting with 'test' and call them.
+ '''
+ for item in self.__dir__():
+ if item.startswith('test'):
+ method = getattr(self, item)
+ print('= ' + item)
+ method()
+ self._summary = '=== unit {:s}: {:d} assert(s) with {:d} error(s)'.format(self._name, self._asserts, self._errors)
+ print(self._summary)
+ if hasattr(self, '_finish'):
+ self._finish()
+
+ def tempDir(self, node, subdir = None):
+ '''Builds the name of a directory and ensures that the directory exists.
+ @param node: the directory's name (without path)
+ @param subdir: None or the name of a directory inside the temp dir
+ @return: None: cannot create directory
+ otherwise: the name of an existing temporary directory (with path)
+ '''
+ rc = tempfile.gettempdir()
+ if subdir != None:
+ rc += os.sep + subdir
+ if node != None:
+ if rc[-1] != os.sep:
+ rc += os.sep
+ rc += node
+ rc = self.ensureDirectory(rc)
+ return rc
+
+ def tempFile(self, node, subdir = None, subdir2 = None):
+ '''Builds the name of a temporary file and ensures that the parent directories exist.
+ @param node: the file's name (without path)
+ @param subdir: None or the name of a directory inside the temp dir
+ @param subdir2: None or the name of a directory inside subdir
+ @return: the name of a temporary file (with path), e.g. /tmp/subdir/subdir2/node
+ '''
+ rc = tempfile.gettempdir() + os.sep
+ if subdir != None:
+ rc += subdir
+ rc = self.ensureDirectory(rc)
+ if subdir[-1] != os.sep:
+ rc += os.sep
+ if subdir2 != None:
+ rc += subdir2
+ self.ensureDirectory(rc)
+ if subdir2[-1] != os.sep:
+ rc += os.sep
+ rc += node
+ return rc
+
+ def usage(self, message):
+ print(message)
+ self.assertFalse(True)
+
+ def xtestMyself(self):
+ self.assertEquals(3, 4)
+ self.assertEquals('Hello', 'Hallo')
+ self.assertEquals('abc\nhallo', 'abc\nhello')
+ self.assertNone('not none')
+ self.assertNotNone(None)
+
+if __name__ == '__main__':
+ tester = UnitTestCase()
+ tester.run()
+ pass
\ No newline at end of file
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
+
+import os.path
+import re
+import sys
+import importlib
+
+sys.path.insert(0, '/usr/share/pyrshell')
+
+class UnitTestSuite:
+ '''Tests a group of test cases.
+ '''
+ def __init__(self, name):
+ '''Constructor.
+ @param name: name of the suite (for logging)
+ '''
+ self._name = name
+ self._testCases = []
+ self._imports = []
+ self._base = '/home/ws/py/pyrshell/'
+ self._summary = []
+
+ def addByPattern(self, relPath, pattern = r'.*[.]py$'):
+ '''Adds the test cases given by a directory and a filename pattern (of modules, not test cases).
+ @param relPath: the directory containing the modules to test, relative to the parent of 'unittest'
+ @param pattern: a regular expression for selecting the modules
+ '''
+ basePath = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+ path = basePath + os.sep + relPath
+ files = os.listdir(path)
+ regExpr = re.compile(pattern)
+ for node in files:
+ if regExpr.match(node):
+ self.addFromFile(relPath, node)
+
+ def addFromFile(self, package, node):
+ '''Adds a test case given by the name of the module.
+ @param package: the package name of the module
+ @param node: the file containing the module
+ '''
+ moduleName = node[0:-3] + 'Test'
+ #moduleWithPackage = package + '.' + moduleName
+ #self._imports.append([moduleName, moduleWithPackage])
+ full = self._base + 'unittest/' + package + '/' + moduleName + '.py'
+ if os.path.exists(full):
+ self._imports.append([moduleName, package])
+ self._testCases.append(moduleName)
+
+ def addList(self, testCases):
+ '''Adds a list of test cases for inspecting.
+ @param testCases: a list of class names
+ '''
+ for item in testCases:
+ if item not in self._testCases:
+ self._testCases.append(item)
+
+ def instantiate(self, clazz ):
+ '''Instantiate a class object given by name
+ @param clazz: the classes name
+ @return the instance
+ '''
+ parts = clazz.split('.')
+ moduleName = ".".join(parts[:-1])
+ className = parts[-1]
+ module = importlib.import_module(moduleName)
+ instance = getattr(module, className)
+ return instance
+
+ def process(self):
+ '''Instantiate the classes collected in _imports and call the class.run() method.
+ '''
+ tests = []
+ for name, package in self._imports:
+ if not name.startswith('__'):
+ if name.find('ToolTest') > 0:
+ package = 'appl'
+ instance = self.instantiate('unittest.' + package + '.' + name + '.' + name)()
+ clazz = self.instantiate('unittest.' + package + '.' + name + '.' + name)
+ tests.append((clazz, instance))
+ for (clazz, instance) in tests:
+ clazz.__init__(instance)
+ clazz.setInTestSuite(instance, True)
+ clazz.run(instance)
+ self._summary.append(clazz.getSummary(instance))
+ def summary(self):
+ print('=== Summary ===')
+ errors = ''
+ countErrors = 0
+ asserts = 0
+ units = 0
+ for item in self._summary:
+ print(item)
+ units += 1
+ # 0...1....2...............3..4.........5....6
+ # === unit LinuxUtilsTest: 84 assert(s) with 0 error(s)
+ parts = item.split()
+ asserts += int(parts[3])
+ countErrors += int(parts[6])
+ if parts[6] != '0':
+ errors += '{}: {} '.format(parts[2][0:-1], parts[6])
+ if errors != '':
+ print ('=== {} units with {} assert(s) and {} error(s) in:\n{}'.format(
+ units, asserts, countErrors, errors))
+
+if __name__ == '__main__':
+ paths = sys.path
+ suite = UnitTestSuite('base')
+ suite.addByPattern('base')
+ suite.addByPattern('net')
+ suite.addByPattern('appl')
+ suite.process()
+ suite.summary()
\ No newline at end of file
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import os.path
+import time
+import datetime
+import shutil
+
+import base.MemoryLogger
+import base.StringUtils
+import appl.BackupTool
+import appl.DirTool
+import appl.TextTool
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class BackupToolTest(UnitTestCase):
+
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ config = base.JavaConfig.JavaConfig('/etc/pyrshell/dbtool.conf', self._logger)
+ self._admin = config.getString('admin.user', 'root')
+ self._adminCode = config.getString('admin.code', '')
+ self._textTool = appl.TextTool.TextTool(self.getOptions('TextTool'))
+
+ def findBackupStorage(self, path):
+ rc = None
+ nodes = os.listdir(path)
+ for node in nodes:
+ full = path + os.sep + node
+ if os.path.isdir(full):
+ rc = full
+ return rc
+
+ def getOptions(self, application='BackupTool'):
+ appInfo = base.BaseTool.ApplicationInfo(application.lower(), 'appl.' + application, usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._logFiles.append('-')
+ return rc
+
+ def makeBackupConfig(self, clear=True):
+ baseDir = self.tempDir('config', 'unittest')
+ if clear:
+ self.clearDirectory(baseDir)
+ targetDir = self.makeTargetDir()
+ appDataDir = self.tempDir('blog', 'unittest')
+ base.StringUtils.toFile(appDataDir + os.sep + 'index.php', '<?php\ninfo();')
+ base.StringUtils.toFile(appDataDir + os.sep + 'new.php', '<?php\nnew();')
+ oldFile = appDataDir + os.sep + 'old.php'
+ base.StringUtils.toFile(oldFile, '<?php\nold();')
+ base.FileHelper.setModified(oldFile, None, datetime.datetime(2018,1,2,3,4,5))
+
+ appDir = self.ensureDirectory(baseDir + os.sep + 'webapps.d')
+ dirDir = self.ensureDirectory(baseDir + os.sep + 'dirs.d')
+ base.StringUtils.toFile(appDir + os.sep + 'testdb.conf', '''
+db=backuptooltest
+user={}
+password={}
+sql.file=testdb
+directory={:s}
+'''.format(self._admin, self._adminCode, appDataDir))
+ base.StringUtils.toFile(baseDir + os.sep + 'backup.conf', '''
+target.path={:s}
+job.dayly=job.sysinfo job.sys job.db job.minimal.1 job.minimal.2 job.gitlab.backup
+job.weekly=job.sysinfo job.sys job.db.all job.full
+job.monthly=job.sysinfo job.sys job.full
+job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded
+job.full=&saveDirByZip @%etc%/dirs.d/std.*.conf:directory
+job.db=&saveMysql @%etc%/webapps.d/*.conf:db:user:password:sql.file
+job.minimal.1=&saveDirByZipLatest 7 @%etc%/webapps.d/*.conf:directory
+job.minimal.2=&saveDirByZipLatest 7 @%etc%/dirs.d/std.*.conf:directory
+job.save.clean=&deleteFile @%etc%/webapps.d/*.conf:directory:sql.file
+job.db.all=&saveAllDatabases
+job.gitlab.backup=&gitlabBackup
+job.sysinfo=&systemInfo
+# Reading metadata from mysql:
+mysql.user={}
+mysql.code={}
+# Email data:
+# Receiver of the error messages: may be a blank separated list of email addresses
+send.always=False
+location=unittest
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.unittest@gmx.de
+smtp.user=hm.unittest@gmx.de
+smtp.code=33S.e.c.r.e.t
+smtp.tls=True
+'''.format(targetDir, self._admin, self._adminCode))
+ appl.BackupTool.BackupTool.createDirsD(dirDir, None, False)
+ return baseDir
+
+ def makeConfigSet(self):
+ baseDir = self.tempDir('configset', 'unittest')
+ base.StringUtils.toFile(baseDir + os.sep + 'example1.conf', 'db=adam\nuser=alice')
+ base.StringUtils.toFile(baseDir + os.sep + 'example2.conf', 'db=bob\nuser=berta')
+ base.StringUtils.toFile(baseDir + os.sep + 'bad.conf~', 'db=charly\nuser=carol')
+ return baseDir
+
+ def makeDirDirectSync(self):
+ baseDir = self.tempDir('rsyncdirect', 'unittest')
+ syncTarget = self.tempDir('trgdir', 'unittest')
+ syncSource = self.tempDir('syncsrc', 'unittest')
+ base.StringUtils.toFile(baseDir + os.sep + 'backup.conf', '''
+target.path={:s}
+job.often=&synchronize --delete {:s} {:s}
+'''.format(syncTarget, syncSource, syncTarget))
+ srcFile = syncSource + os.sep + 'file.sync.txt'
+ trgFile = syncTarget + os.sep + 'trg.only.txt'
+ base.StringUtils.toFile(srcFile, 'src')
+ base.StringUtils.toFile(trgFile, 'trg')
+ return (baseDir, srcFile, trgFile)
+
+ def makeDirToSync(self):
+ srcDir = self.tempDir('srcsync', 'unittest')
+ trgDir = self.tempDir('trgsync', 'unittest')
+ shutil.rmtree(trgDir, False)
+ srcSub = srcDir + os.sep + 'subdir'
+ self.ensureDirectory(srcSub)
+ trgSub = trgDir + os.sep + 'subdir'
+ self.ensureDirectory(trgSub)
+ for ix in range(1, 3+1):
+ full = srcDir + os.sep + 'src_only{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'src{:d}: {:s}'.format(ix, full))
+ full = trgDir + os.sep + 'trg_only{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'trg{:d}: {:s}'.format(ix, full))
+ full = srcSub + os.sep + 'src_only{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'src{:d}: {:s}'.format(ix, full))
+ full = trgSub + os.sep + 'trg_only{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'trg{:d}: {:s}'.format(ix, full))
+ for ix in range(1, 2+1):
+ full = srcDir + os.sep + 'src_younger{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'src{:d}: {:s}'.format(ix, full))
+ time.sleep(1)
+ full = trgDir + os.sep + 'src_younger{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'trg{:d}: {:s}'.format(ix, full))
+ for ix in range(1, 2+1):
+ full = trgDir + os.sep + 'src_older{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'trg{:d}: {:s}'.format(ix, full))
+ time.sleep(1)
+ full = srcDir + os.sep + 'src_older{:d}.txt'.format(ix)
+ base.StringUtils.toFile(full, 'src{:d}: {:s}'.format(ix, full))
+ return (srcDir, trgDir)
+
+ def makeDirIndirectSync(self):
+ baseDir = self.tempDir('syncindirect', 'unittest')
+ syncTarget = self.tempDir('trgdir', 'unittest')
+ syncSource = self.tempDir('sync2src', 'unittest')
+ dirDir = baseDir + os.sep + 'dirs.d'
+ self.ensureDirectory(dirDir)
+ base.StringUtils.toFile(dirDir + os.sep + 'sync.test.conf', '''
+source={:s}
+target={:s}
+delete=True
+'''.format(syncSource, syncTarget))
+ base.StringUtils.toFile(baseDir + os.sep + 'backup.conf', '''
+target.path={:s}
+job.often=&synchronize @%etc%/dirs.d/sync.*.conf:source:target:delete
+'''.format(syncTarget, syncSource, syncTarget))
+ srcFile = syncSource + os.sep + 'file.sync.txt'
+ trgFile = syncTarget + os.sep + 'trg.only.txt'
+ base.StringUtils.toFile(srcFile, 'src')
+ base.StringUtils.toFile(trgFile, 'trg')
+ return (baseDir, srcFile, trgFile)
+
+ def makeMinimalConfigDir(self):
+ dirName = self.tempFile('pyrshell', 'etc')
+ os.makedirs(dirName, 0o777, True)
+ base.StringUtils.toFile(dirName + os.sep + 'backup.conf', '')
+ return dirName
+
+ def makeTree(self, baseDir = None):
+ baseDir = self.tempFile('unittest') if baseDir == None else baseDir
+ text = 'Dies ist ein langer Text, der hoffentlich ein wenig komprimiert werden kann. Wenn nicht, kann man nichts machen' * 1000
+ dirName = baseDir + os.sep + 'srcdir'
+ os.makedirs(dirName, 0o777, True)
+ # /tmp/unittest/srcdir: file[1-5].txt dir[123] .git linkConfig
+ # /tmp/unittest/srcdir/dirX: levelX.txt subdir1
+ # /tmp/unittest/srcdir/dirX/subdir1: a.jpg b.txt c.odt
+ # /tmp/unittest/srcdir/.git: config aa bb
+ # /tmp/unittest/srcdir/.git/xx: xx1234 xx2345 xx3456
+ for ix in range(1, 5+1):
+ fn = dirName + os.sep + 'file{:d}.txt'.format(ix)
+ base.StringUtils.toFile(fn, 'no{:d}: {:s}'.format(ix, text))
+ for ix in range(1, 3+1):
+ subdir = dirName + os.sep + 'dir' + str(ix)
+ os.makedirs(subdir, 0o777, True)
+ fn = subdir + os.sep + 'level{:d}'.format(ix)
+ base.StringUtils.toFile(fn, 'level{:d}: {:s}'.format(ix, text))
+ dir2 = subdir + os.sep + 'subdir1'
+ os.makedirs(dir2, 0o777, True)
+ for node in ['a.jpg', 'b.txt', 'c.odt']:
+ fn = dir2 + os.sep + node
+ base.StringUtils.toFile(fn, node + ': ' + text)
+ return dirName
+
+ def makeTargetDir(self):
+ targetDir = self.tempDir('trgdir', 'unittest')
+ self.clearDirectory(targetDir)
+ return targetDir
+
+ def testEnsureDir(self):
+ if debug:
+ return
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ baseDir = self.tempFile('tmp111')
+ subdir = baseDir + os.sep + 'tmp222'
+ if os.path.isdir(subdir):
+ os.rmdir(subdir)
+ if os.path.isdir(baseDir):
+ os.rmdir(baseDir)
+ self.assertFalse(os.path.isdir(baseDir))
+ tool = appl.BackupTool.BackupTool(options)
+ tool.ensureDirectory(subdir)
+ self.assertTrue(os.path.isdir(subdir))
+ self.assertEquals(0, tool._logger._errors)
+
+ def testEnsureFileNotExist(self):
+ if debug:
+ return
+ fn = self.tempFile('file', 'unittest')
+ if os.path.exists(fn):
+ os.unlink(fn)
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ tool = appl.BackupTool.BackupTool(options)
+ tool.ensureFileDoesNotExist(fn)
+ base.StringUtils.toFile(fn, 'Hi')
+ self.assertTrue(os.path.exists(fn))
+ tool.ensureFileDoesNotExist(fn)
+ self.assertFalse(os.path.exists(fn))
+ self.assertEquals(0, tool._logger._errors)
+
+ def testUsage(self):
+ if debug:
+ return
+ appl.BackupTool.main(['backuptool', '-q'])
+
+ def testSaveDirByTar(self):
+ if debug:
+ return
+ source = self.makeTree()
+ target = self.tempDir('trgdir', 'unittest')
+ name = 'test'
+ archive = target + os.sep + name + '.tgz'
+ if os.path.exists(archive):
+ os.unlink(archive)
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ tool = appl.BackupTool.BackupTool(options)
+ tool.saveDirectoryByTar(name, source, target, None)
+ self.assertTrue(os.path.exists(archive))
+ self.assertEquals(0, tool._logger._errors)
+
+ def testSaveDirByZip(self):
+ if debug:
+ return
+ source = self.makeTree()
+ target = self.tempDir('trgdir', 'unittest')
+ name = 'test'
+ archive = target + os.sep + name + '.zip'
+ if os.path.exists(archive):
+ os.unlink(archive)
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ tool = appl.BackupTool.BackupTool(options)
+ tool.saveDirectoryByZip(name, source, target, None)
+ self.assertTrue(os.path.exists(archive))
+ self.assertEquals(0, tool._logger._errors)
+
+ def testLogFile(self):
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ tool = appl.BackupTool.BackupTool(options)
+ fn = self.tempFile('example.txt', 'unittest')
+ base.StringUtils.toFile(fn, 'Hi')
+ ftime = time.mktime((2018,1,2,3,4,5,0,0,0))
+ os.utime(fn, (ftime, ftime))
+ self.assertEquals('f: /tmp/unittest/example.txt s: 2 Byte t: 2018.01.02 03:04:05 r: 0 sec', tool.logFile(fn, 'f: %f s: %s t: %t r: %r', time.time()))
+
+ def testConfigurationSet(self):
+ if debug:
+ return
+ configDir = self.makeConfigSet()
+ logger = base.MemoryLogger.MemoryLogger(1)
+ configSet = appl.BackupBase.ConfigurationSet(configDir + os.sep + '*.conf', logger)
+ count = 0
+ files = dict()
+ for filename in configSet.nextFile():
+ count += 1
+ files[filename] = 1
+ self.assertEquals(2, count)
+ self.assertTrue(configDir + os.sep + 'example1.conf' in files)
+ self.assertTrue(configDir + os.sep + 'example2.conf' in files)
+ self.assertFalse(configDir + os.sep + 'bad.conf~' in files)
+
+ configSet = appl.BackupBase.ConfigurationSet(configDir + os.sep + '*.conf:db:user', logger)
+ count = 0
+ dbs = dict()
+ users = dict()
+ for params in configSet.nextParameters():
+ count += 1
+ dbs[params[0]] = 1
+ users[params[1]] = 1
+ self.assertEquals(2, count)
+ self.assertTrue('adam' in dbs)
+ self.assertTrue('bob' in dbs)
+ self.assertTrue('alice' in users)
+ self.assertTrue('berta' in users)
+ self.assertEquals(0, logger._errors)
+
+ def testBackupDayly(self):
+ if debug:
+ return
+ target = self.makeTargetDir()
+ configDir = self.makeBackupConfig()
+ self._textTool.replace([r'=/etc', '=/etc/nginx',
+ os.path.dirname(target) + os.sep + 'config/dirs.d/sys.etc.conf'])
+ # options._errorFilter = 'Keine Berechtigung'
+ appl.BackupTool.main(['backuptool', '-v3', '--configuration-directory=' + configDir,
+ 'backup', 'dayly'])
+ storage = self.findBackupStorage(target + os.sep + 'dayly')
+ self.assertFileExists(storage + os.sep + 'testdb.sql.gz')
+
+ def testBackupWeekly(self):
+ if debug:
+ return
+ target = self.makeTargetDir()
+ configDir = self.makeBackupConfig()
+ self._textTool.replace([r'=/etc', '=/etc/nginx',
+ os.path.dirname(target) + os.sep + 'config/dirs.d/sys.etc.conf'])
+ # options._errorFilter = 'Keine Berechtigung'
+ appl.BackupTool.main(['backuptool', '-v3', '--configuration-directory=' + configDir,
+ 'backup', 'weekly'])
+ storage = self.findBackupStorage(target + os.sep + 'weekly')
+ self.assertFileExists(storage + os.sep + 'srv.zip')
+ self.assertFileExists(storage + os.sep + 'etc=nginx.tgz')
+ self.assertFileExists(storage + os.sep + 'winfothek.sql.gz')
+ self.assertFileExists(storage + os.sep + 'system.info.gz')
+ self.assertFileExists(storage + os.sep + 'usr=local.tgz')
+
+ def testBackupMonthly(self):
+ if debug:
+ return
+ target = self.makeTargetDir()
+ configDir = self.makeBackupConfig()
+ self._textTool.replace([r'=/etc', '=/etc/nginx',
+ os.path.dirname(target) + os.sep + 'config/dirs.d/sys.etc.conf'])
+ # options._errorFilter = 'Keine Berechtigung'
+ appl.BackupTool.main(['backuptool', '-v3', '--configuration-directory=' + configDir,
+ 'backup', 'monthly'])
+ storage = self.findBackupStorage(target + os.sep + 'monthly')
+
+ self.assertFileExists(storage + os.sep + 'srv.zip')
+ self.assertFileExists(storage + os.sep + 'etc=nginx.tgz')
+ self.assertFileExists(storage + os.sep + 'system.info.gz')
+ self.assertFileExists(storage + os.sep + 'usr=local.tgz')
+
+ def testExample(self):
+ if debug:
+ return
+ configDir = self.makeBackupConfig(True)
+ logFile = self.tempFile('example.log', 'unittest')
+ argv = ['dummy', '--configuration-directory=' + configDir, '-l' + logFile, 'example']
+ appl.BackupTool.main(argv)
+ self.assertFileExists(configDir + os.sep + 'backup.conf')
+ self.assertFileExists(configDir + os.sep + 'webapps.d/testdb.conf')
+
+ def testWebapps(self):
+ if debug:
+ return
+ logFile = self.tempFile('webapps.log', 'unittest')
+ argv = ['dummy', '-l' + logFile, 'webapps', 'Secret', 'next', 'shop']
+ appl.BackupTool.main(argv)
+
+ def testSync(self):
+ if debug:
+ return
+
+ (srcDir, trgDir) = self.makeDirToSync()
+ options = self.getOptions()
+ tool = appl.BackupTool.BackupTool(options)
+ tool.synchronize(srcDir, trgDir, True)
+
+ for ix in range(1, 3+1):
+ full = trgDir + os.sep + 'src_only{:d}.txt'.format(ix)
+ self.assertTrue(os.path.exists(full))
+ full = trgDir + os.sep + 'trg_only{:d}.txt'.format(ix)
+ self.assertFalse(os.path.exists(full))
+
+ full = trgDir + os.sep + 'subdir' + os.sep + 'src_only{:d}.txt'.format(ix)
+ self.assertTrue(os.path.exists(full))
+ full = trgDir + os.sep + 'subdir' + os.sep + 'trg_only{:d}.txt'.format(ix)
+ self.assertFalse(os.path.exists(full))
+
+ for ix in range(1, 2+1):
+ full = trgDir + os.sep + 'src_younger{:d}.txt'.format(ix)
+ content = base.StringUtils.fromFile(full)
+ self.assertTrue(content.startswith('src'))
+ full = trgDir + os.sep + 'src_older{:d}.txt'.format(ix)
+ content = base.StringUtils.fromFile(full)
+ self.assertTrue(content.startswith('trg'))
+ self.assertEquals(0, tool._logger._errors)
+
+ def testSyncJobDirect(self):
+ if debug:
+ return
+
+ (baseDir, srcFile, trgFile) = self.makeDirDirectSync()
+ options = self.getOptions()
+ options._configDir = baseDir
+ tool = appl.BackupTool.BackupTool(options)
+ tool.doBackup('often')
+ file2 = os.path.dirname(trgFile) + os.sep + os.path.basename(srcFile)
+ self.assertFileExists(file2)
+ self.assertFileNotExists(trgFile)
+ self.assertEquals(0, tool._logger._errors)
+
+ def testSyncJobIndirect(self):
+ if debug:
+ return
+
+ (baseDir, srcFile, trgFile) = self.makeDirIndirectSync()
+ options = self.getOptions()
+ options._configDir = baseDir
+ tool = appl.BackupTool.BackupTool(options)
+ tool.doBackup('often')
+ file2 = os.path.dirname(trgFile) + os.sep + os.path.basename(srcFile)
+ self.assertFileExists(file2)
+ self.assertFileNotExists(trgFile)
+ self.assertEquals(0, tool._logger._errors)
+
+ def testFindDbInfoWinfothek(self):
+ if debug:
+ return
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ baseDir = self.tempDir('unittest')
+ config = self.tempFile('winfothek.conf', 'unittest', 'resources')
+ base.StringUtils.toFile(config, '''# Unittest
+.dburl=mysql:host=localhost;dbname=testdb
+.dbuser=jonny
+.dbcode=Sekret
+ ''')
+ tool = appl.BackupTool.BackupTool(options)
+ (db, user, code) = tool.findDbInfo(baseDir, 'xbackup', 'xbackup2')
+ self.ensureFileDoesNotExist(config)
+ self.assertEquals('testdb', db)
+ self.assertEquals('jonny', user)
+ self.assertEquals('Sekret', code)
+
+ def testFindDbInfoWordPress(self):
+ if debug:
+ return
+ options = self.getOptions()
+ options._configDir = self.makeMinimalConfigDir()
+ baseDir = self.tempDir('unittest')
+ config = self.tempFile('wp-config.php', 'unittest')
+ base.StringUtils.toFile(config, '''# Unittest
+// ** MySQL settings ** //
+define('DB_NAME', 'testdb'); // Der Name der Datenbank, die du benutzt.
+define('DB_USER', 'eva'); // Dein MySQL-Datenbank-Benutzername.
+define('DB_PASSWORD', 'sEkret'); // Dein MySQL-Passwort
+ ''')
+ tool = appl.BackupTool.BackupTool(options)
+ (db, user, code) = tool.findDbInfo(baseDir, 'xbackup', 'xbackup2')
+ self.ensureFileDoesNotExist(config)
+ self.assertEquals('testdb', db)
+ self.assertEquals('eva', user)
+ self.assertEquals('sEkret', code)
+
+ def testSnapshot(self):
+ if debug:
+ return
+ dataDir = self.makeTree()
+ snapshotBase = os.path.dirname(dataDir) + os.sep + 'dayly'
+ self.ensureDirectory(snapshotBase)
+ self.clearDirectory(snapshotBase)
+ appl.BackupTool.main(['backuptool', '-v3', 'snapshot', 'dayly', dataDir])
+ dirTool = appl.DirTool.DirTool(self.getOptions())
+ snaptshotDir = self.findBackupStorage(snapshotBase)
+ dirTool.compare([dataDir, snaptshotDir, '--test-content'])
+
+ def testSnapshotClearIfMissing(self):
+ if debug:
+ return
+ dataDir = self.makeTree()
+ snapshotBase = os.path.dirname(dataDir) + os.sep + 'monthly'
+ self.ensureFileDoesNotExist(snapshotBase)
+ appl.BackupTool.main(['backuptool', '-v3', 'snapshot', 'monthly', dataDir, '--create-if-missing'])
+ dirTool = appl.DirTool.DirTool(self.getOptions())
+ nodes = os.listdir(snapshotBase)
+ if self.assertEquals(1, len(nodes)):
+ dirTool._testContent = True
+ dirTool.compare([dataDir, snapshotBase + os.sep + nodes[0], '--test-content'])
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = BackupToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import os
+import shutil
+import base.BaseTool
+import sys
+# import from base.BaseTool BaseTool
+
+debug = False
+
+vBaseToolTestUsage = '?'
+def usage(msg = None):
+ if msg != None:
+ print('+++ usage() is called with an argument: ' + msg)
+ return 'usage: this is an pseudo usage'
+
+def getUsage():
+ global vBaseToolTestUsage
+ return vBaseToolTestUsage
+
+class BaseToolTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._appInfo = base.BaseTool.ApplicationInfo('basetooltest', 'BaseToolTest', usage)
+ self._options = base.BaseTool.GlobalOptions(self._appInfo, 3)
+ self._options._logFiles = ['-']
+ self._tool = base.BaseTool.BaseTool(self._options, 'testbasetool')
+ self._tool._logger = self._silentLogger
+
+ def testEnsureDirectory(self):
+ if debug : return
+ dir1 = self.tempDir('dir1', 'basetooltest')
+ shutil.rmtree(dir1, True)
+ self._tool.ensureDirectory(dir1)
+ self.assertFileExists(dir1)
+ self._tool.ensureDirectory(dir1)
+ self.assertFileExists(dir1)
+
+ def testEnsureDirectoryStatic(self):
+ if debug : return
+ dir2 = self.tempDir('dir2', 'basetooltest')
+ shutil.rmtree(dir2, True)
+ base.BaseTool.BasicStatics.ensureDirectory(dir2)
+ self.assertFileExists(dir2)
+ base.BaseTool.BasicStatics.ensureDirectory(dir2)
+ self.assertFileExists(dir2)
+
+ def testEnsureFileNotExists(self):
+ if debug : return
+ file1 = self.tempFile('file1.txt', 'basetooltest')
+ base.StringUtils.toFile(file1, 'not allowed')
+ self.assertFileExists(file1)
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ tool.ensureFileDoesNotExist(file1)
+ self.assertFileNotExists(file1)
+ tool.ensureFileDoesNotExist(file1)
+
+ def testErrorFileToLog(self):
+ if debug : return
+ file1 = self.tempFile('file1.txt', 'basetooltest')
+ base.StringUtils.toFile(file1, 'xy failed\nmissing blub')
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ tool._logger = logger = self._silentLogger
+ tool.errorFileToLog(file1, 'UnitTest!')
+ self.assertTrue(logger.contains('UnitTest!'))
+ self.assertTrue(logger.contains('xy failed'))
+ self.assertTrue(logger.contains('missing blub'))
+
+ def testGrep(self):
+ if debug : return
+ file1 = self.tempFile('file1.txt', 'basetooltest')
+ base.StringUtils.toFile(file1, 'öäü Umlaut\na=b')
+ aList = base.BaseTool.BasicStatics.grep('*', file1)
+ self.assertEquals(2, len(aList))
+ self.assertEquals('öäü Umlaut\n', aList[0])
+ self.assertEquals('a=b', aList[1])
+ aList = base.BaseTool.BasicStatics.grep('a=', file1)
+ self.assertEquals(1, len(aList))
+ self.assertEquals('a=b', aList[0])
+ aList = base.BaseTool.BasicStatics.grep('U*ut\n', file1)
+ self.assertEquals(1, len(aList))
+ self.assertEquals('öäü Umlaut\n', aList[0])
+
+ def testClearDirectory(self):
+ if debug : return
+ file1 = self.tempFile('x1.txt', 'basetooltest')
+ base.StringUtils.toFile(file1, 'xy failed\nmissing blub')
+ self.assertTrue(os.path.exists(file1))
+ file2 = self.tempFile('x1.txt', 'basetooltest', 'dir2')
+ base.StringUtils.toFile(file2, 'xy failed\nmissing blub')
+ self.assertTrue(os.path.exists(file1))
+ baseDir = os.path.dirname(file1)
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ tool.clearDirectory(baseDir)
+ self.assertFalse(os.path.exists(file1))
+ self.assertFalse(os.path.exists(file2))
+ files = os.listdir(baseDir)
+ self.assertEquals(0, len(files))
+ self.assertTrue(os.path.exists(baseDir))
+
+ def testFullPathToName(self):
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ self.assertEquals('etc=ssh=sshd_config', tool.fullPathToName('/etc/ssh/sshd_config'))
+
+ def testCreateSystemDScript(self):
+ tool = base.BaseTool.InstallTool('sample', self._options)
+ tool._systemDPath = self.tempDir('', 'testsystemd')
+ tool.createSystemDScript('systest', 'systeststarter', 'usample', 'gsample', 'sample script')
+ self.assertFileContent(tool._systemDPath + 'sample.service', '''[Unit]
+Description=sample script.
+After=syslog.target
+[Service]
+Type=simple
+User=usample
+Group=gsample
+WorkingDirectory=/etc/pyrshell
+EnvironmentFile=-/etc/pyrshell/systest.env
+ExecStart=/usr/local/bin/systeststarter daemon
+ExecReload=/usr/local/bin/systeststarter reload
+SyslogIdentifier=systest
+StandardOutput=syslog
+StandardError=syslog
+Restart=always
+RestartSec=3
+[Install]
+WantedBy=multi-user.target
+''')
+
+ def testEnsureEnvironmentFile(self):
+ tool = base.BaseTool.InstallTool('sample', self._options)
+ tool._configPath = self.tempDir('testbasetool') + os.sep
+ tool.ensureEnvironmentFile('SAMPLE_', '/abc/def', '/var/log/local/test.log', 'A=B\nC=D')
+ fn = tool._configPath + 'sample.env'
+ self.assertFileContains('PYTHONPATH=', fn)
+ self.assertFileContains('SAMPLE__CONFIG=/abc/def', fn)
+ self.assertFileContains('SAMPLE__APPL=sample', fn)
+ self.assertFileContains('SAMPLE__LOGFILE=/var/log/local/test.log', fn)
+ self.assertFileContains('A=B', fn)
+ self.assertFileContains('C=D', fn)
+
+ def testIntegerArgument(self):
+ self.assertEquals(22, self._tool.integerOption('--count=22'))
+
+ def testIntegerArgumentError(self):
+ self.log('expecting error "not an integer"')
+ self.assertNone(self._tool.integerOption('--count=22x'))
+
+ self.log('expecting error "missing integer"')
+ self.assertEquals(-1, self._tool.integerOption('--count:33', -1))
+
+ def testEnsureSymbolicLink(self):
+ tempDir = self.tempDir('jail', 'unittest')
+ target = tempDir + os.sep + 'parent'
+ # creating base dir and target:
+ self.ensureFileDoesNotExist(tempDir)
+ sibling = self.tempDir('sibling', 'unittest')
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ tool.ensureSymbolicLink('../../sibling', target)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling', os.readlink(target))
+ # changing link source:
+ sibling2 = self.tempDir('sibling2', 'unittest')
+ tool.ensureSymbolicLink('../../sibling2', target)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling2', os.readlink(target))
+ # removing existing target:
+ self.ensureFileDoesNotExist(target)
+ base.StringUtils.toFile(target, 'anything')
+ tool.ensureSymbolicLink('../../sibling2', target)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling2', os.readlink(target))
+
+ def testEnsureSymbolicLinkErrors(self):
+ tempDir = self.tempDir('jail', 'unittest')
+ target = tempDir + os.sep + 'parent'
+ self.ensureDirectory(target)
+ # creating base dir and target:
+ self.ensureFileDoesNotExist(tempDir)
+ sibling = self.tempDir('sibling', 'unittest')
+ tool = base.BaseTool.BaseTool(self._options, 'unittest.conf')
+ self._logger.log('= expecting error is directory')
+ tool.ensureSymbolicLink('../../sibling', target)
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = BaseToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import shutil
+import os
+
+import appl.BenchTool
+import base.BaseTool
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class BenchToolTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._finish()
+ self._baseDir = self.tempDir('bench', 'unittest')
+ shutil.copytree('/home/ws/py/pyrshell/appl', self._baseDir + os.sep + 'pyrshell/appl')
+ shutil.copytree('/home/ws/py/pyrshell/base', self._baseDir + os.sep + 'pyrshell/base')
+ shutil.copytree('/home/ws/py/pyrshell/unittest', self._baseDir + os.sep + 'pyrshell/unittest')
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('unittest'))
+
+ def testBenchCalculate(self):
+ if False and debug:
+ return
+ appl.BenchTool.main(['benchtool', '-v3', 'calculate', self._baseDir, '--max-loops=20', '--factor-print=10'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testBenchFiles(self):
+ if debug:
+ return
+ appl.BenchTool.main(['benchtool', '-v3', 'files', self._baseDir, '--max-loops=20', '--max-written=100', '--factor-print=10'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = BenchToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import os
+
+import appl.DbTool
+import base.BaseTool
+import shutil
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class DbToolTest(UnitTestCase):
+
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ config = base.JavaConfig.JavaConfig('/etc/pyrshell/dbtool.conf', self._logger)
+ self._admin = config.getString('admin.user')
+ self._adminCode = config.getString('admin.code')
+ self._tool = appl.DbTool.DbTool(self.getOptions())
+ self._processHelper = base.ProcessHelper.ProcessHelper(3, self._logger)
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('unittest'))
+
+ def buildSqlFile(self, filename, directory = None, table = None):
+ '''Creates a *.sql.gz file
+ @param filename: the filename (without trailing '.gz')
+ @param directory: None: it is chosen automatically otherwise: the "home" of the file
+ @param table: None or the name of a table
+ @return: the full filename e.g. "/tmp/unittest/dbtool/example.sql.gz"
+ '''
+ if table == None:
+ table = 'dummytable'
+ if directory == None:
+ directory = self.tempDir('dbtool', 'unittest')
+ fnSql = directory + os.sep + filename
+ base.FileHelper.ensureFileExists(fnSql, '''-- MySQL dump 10.16 Distrib 10.1.38-MariaDB, for debian-linux-gnu (x86_64)
+DROP TABLE IF EXISTS `{}`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `dummytable` (
+ `id` int(11) DEFAULT NULL
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+/*!40101 SET character_set_client = @saved_cs_client */;
+'''.format(table))
+ self._processHelper.execute(['gzip', fnSql], True)
+ return fnSql + '.gz'
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('dbtool', 'appl.DbTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._logFiles.append('-')
+ rc._verboseLevel = 0
+ return rc
+
+ def dbExists(self, db, testOnly = False):
+ dbs = self._tool.findAllDbs(self._admin, self._adminCode)
+ rc = db in dbs
+ if not testOnly:
+ self.assertTrue(rc)
+ return rc
+
+ def userExists(self, user, testOnly = False):
+ users = self._tool.findAllUsers(self._admin, self._adminCode)
+ rc = user in users
+ if not testOnly:
+ self.assertTrue(rc)
+ return rc
+
+ def deleteDbAndUser(self, db, user):
+ self._tool._forceDeleting = True
+ if db != None:
+ self._tool.deleteDb(db, self._admin, self._adminCode)
+ if user != None:
+ self._tool.deleteUser(user, self._admin, self._adminCode)
+ self._tool._forceDeleting = False
+
+ def testCreateDbAndUser(self):
+ if debug:
+ return
+ db = 'dbdummy2'
+ user = 'uwordpress'
+ self.deleteDbAndUser(db, user)
+ appl.DbTool.main(['dbtool', 'create-db-and-user', db, user, 'TopSecret'])
+ self.dbExists(db)
+ self.userExists(user)
+ self.deleteDbAndUser(db, user)
+
+ def testCreateBackupUser(self):
+ if debug:
+ return
+ # <user> <passwd> <user-adm> <passwd-adm>
+ user = 'ubackup'
+ self.deleteDbAndUser(None, user)
+ appl.DbTool.main(['dbtool',
+ 'create-backup-user',
+ user,
+ 'StaySecret'])
+ self.userExists(user)
+ self.deleteDbAndUser(None, user)
+
+ def testAllDbs(self):
+ if debug:
+ return
+ # all-dbs <admin> <admin-passwd> [<internal-too>]
+ appl.DbTool.main(['dbtool',
+ 'all-dbs'])
+ dbs = appl.DbTool.result()
+ self.assertTrue(dbs != None and 'timetracking' in dbs)
+
+ def testCreateWebappDbs(self):
+ if debug:
+ return
+ # create-webapp-dbs <admin> <admin-passwd>
+ appl.DbTool.main(['dbtool',
+ 'create-webapp-dbs', self._admin, self._adminCode])
+
+ def testImportWebApp(self):
+ if debug:
+ return
+ self.deleteDbAndUser('dbexample', 'uexample')
+ configDir = self.tempDir('webapps.d', 'unittest')
+ testDir = os.path.dirname(configDir)
+ fn = configDir + os.sep + 'example.infeos.de.conf'
+ base.StringUtils.toFile(fn, """db=dbexample
+user=uexample
+password=hi
+ """)
+ fn = self.tempFile('example.infeos.de.sql')
+ base.StringUtils.toFile(fn, '''create table tdummy (
+id integer,
+name varchar(255)
+);
+drop table tdummy;''')
+ self._tool.createDbAndUser('dbexample', 'uexample', 'hi', self._admin, self._adminCode)
+ # <domain> <sql-file>
+ appl.DbTool.main(['dbtool',
+ '--configuration-directory=' + testDir,
+ 'import-webapp',
+ 'example.infeos.de',
+ fn])
+ self.dbExists('dbexample')
+ self.userExists('uexample')
+ self.deleteDbAndUser('dbexample', 'uexample')
+
+ def testImportAllWebApps(self):
+ if debug:
+ return
+ dirSql = self.tempDir('dbtooltest')
+ for node in ['example1.sql', 'example2.sql']:
+ fn = self.tempFile(node, 'dbtooltest')
+ base.StringUtils.toFile(fn, """
+ """)
+ # <sql-directory>
+ appl.DbTool.main(['dbtool',
+ '-v3',
+ 'import-all-webapps',
+ dirSql])
+ if base.BaseTool.BaseTool(self.getOptions(), 'dbtool.conf')._isRoot:
+ self.dbExists('?')
+
+ def testExportDb(self):
+ if debug:
+ return
+ fn = self.tempFile('db.sql', 'dbtooltest')
+ self.ensureFileDoesNotExist(fn)
+ appl.DbTool.main(['dbtool', '-q',
+ 'export-db', 'timetracking', self._admin, self._adminCode, fn])
+ self.assertFileContains('CREATE TABLE', fn)
+
+ def testExportWebApp(self):
+ if debug:
+ return
+ fn = self.tempFile('db.sql.gz', 'dbtooltest')
+ self.ensureFileDoesNotExist(fn)
+ appl.DbTool.main(['dbtool', '-q',
+ 'export-webapp', 'timetracking', fn])
+ self.assertFileExists(fn)
+
+ def testCreateAdmin(self):
+ if debug:
+ return
+ self.deleteDbAndUser(None, 'admin2')
+ appl.DbTool.main(['dbtool', '-v3', 'create-admin', 'admin2', 'NeverGood', self._admin, self._adminCode])
+ self.deleteDbAndUser(None, 'admin2')
+
+ def testDeleteAdmin(self):
+ if debug:
+ return
+ appl.DbTool.main(['dbtool', '-v3', 'create-admin', 'testadmin', 'NeverGood'])
+ appl.DbTool.main(['dbtool', '-v3', 'delete-user', 'testadmin', self._admin, self._adminCode])
+
+ def testExampleAllUsers(self):
+ if debug:
+ return
+ appl.DbTool.main(['dbtool', 'all-users', self._admin, self._adminCode])
+ lines = appl.DbTool.result()
+ self.assertTrue(self._admin in lines)
+
+ def testExampleSaveWebApp(self):
+ if debug:
+ return
+ dirConfig = self.tempDir('webapps.d', 'unittest')
+ domain = 'example.com'
+ home = self.tempDir('example.com', 'unittest')
+ base.StringUtils.toFile(home + os.sep + 'phpinfo.php', '<?php\nphpinfo();')
+ base.StringUtils.toFile(dirConfig + os.sep + domain + '.conf', '''db=testdb
+user=testuser
+password=TopSecret
+directory={}
+excluded=
+'''.format(home))
+ archive = self.tempFile('example.com.tgz', 'unittest')
+ appl.DbTool.main(['dbtool', '-v3', '--test-target-dir=' + dirConfig,
+ 'save-webapp', domain, archive])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ current = '\n'.join(tool.tableOfContent(archive))
+ self.assertEquals('''./
+./phpinfo.php
+./db/
+./db/example.com.save-webapp.sql.gz''', current)
+
+ #dbtool -v3 create-webapp-configuration example.com /home/example.com dbexample usrexample NeverUsed
+ def testExampleCreateWebAppConfiguration(self):
+ if debug:
+ return
+ dirConfig = self.tempDir('webapps.d', 'unittest')
+ domain = 'example.com'
+ home = self.tempDir('example.com', 'unittest')
+ appl.DbTool.main(['dbtool', '-v3', '--test-target-dir=' + dirConfig,
+ 'create-webapp-configuration', 'example.com', home, 'dbexample', 'usrexample', 'NeverUsed'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContent(dirConfig + os.sep + domain + '.conf', '''db=dbexample
+user=usrexample
+password=NeverUsed
+sql.file=example.com_dbexample
+directory=/tmp/unittest/example.com
+excluded=''')
+
+ def testExampleCreateAndImportAllWebapps(self):
+ if debug:
+ return
+ self._finish()
+ dirConfig = self.tempDir('webapps.d', 'unittest')
+ domain = 'example.com'
+ home = self.tempDir('example.com', 'unittest')
+ self.buildSqlFile('example.com_dbexample.sql', home)
+ configFile = dirConfig + os.sep + domain + '.conf'
+ base.StringUtils.toFile(configFile, '''db=dbexample
+user=usrexample
+password=NeverUsed
+sql.file=example.com_dbexample
+directory={}
+excluded='''.format(home), self._logger)
+ appl.DbTool.main(['dbtool', '-v3', '--test-target-dir=' + dirConfig,
+ 'create-and-import-all-webapps', home])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleCreateAndImportWebApp(self):
+ if False and debug:
+ return
+ self._finish()
+ dirConfig = self.tempDir('webapps.d', 'unittest')
+ domain = 'example2.com'
+ home = self.tempDir(domain, 'unittest')
+ self.buildSqlFile('example2.com_dbexample2.sql', home)
+ configFile = dirConfig + os.sep + domain + '.conf'
+ base.StringUtils.toFile(configFile, '''db=dbexample2
+user=usrexample2
+password=NeverUsed
+sql.file=example2.com_dbexample2
+directory={}
+excluded='''.format(home), self._logger)
+ appl.DbTool.main(['dbtool', '-v3', '--test-target-dir=' + dirConfig,
+ 'create-and-import-webapp', domain, home, 'admin', '*Never77Forget'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleFilterRecordsFromSql(self):
+ if debug:
+ return
+ fnIn = self.tempFile('input.sql', 'unittest', 'sql')
+ fnOut = self.tempFile('output.sql', 'unittest', 'sql')
+ base.StringUtils.toFile(fnIn, '''-- MySQL dump 10.17 Distrib 10.3.17-MariaDB, for debian-linux-gnu (x86_64)
+
+DROP TABLE IF EXISTS `user`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user` (`user_id` int(10) unsigned NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=104 DEFAULT CHARSET=utf8;
+LOCK TABLES `user` WRITE;
+/*!40000 ALTER TABLE `user` DISABLE KEYS */;
+INSERT INTO `user` VALUES (1,'root',NULL,NULL),(2,'jonny',33,NULL),(3,'eva',44,NULL),(4,'admin',55,NULL),(5,'jussuf',64,NULL);
+UNLOCK TABLES;
+DROP TABLE IF EXISTS `usergroup`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `usergroup` (`user_id` int(10) unsigned NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=104 DEFAULT CHARSET=utf8;
+LOCK TABLES `user` WRITE;
+/*!40000 ALTER TABLE `usergroup` DISABLE KEYS */;
+INSERT INTO `usergroup` VALUES (1,'root',NULL,NULL),(2,'gjonny',33,NULL),(3,'geva',44,NULL),(4,'gadmin',55,NULL),(5,'gjussuf',64,NULL);
+UNLOCK TABLES;
+''')
+ appl.DbTool.main(['dbtool', '-v3', 'filter-records', fnIn, 'user', fnOut, "--exclude=,'root',"])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContent(fnOut, '''-- MySQL dump 10.17 Distrib 10.3.17-MariaDB, for debian-linux-gnu (x86_64)
+
+LOCK TABLES `user` WRITE;
+
+INSERT INTO `user` VALUES
+(2,'jonny',33,NULL),
+(3,'eva',44,NULL),
+(4,'admin',55,NULL),
+(5,'jussuf',64,NULL);
+UNLOCK TABLES;''')
+
+ def testExampleExportAllDbs(self):
+ if debug:
+ return
+ target = self.tempDir('sql', 'unittest')
+ appl.DbTool.main(['dbtool', '-v3', 'export-all-dbs', target])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ nodes = os.listdir(target)
+ self.assertTrue(len(nodes) > 0)
+
+ def testExampleDeleteDbAndUser(self):
+ if False and debug:
+ return
+ appl.DbTool.main(['dbtool', '-v3', 'create-db-and-user', 'dbtestdelete', 'usrtestdelete', 'ForgetIt'])
+ appl.DbTool.main(['dbtool', '-v3', 'delete-db-and-user', 'dbtestdelete', 'usrtestdelete', '--no-saving', '--no-confirmation'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleDeleteDbAndUser2(self):
+ if debug:
+ return
+ # tool.deleteDbAndUser() stopps for user input (confirmation)
+ if self._inTestSuite:
+ return
+ appl.DbTool.main(['dbtool', '-v3', 'create-db-and-user', 'd42', 'u42', 'ForgetIt'])
+ appl.DbTool.main(['dbtool', '-v3', 'delete-db-and-user', 'd42', 'u42'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertNodeExists(self.tempDir(None), 'dbtestdelete2.sql.[0-9]*.gz')
+
+ def testExampleArchiveWebapp(self):
+ if False and debug:
+ return
+ domain = 'huber42.de'
+ dirArchive = self.tempDir('archive', 'unittest')
+ dirWebapp = self.tempDir(domain, 'unittest')
+ base.StringUtils.toFile(dirWebapp + os.sep + 'index.html', '<html><body>Hi!</body></html>')
+ dirConf = self.tempDir('archive.conf', 'unittest')
+ fnConfig = dirConf + os.sep + domain + '.conf'
+ base.StringUtils.toFile(fnConfig, '''db=dbhuber42
+user=usrhuber42
+password=ForgetIt
+directory={}
+sql.file=dbhuber42_sql
+excluded=
+'''.format(dirWebapp))
+ fnNginx = dirConf + os.sep + domain
+ base.StringUtils.toFile(fnNginx, '# nginx file ' + domain)
+ appl.DbTool.main(['dbtool', '-v3', 'create-db-and-user', 'dbhuber42', 'usrhuber42', 'ForgetIt'])
+ appl.DbTool.main(['dbtool', '-v3', '--test-target-dir=' + dirConf,
+ 'archive-webapp', domain, dirArchive])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ fnScript = self.tempFile('Remove.' + domain)
+ self.assertFileContent(fnScript, '''#! /bin/bash
+dbtool -v3 delete-db-and-user dbhuber42 usrhuber42 --no-saving $1
+rm -Rf /tmp/unittest/huber42.de /tmp/unittest/archive.conf/huber42.de /tmp/unittest/archive.conf/huber42.de.conf
+rm -f /etc/ssl/certs/huber42.de.pem /etc/ssl/private/huber42.de.pem /etc/nginx/sites-enabled/huber42.de
+''')
+ tool._processHelper.execute(['/bin/bash', fnScript, '--no-confirmation'], True)
+
+if __name__ == '__main__':
+ tester = DbToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import shutil
+import os
+import datetime
+
+import appl.DirTool
+import base.BaseTool
+import base.StringUtils
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class DirToolTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._finish()
+ self._logger = base.MemoryLogger.MemoryLogger(1)
+ base.FileHelper.clearDirectory(self.tempDir('unittest'))
+ appl.DirTool.main(['dirtool', '-q', 'build-examples'])
+ self._exampleBase = '/tmp/%examples'
+ self._dir1 = self._exampleBase + '/dir1'
+ self._dir2 = self._exampleBase + '/dir2'
+ self._sourceDir = self._dir1
+ self._targetDir = self._dir2
+ self._safeDir = self._exampleBase + '/safe'
+ self.ensureDirectory(self._safeDir)
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('unittest'))
+
+ def createFile(self, baseDir, path, node, number):
+ fn = baseDir + os.sep + ('' if path == '' else path + os.sep) + node.format(number)
+ base.StringUtils.toFile(fn, self._fileContent)
+ base.FileHelper.setModified(fn, None, datetime.datetime(2018, 3 + number % 7, 1 + number%28, 4+number % 18, 22-number % 22, 3*number%60))
+
+ def _makeMirror(self):
+ appl.DirTool.main(['dirtool', '-q', 'build-examples'])
+
+ def _makeMirror2(self):
+ appl.DirTool.main(['dirtool', '-q', 'build-examples'])
+
+ def getOptions(self):
+ rc = base.BaseTool.GlobalOptions()
+ rc._exampleText = 'log.file=/var/log/local/wikitool.log'
+ rc._logFiles.append('-')
+ rc._verboseLevel = 0
+ rc._appInfo = self._applicationName
+ return rc
+
+ def testSync(self):
+ if debug:
+ return
+ self._makeMirror()
+ '''
+ fn = '/usr/share/pyrshell/examples/dir2/file5.txt'
+ content = base.StringUtils.fromFile(fn)
+ aTime = os.path.getmtime(fn)
+ build(fn, content.replace('e', 'E'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(aTime)))
+ '''
+ self.assertFileNotExists(self._targetDir + '/file2.txt')
+ self.assertFileNotExists(self._targetDir + '/subdir1/data_6.txt')
+ self.assertFileContains('is a file instead of the name', self._targetDir + '/subdir2')
+ self.assertFileContains('line1', self._targetDir + '/new.txt')
+ self.assertFileContains('line3', self._targetDir + '/subdir1/new2.txt')
+ self.assertFileContains('newer content in file4.txt', self._targetDir + '/file4.txt')
+ self.assertFileContains('bla bla', self._targetDir + '/subdir3/data_5.txt')
+ self.assertFileContains('contEnt of thE filE filE4.txt', self._targetDir + '/file5.txt')
+ self.assertFileContains('contEnt of thE filE filE5.txt', self._targetDir + '/subdir3/data_6.txt')
+
+ base.FileHelper.clearDirectory(self._safeDir, self._logger)
+ appl.DirTool.main(['filetool', '-v4', 'sync', self._sourceDir, self._targetDir, self._safeDir])
+
+ appl.DirTool.main(['filetool', 'compare', self._sourceDir, self._targetDir])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContains('is a file instead of the name', self._safeDir + '/subdir2')
+ self.assertFileContains('line1', self._safeDir + '/new.txt')
+ self.assertFileContains('line3', self._safeDir + '/subdir1/new2.txt')
+ self.assertFileContains('content of the file file4.txt', self._safeDir + '/subdir1/data_5.txt')
+ self.assertFileContains('content of the file file0.txt', self._safeDir + '/file1.txt')
+ self.assertFileContains('line2', self._safeDir + '/subdir3/data5.txt')
+ self.assertFileContains('newer content in file4.txt', self._safeDir + '/file4.txt')
+
+ def testSnapshot(self):
+ if debug:
+ return
+ self._makeMirror()
+ target = os.path.dirname(self._targetDir) + os.sep + 'snapshot'
+ if os.path.exists(target):
+ shutil.rmtree(target)
+ base.FileHelper.clearDirectory(self._safeDir, self._logger)
+ appl.DirTool.main(['filetool', '-v4', 'snapshot', self._sourceDir, target])
+ appl.DirTool.main(['filetool', 'compare', self._sourceDir, target])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testInfo(self):
+ if debug:
+ return
+ self._makeMirror()
+ appl.DirTool.main(['filetool', '-v4', 'info', self._sourceDir, '--count=2', self._targetDir])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 7 Files: 27 / 1.226 KB
+Ignored: 0 file(s) / 0 dir(s)
+The smallest files:
+2017.07.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_7.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.05.02 07:03:52 45 Byte /tmp/%examples/dir1/subdir2/data_5.txt
+The largest files:
+2018.02.03 07:03:51 45 Byte /tmp/%examples/dir1/file2.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The youngest files:
+2018.11.03 09:03:53 <dir> /tmp/%examples/dir2/subdir3
+2018.12.09 07:03:53 <dir> /tmp/%examples/dir1/subdir2''', out)
+ appl.DirTool.main(['filetool', '-v4', 'info', self._sourceDir, self._targetDir,
+ '--max-largest=3', '--max-oldest=2', '--max-youngest=7'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 7 Files: 27 / 1.226 KB
+Ignored: 0 file(s) / 0 dir(s)
+The smallest files:
+2017.06.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_6.txt
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.07.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_7.txt
+2017.07.03 07:03:53 45 Byte /tmp/%examples/dir1/subdir3/data_7.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.05.02 07:03:52 45 Byte /tmp/%examples/dir1/subdir2/data_5.txt
+The largest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2018.02.03 07:03:51 45 Byte /tmp/%examples/dir1/file2.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The youngest files:
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir1/file5.txt
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir2/file5.txt
+2018.09.03 09:03:53 <dir> /tmp/%examples/dir2/subdir1
+2018.10.03 09:03:53 26 Byte /tmp/%examples/dir2/file4.txt
+2018.10.03 09:07:53 11 Byte /tmp/%examples/dir2/subdir3/data5.txt
+2018.11.03 09:03:53 <dir> /tmp/%examples/dir2/subdir3
+2018.12.09 07:03:53 <dir> /tmp/%examples/dir1/subdir2''', out)
+
+ def testExampleLargest(self):
+ if debug:
+ return
+ self._makeMirror()
+ appl.DirTool.main(['filetool', '-v4', 'largest', self._sourceDir, '--pattern=.*1.*'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 1 / 37 Byte
+Ignored: 13 file(s) / 0 dir(s)
+The largest files:
+2018.01.03 07:03:50 37 Byte /tmp/%examples/dir1/file1.txt''', out)
+
+ def testExampleYoungest(self):
+ if debug:
+ return
+ self._makeMirror()
+ appl.DirTool.main(['filetool', '-v4', 'youngest', self._sourceDir, '--max-depth=0'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 1 Files: 5 / 265 Byte
+Ignored: 0 file(s) / 3 dir(s)
+The youngest files:
+2018.03.08 07:03:53 <dir> /tmp/%examples/dir1/subdir1
+2018.03.10 07:03:53 <dir> /tmp/%examples/dir1/subdir3
+2018.04.03 07:03:53 61 Byte /tmp/%examples/dir1/file4.txt
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir1/file5.txt
+2018.12.09 07:03:53 <dir> /tmp/%examples/dir1/subdir2''', out)
+
+ def testExampleOldest(self):
+ if debug:
+ return
+ self._makeMirror()
+ appl.DirTool.main(['filetool', '-v4', 'oldest', self._sourceDir, '--file-only'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 14 / 718 Byte
+Ignored: 0 file(s) / 0 dir(s)
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.05.02 07:03:52 45 Byte /tmp/%examples/dir1/subdir2/data_5.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+2017.06.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_6.txt
+2017.06.02 07:03:52 53 Byte /tmp/%examples/dir1/subdir2/data_6.txt''', out)
+
+ def testCheck(self):
+ if debug:
+ return
+ self._makeMirror()
+ appl.DirTool.main(['filetool', '-v4', 'check', self._sourceDir])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testCompare(self):
+ if debug:
+ return
+ self._makeMirror2()
+ appl.DirTool.main(['filetool', '-v4', 'compare', self._sourceDir, self._targetDir, '--test-content'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(tool._differences)
+ self.assertEquals('''additional: /tmp/%examples/dir1/file2.txt
+date: 2017.05.01 07:03:51 / 2018.01.03 09:03:53 /tmp/%examples/dir1/subdir1/data_5.txt
+additional: /tmp/%examples/dir1/subdir1/data_6.txt
+missing counterpart: /tmp/%examples/dir1/subdir1/new2.txt
+different at position [5]: /tmp/%examples/dir1/subdir3/data_6.txt
+missing counterpart: /tmp/%examples/dir1/subdir3/data5.txt
+different types: dir / file /tmp/%examples/dir1/subdir2
+different at position [5]: /tmp/%examples/dir1/file5.txt
+size: 61 / 26 /tmp/%examples/dir1/file4.txt
+date: 2018.01.03 07:03:50 / 2018.01.03 09:03:53 /tmp/%examples/dir1/file1.txt
+missing counterpart: /tmp/%examples/dir1/new.txt''', out)
+ def testCompareShort(self):
+ if debug:
+ return
+ self._makeMirror2()
+ appl.DirTool.main(['dirtool', '-v4', 'compare', self._sourceDir, self._targetDir, '--short', '--test-content'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(tool._differences)
+ self.assertEquals('''+ /tmp/%examples/dir1/file2.txt
+T 2017.05.01 07:03:51 / 2018.01.03 09:03:53 /tmp/%examples/dir1/subdir1/data_5.txt
++ /tmp/%examples/dir1/subdir1/data_6.txt
+- /tmp/%examples/dir1/subdir1/new2.txt
+C [5] /tmp/%examples/dir1/subdir3/data_6.txt
+- /tmp/%examples/dir1/subdir3/data5.txt
+! dir / file /tmp/%examples/dir1/subdir2
+C [5] /tmp/%examples/dir1/file5.txt
+S 61 / 26 /tmp/%examples/dir1/file4.txt
+T 2018.01.03 07:03:50 / 2018.01.03 09:03:53 /tmp/%examples/dir1/file1.txt
+- /tmp/%examples/dir1/new.txt''', out)
+
+ def testExampleCheck(self):
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'check', self._dir1, self._dir2])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleCompare1(self):
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'compare', self._dir1, self._dir2])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(tool._differences)
+ self.assertEquals('''additional: /tmp/%examples/dir1/file2.txt
+date: 2017.05.01 07:03:51 / 2018.01.03 09:03:53 /tmp/%examples/dir1/subdir1/data_5.txt
+additional: /tmp/%examples/dir1/subdir1/data_6.txt
+missing counterpart: /tmp/%examples/dir1/subdir1/new2.txt
+missing counterpart: /tmp/%examples/dir1/subdir3/data5.txt
+different types: dir / file /tmp/%examples/dir1/subdir2
+size: 61 / 26 /tmp/%examples/dir1/file4.txt
+date: 2018.01.03 07:03:50 / 2018.01.03 09:03:53 /tmp/%examples/dir1/file1.txt
+missing counterpart: /tmp/%examples/dir1/new.txt''', out)
+
+ def testExampleCompare2(self):
+ if debug:
+ return
+
+ appl.DirTool.main(['dirtool', '-v3', 'compare', self._dir1, self._dir2, '--ignore-time', '--short', '--test-content'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(tool._differences)
+ self.assertEquals('''+ /tmp/%examples/dir1/file2.txt
++ /tmp/%examples/dir1/subdir1/data_6.txt
+- /tmp/%examples/dir1/subdir1/new2.txt
+C [5] /tmp/%examples/dir1/subdir3/data_6.txt
+- /tmp/%examples/dir1/subdir3/data5.txt
+! dir / file /tmp/%examples/dir1/subdir2
+C [5] /tmp/%examples/dir1/file5.txt
+S 61 / 26 /tmp/%examples/dir1/file4.txt
+- /tmp/%examples/dir1/new.txt''', out)
+
+ def testExampleCompare3(self):
+ if debug:
+ return
+
+ appl.DirTool.main(['dirtool', '-v4', 'compare', self._dir1, self._dir2, '-s', '--exlude=sub.*2', '--pattern=[^u]*[1-3].*', '-i', '--ignore-case'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(tool._differences)
+ self.assertEquals('''+ /tmp/%examples/dir1/file2.txt
+! dir / file /tmp/%examples/dir1/subdir2
+T 2018.01.03 07:03:50 / 2018.01.03 09:03:53 /tmp/%examples/dir1/file1.txt
+- /tmp/%examples/dir1/new.txt''', out)
+
+
+#dirtool info /usr/share/pyrshell/examples/dir1 --max-largest=2 --max-youngest=3 --file-only
+ def testExampleInfo1(self):
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'info', self._dir1,
+ '--max-largest=2', '--max-youngest=3', '--max-oldest=1', '--file-only'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 14 / 718 Byte
+Ignored: 0 file(s) / 0 dir(s)
+The smallest files:
+2017.06.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_6.txt
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.07.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_7.txt
+2017.07.03 07:03:53 45 Byte /tmp/%examples/dir1/subdir3/data_7.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+The largest files:
+2018.02.03 07:03:51 45 Byte /tmp/%examples/dir1/file2.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+The youngest files:
+2018.03.03 07:03:52 53 Byte /tmp/%examples/dir1/file3.txt
+2018.04.03 07:03:53 61 Byte /tmp/%examples/dir1/file4.txt
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir1/file5.txt''', out)
+
+ def testExampleInfo2(self):
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'info', self._dir1,
+ '--max-largest=0', '--max-smallest=0', '--max-youngest=3', '--max-oldest=4', '--dir-only'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 0 / 0 Byte
+Ignored: 14 file(s) / 0 dir(s)
+The oldest files:
+2018.03.08 07:03:53 <dir> /tmp/%examples/dir1/subdir1
+2018.03.10 07:03:53 <dir> /tmp/%examples/dir1/subdir3
+2018.12.09 07:03:53 <dir> /tmp/%examples/dir1/subdir2
+The youngest files:
+2018.03.08 07:03:53 <dir> /tmp/%examples/dir1/subdir1
+2018.03.10 07:03:53 <dir> /tmp/%examples/dir1/subdir3
+2018.12.09 07:03:53 <dir> /tmp/%examples/dir1/subdir2''', out)
+
+ def testExampleInfo3(self):
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'info', self._dir1,
+ '--max-largest=0', '--max-smallest=0', '--max-youngest=3', '--max-oldest=4', '--file-only'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 14 / 718 Byte
+Ignored: 0 file(s) / 0 dir(s)
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.05.02 07:03:52 45 Byte /tmp/%examples/dir1/subdir2/data_5.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+2017.06.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_6.txt
+The youngest files:
+2018.03.03 07:03:52 53 Byte /tmp/%examples/dir1/file3.txt
+2018.04.03 07:03:53 61 Byte /tmp/%examples/dir1/file4.txt
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir1/file5.txt''', out)
+
+ def testExampleListTar1(self):
+ if False and debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'list', '/usr/share/pyrshell/unittest/data/examples2.tgz'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 9 Files: 27 / 1.226 KB
+Ignored: 0 file(s) / 0 dir(s)
+''', out)
+ out = tool._rawOutput
+ self.assertEquals('''
+''', out)
+
+ def testExampleListDir(self):
+ if True:
+ return
+ if debug:
+ return
+ appl.DirTool.main(['dirtool', '-v3', 'list', '/usr/share/pyrshell/unittest/data', '--order-date-size', '--file-only', '--mbyte-size'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ out = '\n'.join(base.BaseTool.result())
+ self.assertEquals('''Directories: 4 Files: 14 / 718 Byte
+Ignored: 0 file(s) / 0 dir(s)
+The oldest files:
+2017.05.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_5.txt
+2017.05.02 07:03:52 45 Byte /tmp/%examples/dir1/subdir2/data_5.txt
+2017.05.03 07:03:53 69 Byte /tmp/%examples/dir1/subdir3/data_5.txt
+2017.06.01 07:03:51 37 Byte /tmp/%examples/dir1/subdir1/data_6.txt
+The youngest files:
+2018.03.03 07:03:52 53 Byte /tmp/%examples/dir1/file3.txt
+2018.04.03 07:03:53 61 Byte /tmp/%examples/dir1/file4.txt
+2018.05.03 07:03:54 69 Byte /tmp/%examples/dir1/file5.txt''', out)
+
+ def testExampleImageResize(self):
+ if False and debug:
+ return
+ target = self.tempDir('images', 'unittest')
+ appl.DirTool.main(['dirtool', '-v3', 'image-resize', '/usr/share/pyrshell/unittest/img', target, '--max-width=100', '--max-height=50'])
+ self.assertFileExists(target + os.sep + 'landscape.jpg')
+ self.assertFileExists(target + os.sep + 'portrait.jpg')
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = DirToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import os.path
+import time
+import appl.FtpTool
+import base.StringUtils
+import shutil
+
+def usage(msg=None):
+ return 'test usage'
+
+debug = False
+
+class FtpToolTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._baseDir = self.tempDir('ftptool', 'unittest')
+
+ def _finish(self):
+ shutil.rmtree(self._baseDir)
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('ftptool', 'appl.FtpTool.py', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._logFiles.append('-')
+ return rc
+
+ def makeSourceTree(self):
+ text = 'Dies ist ein langer Text, der hoffentlich ein wenig komprimiert werden kann. Wenn nicht, kann man nichts machen' * 1000
+ dirName = self._baseDir + os.sep + 'src'
+ os.makedirs(dirName, 0o777, True)
+ os.chmod(dirName, 0o777)
+ # /tmp/zippertest/srcdir: file[1-5].txt dir[123] .git linkConfig
+ # /tmp/zippertest/srcdir/dirX: levelX.txt subdir1
+ # /tmp/zippertest/srcdir/dirX/subdir1: a.jpg b.txt c.odt
+ # /tmp/zippertest/srcdir/.git: config aa bb
+ # /tmp/zippertest/srcdir/.git/xx: xx1234 xx2345 xx3456
+ for ix in range(1, 5+1):
+ fn = dirName + os.sep + 'file{:d}.txt'.format(ix)
+ base.StringUtils.toFile(fn, 'no{:d}: {:s}'.format(ix, text))
+ os.chmod(fn, 0o666)
+ if not os.path.exists(fn):
+ os.symlink('file1.txt', fn)
+ for ix in range(1, 3+1):
+ subdir = dirName + os.sep + 'dir' + str(ix)
+ os.makedirs(subdir, 0o777, True)
+ os.chmod(subdir, 0o777)
+ fn = subdir + os.sep + 'level{:d}'.format(ix)
+ base.StringUtils.toFile(fn, 'level{:d}: {:s}'.format(ix, text))
+ dir2 = subdir + os.sep + 'subdir1'
+ os.makedirs(dir2, 0o777, True)
+ os.chmod(dir2, 0o777)
+ for node in ['a.jpg', 'b.txt', 'c.odt']:
+ fn = dir2 + os.sep + node
+ base.StringUtils.toFile(fn, node + ': ' + text)
+ os.chmod(fn, 0o666)
+ return dirName
+
+ def makeMinimalConfigDir(self):
+ dirName = self.tempFile('pyrshell', 'etc')
+ os.makedirs(dirName, 0o777, True)
+ base.StringUtils.toFile(dirName + os.sep + 'backup.conf', '')
+ return dirName
+
+ def makeTargetDir(self):
+ targetDir = self._baseDir + 'trgdir'
+ self.clearDirectory(targetDir)
+ return targetDir
+
+ def makeFtpConfig(self, clear=True):
+ baseDir = self.tempDir('config', 'backuptest')
+ if clear:
+ self.clearDirectory(baseDir)
+ targetDir = self.makeTargetDir()
+ appDataDir = self.tempDir('blog', 'backuptest')
+ base.StringUtils.toFile(appDataDir + os.sep + 'index.php', '<?php\ninfo();')
+ base.StringUtils.toFile(appDataDir + os.sep + 'new.php', '<?php\nnew();')
+ oldFile = appDataDir + os.sep + 'old.php'
+ base.StringUtils.toFile(oldFile, '<?php\nold();')
+ ftime = time.mktime((2018,1,2,3,4,5,0,0,0))
+ os.utime(oldFile, (ftime, ftime))
+
+ appDir = self.ensureDirectory(baseDir + os.sep + 'ftp.d')
+ base.StringUtils.toFile(appDir + os.sep + 'unittest.conf', '''
+host=localhost
+user=unittest
+port=21
+code=Secret
+'''.format(appDataDir))
+ base.StringUtils.toFile(baseDir + os.sep + 'ftptool.conf', '''# created by unittest
+log.file=/tmp/ftptooltest.log
+'''.format(targetDir, appDir))
+ return baseDir
+
+ def testFtpInfo(self):
+ if debug:
+ return
+ configDir = self.makeFtpConfig()
+ logFile = self.tempFile('ftpinfo.log', 'unittest')
+ self.ensureFileDoesNotExist(logFile)
+ argv = ['ftptool', '-c' + configDir, '-l' + logFile, 'info', 'unittest', 'localhost']
+ appl.FtpTool.main(argv)
+ lines = base.StringUtils.fromFile(logFile)
+ self.assertTrue(len(lines) > 0)
+ self.assertTrue(lines.find('MLST modify*;') > 0)
+
+ def testFtpRmTree(self):
+ if debug:
+ return
+ source = self.makeSourceTree()
+ configDir = self.makeFtpConfig()
+ logFile = self.tempFile('ftpinfo.log', 'ftptest')
+ self.ensureFileDoesNotExist(logFile)
+ ftpDir = '/home/unittest/unittest/' + os.path.basename(source)
+ argv = ['ftptool', '-c' + configDir, '-l' + logFile, '-v3', 'rmtree', 'unittest', ftpDir]
+ appl.FtpTool.main(argv)
+ self.assertFileNotExists(source)
+ lines = base.StringUtils.fromFile(logFile)
+ self.assertTrue(len(lines) > 0)
+ self.assertTrue(lines.find('/home/unittest/unittest/src/file2.txt') > 0)
+
+ def testFtpLsTree(self):
+ if False and debug:
+ return
+ source = self.makeSourceTree()
+ configDir = self.makeFtpConfig()
+ logFile = self.tempFile('ftpinfo.log', 'ftptest')
+ self.ensureFileDoesNotExist(logFile)
+ ftpDir = '/home/unittest/unittest/' + os.path.basename(source)
+ argv = ['ftptool', '-c' + configDir, '-l' + logFile, '-v3', 'lstree', 'unittest', ftpDir]
+ appl.FtpTool.main(argv)
+ lines = base.StringUtils.fromFile(logFile)
+ self.assertTrue(len(lines) > 0)
+ self.assertTrue(lines.find('/home/unittest/unittest/src/file2.txt') > 0)
+
+ def testExample(self):
+ if debug:
+ return
+ configDir = self.makeFtpConfig()
+ logFile = self.tempFile('ftpinfo.log', 'ftptest')
+ self.ensureFileDoesNotExist(logFile)
+ argv = ['ftptool', '-c' + configDir, '-l' + logFile, 'example']
+ appl.FtpTool.main(argv)
+ self.assertFileExists(configDir + os.sep + 'ftptool.conf.example')
+ self.assertFileExists(configDir + os.sep + 'ftp.d' + os.sep + 'example.conf')
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = FtpToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import os.path
+from unittest.UnitTestCase import UnitTestCase
+import appl.Monitor
+import base.Logger
+import base.MemoryLogger
+
+def usage(msg=None):
+ return 'test usage'
+
+debug = False
+
+class MonitorTest(UnitTestCase):
+
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ appInfo = base.BaseTool.ApplicationInfo('pymonitor', 'appl/Monitor.py', usage)
+ self._options = self.getOptions()
+ self._options._logFiles = []
+
+ def _finish(self):
+ self.clearDirectory(self.tempDir('unittest'))
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('pymonitor', 'appl/Monitor.py', usage)
+ options = base.BaseTool.GlobalOptions(appInfo)
+ options._logFiles = []
+ return options
+
+ def checkObservedProperty(self, parent, name, warnLimit, errorLimit):
+ self.assertTrue(name in parent._observed)
+ if name in parent._observed:
+ prop = parent._observed[name]
+ self.assertEquals(name, prop._id)
+ self.assertEquals(warnLimit, prop._warnLimit)
+ self.assertEquals(errorLimit, prop._errorLimit)
+
+ def checkProperty(self, parent, name, value):
+ self.assertTrue(name in parent._properties)
+ if name in parent._properties:
+ prop = parent._properties[name]
+ self.assertEquals(name, prop._id)
+ if isinstance(value, appl.Monitor.ReceiverGroup):
+ self.assertEquals(value._properties['error'], prop._value._properties['error'])
+ self.assertEquals(value._properties['warning'], prop._value._properties['warning'])
+ else:
+ self.assertEquals(value, prop._value)
+
+ def checkReceivers(self, receiver1, receiver2):
+ self.assertTrue(isinstance(receiver1, appl.Monitor.ReceiverGroup))
+ self.assertTrue(isinstance(receiver2, appl.Monitor.ReceiverGroup))
+ self.assertEquals(receiver1._name, receiver2._name)
+ self.assertEquals(receiver1._properties['error']._value, receiver2._properties['error']._value)
+ self.assertEquals(receiver1._properties['warning']._value, receiver2._properties['warning']._value)
+
+ def checkDisk(self, host, name, usedWarn, usedError, receivers = None):
+ self.assertTrue(name in host._disks)
+ disk = host._disks[name]
+ self.assertEquals('Disk', disk._type)
+ self.assertEquals(name, disk._name)
+ self.assertEquals(usedWarn, disk._observed['used']._warnLimit)
+ self.assertEquals(usedError, disk._observed['used']._errorLimit)
+ if receivers != None:
+ self.assertEquals(receivers, disk._properties['receivers']._name)
+
+ def checkHost(self, monitor, name, address, receivers, load1Warn, load1Error, load5Warn, load5Error, load10Warn, load10Error, procWarn, procError):
+ self.assertTrue(name in monitor._hosts)
+ host = monitor._hosts[name]
+ self.checkProperty(host, 'address', address)
+ self.checkReceivers(receivers, host._properties['receivers'])
+ self.checkReceivers(receivers, host._properties['receivers'])
+ self.checkObservedProperty(host, 'load1', load1Warn, load1Error)
+ self.checkObservedProperty(host, 'load5', load5Warn, load5Error)
+ self.checkObservedProperty(host, 'load10', load10Warn, load10Error)
+ self.checkObservedProperty(host, 'processes', procWarn, procError)
+ return host
+
+ def testDump(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFile = ['/tmp/unittest.log']
+ monitor = appl.Monitor.Monitor(options, configDir)
+ dump = monitor.dump()
+ self.assertMatches(r'Disk "/":', dump)
+ pass
+
+ def testBasic(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFile = ['/tmp/unittest.log']
+ monitor = appl.Monitor.Monitor(options, configDir)
+ host = self.checkHost(monitor, '', 'localhost', monitor._receiverGroups[''], '4.0', '5.0', '3.1', '4.1', '2.2', '3.2', '500', '1000')
+ self.checkDisk(host, '', '81%', '91%', '')
+
+ host = self.checkHost(monitor, 'localhost', '127.0.0.1', monitor._receiverGroups['group1'], '3.3', '5.3', '2.4', '3.4', '1.5', '2.5', '502', '1002')
+ self.checkDisk(host, '', '81%', '91%', '')
+ self.checkDisk(host, '/', '70%', '80%', 'globalGroup')
+ self.checkDisk(host, '/home', '83%', '93%', 'group1')
+ self.checkDisk(host, '/opt', '84%', '94%', '')
+ self.assertEquals('globalGroup', host._disks['/']._properties['receivers']._name)
+ self.assertEquals('group1', host._observed['load1']._receivers._name)
+ self.assertEquals('group1', host._observed['load5']._receivers._name)
+ self.assertEquals('globalGroup', host._observed['load10']._receivers._name)
+ self.assertEquals('group1', host._observed['processes']._receivers._name)
+
+ def testFormatErrors(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration/error'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFiles = []
+ monitor = appl.Monitor.Monitor(options, configDir)
+ self._logger.log('=== Note many errors follow...')
+ self.assertTrue(monitor._logger.contains('unexpected "}"'))
+ self.assertTrue(monitor._logger.contains('nested definition of host'))
+ self.assertTrue(monitor._logger.contains('disk not inside a host definition'))
+ self.assertTrue(monitor._logger.contains('disk not inside a host definition. Found:'))
+ self.assertTrue(monitor._logger.contains('property outside a container'))
+ self.assertTrue(monitor._logger.contains('unknown property dummyProp'))
+ self.assertTrue(monitor._logger.contains('unknown receivergroup: unknownGroup1'))
+ self.assertTrue(monitor._logger.contains('unknown receivergroup: unknownGroup2'))
+ self.assertTrue(monitor._logger.contains('receiver group definition inside a container'))
+ self.assertTrue(monitor._logger.contains('unexpected input'))
+ self.assertTrue(monitor._logger.contains('missing "}"'))
+
+ def testObserveDisks(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration/observed'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFile = ['/tmp/unittest.log']
+ monitor = appl.Monitor.Monitor(options, configDir)
+ with open(self.tempFile('monitor.conf'), 'w') as fp:
+ fp.write(monitor.dump())
+ monitor._currentHostname = 'localhost'
+ rc = monitor.observeDisks()
+ if self.assertEquals(2, len(rc)):
+ # W |localhost:disk:/| disk usage warning free: 8.685 GiByte (29.5%) of 29.404 GiByte Limit: 10%||1568545510
+ self.assertMatches(r'W\|localhost:disk:/\|disk usage warning free: [0-9.]+ \S+ \([.0-9]+%\) of [0-9.]+ \S+ Limit: 10%\|', rc[0])
+ # E|localhost:disk:/home |disk usage error free: 28.331 GiByte (45.3%) of 62.496 GiByte Limit: 11%|monitor@hamatoma.de pit@hamatoma.de|1568555108
+ self.assertMatches(r'E\|localhost:disk:/home\|disk usage error free: [0-9.]+ \S+ \([.0-9]+%\) of [0-9.]+ \S+ Limit: 11%\|monitor\@hamatoma.de pit\@hamatoma.de', rc[1])
+ #self.assertMatches(r'W\|localhost:disk:/media/work\|disk usage warning: free: [0-9.]+ \S+ \([.0-9]+%\) of [0-9.]+ \S+ Limit: 81%\|', rc[2])
+
+ def testObserveCore(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration/observed'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFile = ['/tmp/unittest.log']
+ monitor = appl.Monitor.Monitor(options, configDir)
+ monitor._currentHostname = 'localhost'
+ rc = monitor.observeCore()
+ if self.assertEquals(5, len(rc)):
+ self.assertMatches(r'W\|localhost:core:load1\|core detection warning\s+\S+ Limit: \S+\s*\|one@hamatoma.de two@hamatoma.de', rc[0])
+ self.assertMatches(r'E\|localhost:core:load5\|core detection error\s+\S+ Limit: \S+\s*\|critical@hamatoma.de', rc[1])
+ self.assertMatches(r'W\|localhost:core:load10\|core detection warning\s+\S+ Limit: \S+\s*\|one@hamatoma.de two@hamatoma.de', rc[2])
+ self.assertMatches(r'E\|localhost:core:processes\|core detection error\s+\S+ Limit: \S+\s*\|critical@hamatoma.de', rc[3])
+ self.assertMatches(r'W\|localhost:core:swap\|core detection warning 0.0 Limit: 0\|one@hamatoma.de two@hamatoma.de\|', rc[4])
+
+ def testObserve(self):
+ if debug: return
+ configDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration/observed'
+ options = self.getOptions()
+ options._additionalConfigDir = configDir
+ options._logFile = ['/tmp/unittest.log']
+ monitor = appl.Monitor.Monitor(options, configDir)
+ (newProblems, closedProblems) = monitor.observe()
+ self.assertEquals(9, len(newProblems))
+ self.assertEquals(0, len(closedProblems))
+ self.assertEquals(9, len(monitor._openProblems))
+ no = -1
+ keys = sorted(list(monitor._openProblems.keys()))
+ for key in keys:
+ no += 1
+ if no % 2 == 0:
+ del monitor._openProblems[key]
+ host = monitor._hosts['localhost']
+ host._observed['processes'] = appl.Monitor.ObservedProperty('processes', 9999, 9999, None)
+ (newProblems, closedProblems) = monitor.observe()
+ self.assertEquals(5, len(newProblems))
+ self.assertEquals(1, len(closedProblems))
+ self.assertEquals(8, len(monitor._openProblems))
+ self._logger.log('- openProblems:\n' + '\n'.join(monitor._openProblems.values()))
+
+ def testExampleSiteCommand(self):
+ if debug:
+ return
+ nginxDir = os.path.dirname(os.path.dirname(__file__)) + '/configuration/sites'
+ appl.Monitor.main(['pymonitor', '-l-', '-v3', '--test-source-dir=' + nginxDir,
+ 'site', '--scan', nginxDir])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ lines = base.BaseTool.result()
+ if self.assertEquals(2, len(lines)):
+ self.assertEquals('site "monitor.infeos.eu" {\n\turl: http://monitor.infeos.eu:10116/works\n}', lines[0])
+ self.assertEquals('site "wiki.hamatoma.de" {\n\turl: https://wiki.hamatoma.de/index.php?title=Hauptseite\n}', lines[1])
+
+ def testExampleCreateSiteServer(self):
+ if debug:
+ return
+ self.tempDir('sites-enabled', 'unittest')
+ self.tempDir('sites-available', 'unittest')
+ baseDir = self.tempDir('www', 'unittest')
+ tmpDir = self.tempDir('unittest')
+ appl.Monitor.main(['pymonitor', '-v3', '--test-target-dir=' + tmpDir,
+ 'create-site-server', '--ip=208.33.99.5', '--domain=gustl.example.com', '--overwrite'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ expected = '''server {
+ listen 80;
+ server_name gustl.example.com 208.33.99.5;
+ root /tmp/unittest/www/gustl.example.com;
+ location / {
+ allow all;
+ }
+}'''
+ self.assertFileContent(tmpDir + '/sites-enabled/gustl.example.com', expected)
+ self.assertFileContent(tmpDir + '/sites-available/gustl.example.com', expected)
+ baseDir = baseDir + os.sep + 'gustl.example.com'
+ self.assertFileContains('<html>', baseDir + os.sep + 'index.html')
+ self.assertFileContains('<?php', baseDir + os.sep + 'index.php')
+ self.assertFileContains('<html>', baseDir + os.sep + 'index.php')
+ self.assertFileContains('gustl.example.com', baseDir + os.sep + 'domain.txt')
+
+ def testExampleCreateSiteServerApache(self):
+ if debug:
+ return
+ self.clearDirectory(self.tempDir('unittest'))
+ self.tempDir('sites-enabled', 'unittest')
+ self.tempDir('sites-available', 'unittest')
+ baseDir = self.tempDir('www', 'unittest')
+ tmpDir = self.tempDir('unittest')
+ appl.Monitor.main(['pymonitor', '-v3', '--test-target-dir=' + tmpDir,
+ 'create-site-server', '--overwrite', '--apache'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ expectedDomain = base.BaseTool.BasicStatics.hostname(True)
+ expected = '''<VirtualHost *:80>
+ ServerName {}
+ ServerAlias 192.168.2.50
+ ServerAdmin webmaster@localhost
+ DocumentRoot /tmp/unittest/www/{}
+ <Directory /tmp/unittest/www/{}>
+ AllowOverride all
+ Require all granted
+ Order allow,deny
+ allow from all
+ </Directory>
+</VirtualHost>'''.format(expectedDomain, expectedDomain, expectedDomain)
+ self.assertFileContent(tmpDir + '/sites-enabled/{}.conf'.format(expectedDomain), expected)
+ self.assertFileContent(tmpDir + '/sites-available/{}.conf'.format(expectedDomain), expected)
+ baseDir = baseDir + os.sep + expectedDomain
+ self.assertFileContains('<html>', baseDir + os.sep + 'index.html')
+ self.assertFileContains('<?php', baseDir + os.sep + 'index.php')
+ self.assertFileContains('<html>', baseDir + os.sep + 'index.php')
+ self.assertFileContains(expectedDomain, baseDir + os.sep + 'domain.txt')
+
+ def testExampleTest(self):
+ if debug:
+ return
+ appl.Monitor.main(['pymonitor', '-v3', 'test'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = MonitorTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import sys
+sys.path.insert(0, '/usr/share/pyrshell')
+from unittest.UnitTestCase import UnitTestCase
+import os
+import time
+import shutil
+import re
+
+import base.StringUtils
+import appl.RestoreTool
+import appl.BackupTool
+
+def usage(msg=None):
+ return 'test usage'
+
+debug = False
+
+class RestoreToolTest(UnitTestCase):
+
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._targetDir = self.tempDir('trgdir', 'unittest')
+
+ def makeTargetDir(self):
+ self.clearDirectory(self._targetDir)
+ return self._targetDir
+
+ def _finish(self):
+ shutil.rmtree('/tmp/unittest', True)
+ shutil.rmtree('/tmp/restoretool', True)
+
+ def makeTree(self):
+ text = 'Dies ist ein langer Text, der hoffentlich ein wenig komprimiert werden kann. Wenn nicht, kann man nichts machen' * 1000
+ dirName = self.tempFile('srcdir', 'unittest')
+ os.makedirs(dirName, 0o777, True)
+ # /tmp/zippertest/srcdir: file[1-5].txt dir[123] .git linkConfig
+ # /tmp/zippertest/srcdir/dirX: levelX.txt subdir1
+ # /tmp/zippertest/srcdir/dirX/subdir1: a.jpg b.txt c.odt
+ # /tmp/zippertest/srcdir/.git: config aa bb
+ # /tmp/zippertest/srcdir/.git/xx: xx1234 xx2345 xx3456
+ for ix in range(1, 5+1):
+ fn = dirName + os.sep + 'file{:d}.txt'.format(ix)
+ base.StringUtils.toFile(fn, 'no{:d}: {:s}'.format(ix, text))
+ for ix in range(1, 3+1):
+ subdir = dirName + os.sep + 'dir' + str(ix)
+ os.makedirs(subdir, 0o777, True)
+ fn = subdir + os.sep + 'level{:d}'.format(ix)
+ base.StringUtils.toFile(fn, 'level{:d}: {:s}'.format(ix, text))
+ dir2 = subdir + os.sep + 'subdir1'
+ os.makedirs(dir2, 0o777, True)
+ for node in ['a.jpg', 'b.txt', 'c.odt']:
+ fn = dir2 + os.sep + node
+ base.StringUtils.toFile(fn, node + ': ' + text)
+ return dirName
+
+ def makeBackup(self):
+ target = self.makeTargetDir()
+ options = self.getOptions()
+ options._configDir = self.makeBackupConfig()
+ options._errorFilter = 'Keine Berechtigung'
+ tool = appl.BackupTool.BackupTool(options)
+ base.FileHelper.ensureFileExists('/srv/.srv.exists')
+ tool.doBackup('dayly')
+ relPath = tool.relativeBackupPath('dayly')
+ rc = target + relPath
+ return rc
+
+ def makeBackupConfig(self, clear=True):
+ baseDir = self.tempDir('config', 'backuptest')
+ if clear:
+ self.clearDirectory(baseDir)
+ sourceDir = self.makeTree()
+ targetDir = self.makeTargetDir()
+ appDataDir = self.tempDir('blog', 'backuptest')
+ base.StringUtils.toFile(appDataDir + os.sep + 'index.php', '<?php\ninfo();')
+ base.StringUtils.toFile(appDataDir + os.sep + 'new.php', '<?php\nnew();')
+ oldFile = appDataDir + os.sep + 'old.php'
+ base.StringUtils.toFile(oldFile, '<?php\nold();')
+ ftime = time.mktime((2018,1,2,3,4,5,0,0,0))
+ os.utime(oldFile, (ftime, ftime))
+
+ appDir = self.ensureDirectory(baseDir + os.sep + 'webapps.d')
+ dirDir = self.ensureDirectory(baseDir + os.sep + 'dirs.d')
+ base.StringUtils.toFile(dirDir + os.sep + 'sys.etc.conf', '''
+directory=/etc/systemd
+excluded=
+''')
+ base.StringUtils.toFile(dirDir + os.sep + 'std.source.conf', '''
+directory={}
+excluded=
+'''.format(sourceDir))
+ base.StringUtils.toFile(baseDir + os.sep + 'backup.conf', '''
+target.path={:s}
+job.dayly=job.sysinfo job.sys job.full
+job.sys=&saveDirByTar @%etc%/dirs.d/sys.*.conf:directory:excluded
+job.full=&saveDirByZip @%etc%/dirs.d/std.*.conf:directory
+job.sysinfo=&systemInfo
+# Reading metadata from mysql:
+mysql.user=backup
+mysql.code=backup2backup
+# Email data:
+# Receiver of the error messages: may be a blank separated list of email addresses
+send.always=False
+location=unittest
+admin.email=hm.neutral@gmx.de
+smtp.host=smtp.gmx.de
+smtp.port=587
+smtp.sender=hm.unittest@gmx.de
+smtp.user=hm.unittest@gmx.de
+smtp.code=33S.e.c.r.e.t
+smtp.tls=True
+'''.format(targetDir, appDir))
+ appl.BackupTool.BackupTool.createDirsD(dirDir, None, False)
+ return baseDir
+
+ def makeMinimalConfigDir(self):
+ dirName = self.tempFile('pyrshell', 'etc')
+ os.makedirs(dirName, 0o777, True)
+ base.StringUtils.toFile(dirName + os.sep + 'backup.conf', '')
+ return dirName
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('wiki', 'appl.WikiTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._logFiles.append('-')
+ return rc
+
+ def testRestoreByTar(self):
+ if debug:
+ return
+ target = self.makeBackup()
+ archive = target + os.sep + 'etc=systemd.tgz'
+ options = self.getOptions()
+ tool = appl.RestoreTool.RestoreTool(options)
+ target2 = self.tempDir('trgdir2', 'unittest')
+ fn = target2 + os.sep + 'toDelete.txt'
+ base.StringUtils.toFile(fn, 'to delete')
+ dir2 = target2 + os.sep + 'dir.to.remove'
+ tool.ensureDirectory(dir2)
+ tool.restoreDirectoryByTar(archive, target2, None, True)
+ self.assertFileNotExists(fn)
+ self.assertFileNotExists(dir2)
+ self.assertFileExists(target2 + os.sep + 'system.conf')
+
+ def testRestoreByZip(self):
+ if debug:
+ return
+ target = self.makeBackup()
+ archive = target + os.sep + 'tmp=unittest=srcdir.zip'
+ options = self.getOptions()
+ tool = appl.RestoreTool.RestoreTool(options)
+ target2 = self.tempDir('trgdir2', 'unittest')
+ fn = target2 + os.sep + 'toDelete.txt'
+ base.StringUtils.toFile(fn, 'to delete')
+ dir2 = target2 + os.sep + 'dir.to.remove'
+ tool.ensureDirectory(dir2)
+ tool.restoreDirectoryByZip(archive, target2, None)
+ self.assertFileNotExists(fn)
+ self.assertFileNotExists(dir2)
+ self.assertFileExists(target2 + os.sep + 'file2.txt')
+ self.assertFileExists(target2 + os.sep + 'dir2' + os.sep + 'level2')
+
+ def testMergeUsers(self):
+ if False and debug:
+ return
+ self._finish()
+ options = self.getOptions()
+ dirSource = self.tempDir('etc', 'unittest')
+ dirSecond = self.tempDir('etc2', 'unittest')
+ options._testSourceDir = os.path.dirname(dirSource)
+ options._testTargetDir = os.path.dirname(dirSource)
+ tool = appl.RestoreTool.RestoreTool(options)
+ fnPasswd = dirSource + os.sep + 'passwd'
+ base.StringUtils.toFile(fnPasswd, '''# test passwd
+root:x:0:0:root:/root:/bin/bash
+daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin
+bin:x:2:2:bin:/bin:/usr/sbin/nologin
+wk:x:1003:1003:,,,:/home/wk:/bin/bash
+redis:x:129:136::/var/lib/redis:/usr/sbin/nologin
+''')
+ base.StringUtils.toFile(dirSource + os.sep + 'shadow', '''# test shadow
+root:$6$ovR8mtHu$RXx8SzG1k5fYe9DIecA3nM7744otO9lyVwCUIKo.gMY0LziEb53dRRQRCyGlNBFqx/XULgrXQfDuYc59REHdY.:17732:0:99999:7:::
+daemon:*:17708:0:99999:7:::
+bin:*:17708:0:99999:7:::
+wk:$6$Fz9IlJZv$c7BY4/D5LeJrXpd.313wBsH3hAwU9xyPe7p322nf3g3X4kNO1dkLoh32rWEeIRCFqIt/MgglOBD/UGUoMR6rJ.:17777:0:99999:7:::
+redis:*:17962:0:99999:7:::
+''')
+ base.StringUtils.toFile(dirSource + os.sep + 'group', '''# test group
+root:x:0:
+daemon:x:1:
+bin:x:2:
+root:x:0:
+daemon:x:1:
+bin:x:2:
+''')
+ # === Merge files:
+ base.StringUtils.toFile(dirSecond + os.sep + 'passwd', '''# test passwd
+bin2:x:502:502:bin:/bin:/usr/sbin/nologin
+wk:x:1003:1003:,,,:/home/wk:/bin/bash
+user1:x:129:136::/var/lib/redis:/usr/sbin/nologin
+''')
+ base.StringUtils.toFile(dirSecond + os.sep + 'shadow', '''# test shadow
+bin2:*:17708:0:99999:7:::
+wk:$6$Fz9IlJZv$c7BY4/D5LeJrXpd.313wBsH3hAwU9xyPe7p322nf3g3X4kNO1dkLoh32rWEeIRCFqIt/MgglOBD/UGUoMR6rJ.:17777:0:99999:7:::
+user1:*:17962:0:99999:7:::
+''')
+ base.StringUtils.toFile(dirSecond + os.sep + 'group', '''# test group
+bin2:x:502:
+wk:x:1003:
+group1:x:2:user1
+''')
+ tool.restoreMergeUsersAndGroups(dirSecond)
+ dirTarget = dirSource
+ expected = '''# test passwd
+root:x:0:0:root:/root:/bin/bash
+daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin
+bin:x:2:2:bin:/bin:/usr/sbin/nologin
+wk:x:1003:1003:,,,:/home/wk:/bin/bash
+redis:x:129:136::/var/lib/redis:/usr/sbin/nologin
+bin2:x:502:502:bin:/bin:/usr/sbin/nologin
+user1:x:129:136::/var/lib/redis:/usr/sbin/nologin'''
+ current = base.StringUtils.fromFile(dirTarget + os.sep + 'passwd')
+ self.assertEquals(expected, current)
+ expected = '''# test shadow
+root:$6$ovR8mtHu$RXx8SzG1k5fYe9DIecA3nM7744otO9lyVwCUIKo.gMY0LziEb53dRRQRCyGlNBFqx/XULgrXQfDuYc59REHdY.:17732:0:99999:7:::
+daemon:*:17708:0:99999:7:::
+bin:*:17708:0:99999:7:::
+wk:$6$Fz9IlJZv$c7BY4/D5LeJrXpd.313wBsH3hAwU9xyPe7p322nf3g3X4kNO1dkLoh32rWEeIRCFqIt/MgglOBD/UGUoMR6rJ.:17777:0:99999:7:::
+redis:*:17962:0:99999:7:::
+# test shadow
+bin2:*:17708:0:99999:7:::
+user1:*:17962:0:99999:7:::
+'''
+ current = base.StringUtils.fromFile(dirTarget + os.sep + 'shadow')
+ self.assertEquals(expected, current)
+ expected = '''# test group
+root:x:0:
+daemon:x:1:
+bin:x:2:
+root:x:0:
+daemon:x:1:
+bin:x:2:
+bin2:x:502:
+wk:x:1003:
+group1:x:2:user1
+'''
+ current = base.StringUtils.fromFile(dirTarget + os.sep + 'group')
+ self.assertEquals(expected, current)
+
+ def testInitNetStatic(self):
+ if debug:
+ return
+ tempDir = self.tempDir('trgdir', 'unittest')
+ fn = tempDir + os.sep + 'interfaces'
+ base.StringUtils.toFile(fn, '''# interfaces(5) file used by ifup(8) and ifdown(8)
+auto lo
+iface lo inet loopback
+
+auto wlan0
+iface wlan0 inet dhcp
+''')
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(self._targetDir), 'net', 'static'])
+ self.assertFileContains('auto enp2s0', fn)
+ self.assertFileContains('iface enp2s0 inet static', fn)
+ self.assertFileContains(' address 10.10.10.100', fn)
+ self.assertFileContains(' netmask 255.0.0.0', fn)
+ self.assertFileContains(' gateway 10.10.10.1', fn)
+
+ def testInitNetStatic2(self):
+ if debug:
+ return
+ fn = self._targetDir + os.sep + 'interfaces'
+ base.StringUtils.toFile(fn, '''# interfaces(5) file used by ifup(8) and ifdown(8)
+auto lo
+iface lo inet loopback
+iface enp2s0 inet static
+ address 192.168.3.100
+ netmask 255.255.255.0
+ gateway 192.168.3.1
+auto wlan0
+iface wlan0 inet dhcp
+''')
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(self._targetDir),
+ 'net', 'static', '101'])
+ self.assertFileContains('auto enp2s0', fn)
+ self.assertFileContains('iface enp2s0 inet static', fn)
+ self.assertFileContains(' address 10.10.10.101', fn)
+ self.assertFileContains(' netmask 255.0.0.0', fn)
+ self.assertFileContains(' gateway 10.10.10.1', fn)
+
+ def testInitNetStatic3(self):
+ if debug:
+ return
+ fn = self._targetDir + os.sep + 'interfaces'
+ base.StringUtils.toFile(fn, '''# interfaces(5) file used by ifup(8) and ifdown(8)
+auto lo
+iface lo inet loopback
+iface wlp4s0 inet static
+ address 192.168.3.100
+ netmask 255.255.255.0
+ gateway 192.168.3.1
+auto wlan0
+iface wlan0 inet dhcp
+''')
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(self._targetDir),
+ 'net', 'static', '25', 'w*'])
+ self.assertFileContains('auto wlp4s0', fn)
+ self.assertFileContains('iface wlp4s0 inet static', fn)
+ self.assertFileContains(' address 10.10.10.25', fn)
+ self.assertFileContains(' netmask 255.0.0.0', fn)
+ self.assertFileContains(' gateway 10.10.10.1', fn)
+
+ def testInitNetStatic4(self):
+ if debug:
+ return
+ fn = self._targetDir + os.sep + 'interfaces'
+ base.StringUtils.toFile(fn, '''# empty
+''')
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(self._targetDir),
+ 'net', 'static', '192.25', 'e*'])
+ self.assertFileContains('auto enp2s0', fn)
+ self.assertFileContains('iface enp2s0 inet static', fn)
+ self.assertFileContains(' address 192.10.10.25', fn)
+ self.assertFileContains(' netmask 255.255.255.0', fn)
+ self.assertFileContains(' gateway 192.10.10.1', fn)
+
+ def testNetNameserver(self):
+ if debug:
+ return
+ tmpDir = self.tempDir('unittest')
+ shutil.copy2('/etc/systemd/resolved.conf', tmpDir)
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(tmpDir),
+ 'net', 'nameserver', '192.168.178.254'])
+ self.assertFileContains('DNS=192.168.178.254', tmpDir + os.sep + 'resolved.conf')
+
+ def testNetNameserver2(self):
+ if debug:
+ return
+ tmpDir = self.tempDir('unittest')
+ shutil.copy2('/etc/systemd/resolved.conf', tmpDir)
+ appl.RestoreTool.main(['restoretool', '--test-target-dir={}'.format(tmpDir),
+ 'net', 'nameserver'])
+ self.assertFileContains('DNS=9.9.9.9', tmpDir + os.sep + 'resolved.conf')
+
+ def testExampleStorageDisk1(self):
+ if debug:
+ return
+ appl.RestoreTool.main(['restoretool', 'storage', 'disks', 'sd[a-f]'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ lines = tool._output
+ if self.assertTrue(lines != None and len(lines) > 0):
+ for line in lines:
+ self.assertMatches(r'UUID', line)
+
+ def testExampleStorageAutoFsDisk(self):
+ if debug:
+ return
+ tmpDir = self.tempDir('etc', 'unittest')
+ appl.RestoreTool.main(['restoretool', '--test-target-dir=' + tmpDir,
+ 'storage', 'autofs', 'usb-backup', '/', 'disk|*|/media/usb-backup|rw', '--auto-select=sd[a].*ntfs'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ fn = tmpDir + os.sep + 'auto.master'
+ self.assertFileContains(re.compile(r'^/-.*auto.usb-backup'), fn)
+ fn = tmpDir + os.sep + 'auto.usb-backup'
+ self.assertFileContains(re.compile(r'^[0-9A-Fa-f-]+\s+-fstype=\w+,rw,umask=002\s+:/media/usb-backup'), fn)
+
+ def testExampleInitApache(self):
+ if debug:
+ return
+ tmpDir = self.tempDir('etc', 'unittest')
+ fn = tmpDir + os.sep + 'ports.conf'
+ base.StringUtils.toFile(fn, '''# If you just change the port or add more ports here, you will likely also
+# have to change the VirtualHost statement in
+# /etc/apache2/sites-enabled/000-default.conf
+Listen 80
+<IfModule ssl_module>
+ Listen 443
+</IfModule>
+<IfModule mod_gnutls.c>
+ Listen 443
+</IfModule>
+''')
+ appl.RestoreTool.main(['restoretool', '-v3', '--test-target-dir=' + tmpDir,
+ 'init', 'apache', '--ports=81,444'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContains('Listen 81', fn)
+ self.assertFileContains('Listen 444', fn)
+
+ def testExampleInitNginx(self):
+ if debug:
+ return
+ tmpDir = self.tempDir('etc', 'unittest')
+ dirWellKnown = self.tempDir('letsencrypt', 'unittest')
+ fn = tmpDir + os.sep + 'letsencrypt.conf'
+ self.ensureFileDoesNotExist(fn)
+ appl.RestoreTool.main(['restoretool', '-v3', '--test-target-dir=' + tmpDir,
+ 'init', 'nginx', '--well-known=' + dirWellKnown])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertDirExists(dirWellKnown + os.sep + '.well-known')
+ self.assertFileContains('location ^~ /.well-known/acme-challenge/', fn)
+ self.assertFileContains(re.compile(r'root\s+' + dirWellKnown), fn)
+
+ def testExampleNetInfo(self):
+ if debug:
+ return
+ lines = base.BaseTool.result()
+ appl.RestoreTool.main(['restoretool', 'net', 'info'])
+ if self.assertTrue(lines != None and len(lines) > 0):
+ self.assertTrue(lines[0].startswith('sda3'))
+
+ def testExampleBtrFsCreateFsLoop(self):
+ if debug:
+ return
+ if not self._isRoot:
+ self._logger.log('not root. ignoring testExampleBtrFsCreateFsLoop')
+ else:
+ tempDir = self.tempDir('storage', 'unittest')
+ base.StringUtils.toFile(tempDir + os.sep + 'auto.master', '# auto.master')
+ image = tempDir + os.sep + 'fs.unittest.img'
+ appl.RestoreTool.main(['restoretool', '-v3', '--test-target-dir=' + tempDir,
+ 'btrfs', 'create-fs', '/dev/loop33', 'fs.unittest', '--image={}:150M'.format(image), '--force'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ fn = tempDir + os.sep + 'auto.btrfs.fs.unittest'
+ self.assertFileContains('LABEL=fs.unittest\t-fstype=btrfs\t:fs.unittest', fn)
+ self.assertFileContains(re.compile(r'/-\s+' + fn), tempDir + os.sep + 'auto.master')
+
+ def testExampleBtrCreateFsMove(self):
+ if debug:
+ return
+ if not self._isRoot:
+ self._logger.log('not root: ignoring testExampleBtrCreateFsMove')
+ else:
+ tempDir = self.tempDir('storage', 'unittest')
+ self.ensureDirectory(tempDir + os.sep + 'fs.unittest')
+ fnTab = tempDir + os.sep + 'fstab'
+ base.StringUtils.toFile(fnTab, '''# FSTab
+/dev/sda2 /boot ext4 defaults,noatime 0 2
+LABEL=swap swap swap defaults,noatime 0 2
+UUID=74a63fee-8fca-412e-9533-4d02547fe978 / btrfs subvol=@,defaults,noatime,space_cache,autodefrag 0 1
+UUID=74a63fee-8fca-412e-9533-4d02547fe978 /home btrfs subvol=@home,defaults,noatime,space_cache,autodefrag 0 2
+''')
+ appl.RestoreTool.main(['restoretool', '-v3', '--test-target-dir=' + tempDir,
+ 'btrfs', 'create-subvol', 'fs.unittest', 'db', tempDir + os.sep + 'db', '--options=nodatacow', '--move-files', '--force'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ line = 'UUID=74a63fee-8fca-412e-9533-4d02547fe978 /tmp/unittest/storage/db btrfs subvol=db,defaults,noatime,space_cache,autodefrag,nodatacow 0 0'
+ self.assertFileContains(line, fnTab)
+
+ def testExampleInitGrub(self):
+ if debug:
+ return
+ fnGrub = self.tempFile('grub.cfg', 'unittest', 'grub')
+ base.StringUtils.toFile(fnGrub, '''# Dummy grub file
+menuentry 'Buster XFCE on /dev/nvme0n1p6 NVSAMS' --class debian --class gnu-linux --class gnu --class os $menuentry_id_option 'gnulinux-simple-381089c0-2913-4623-9358-1ad56766b80b' {
+ # Text 1
+}
+menuentry 'Entry2' {
+ # Text 2
+}
+''', self._logger)
+ fnBlkId = self.tempFile('blkid.out', 'unittest', 'grub')
+ base.StringUtils.toFile(fnBlkId, '''# Dummy output of lsblk
+/dev/nvme0n1p1: LABEL="SYSTEM" UUID="184C-BC88" TYPE="vfat" PARTLABEL="EFI system partition" PARTUUID="801ef4fd-3afc-4378-b112-296be3955024"
+/dev/nvme0n1p3: LABEL="Windows" UUID="FA844F35844EF3A3" TYPE="ntfs" PARTLABEL="Basic data partition" PARTUUID="e7239890-b477-4d50-aa82-4fb6de1f5205"
+/dev/nvme0n1p6: LABEL="nvsams" UUID="381089c0-2913-4623-9358-1ad56766b80b" UUID_SUB="7ea15cb3-6a84-48f7-8a61-ffadc21733b8" TYPE="btrfs" PARTLABEL="Basic data partition" PARTUUID="99c411d2-6660-4f02-b855-aa189fb71daf"
+/dev/nvme0n1p7: LABEL="WinRE_DRV" UUID="C8C650B7C650A808" TYPE="ntfs" PARTLABEL="Basic data partition" PARTUUID="4891912c-4d85-4b3f-9fea-00be938a0f57"
+/dev/sda3: LABEL="source" UUID="16BE0656BE062F35" TYPE="ntfs" PARTUUID="979453a9-03"
+/dev/sda6: LABEL="system" UUID="7a497274-d30b-4860-a06b-c94e83d8df76" UUID_SUB="229f5912-31b6-4f70-a18f-063742429e23" TYPE="btrfs" PARTUUID="979453a9-06"
+/dev/nvme0n1: PTUUID="d3966240-6a16-484c-9d3f-498c9dcdf296" PTTYPE="gpt"
+/dev/nvme0n1p2: PARTLABEL="Microsoft reserved partition" PARTUUID="372f5f3e-1ac7-492c-9bc8-ba73e132e358"
+/dev/sda5: PARTUUID="979453a9-05"
+''', self._logger)
+ appl.RestoreTool.main(['restoretool', '-v3', '--test-target-dir=' + os.path.dirname(fnGrub),
+ 'init', 'grub'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ current = base.StringUtils.fromFile(fnGrub, '\n')
+ self.assertEquals('''# Dummy grub file
+menuentry 'Buster XFCE on /dev/nvme0n1p6 NVSAMS' --class debian --class gnu-linux --class gnu --class os $menuentry_id_option 'gnulinux-simple-381089c0-2913-4623-9358-1ad56766b80b' {
+ # Text 1
+}
+menuentry 'Entry2' {
+ # Text 2
+}
+'''.split('\n'), current)
+
+ def testExampleRestoreClone(self):
+ if debug:
+ return
+ self._finish()
+ tar = '/usr/share/pyrshell/unittest/data/example.tgz'
+ target = self.tempDir('bin', 'unittest')
+ target2 = os.path.dirname(target) + os.sep + 'bin2'
+ self.ensureFileDoesNotExist(target2)
+ appl.RestoreTool.main(['restoretool', '-v3',
+ 'restore', 'clone', tar, target])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertDirExists(target2)
+ self.assertFileExists(target2 + os.sep + 'All.sh')
+
+ def testExampleRestorePyrshell(self):
+ if debug:
+ return
+ self._finish()
+ tar = '/usr/share/pyrshell/unittest/data/etc.tgz'
+ target = self.tempDir('unittest')
+ target1 = self.tempDir('etc', 'unittest')
+ target2 = self.tempDir('etc2', 'unittest')
+ self.ensureFileDoesNotExist(target2)
+ appl.RestoreTool.main(['restoretool', '-v3',
+ 'restore', 'clone', tar, target1])
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-source-dir={}'.format(os.path.dirname(target2)),
+ '--test-target-dir={}'.format(os.path.dirname(target1)),
+ 'restore', 'pyrshell'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ target3 = self.tempDir('pyrshell', 'unittest')
+ self.assertFileExists(target3 + os.sep + 'backup.conf')
+ self.assertDirExists(target3 + os.sep + 'webapps.d')
+ unwanted = target3 + os.sep + 'unwanted.data'
+ base.StringUtils.toFile(unwanted, 'unwanted')
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-source-dir={}'.format(os.path.dirname(target2)),
+ '--test-target-dir={}'.format(os.path.dirname(target1)),
+ 'restore', 'pyrshell'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileNotExists(target3 + os.sep + 'unwanted.data')
+
+ def testExampleRestoreNginx(self):
+ if debug:
+ return
+ self._finish()
+ tar = '/usr/share/pyrshell/unittest/data/etc.tgz'
+ target = self.tempDir('unittest')
+ target1 = self.tempDir('etc', 'unittest')
+ source = self.tempDir('etc2', 'unittest')
+ self.ensureFileDoesNotExist(source)
+ appl.RestoreTool.main(['restoretool', '-v3',
+ 'restore', 'clone', tar, target1])
+ base.FileHelper.unpack('/usr/share/pyrshell/unittest/data/etc.work.tgz', target, self._logger)
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-source-dir={}'.format(os.path.dirname(source)),
+ '--test-target-dir={}'.format(os.path.dirname(target1)),
+ 'restore', 'nginx'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ target = target1 + os.sep + 'nginx'
+ self.assertFileExists(target + os.sep + 'nginx.conf')
+ self.assertFileExists(target + os.sep + 'snippets/letsencrypt.conf')
+ self.assertFileExists(target + os.sep + 'sites-enabled/life.caribou')
+ self.assertFileExists(target + os.sep + 'sites-available/life.caribou')
+ target = target1 + os.sep + 'ssl'
+ self.assertFileExists(target + os.sep + 'private/wiki.hamatoma.de.key')
+ self.assertFileExists(target + os.sep + 'certs/wiki.hamatoma.de.pem')
+
+ def testExampleRestoreEtc(self):
+ if debug:
+ return
+ self._finish()
+ target = self.tempDir('unittest')
+ tar = '/usr/share/pyrshell/unittest/data/etc.tgz'
+ target1 = self.tempDir('etc', 'unittest')
+ source = self.tempDir('unittest')
+ appl.RestoreTool.main(['restoretool', '-v3',
+ 'restore', 'clone', tar, target1])
+ base.FileHelper.unpack('/usr/share/pyrshell/unittest/data/etc.work.tgz', target, self._logger)
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-source-dir={}'.format(source),
+ '--test-target-dir={}'.format(target),
+ 'restore', 'etc'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ target = target1 + os.sep + 'nginx'
+ self.assertFileExists(target + os.sep + 'nginx.conf')
+ self.assertFileExists(target + os.sep + 'snippets/letsencrypt.conf')
+ self.assertFileExists(target + os.sep + 'sites-enabled/life.caribou')
+ self.assertFileExists(target + os.sep + 'sites-available/life.caribou')
+ target = target1 + os.sep + 'ssl'
+ self.assertFileExists(target + os.sep + 'private/wiki.hamatoma.de.key')
+ self.assertFileExists(target + os.sep + 'certs/wiki.hamatoma.de.pem')
+ dirTarget = self.tempDir('etc', 'unittest')
+ expected = '''# test passwd
+root:x:0:0:root:/root:/bin/bash
+daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin
+bin:x:2:2:bin:/bin:/usr/sbin/nologin
+wk:x:1003:1003:,,,:/home/wk:/bin/bash
+redis:x:129:136::/var/lib/redis:/usr/sbin/nologin
+bin2:x:502:502:bin:/bin:/usr/sbin/nologin
+user1:x:129:136::/var/lib/redis:/usr/sbin/nologin'''
+ current = base.StringUtils.fromFile(dirTarget + os.sep + 'passwd')
+ self.assertEquals(expected, current)
+
+ def testExampleRestoreLetsencryptOff(self):
+ if debug:
+ return
+ self._finish()
+ tempDir = self.tempDir('sites-enabled', 'unittest')
+ domain = 'www.example.de'
+ fn = tempDir + os.sep + domain
+ base.StringUtils.toFile(fn, '''# with
+ ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+ #ssl_certificate /etc/ssl/certs/www.example.de.pem;
+ #ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-target-dir={}'.format(tempDir),
+ 'reconfigure', 'letsencrypt', 'off', domain])
+ self.assertFileContent(fn, '''# with
+# ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+# ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+ ssl_certificate /etc/ssl/certs/www.example.de.pem;
+ ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+ def testExampleRestoreLetsencryptOn(self):
+ if debug:
+ return
+ self._finish()
+ tempDir = self.tempDir('sites-enabled', 'unittest')
+ domain = 'www.example.de'
+ fn = tempDir + os.sep + domain
+ base.StringUtils.toFile(fn, '''# with
+ ## ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+ ## ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+ ssl_certificate /etc/ssl/certs/www.example.de.pem;
+ ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-target-dir={}'.format(tempDir),
+ 'reconfigure', 'letsencrypt', 'on', domain])
+ self.assertFileContent(fn, '''# with
+ ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+# ssl_certificate /etc/ssl/certs/www.example.de.pem;
+# ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+
+ def testExampleRestoreLetsencryptAll(self):
+ if debug:
+ return
+ self._finish()
+ tempDir = self.tempDir('sites-enabled', 'unittest')
+ domain = 'www.example.de'
+ fn = tempDir + os.sep + domain
+ base.StringUtils.toFile(fn, '''# with
+ ## ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+ ## ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+ ssl_certificate /etc/ssl/certs/www.example.de.pem;
+ ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+ appl.RestoreTool.main(['restoretool', '-v3',
+ '--test-target-dir={}'.format(tempDir),
+ 'reconfigure', 'letsencrypt', 'on', '--all'])
+ self.assertFileContent(fn, '''# with
+ ssl_certificate /etc/letsencrypt/live/latest/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/latest/privkey.pem;
+# ssl_certificate /etc/ssl/certs/www.example.de.pem;
+# ssl_certificate_key /etc/ssl/private/www.example.de.key;
+''')
+
+ def testLetsencryptAdapt(self):
+ if debug:
+ return
+ live = self.tempDir('live', 'unittest')
+ huber1 = live + os.sep + 'huber.de.01'
+ self.ensureDirectory(huber1)
+ time.sleep(1)
+ self.ensureDirectory(live + os.sep + 'huber.de.02')
+ latest = live + os.sep + 'latest'
+ self.ensureFileDoesNotExist(latest)
+ os.symlink('huber.de.01', latest)
+ options = self.getOptions()
+ tool = appl.RestoreTool.RestoreTool(options)
+ tool._globalOptions._testTargetDir = os.path.dirname(live)
+ tool.reconfigureLetsencryptAdapt()
+ self.assertEquals('huber.de.02', os.readlink(latest))
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = RestoreToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import appl.SvgTool
+import base.BaseTool
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class SvgToolTest(UnitTestCase):
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('svgtool', 'appl.SvgTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._logFiles.append('-')
+ return rc
+ def stringToFloatArray(self, text):
+ rc = []
+ for y in text.split(';'):
+ rc.append(float(y))
+ return rc
+ def floatArrayToString(self, data):
+ rc = ''
+ for y in data:
+ rc += str(y) + ';'
+ return rc[0:-1]
+
+ def makeSet(self, xValues, yValues):
+ rc = []
+ xCol = appl.SvgTool.Column('x', rc)
+ yCol = appl.SvgTool.Column('y', rc)
+ for x in xValues.split(';'):
+ # time or datetime values will be untouched:
+ xCol._values.append(x if x.find(':') >= 0 else int(x))
+ yCol._values = self.stringToFloatArray(yValues)
+ rc.append(xCol)
+ rc.append(yCol)
+ return rc
+
+ def makeTool(self, xValues, yValues):
+ options = self.getOptions()
+ tool = appl.SvgTool.SvgTool(options)
+ tool._columns = self.makeSet(xValues, yValues)
+ return tool
+
+ def dumpSet(self, array):
+ xValues = ''
+ yValues = ''
+ self.assertEquals(2, len(array))
+ self.assertEquals(len(array[0]._values), len(array[1]._values))
+ for ix in range(len(array[0]._values)):
+ xValues += ';{}'.format(array[0]._values[ix])
+ yValues += ';{}'.format(array[1]._values[ix])
+ return [xValues, yValues]
+
+ def testShrinkData(self):
+ if debug:
+ return
+ tool = self.makeTool('1;2;3;4;5;6;7;8;9;10;11;12;13;14;15', '-1;2;0;0;-2;3;5;5;5;-3;4;0;0;0;2')
+ array = tool.shrinkData(5)
+ [x,y] = self.dumpSet(array)
+ self.assertEquals(';-1.0;2.0;5.0;-3.0;2.0', y)
+ self.assertEquals(';1;4.5;8.0;11.5;15.0', x)
+ tool = self.makeTool('2019-02-03 00:11;2019.02.04T12:13:14', '-1;2')
+ fn = self.tempFile('data.csv', 'svgtest')
+ fn2 = self.tempFile('data.html', 'svgtest')
+ tool.putCsv(fn)
+ argv = [fn, fn2]
+ tool.diagram(argv, None)
+
+ def testNormalize(self):
+ if debug:
+ return
+ tool = self.makeTool('1;2;3;4;5;6;7;8;9;10', '10;20;10;20;90;30;20;30;20;100')
+ fn = self.tempFile('data.csv', 'svgtest')
+ tool.putCsv(fn)
+ fn2 = self.tempFile('data.html', 'svgtest')
+ tool.putCsv(fn)
+ argv = [fn, fn2]
+ tool.diagram(argv, None)
+
+ def testMinMax(self):
+ if debug:
+ return
+ tool = self.makeTool('1;12;13;14;15;16;17;18;19;26', '3;-12;-13;-14;-15;-16;-17;-18;-19;-26')
+ tool._columns[0].findMinMax(80, 1.1);
+ self.assertEquals(18, tool._columns[0]._max)
+ self.assertEquals(13, tool._columns[0]._min)
+ tool._columns[1].findMinMax(80, 1.1);
+ self.assertEquals(-13.0, tool._columns[1]._max)
+ self.assertEquals(-18.0, tool._columns[1]._min)
+
+ def testMinMaxSpreadFactor(self):
+ if debug:
+ return
+ tool = self.makeTool('1;2;3;4;5;6;7;8;9;11', '-11;-12;-13;-14;-15;-16;-17;-18;-19;-21')
+ tool._columns[0].findMinMax(80, 1.401);
+ self.assertEquals(8, tool._columns[0]._max)
+ self.assertEquals(1, tool._columns[0]._min)
+ tool._columns[1].findMinMax(80, 1.401);
+ self.assertEquals(-11.0, tool._columns[1]._max)
+ self.assertEquals(-18.0, tool._columns[1]._min)
+
+ def testDiagram(self):
+ if debug:
+ return
+ fnCsv = self.tempFile('sinus.csv')
+ fnHtml = self.tempFile('sinus.html')
+ self.ensureFileDoesNotExist(fnCsv)
+ self.ensureFileDoesNotExist(fnHtml)
+ appl.SvgTool.main(['svgtool', 'example'])
+ self.assertFileExists(fnCsv)
+ self.assertFileContains('x;sin', fnCsv)
+ self.assertFileContains('12.30726762886598;-0.2562135827507867;0.9666201942924666;-0.2650612766665054', fnCsv)
+ appl.SvgTool.main(['svgtool', '-v3', 'x-y-diagram', fnCsv, fnHtml, '--width=512'])
+ self.assertFileContains('x;sin', fnCsv)
+ self.assertFileContains('</html>', fnHtml)
+ self.assertFileContains('<line x1="12" y1="97" x2="18" y2="97" stroke="green" stroke-width="1" />', fnHtml)
+
+ def testConvertToMovingAverage(self):
+ if debug:
+ return
+ tool = self.makeTool('1', '10')
+
+ dataAsString = '10;30;50;160;200;300'
+ data = self.stringToFloatArray(dataAsString)
+ tool.convertToMovingAverage(data, 4)
+ current = self.floatArrayToString(data)
+ self.assertEquals('30.0;62.5;110.0;177.5;220.0;250.0', current)
+
+ dataAsString = '10;20;30;40;50;160;170;180;190;200;300'
+ data = self.stringToFloatArray(dataAsString)
+ tool.convertToMovingAverage(data, 3)
+ current = self.floatArrayToString(data)
+ self.assertEquals('15.0;20.0;30.0;40.0;83.33333333333333;126.66666666666667;170.0;180.0;190.0;230.0;250.0', current)
+
+ def testDiagramMovingAverage(self):
+ if debug:
+ return
+ fnCsv = self.tempFile('sinus.csv')
+ fnHtml = self.tempFile('sinus.html')
+ self.ensureFileDoesNotExist(fnCsv)
+ self.ensureFileDoesNotExist(fnHtml)
+ appl.SvgTool.main(['svgtool', 'example', '12'])
+ self.assertFileExists(fnCsv)
+ self.assertFileContains('x;sin', fnCsv)
+ self.assertFileContains('36.92180288659794;-0.7013637757662801;0.7128035171369929;-0.9839510593092174', fnCsv)
+ appl.SvgTool.main(['svgtool', '-v3', 'x-y-diagram', fnCsv, fnHtml, '--moving-average=201'])
+ self.assertFileContains('x;sin', fnCsv)
+ self.assertFileContains('</html>', fnHtml)
+ self.assertFileContains('<tr style="color: blue"><td>x:</td><td></td><td>0.00</td><td>37.62</td><td>485 Werte</td></tr>', fnHtml)
+ self.assertFileContains('<tr style="color: black"><td>sin(x):</td><td>0.000092</td><td>-0.215542</td><td>0.213731</td><td></td></tr>', fnHtml)
+ self.assertFileContains('<tr style="color: red"><td>cos(x):</td><td>-0.002358</td><td>-0.113892</td><td>0.132246</td><td></td></tr>', fnHtml)
+ self.assertFileContains('<tr style="color: green"><td>tan(x):</td><td>0.000221</td><td>-0.152102</td><td>0.150606</td><td></td></tr>', fnHtml)
+
+ def testConvertToMovingAverage2(self):
+ if False and debug:
+ return
+ tool = self.makeTool('1', '10')
+
+ dataAsString = '10;30;50;0;0;0;0;200;300'
+ data = self.stringToFloatArray(dataAsString)
+ tool.convertToMovingAverage(data, 3)
+ current = self.floatArrayToString(data)
+ self.assertEquals('20.0;30.0;26.666666666666668;16.666666666666668;0.0;0.0;66.66666666666667;166.66666666666666;250.0', current)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = SvgToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import os
+import shutil
+import re
+import time
+
+import appl.TextTool
+import base.BaseTool
+import base.StringUtils
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class TextToolTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._finish()
+ base.FileHelper.clearDirectory(self.tempDir('unittest'))
+ self._tempDir = self.tempDir('texttool', 'unittest')
+ self._fn = self._tempDir + os.sep + 'first.txt'
+ base.StringUtils.toFile(self._fn, "line 1\nline 2\nThis text is in line 3")
+ self._fn2 = self._tempDir + os.sep + 'second.txt'
+ base.StringUtils.toFile(self._fn2, "How are you")
+ self._tool = appl.TextTool.TextTool(self.getOptions())
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('unittest'))
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('wiki', 'appl.WikiTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo, 1)
+ rc._exampleText = 'log.file=/var/log/local/wikitool.log'
+ rc._logFiles.append('-')
+ rc._verboseLevel = 0
+ return rc
+
+ def testGrep(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '-v4',
+ 'grep',
+ 'line|How',
+ self.tempFile('*.txt', 'unittest')])
+ result = base.BaseTool.result()
+ self.assertEquals("{}: How are you\n{}: line 1\n{}: line 2\n{}: This text is in line 3".format(self._fn2, self._fn, self._fn, self._fn), result)
+
+ def testGrepMiddleOfTheLine(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '-v4',
+ 'grep',
+ 'ine|ow',
+ self.tempFile('*.txt', 'unittest')])
+ result = base.BaseTool.result()
+ self.assertEquals("{}: How are you\n{}: line 1\n{}: line 2\n{}: This text is in line 3".format(self._fn2, self._fn, self._fn, self._fn), result)
+
+ def testGrepExcludedFile(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '-v4',
+ 'grep',
+ 'How',
+ self.tempFile('*.txt', 'unittest'),
+ '--excluded=:f*'])
+ result = base.BaseTool.result()
+ self.assertEquals("{}: How are you".format(self._fn2), result)
+
+ def testGrepExcludedDir(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '-v4',
+ '--exit-with-0',
+ 'grep',
+ 'How',
+ self.tempFile('*.txt', 'unittest'),
+ '--excluded=:/tmp/*/unit*'])
+ result = base.BaseTool.result()
+ self.assertEquals('', result)
+
+ def testGrepLineNo(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ 'line',
+ self.tempFile('first*txt'),
+ '--line-no'])
+ result = base.BaseTool.result()
+ self.assertEquals("{}-1: line 1\n{}-2: line 2\n{}-3: This text is in line 3".format(self._fn, self._fn, self._fn), result)
+
+ def testGrep2(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'text\sis',
+ self.tempFile('first*txt')])
+ self.assertEquals("{}: This text is in line 3".format(self._fn, self._fn, self._fn), base.BaseTool.result())
+
+ def testGrepList(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'text\sis',
+ self.tempFile('first*txt'),
+ '--list'])
+ self.assertEquals(self._fn, base.BaseTool.result())
+
+ def testGrepOnlyMatching(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'text\sis',
+ self.tempFile('first*txt'),
+ '--only-matching'])
+ result = base.BaseTool.result()
+ self.assertEquals("{}: text is".format(self._fn, self._fn, self._fn), result)
+
+ def testGrepOnlyMatchingLineNo(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'text\sis',
+ self.tempFile('first*txt'),
+ '--line-no',
+ '--only-matching'])
+ result = base.BaseTool.result()
+ self.assertEquals("{}-3: text is".format(self._fn, self._fn, self._fn), result)
+
+ def testGrepFormat(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'(\d+)',
+ self.tempFile('first*txt'),
+ r'--format=!full!-!no!: $1'])
+ self.assertEquals("{}-1: 1\n{}-2: 2\n{}-3: 3".format(self._fn, self._fn, self._fn), base.BaseTool.result())
+
+ def testGrepReverse(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'line\s+',
+ self.tempFile('*.txt', 'unittest'),
+ r'--reverse'])
+ self.assertEquals("{}: How are you".format(self._fn2), base.BaseTool.result())
+
+ def testGrepMissing(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ 'grep',
+ r'line\s+',
+ self.tempFile('*.txt', 'unittest'),
+ '--missing'])
+ self.assertEquals(self._fn2, base.BaseTool.result())
+
+ def testReplace(self):
+ if debug:
+ return
+ fnOut = self._tempDir + os.sep + 'first.out'
+ appl.TextTool.main(['texttool',
+ 'replace',
+ r'line (\d+)',
+ 'zeile $1$1',
+ self.tempFile('*.txt', 'unittest'),
+ fnOut])
+ replaced = base.StringUtils.fromFile(fnOut)
+ self.assertEquals('zeile 11\nzeile 22\nThis text is in zeile 33\n', replaced)
+
+ def testReplace2(self):
+ if debug:
+ return
+ fnSrc = self.tempFile('first.txt', 'unittest', 'texttool')
+ fnOut = self.tempFile('copy.txt', 'unittest')
+ shutil.copy2(fnSrc, fnOut)
+ appl.TextTool.main(['texttool', '-v4',
+ 'replace',
+ r'line (\d+)',
+ 'zeile $1$1',
+ fnOut])
+ replaced = base.StringUtils.fromFile(fnOut)
+ self.assertEquals('zeile 11\nzeile 22\nThis text is in zeile 33\n', replaced)
+ self.ensureFileDoesNotExist(fnOut)
+
+ def testReplaceCount(self):
+ if debug:
+ return
+ fnOut = self._tempDir + os.sep + 'first.out'
+ appl.TextTool.main(['texttool',
+ 'replace',
+ r'(\s+)([il])',
+ '$1*$2$2',
+ self.tempFile('*.txt', 'unittest'),
+ fnOut,
+ '--count=1'])
+ replaced = base.StringUtils.fromFile(fnOut)
+ self.assertEquals('line 1\nline 2\nThis text *iis in line 3\n', replaced)
+
+ def testReplaceInPlace(self):
+ if debug:
+ return
+ source = self._fn
+ fn = self._tempDir + os.sep + 'third'
+ shutil.copy(source, fn)
+ appl.TextTool.main(['texttool',
+ 'replace',
+ r'(\s+)(\S+)(\s+)',
+ '$1*$2*$3',
+ fn,
+ '--count=2'])
+ replaced = base.StringUtils.fromFile(fn)
+ self.assertEquals('line 1\nline 2\nThis *text* *is* in line 3\n', replaced)
+
+ def testReplaceByTable(self):
+ if debug:
+ return
+ table = self._tempDir + os.sep + 'table.data'
+ fnOut = self._tempDir + os.sep + 'first.out'
+ base.StringUtils.toFile(table, """e\t33
+xt\tXT
+33 \tE-
+""")
+ appl.TextTool.main(['texttool',
+ 'replace',
+ '@' + table,
+ self.tempFile('f*txt'),
+ fnOut])
+ replaced = base.StringUtils.fromFile(fnOut)
+ self.assertEquals('linE-1\nlinE-2\nThis t33XT is in linE-3\n', replaced)
+
+ def testReplaceByTableCount(self):
+ if debug:
+ return
+ table = self._tempDir + os.sep + 'table.data'
+ base.StringUtils.toFile(table, """e\t33
+xt\tXT
+ei\tI
+""")
+ fnOut = self._tempDir + os.sep + 'first.out'
+ appl.TextTool.main(['texttool',
+ 'replace',
+ '@' + table,
+ self.tempFile('f*txt'),
+ fnOut,
+ '--count=1'])
+ replaced = base.StringUtils.fromFile(fnOut)
+ self.assertEquals('lin33 1\nlin33 2\nThis t33XT is in line 3\n', replaced)
+
+ def testBuildTargetname(self):
+ if debug:
+ return
+ tool = appl.TextTool.TextTool(self.getOptions())
+ tool._target = '!full!->!path!+!node! | !path!#!fn!+!ext!'
+ self.assertEquals('/home/bin/abc.def->/home/bin/+abc.def | /home/bin/#abc+.def', tool.buildTargetname('/home/bin/abc.def'))
+
+ def testReplaceDirectInput(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '--quiet',
+ 'replace',
+ '/',
+ r'\\',
+ '=/data/by/device.com\n/data/by/other',
+ '--count=1'])
+ replaced = base.BaseTool.result()
+ self.assertEquals('\\\\data/by/device.com\n\\\\data/by/other\n', replaced)
+
+ def testAdaptConfig(self):
+ if debug:
+ return
+ fnVars = self._tempDir + os.sep + 'texttool.conf'
+ base.StringUtils.toFile(fnVars, '''php.memory_limit = 2048M
+php.upload_max_filesize = 512M
+
+''')
+ fnConfig = self._tempDir + os.sep + 'config.conf'
+ base.StringUtils.toFile(fnConfig, '''#PHP-Configuration:
+memory_limit = 512M
+upload_max_filesize = 2M
+''')
+ appl.TextTool.main(['texttool',
+ '-v3',
+ '--configuration-directory=' + self._tempDir,
+ 'adapt-configuration',
+ fnConfig,
+ 'prefix=php.'])
+ current = base.StringUtils.fromFile(fnConfig)
+ self.assertEquals('''#PHP-Configuration:
+memory_limit=2048M
+upload_max_filesize=512M
+''', current)
+
+ def testAdaptConfigFile(self):
+ if debug:
+ return
+ fnVars = self._tempDir + os.sep + 'vars.txt'
+ base.StringUtils.toFile(fnVars, '''abc=123
+command.log = "/var/log/command.log"
+''')
+ fnConfig = self._tempDir + os.sep + 'config.conf'
+ base.StringUtils.toFile(fnConfig, '''#configuration:
+abc=4
+command.log='/var/log/dummy.log'
+''')
+ appl.TextTool.main(['texttool',
+ '-v3',
+ 'adapt-configuration',
+ fnConfig,
+ fnVars])
+ current = base.StringUtils.fromFile(fnConfig)
+ self.assertEquals('''#configuration:
+abc=123
+command.log = "/var/log/command.log"
+''', current)
+
+ def testReplaceOrInsert(self):
+ if debug:
+ return
+ fnVars1 = self._tempDir + os.sep + 'vars1.txt'
+ fnVars2 = self._tempDir + os.sep + 'vars2.txt'
+ base.StringUtils.toFile(fnVars1, '''#configuration:
+abc=4
+command.log='/var/log/dummy.log'
+''')
+ base.StringUtils.toFile(fnVars2, '''# Intro
+command.log='/var/log/dummy.log'
+[files]
+blabla=4
+''')
+ appl.TextTool.main(['texttool',
+ '-v3',
+ 'replace-or-insert',
+ r'^abc\s*=',
+ 'abc=432',
+ fnVars1.replace('vars1.', 'v*.'),
+ r'--below-anchor=^\[files\]'])
+ current = base.StringUtils.fromFile(fnVars1)
+ self.assertEquals('''#configuration:
+abc=432
+command.log='/var/log/dummy.log'
+''', current)
+ current = base.StringUtils.fromFile(fnVars2)
+ self.assertEquals('''# Intro
+command.log='/var/log/dummy.log'
+[files]
+abc=432
+blabla=4
+''', current)
+
+ def testReplaceOrInsertExclude(self):
+ if debug:
+ return
+ fnVars1 = self._tempDir + os.sep + 'vars1.txt'
+ fnVars2 = os.path.dirname(self._tempDir) + os.sep + 'vars2.txt'
+ content = '''#configuration:
+abc=4
+command.log='/var/log/dummy.log'
+'''
+ base.StringUtils.toFile(fnVars1, content)
+ base.StringUtils.toFile(fnVars2, '''# Intro
+command.log='/var/log/dummy.log'
+[files]
+blabla=4
+''')
+ appl.TextTool.main(['texttool',
+ '-v3',
+ 'replace-or-insert',
+ r'^abc\s*=',
+ 'abc=432',
+ fnVars2.replace('vars2.', 'v*.'),
+ r'--excluded=;*1*'])
+ current = base.StringUtils.fromFile(fnVars1)
+ self.assertEquals(content, current)
+ current = base.StringUtils.fromFile(fnVars2)
+ self.assertEquals('''# Intro
+command.log='/var/log/dummy.log'
+[files]
+blabla=4
+abc=432
+''', current)
+
+ def testReplaceOrInsertMaxDepth(self):
+ if debug:
+ return
+ fnVars1 = self._tempDir + os.sep + 'vars1.txt'
+ fnVars2 = os.path.dirname(self._tempDir) + os.sep + 'vars2.txt'
+ content = '''#configuration:
+abc=4
+command.log='/var/log/dummy.log'
+'''
+ base.StringUtils.toFile(fnVars1, content)
+ base.StringUtils.toFile(fnVars2, '''# Intro
+command.log='/var/log/dummy.log'
+[files]
+blabla=4
+''')
+ appl.TextTool.main(['texttool',
+ '-v3',
+ 'replace-or-insert',
+ r'^abc\s*=',
+ 'abc=432',
+ fnVars2.replace('vars2.', 'v*.'),
+ r'--max-depth=0',
+ '--above-anchor=^#'])
+ current = base.StringUtils.fromFile(fnVars1)
+ self.assertEquals(content, current)
+ current = base.StringUtils.fromFile(fnVars2)
+ self.assertEquals('''abc=432
+# Intro
+command.log='/var/log/dummy.log'
+[files]
+blabla=4
+''', current)
+
+ def testRandom0(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool',
+ '-v3',
+ 'random',
+ '--seed=hiWorld'])
+ current = base.BaseTool.result()
+ self.assertEquals('388449311', current)
+
+ def testFindRegion(self):
+ if debug:
+ return
+ tool = appl.TextTool.TextTool(self.getOptions())
+ tool._lines = '''auto lo
+iface lo inet loopback
+
+auto eth0
+iface eth0 inet static
+ address
+
+auto eth1
+iface eth1 inet dhcp
+'''.split('\n')
+ start, end = tool.findRegion(r'^(auto|iface)\s+eth0', True, r'^(auto|iface)', False, None, False)
+ self.assertEquals(3, start)
+ self.assertEquals(7, end)
+ tool.removeRegion()
+ self.assertEquals('''auto lo
+iface lo inet loopback
+
+auto eth1
+iface eth1 inet dhcp
+''', '\n'.join(tool._lines))
+
+ def testReplaceRegion(self):
+ if debug:
+ return
+ tool = appl.TextTool.TextTool(self.getOptions())
+ tool._lines = '''auto lo
+iface lo inet loopback
+
+auto eth0
+iface eth0 inet static
+ address
+
+auto eth1
+iface eth1 inet dhcp
+'''.split('\n')
+ start, end = tool.findRegion(r'^(auto|iface)\s+eth0', True, r'^(auto|iface)', False, None, False)
+ self.assertEquals(3, start)
+ self.assertEquals(7, end)
+ tool.replaceRegion('auto wlan0\niface wlan0 dhcp\n')
+ self.assertEquals('''auto lo
+iface lo inet loopback
+
+auto wlan0
+iface wlan0 dhcp
+
+auto eth1
+iface eth1 inet dhcp
+''', '\n'.join(tool._lines))
+
+ def testCurrentFind(self):
+ if debug:
+ return
+ tool = appl.TextTool.TextTool(self.getOptions())
+ tool._lines = '''auto lo
+iface lo inet loopback
+
+auto eth0
+iface eth0 inet static
+ address
+
+auto eth1
+iface eth1 inet dhcp
+'''.split('\n')
+ self.assertEquals(3, tool.currentFind(re.compile(r'.*\d')))
+ self.assertNone(tool.currentFind(re.compile(r'neverever')))
+
+ def testCurrentFindRange(self):
+ if debug:
+ return
+ tool = appl.TextTool.TextTool(self.getOptions())
+ tool._lines = '''auto lo
+iface lo inet loopback
+
+auto eth0
+iface eth0 inet static
+ address 192.168.6.2
+
+auto eth1
+iface eth1 inet dhcp
+'''.split('\n')
+ for start in range(6):
+ self.assertEquals(5, tool.currentFind(re.compile(r'.*address'), start, 6))
+ self.assertNone(tool.currentFind(re.compile(r'.*address'), 7, 9))
+
+ def testBuildExamples(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'build-examples'])
+
+ def testExampleAdapt1(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'adapt',
+ '/usr/share/pyrshell/examples/data/php.ini',
+ '/usr/share/pyrshell/examples/config/php_minimal.conf'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleAdapt2(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'adapt',
+ '/usr/share/pyrshell/examples/data/php.ini',
+ 'prefix=php.'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep1(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep',
+ r'\bopen\b.*rb',
+ '/usr/share/pyrshell/base/*.py',
+ '--excluded=:*file*:*text*'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep2(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep',
+ r'total:\s+([0-9.]+)',
+ '/usr/share/pyrshell/examples/data/sum.txt',
+ '--line-no', '--format=Sum: $1'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep3(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep',
+ 'StringUtils',
+ '/usr/share/pyrshell/net/*.py',
+ '--missing',
+ '--list'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep4(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep',
+ r'^((\d: \w+)|(\s*inet\s+[0-9.]+))',
+ '/usr/share/pyrshell/examples/data/ip.out.txt',
+ '--only-matching'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep5(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep',
+ r'total:',
+ '/usr/share/pyrshell/examples/data/sum.txt',
+ '--reverse'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testExampleGrep6(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'grep', '-i', '--no-name', '-v',
+ r'jonny',
+ '/usr/share/pyrshell/examples/data/first.addr'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ lines = base.BaseTool.result()
+ self.assertEquals('info@whow.com', lines)
+
+ def testExamplePythonToC1(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'python-to-c',
+ '/usr/share/pyrshell/base/ThreadLogger.py',
+ '/tmp/threadlogger.ts'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContains(r"logger.log('Hi');", '/tmp/threadlogger.ts')
+
+ def testExampleReplace1(self):
+ if debug:
+ return
+ timestring = time.strftime('DATE=%d%m.%Y')
+ appl.TextTool.main(['texttool', '-v3', 'replace',
+ r'DATE=\S+',
+ timestring,
+ '/usr/share/pyrshell/examples/data/today.sh'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContains(timestring, '/usr/share/pyrshell/examples/data/today.sh')
+
+ def testExampleReplace2(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'replace',
+ 'jonny@gmx.de',
+ 'mr.universe@gmx.de',
+ '/usr/share/pyrshell/examples/data/*.addr',
+ '!path!universe_!node!.out',
+ '--excluded=:shop.addr:test*',
+ '--max-depth=0'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContains('mr.universe@gmx.de', '/usr/share/pyrshell/examples/data/universe_first.addr.out')
+ self.assertFileContains('mr.universe@gmx.de', '/usr/share/pyrshell/examples/data/universe_second.addr.out')
+ self.assertFileNotExists('/usr/share/pyrshell/examples/data/universe_shop.addr.out')
+ self.assertFileNotExists('/usr/share/pyrshell/examples/data/universe_test.addr.out')
+ self.assertFileNotExists('/usr/share/pyrshell/examples/data/subdir/universe_third.addr.out')
+
+ def testExampleReplace3(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'replace',
+ 'hot',
+ 'cool',
+ '=should be hot'])
+ self.assertEquals('should be cool', base.BaseTool.result())
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+
+ def testExampleReplace4(self):
+ if debug:
+ return
+ appl.TextTool.main(['texttool', '-v3', 'replace',
+ '@/usr/share/pyrshell/examples/config/german.txt',
+ '/usr/share/pyrshell/examples/data/addr.csv'])
+ self.assertFileContains('Nr;Vorname;Nachname', '/usr/share/pyrshell/examples/data/addr.csv')
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+
+ def testCurrentInsertAnchored(self):
+ if debug:
+ return
+ lines = '''# missing auto lo
+iface lo inet loopback'''.split('\n')
+ self._tool._lines = lines[:]
+ self._tool.currentInsertAnchored('auto lo', r'iface\s+lo', False)
+ self.assertEquals('auto lo', self._tool._lines[1])
+ self.assertEquals('iface lo inet loopback', self._tool._lines[2])
+ self._tool._lines = lines[:]
+ self._tool.currentInsertAnchored('auto eth0', None)
+ self.assertEquals('iface lo inet loopback', self._tool._lines[1])
+ self.assertEquals('auto eth0', self._tool._lines[2])
+
+ lines = '''# missing auto lo
+auto lo
+iface lo inet loopback
+auto eth0
+iface eth0 inet dhcp
+auto wlan0
+iface wlan0 inet dhcp'''.split('\n')
+ self._tool._lines = lines[:]
+ (start, end) = self._tool.findRegion(r'auto eth', True, 'iface eth', True)
+ self.assertEquals(3, start)
+ self.assertEquals(5, end)
+ self._tool.currentInsertAnchored('auto eth1\niface eth1 inet dhcp', r'wlan0', False, start, end)
+ self.assertEquals('''# missing auto lo
+auto lo
+iface lo inet loopback
+auto eth0
+iface eth0 inet dhcp
+auto eth1
+iface eth1 inet dhcp
+auto wlan0
+iface wlan0 inet dhcp''', '\n'.join(self._tool._lines))
+
+ def testCurrentReplace(self):
+ if debug:
+ return
+ lines = '''[log]
+file=test.txt
+[db]
+file=test.sql
+[opts]'''.split('\n')
+ self._tool._lines = lines[:]
+ self._tool.currentReplace(r'file=', 'file=example.txt', r'\[db\]', False)
+ self._tool.currentReplace(r'level=', 'level=3', r'\[db\]', False)
+ self.assertEquals('file=example.txt', self._tool._lines[1])
+ self.assertEquals('level=3', self._tool._lines[3])
+ self._tool._lines = lines[:]
+ self._tool.currentReplace(r'file=', 'file=addr.sql', None, False, 1, 4)
+ self._tool.currentReplace(r'user=', 'user=bob', r'\[db\]', False, 1, 5)
+ self.assertEquals('file=addr.sql', self._tool._lines[1])
+ self.assertEquals('user=bob', self._tool._lines[3])
+
+ def testCurrentSetLine(self):
+ if debug:
+ return
+ lines = '''[log]
+file=test.txt
+[db]
+file=test.sql
+[opts]
+file=data.conf'''.split('\n')
+ self._tool._lines = lines[:]
+ self._tool.currentSetLine(r'file=', 'file=example')
+ self.assertEquals('file=example', self._tool._lines[1])
+ self.assertEquals('file=test.sql', self._tool._lines[3])
+ self.assertEquals('file=data.conf', self._tool._lines[5])
+ self._tool._lines = lines[:]
+ self._tool.currentSetLine(r'file=', 'file=default', 2)
+ self.assertEquals('file=default', self._tool._lines[1])
+ self.assertEquals('file=default', self._tool._lines[3])
+ self.assertEquals('file=data.conf', self._tool._lines[5])
+ self._tool._lines = lines[:]
+ self._tool.currentSetLine(r'file=', 'file=default', 2, 2, 4)
+ self.assertEquals('file=test.txt', self._tool._lines[1])
+ self.assertEquals('file=default', self._tool._lines[3])
+ self.assertEquals('file=data.conf', self._tool._lines[5])
+ self._tool._lines = lines[:]
+ self._tool.currentSetLine(r'file=', 'file=default', 2, 2, 6)
+ self.assertEquals('file=test.txt', self._tool._lines[1])
+ self.assertEquals('file=default', self._tool._lines[3])
+ self.assertEquals('file=default', self._tool._lines[5])
+
+ def testExampleReplaceRegion(self):
+ if debug:
+ return
+ # texttool replace-region '<body>' '</body>' '<p>no access!</p>' /usr/share/pyrshell/examples/index.html
+ temp = self.tempDir('replace', 'unittest')
+ trg = temp + os.sep + 'index.html'
+ base.StringUtils.toFile(trg, '<html>\n<body>\n<h1>Remember</h1>\n<p>forget it!</p>\n</body>\n</html>')
+ appl.TextTool.main(['texttool', '-v3',
+ 'replace-region',
+ '<body>',
+ '</body>',
+ '<p>no access!</p>',
+ trg,
+ '--start-excluded',
+ '--max-depth=0'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContent(trg, '''<html>
+<body>
+<p>no access!</p>
+</body>
+</html>''')
+
+ def testExampleReplaceRegion2(self):
+ if debug:
+ return
+ # texttool replace-region '<body>' '</body>' '<p>no access!</p>' /usr/share/pyrshell/examples/index.html
+ temp = self.tempDir('replace', 'unittest')
+ trg = temp + os.sep + 'index.html'
+ base.StringUtils.toFile(trg, '<html>\n<body>\n<h1>Remember</h1>\n<p>forget it!</p>\n</body>\n</html>')
+ appl.TextTool.main(['texttool', '-v4',
+ 'replace-region',
+ r'<h[1-9]>',
+ '.*</p>',
+ '<p>no access!</p>',
+ trg,
+ '--end-included'])
+ tool = base.BaseTool.latestTool()
+ self.assertEquals(0, tool._logger._errors)
+ self.assertFileContent(trg, '''<html>
+<body>
+<p>no access!</p>
+</body>
+</html>''')
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = TextToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import appl.WikiTool
+import base.BaseTool
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class WikiToolTest(UnitTestCase):
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('wiki', 'appl.WikiTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._exampleText='log.file=/var/log/local/wikitool.log'
+ rc._logFiles.append('-')
+ rc._verboseLevel = 0
+ return rc
+
+ def check(self, current):
+ fnInput = self.tempFile('inp', 'wikitest')
+ fnOutput = self.tempFile('out', 'wikitest')
+ base.StringUtils.toFile(fnInput, current)
+ tool = appl.WikiTool.WikiTool(self.getOptions())
+ tool.gitlabToMediaWiki([fnInput, fnOutput])
+ out = base.StringUtils.fromFile(fnOutput)
+ return out
+
+ def testInlineCode(self):
+ if debug:
+ return
+ line = 'one __fat__ and **other fat** thing in _italics_ or _in another way_'
+ out = self.check(line)
+ self.assertEquals("one '''fat''' and '''other fat''' thing in ''italics'' or ''in another way''", out)
+
+ def testLink(self):
+ if debug:
+ return
+ line = '[abc def](Link) [Noch ein Text](Bedienungsanleitung)'
+ out = self.check(line)
+ self.assertEquals('[[Link|abc def]] [[Bedienungsanleitung|Noch ein Text]]', out)
+
+ def testHeadline(self):
+ if debug:
+ return
+ line = '#Haupt\n## Unter-1\n### Unter-2'
+ out = self.check(line)
+ self.assertEquals('= Haupt =\n== Unter-1 ==\n=== Unter-2 ===', out)
+
+ def testUnorderedList(self):
+ if debug:
+ return
+ line = ' - erstens\n + 1.1\n - zweitens'
+ out = self.check(line)
+ self.assertEquals('* erstens\n** 1.1\n* zweitens', out)
+
+ def testTable(self):
+ if debug:
+ return
+ line = """Tabelle
+| Id | Name |
+| 1 | Huber |
+| 2 | Maier |"""
+ out = self.check(line)
+ self.assertEquals("""Tabelle
+{|
+! Id
+! Name
+|-
+| 1
+| Huber
+|-
+| 2
+| Maier
+|}""", out)
+
+ def testScript(self):
+ if False and debug:
+ return
+ line = """Script
+```bash
+FN=abc
+echo "nicht **fett**"
+```"""
+ out = self.check(line)
+ self.assertEquals("""Script
+<syntaxhighlight lang="bash" "line='line'>
+FN=abc
+echo "nicht **fett**"
+</syntaxhighlight>""", out)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = WikiToolTest()
+ tester.run()
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
+
+import unittest.base.MemoryLoggerTest
+import unittest.base.ThreadLoggerTest
+import unittest.base.LoggerTest
+import unittest.base.StringUtilsTest
+import unittest.base.LinuxUtilsTest
+
+class Runner:
+ def __init__(self):
+ self._modules = 0
+ self._errors = 0
+ self._asserts = 0
+
+ def run(self, clazz):
+ tester = clazz()
+ tester.run()
+ self._modules += 1
+ self._errors += tester._errors
+ self._asserts += tester._asserts
+
+def main():
+ runner = Runner()
+ runner.run(unittest.base.LoggerTest.LoggerTest)
+ runner.run(unittest.base.MemoryLoggerTest.MemoryLoggerTest)
+ runner.run(unittest.base.ThreadLoggerTest.ThreadLoggerTest)
+ runner.run(unittest.base.StringUtilsTest.StringUtilsTest)
+ runner.run(unittest.base.LinuxUtilsTest.LinuxUtilsTest)
+
+ print('= BaseTester: {:d} modules with {:d} asserts and {:d} error(s)'.format(runner._modules, runner._asserts, runner._errors))
+if __name__ == '__main__':
+ main()
\ No newline at end of file
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.CryptoEngine
+import base.MemoryLogger
+import os.path
+import base64
+
+debug = False
+
+class CryptoEngineTest(UnitTestCase):
+
+ def testBasic(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ #self.log('random: ' + engine.nextString(60, 'ascii95'))
+
+ def testEncode(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ text = 'Hi_World'
+ encoded = engine.encode(text, 'word')
+ self.log('=' + encoded)
+ decoded = engine.decode(encoded, 'word')
+ self.assertEquals(text, decoded)
+ self.assertEquals(0, len(logger.getMessages()))
+
+ def testDecode(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ for aSet in engine.getCharSetNames():
+ text = engine.nextString(20, aSet)
+ encoded = engine.encode(text, aSet)
+ decoded = engine.decode(encoded, aSet)
+ self.assertEquals(text, decoded)
+ self.assertEquals(0, len(logger.getMessages()))
+ for aSet in engine.getCharSetNames():
+ text = engine.getCharSet(aSet)
+ encoded = engine.encode(text, aSet)
+ decoded = engine.decode(encoded, aSet)
+ self.assertEquals(text, decoded)
+ self.assertEquals(0, len(logger.getMessages()))
+
+ def buildBinary(self, length):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ engine.setSeedRandomly()
+ rc = ''
+ for ix in range(length):
+ rc += chr(engine.nextInt(127, 1))
+ return rc
+
+ def testEncodeBinaryBase(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ text = '12'
+ encoded = engine.encodeBinary(text)
+ decoded = engine.decodeBinary(encoded)
+ self.assertEquals(text, decoded)
+ text = '123'
+ encoded = engine.encodeBinary(text)
+ decoded = engine.decodeBinary(encoded)
+ self.assertEquals(text, decoded)
+ text = '1235'
+ encoded = engine.encodeBinary(text)
+ decoded = engine.decodeBinary(encoded)
+ self.assertEquals(text, decoded)
+
+ def testEncodeBinary(self):
+ if debug:
+ return
+ if self.assertTrue(False):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ for length in range(20, 256):
+ text = self.buildBinary(length)
+ print(length)
+ encoded = engine.encodeBinary(text)
+ try:
+ decoded = engine.decodeBinary(encoded)
+ except Exception as exc:
+ self.assertEquals('', str(exc))
+ break
+ self.assertEquals(text, decoded)
+ self.assertEquals(0, len(logger.getMessages()))
+
+ def testTestCharSet(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ for name in engine.getCharSetNames():
+ aSet = engine.getCharSet(name)
+ self.assertEquals(-1, engine.testCharSet(aSet, name))
+ aSet += "\t"
+ self.assertEquals(len(aSet) - 1, engine.testCharSet(aSet, name))
+
+ def testOneTimePad(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ fn = '/tmp/otp_request.txt'
+ with open(fn, 'w') as fp:
+ for user in range(1, 100):
+ data = 'X{:04x}y'.format(user)
+ pad = engine.oneTimePad(user, data)
+ padData = engine.unpackOneTimePad(pad)
+ self.assertEquals(user, padData[1])
+ self.assertEquals(data, padData[2])
+ fp.write('{:d}\t{:s}\t{:s}'.format(user, data, pad))
+
+ def testExternOneTimePad(self):
+ if "x"+"y" == "xy":
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ fn = '/tmp/otp.txt'
+ if self.assertTrue(os.path.exists(fn)):
+ with open(fn, 'r') as fp:
+ for line in fp:
+ [user, data, pad] = line.rstrip().split("\t")
+ padData = engine.unpackOneTimePad(pad, 3600)
+ self.assertEquals(int(user), padData[1])
+ self.assertEquals(data, padData[2])
+
+ def testSetSeedFromString(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ engine.setSeedFromString('')
+ self.assertEquals(231702727, engine.nextInt())
+ engine.setSeedFromString('x')
+ self.assertEquals(1157398379, engine.nextInt())
+ engine.setSeedFromString('blubber')
+ self.assertEquals(604275342, engine.nextInt())
+
+ def testSaveRestore(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ engine = base.CryptoEngine.CryptoEngine(logger)
+ engine.setSeedFromString('')
+ seed1 = engine.saveSeed()
+ value1 = engine.nextString(10, 'ascii94')
+ engine.restoreSeed(seed1)
+ value2 = engine.nextString(10, 'ascii94')
+ self.assertEquals(value1, value2)
+
+ def testBase64(self):
+ if False and debug:
+ return
+ buffer = b'x'
+ '''
+ for ix in range(256):
+ buffer = buffer[0:-1]
+ print("ix: " + str(ix))
+ encoded = base64.encodebytes(buffer)
+ decoded = base64.decodebytes(encoded)
+ if decoded != buffer:
+ print("Different: {:02x}".format(ix))
+ for ix2 in range(32, 128):
+ buffer += bytes(ix2)
+ encoded = base64.encodebytes(buffer)
+ decoded = base64.decodebytes(encoded)
+ if decoded != buffer:
+ print("Different: {:02x}, {:02x}".format(ix, ix2))
+ '''
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = CryptoEngineTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import shutil
+import datetime
+import time
+import os.path
+
+import base.FileHelper
+import base.StringUtils
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class FileHelperTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self.tempDir('filetool', 'unittest')
+ self._fn = self.tempFile('first.txt', 'unittest', 'filetool')
+ base.StringUtils.toFile(self._fn, "line 1\nline 2\nThis file is in line 3")
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('unittest'))
+
+ def getOptions(self):
+ rc = base.BaseTool.GlobalOptions()
+ rc._exampleFile = 'log.file=/var/log/local/filetool.log'
+ rc._logFiles.append('-')
+ rc._verboseLevel = 0
+ return rc
+
+ def checkPart(self, container, full, path, node, fn, ext):
+ self.assertEquals(path, container['path'])
+ self.assertEquals(full, container['full'])
+ self.assertEquals(node, container['node'])
+ self.assertEquals(fn, container['fn'])
+ self.assertEquals(ext, container['ext'])
+
+ def testSplitFilename(self):
+ if debug:
+ return
+ container = base.FileHelper.splitFilename('/tmp/jonny.txt')
+ self.checkPart(container, '/tmp/jonny.txt', '/tmp/', 'jonny.txt', 'jonny', '.txt')
+ container = base.FileHelper.splitFilename('/home/authors/jonny.txt')
+ self.checkPart(container, '/home/authors/jonny.txt', '/home/authors/', 'jonny.txt', 'jonny', '.txt')
+ container = base.FileHelper.splitFilename('jonny.v2.txt')
+ self.checkPart(container, 'jonny.v2.txt', '', 'jonny.v2.txt', 'jonny.v2', '.txt')
+ container = base.FileHelper.splitFilename('.config')
+ self.checkPart(container, '.config', '', '.config', '.config', '')
+
+ def testTail(self):
+ if debug:
+ return
+ tail = base.FileHelper.tail(self._fn)
+ self.assertEquals(1, len(tail))
+ self.assertEquals('This file is in line 3', tail[0])
+
+ def testTailNumbers(self):
+ if debug:
+ return
+ tail = base.FileHelper.tail(self._fn, 2, True)
+ self.assertEquals(2, len(tail))
+ asString = ''.join(tail)
+ self.assertEquals('2: line 2\n3: This file is in line 3', asString)
+
+ def testDirectoryInfo(self):
+ if debug:
+ return
+ info = base.FileHelper.directoryInfo('/etc', r'.*\.conf')
+ self.assertTrue(info._fileCount > 0)
+ self.assertTrue(info._fileSizes > 0)
+ self.assertTrue(info._dirCount > 0)
+ self.assertTrue(info._ignoredDirs > 0)
+ # self.assertTrue(info._ignoredFiles > 0)
+ self.assertEquals(5, len(info._youngest))
+ self.assertEquals(5, len(info._largest))
+
+ def testPathToNode(self):
+ if debug:
+ return
+ self.assertEquals('x__abc_def_x.txt', base.FileHelper.pathToNode('x:/abc/def/x.txt'))
+
+ def testSetModified(self):
+ if debug:
+ return
+ fn = self.tempFile('test.txt', 'unittest')
+ base.StringUtils.toFile(fn, 'Hi')
+ yesterday = time.time() - 86400
+ januar = datetime.datetime(2016, 1, 2, 10, 22, 55)
+ januar2 = time.mktime(januar.timetuple())
+ base.FileHelper.setModified(fn, yesterday)
+ self.assertEquals(yesterday, os.path.getmtime(fn))
+ base.FileHelper.setModified(fn, None, januar)
+ self.assertEquals(januar2, os.path.getmtime(fn))
+
+ def testDistinctPaths(self):
+ if debug:
+ return
+ tempDir = self.tempDir('disticts', 'unittest')
+ self.clearDirectory(tempDir)
+ dir1 = tempDir + os.sep + 'abc'
+ dir2 = tempDir + os.sep + 'def'
+ dirLink = tempDir + os.sep + 'link'
+ dirChild = dir1 + os.sep + 'child'
+ dirChildInLink = dirLink + os.sep + 'childInLink'
+ dirLinkLink = dir1 + os.sep + 'linkLink'
+ self.ensureDirectory(dir1)
+ self.ensureDirectory(dir2)
+ self.ensureDirectory(dirChild)
+ os.symlink(dir2, dirLink)
+ os.symlink(dirChildInLink, dirLinkLink)
+ # base/abc
+ # base/abc/child
+ # base/abc/linkInLink -> def
+ # base/def
+ # base/link -> def
+ # base/def/childInLink
+ # base/def/linkLink -> def/childInLink
+ self.assertTrue(base.FileHelper.distinctPaths(dir1, dir2))
+ self.assertTrue(base.FileHelper.distinctPaths(dir2, dir1))
+ self.assertTrue(base.FileHelper.distinctPaths(dirChild, dir2))
+ self.assertTrue(base.FileHelper.distinctPaths(dir2, dirChild))
+ self.assertTrue(base.FileHelper.distinctPaths(dir1, dirLink))
+ self.assertTrue(base.FileHelper.distinctPaths(dirLink, dir1))
+
+ self.assertFalse(base.FileHelper.distinctPaths(dirChild, dir1))
+ self.assertFalse(base.FileHelper.distinctPaths(dir1, dirChild))
+ self.assertFalse(base.FileHelper.distinctPaths(dir2, dirLink))
+ self.assertFalse(base.FileHelper.distinctPaths(dirLink, dir2))
+ self.assertFalse(base.FileHelper.distinctPaths(dir2, dirChildInLink))
+ self.assertFalse(base.FileHelper.distinctPaths(dirChildInLink, dir2))
+ self.assertFalse(base.FileHelper.distinctPaths(dir2, dirLinkLink))
+ self.assertFalse(base.FileHelper.distinctPaths(dirLinkLink, dir2))
+ self.assertFalse(base.FileHelper.distinctPaths(dirChildInLink, dirLinkLink))
+ self.assertFalse(base.FileHelper.distinctPaths(dirLinkLink, dirChildInLink))
+ self.assertFalse(base.FileHelper.distinctPaths(dirLinkLink, dir2))
+ self.assertFalse(base.FileHelper.distinctPaths(dir2, dirLinkLink))
+
+ def testFromBytes(self):
+ if False and debug:
+ return
+ self.assertEquals('ascii', base.FileHelper.fromBytes(b'ascii'))
+ self.assertEquals('äöüÖÄÜß', base.FileHelper.fromBytes('äöüÖÄÜß'.encode('utf_8')))
+ line = 'äöüÖÄÜß'.encode('latin-1')
+ self.assertEquals('äöüÖÄÜß', base.FileHelper.fromBytes(line))
+ line = 'äöüÖÄÜß'.encode('cp850')
+ self.assertFalse('äöüÖÄÜß' == base.FileHelper.fromBytes(line))
+ line = b''
+ hex = ''
+ for ix in range(1, 255):
+ hex += "{:02x}".format(ix)
+ line = bytes.fromhex(hex)
+ self.assertFalse('äöüÖÄÜß' == base.FileHelper.fromBytes(line))
+
+ def testJoinPaths(self):
+ self.assertEquals('family/sister', base.FileHelper.joinPaths('../sister', 'family/brother', self._logger))
+ self.assertEquals('/a/family/sister', base.FileHelper.joinPaths('../../sister', '/a/family/brother/b', self._logger))
+ self.assertEquals('sister', base.FileHelper.joinPaths('../sister', 'brother', self._logger))
+ current = os.curdir
+ self.assertEquals(os.path.dirname(current), base.FileHelper.joinPaths('..', None, self._logger))
+
+ def testJoinPathsErrors(self):
+ self.log('= expecting error')
+ self.assertNone(base.FileHelper.joinPaths('./sister', 'brother', self._logger))
+ self.assertNone(base.FileHelper.joinPaths('/sister', 'brother', self._logger))
+ self.assertNone(base.FileHelper.joinPaths('../../sister', 'brother', self._logger))
+
+ def testEnsureDir(self):
+ temp = self.tempDir('dir1', 'unittest')
+ # already exists
+ base.FileHelper.ensureDirectory(temp, self._logger)
+ self.assertTrue(os.path.isdir(temp))
+ # does not exist with logger
+ self.ensureFileDoesNotExist(temp)
+ base.FileHelper.ensureDirectory(temp, self._logger)
+ self.assertTrue(os.path.isdir(temp))
+ # does not exist without logger
+ self.ensureFileDoesNotExist(temp)
+ base.FileHelper.ensureDirectory(temp)
+ self.assertTrue(os.path.isdir(temp))
+ # file exists, with logger
+ self.ensureFileDoesNotExist(temp)
+ base.StringUtils.toFile(temp, 'anything')
+ base.FileHelper.ensureDirectory(temp)
+ self.assertTrue(os.path.isdir(temp))
+ # file exists, with logger
+ self.ensureFileDoesNotExist(temp)
+ base.StringUtils.toFile(temp, 'anything')
+ base.FileHelper.ensureDirectory(temp, self._logger)
+ self.assertTrue(os.path.isdir(temp))
+ # invalid link, with logger
+ self.ensureFileDoesNotExist(temp)
+ os.symlink('../does-not-exist', temp)
+ base.FileHelper.ensureDirectory(temp, self._logger)
+ self.assertTrue(os.path.isdir(temp))
+ # invalid link, without logger
+ self.ensureFileDoesNotExist(temp)
+ os.symlink('../does-not-exist2', temp)
+ base.FileHelper.ensureDirectory(temp, self._logger)
+ self.assertTrue(os.path.isdir(temp))
+
+ def testEnsureFileDoesNotExist(self):
+ temp = self.tempDir('file', 'unittest')
+ # directory exists
+ base.FileHelper.ensureFileDoesNotExist(temp, self._logger)
+ self.assertFalse(os.path.exists(temp))
+ # does not exists:
+ base.FileHelper.ensureFileDoesNotExist(temp, self._logger)
+ self.assertFalse(os.path.exists(temp))
+ base.FileHelper.ensureFileDoesNotExist(temp)
+ self.assertFalse(os.path.exists(temp))
+ # file exists
+ base.StringUtils.toFile(temp, 'x')
+ base.FileHelper.ensureFileDoesNotExist(temp, self._logger)
+ self.assertFalse(os.path.exists(temp))
+ base.StringUtils.toFile(temp, 'x')
+ base.FileHelper.ensureFileDoesNotExist(temp)
+ self.assertFalse(os.path.exists(temp))
+ # invalid link exists
+ os.symlink('../invalid-link-source', temp)
+ base.FileHelper.ensureFileDoesNotExist(temp, self._logger)
+ self.assertFalse(os.path.exists(temp))
+ os.symlink('../invalid-link-source', temp)
+ base.FileHelper.ensureFileDoesNotExist(temp)
+ self.assertFalse(os.path.exists(temp))
+
+ def testEnsureSymbolicLink(self):
+ tempDir = self.tempDir('jail', 'unittest')
+ target = tempDir + os.sep + 'parent'
+ # creating base dir and target:
+ self.ensureFileDoesNotExist(tempDir)
+ self.tempDir('sibling', 'unittest')
+ base.FileHelper.ensureSymbolicLink('../../sibling', target)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling', os.readlink(target))
+ # changing link source:
+ self.tempDir('sibling2', 'unittest')
+ base.FileHelper.ensureSymbolicLink('../../sibling2', target, True, self._logger)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling2', os.readlink(target))
+ # removing existing target:
+ self.ensureFileDoesNotExist(target)
+ base.StringUtils.toFile(target, 'anything')
+ base.FileHelper.ensureSymbolicLink('../../sibling2', target, True, self._logger)
+ self.assertTrue(os.path.islink(target))
+ self.assertEquals('../../sibling2', os.readlink(target))
+
+ def testEnsureSymbolicLinkErrors(self):
+ tempDir = self.tempDir('jail', 'unittest')
+ target = tempDir + os.sep + 'parent'
+ self.ensureDirectory(target)
+ # creating base dir and target:
+ self.ensureFileDoesNotExist(tempDir)
+ self.tempDir('sibling', 'unittest')
+ self._logger.log('= expecting error is directory')
+ base.FileHelper.ensureSymbolicLink('../../sibling', target, True, self._logger)
+ self.assertFalse(os.path.exists(target))
+ # must not create parent:
+ self._logger.log('= expecting error missing parent')
+ self.ensureFileDoesNotExist(os.path.dirname(target))
+ base.FileHelper.ensureSymbolicLink('../../sibling', target, False, self._logger)
+ self.assertFalse(os.path.exists(target))
+
+ def testFileClass(self):
+ baseDir = '/usr/share/pyrshell/unittest/data/'
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.zip')
+ self.assertEquals('container', aClass)
+ self.assertEquals('zip', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.tar')
+ self.assertEquals('container', aClass)
+ self.assertEquals('tar', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.tgz')
+ self.assertEquals('container', aClass)
+ self.assertEquals('tar', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.tbz')
+ self.assertEquals('container', aClass)
+ self.assertEquals('tar', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.html')
+ self.assertEquals('text', aClass)
+ self.assertEquals('xml', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.sh')
+ self.assertEquals('text', aClass)
+ self.assertEquals('shell', subClass)
+ aClass, subClass = base.FileHelper.fileClass(baseDir + 'example.txt')
+ self.assertEquals('text', aClass)
+ self.assertEquals('text', subClass)
+
+ def testEnsureFileExists(self):
+ fn = self.tempFile('should.exist.txt', 'unittest')
+ base.FileHelper.ensureFileDoesNotExist(fn, self._logger)
+ base.FileHelper.ensureFileExists(fn, 'Hi world', self._logger)
+ self.assertFileContains('Hi world', fn)
+
+ def testEnsureFileExistsError(self):
+ fn = self.tempDir('blocking.dir', 'unittest')
+ self._logger.log('expectig error: blocking dir')
+ base.FileHelper.ensureFileExists(fn, 'Hi', self._logger)
+ self.assertDirExists(fn)
+
+ def testCopyDirectoryClear(self):
+ source = self.tempDir('src', 'unittest')
+ target = self.tempDir('trg', 'unittest')
+ base.StringUtils.toFile(source + '/hi.txt', 'Hi')
+ os.symlink('hi.txt', source + os.sep + 'hi.link.txt')
+ source2 = self.tempDir('src/dir1', 'unittest')
+ base.StringUtils.toFile(source2 + '/wow.txt', 'Wow')
+ if not os.path.exists(source2 + '/wow.symlink.txt'):
+ os.symlink('wow.txt', source2 + '/wow.symlink.txt')
+ base.FileHelper.copyDirectory(source, target, 'clear', self._logger, 3)
+ self.assertFileContains('Hi', target + '/hi.txt')
+ self.assertDirExists(target + '/dir1')
+ self.assertFileContains('Wow', target + '/dir1/wow.txt')
+ trg2 = target + '/dir1/wow.symlink.txt'
+ self.assertFileContains('Wow', trg2)
+ self.assertTrue(os.path.islink(trg2))
+ fn = target + os.sep + 'hi.link.txt'
+ self.assertFileExists(fn)
+ self.assertEquals('hi.txt', os.readlink(fn))
+
+ def testCopyDirectoryUpdate(self):
+ source = self.tempDir('src', 'unittest')
+ target = self.tempDir('trg', 'unittest')
+ base.StringUtils.toFile(source + '/hi.txt', 'Hi')
+ source2 = self.tempDir('src/dir1', 'unittest')
+ base.StringUtils.toFile(source2 + '/wow.txt', 'Wow')
+ base.FileHelper.copyDirectory(source, target, 'clear', self._logger, 3)
+ time.sleep(1)
+ base.StringUtils.toFile(source + '/hi.txt', 'hi!')
+ base.FileHelper.setModified(source + '/hi.txt', 365*24*3600)
+ base.StringUtils.toFile(source + '/hi2.txt', 'hi!')
+ base.StringUtils.toFile(source2 + '/wow2.txt', 'wow!')
+ base.FileHelper.setModified(source2 + '/wow2.txt', 365*24*3600)
+ base.FileHelper.copyDirectory(source, target, 'update', self._logger)
+ self.assertFileContains('Hi', target + '/hi.txt')
+ self.assertFileContains('hi!', target + '/hi2.txt')
+ self.assertDirExists(target + '/dir1')
+ self.assertFileContains('Wow', target + '/dir1/wow.txt')
+ self.assertFileContains('wow!', target + '/dir1/wow2.txt')
+
+ def testUnpackTgz(self):
+ target = self.tempDir('unittest')
+ fn = target + os.sep + 'dummy'
+ base.StringUtils.toFile(fn, '')
+ base.FileHelper.unpack('/usr/share/pyrshell/unittest/data/etc.work.tgz', target, self._logger, True)
+ self.assertFileNotExists(fn)
+ self.assertFileExists(target + '/etc/passwd')
+ self.assertFileExists(target + '/etc/nginx/sites-available/default')
+
+ def testUnpackZip(self):
+ target = self.tempDir('archive', 'unittest')
+ base.FileHelper.unpack('/usr/share/pyrshell/unittest/data/example.zip', target, self._logger, True)
+ self.assertFileExists(target + '/All.sh')
+
+ def testTempFile(self):
+ fn = base.FileHelper.tempFile('test.txt', 'unittest.2', self._logger)
+ parent = os.path.dirname(fn)
+ self.assertEquals('test.txt', os.path.basename(fn))
+ self.assertEquals('unittest.2', os.path.basename(parent))
+ self.assertFileExists(parent)
+ os.rmdir(parent)
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = FileHelperTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import os
+from unittest.UnitTestCase import UnitTestCase
+import base.JavaConfig
+import base.StringUtils
+import base.MemoryLogger
+
+class JavaConfigTest(UnitTestCase):
+
+ def testBasic(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ fn = self.tempFile('javaconf.conf')
+ base.StringUtils.toFile(fn, '# comment\nabc.def=/dev\n\t\n\tFile = /tmp/x')
+ config = base.JavaConfig.JavaConfig(fn, logger)
+ self.assertEquals('/dev', config.getString('abc.def'))
+ self.assertEquals('/tmp/x', config.getString('File'))
+ self.assertNone(config.getString('file'))
+ self.assertNone(config.getString('unknown'))
+ os.unlink(fn)
+
+ def testSyntaxError(self):
+ fn = self.tempFile('error.conf')
+ base.StringUtils.toFile(fn, '# comment\nabc.def:=/dev\n\t\n\tFile')
+ logger = base.MemoryLogger.MemoryLogger()
+ base.JavaConfig.JavaConfig(fn, logger)
+ self.assertTrue(logger.contains('error.conf line 2: unexpected syntax [expected: <var>=<value>]: abc.def:=/dev'))
+ self.assertTrue(logger.contains('error.conf line 4: unexpected syntax [expected: <var>=<value>]: File'))
+
+ def testIntVar(self):
+ fn = self.tempFile('javaconf.conf')
+ base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
+ logger = base.MemoryLogger.MemoryLogger()
+ config = base.JavaConfig.JavaConfig(fn, logger)
+ self.assertEquals(123, config.getInt('number'))
+ self.assertEquals(456, config.getInt('unknown', 456))
+ self.assertEquals(111, config.getInt('Wrong', 111))
+ self.assertTrue(logger.contains('avaconf.conf: variable Wrong is not an integer: zwo'))
+ os.unlink(fn)
+
+ def testGetKeys(self):
+ fn = self.tempFile('javaconf.conf')
+ base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
+ logger = base.MemoryLogger.MemoryLogger()
+ config = base.JavaConfig.JavaConfig(fn, logger)
+ keys = config.getKeys()
+ self.assertEquals(2, len(keys))
+ self.assertEquals('Wrong', keys[0])
+ self.assertEquals('number', keys[1])
+ os.unlink(fn)
+
+ def testGetKeysRegExpr(self):
+ fn = self.tempFile('javaconf.conf')
+ base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
+ logger = base.MemoryLogger.MemoryLogger()
+ config = base.JavaConfig.JavaConfig(fn, logger)
+ keys = config.getKeys(r'number|int')
+ self.assertEquals(1, len(keys))
+ self.assertEquals('number', keys[0])
+ os.unlink(fn)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = JavaConfigTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.LinuxUtils
+
+class LinuxUtilsTest(UnitTestCase):
+
+ def testDiskFree(self):
+ infos = base.LinuxUtils.diskFree()
+ x = self.__dict__
+ self.assertTrue(len(infos) >= 1)
+ for info in infos:
+ if info[0] not in ['/', '/opt', '/work', '/home'] and not info[0].startswith('/media') and info[0].find('jail') < 0:
+ self.assertEquals('valid path', info[0])
+ self.assertTrue(type(info[1]) == int)
+ self.assertTrue(type(info[2]) == int)
+ self.assertTrue(type(info[3]) == int)
+ self.assertTrue(info[1] >= info[2])
+ self.assertTrue(info[1] >= info[3])
+
+ def testUsers(self):
+ infos = base.LinuxUtils.users()
+ self.assertTrue(len(infos) >= 1)
+ for info in infos:
+ self.assertMatches(r'[\w]+', info[0])
+ self.assertMatches(r'(:?\d+)(\.\d+)?|([\d.]+)', info[1])
+ self.assertMatches(r'[0-2]?\d+:[0-5]\d', info[2])
+
+ def testLoad(self):
+ info = base.LinuxUtils.load()
+ self.assertEquals(5, len(info))
+ for ix in range(3):
+ self.assertTrue(type(info[ix]) == float)
+ self.assertTrue(type(info[3]) == int)
+ self.assertTrue(type(info[4]) == int)
+ self.assertTrue(int(info[3]) < int(info[4]))
+
+ def testMemoryInfo(self):
+ info = base.LinuxUtils.memoryInfo()
+ self.assertEquals(5, len(info))
+ # TOTAL_RAM, AVAILABLE_RAM, TOTAL_SWAP, FREE_SWAP, BUFFERS
+ for ix in range(len(info)):
+ self.assertTrue(type(info[ix]) == int)
+ self.assertTrue(info[0] >= info[1])
+ self.assertTrue(info[0] >= info[2])
+ self.assertTrue(info[2] >= info[3])
+
+ def checkMdadm(self, name, aType, members, blocks, status, info):
+ self.assertEquals(name, info[0])
+ self.assertEquals(aType, info[1])
+ self.assertEquals(members, info[2])
+ self.assertEquals(blocks, info[3])
+ self.assertEquals(status, info[4])
+
+ def testMdadmInfo(self):
+ fn = self.tempFile('mdadm.info')
+ with open(fn, "w") as fp:
+ fp.write('''Personalities : [raid1]
+md2 : active raid1 sdc1[0] sdd1[1]
+ 1953378368 blocks super 1.2 [2/2] [UU]
+ bitmap: 0/15 pages [0KB], 65536KB chunk
+
+md1 : active raid1 sda2[0] sdb2[1]
+ 508523520 blocks super 1.2 [2/2] [UU]
+ bitmap: 2/4 pages [8KB], 65536KB chunk
+
+md0 : active raid1 sda1[0] sdb1[1]
+ 242496 blocks super 1.2 [2/2] [UU]
+''')
+ infos = base.LinuxUtils.mdadmInfo(fn)
+ self.assertEquals(3, len(infos))
+ self.checkMdadm('md2', 'raid1', 'sdc1[0] sdd1[1]', 1953378368, 'OK', infos[0])
+ self.checkMdadm('md1', 'raid1', 'sda2[0] sdb2[1]', 508523520, 'OK', infos[1])
+ self.checkMdadm('md0', 'raid1', 'sda1[0] sdb1[1]', 242496, 'OK', infos[2])
+
+ def testMdadmInfoBroken(self):
+ fn = self.tempFile('mdadm.info')
+ with open(fn, "w") as fp:
+ fp.write('''Personalities : [raid1]
+md1 : active raid1 hda14[0] sda11[2](F)
+ 2803200 blocks [2/1] [U_]''')
+ infos = base.LinuxUtils.mdadmInfo(fn)
+ self.assertEquals(1, len(infos))
+ self.checkMdadm('md1', 'raid1', 'hda14[0] sda11[2](F)', 2803200, 'broken', infos[0])
+
+ def testStress(self):
+ info = base.LinuxUtils.stress(r'^(sda|nvme0n1)$', r'^(enp2s0|wlp4s0)$')
+ self.assertEquals(7, len(info))
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = LinuxUtilsTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+from base.Logger import Logger
+import os
+import re
+# import from base.Logger Logger
+
+class LoggerTest(UnitTestCase):
+
+ def testLogger(self):
+ logFile = '/tmp/logger.log'
+ if os.path.isfile(logFile):
+ os.remove(logFile)
+ logger = Logger(logFile, True)
+ logger.log('Hi world')
+ logger.error('an expected error')
+ logger.debug('debug message')
+ self.assertFileContains('Hi world', logFile)
+ self.assertFileContains('+++ an expected error', logFile)
+ self.assertFileContains('debug message', logFile)
+ self.assertEquals('an expected error', logger._firstErrors[0])
+ self.assertEquals(1, logger._errors)
+
+ def testTextFilter(self):
+ logFile = '/tmp/logger.log'
+ if os.path.isfile(logFile):
+ os.remove(logFile)
+ logger = Logger(logFile, True)
+ logger.setErrorFilter('[second]')
+ logger.log('Hi world')
+ logger.error('an expected error')
+ logger.error('a [second] expected error')
+ logger.debug('debug message')
+ self.assertFileContains('Hi world', logFile)
+ self.assertFileContains('+++ an expected error', logFile)
+ self.assertFileNotContains('a [second] expected error', logFile)
+ self.assertFileContains('debug message', logFile)
+ self.assertEquals('an expected error', logger._firstErrors[0])
+ self.assertEquals(1, logger._errors)
+
+ def testRegExprFilter(self):
+ logFile = '/tmp/logger.log'
+ if os.path.isfile(logFile):
+ os.remove(logFile)
+ logger = Logger(logFile, True)
+ logger.setErrorFilter(re.compile('second|third'))
+ logger.log('Hi world')
+ logger.error('an expected error')
+ logger.error('a [second] expected error')
+ logger.debug('debug message')
+ self.assertFileContains('Hi world', logFile)
+ self.assertFileContains('+++ an expected error', logFile)
+ self.assertFileNotContains('a [second] expected error', logFile)
+ self.assertFileContains('debug message', logFile)
+ self.assertEquals('an expected error', logger._firstErrors[0])
+ self.assertEquals(1, logger._errors)
+
+ def testMirror(self):
+ logFile1 = '/tmp/logger1.log'
+ if os.path.isfile(logFile1):
+ os.remove(logFile1)
+ logger = Logger(logFile1, True)
+
+ logFile2 = '/tmp/logger2.log'
+ if os.path.isfile(logFile2):
+ os.remove(logFile2)
+ loggerMirror = Logger(logFile2, True)
+ logger.setMirror(loggerMirror)
+
+ logger.log('Hi world')
+ logger.error('an expected error')
+ logger.debug('debug message')
+ self.assertFileContains('Hi world', logFile1)
+ self.assertFileContains('+++ an expected error', logFile1)
+ self.assertFileContains('debug message', logFile1)
+ self.assertEquals('an expected error', logger._firstErrors[0])
+ self.assertEquals(1, logger._errors)
+
+ self.assertFileContains('Hi world', logFile2)
+ self.assertFileContains('+++ an expected error', logFile2)
+ self.assertFileContains('debug message', logFile2)
+ self.assertEquals('an expected error', loggerMirror._firstErrors[0])
+ self.assertEquals(1, loggerMirror._errors)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = LoggerTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.MemoryLogger
+
+class MemoryLoggerTest(UnitTestCase):
+
+ def testBase(self):
+ logger = base.MemoryLogger.MemoryLogger()
+ logger.log('Hi world')
+ logger.error('an expected error')
+ logger.debug('debug message')
+ self.assertTrue(logger.contains('Hi world'))
+ self.assertTrue(logger.contains('an expected error'))
+ self.assertTrue(logger.contains('debug message'))
+ self.assertFalse(logger.contains('Hi world!'))
+ self.assertTrue(logger.matches(r'Hi\sworld'))
+ self.assertTrue(logger.matches(r'an [a-z]+ error'))
+ self.assertTrue(logger.matches(r'^de.*sage$'))
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = MemoryLoggerTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+
+import os
+
+import base.ProcessHelper
+import base.StringUtils
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class ProcessHelperTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._dir = self.tempDir('processtool', 'unittest')
+ self._testFile = self._dir + os.sep + 'simple.file.txt'
+ base.StringUtils.toFile(self._testFile, 'line 1\nline 2\nline 3\n')
+ self._tool = base.ProcessHelper.ProcessHelper(3, base.Logger.Logger('/tmp/processtooltest.log', True))
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('processHelper', 'base.WikiTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._verboseLevel = 0
+ return rc
+
+ def testExecute(self):
+ self._tool.execute(['tail', '-n2', self._testFile], True)
+ if self.assertTrue(len(self._tool._output) == 2):
+ self.assertEquals('line 2', self._tool._output[0])
+ self.assertEquals('line 3', self._tool._output[1])
+
+ def testExecuteError(self):
+ self._tool._logger.log('expecting an error:')
+ self._tool.execute(['tail', '-n2', '/etc/shadow'], True)
+ self.assertEquals(0, len(self._tool._output))
+ self.assertTrue(self._tool._error[0].startswith("tail: '/etc/shadow'"))
+
+ def testExecuteInput(self):
+ self._tool.executeInput(['grep', '-o', '[0-9][0-9]*'], True, 'line1\n\line222')
+ self.assertEquals('1', self._tool._output[0])
+ self.assertEquals('222', self._tool._output[1])
+
+ def testExecuteInputError(self):
+ self._tool._logger.log('expecting an error:')
+ self._tool.executeInput(['veryUnknownCommand!', '[0-9]+'], True, 'line1\n\line222')
+ self.assertEquals(0, len(self._tool._output))
+ self.assertEquals("[Errno 2] No such file or directory: 'veryUnknownCommand!': 'veryUnknownCommand!'", self._tool._error[0])
+
+ def testExecuteScript(self):
+ rc = self._tool.executeScript('#! /bin/bash\n/bin/echo $1', 'getArg1', True, ['Hi world', 'Bye world'])
+ #if self.assertEquals(1, len(rc)):
+ # self.assertEquals('Hi world', rc[0])
+
+ def testExecuteInChain(self):
+ fn = self.tempFile('gzip.input', 'unittest')
+ base.StringUtils.toFile(fn, 'Hi')
+ rc = self._tool.executeInChain(['gzip', '-c', fn], None, ['zcat'], '!shell')
+ #if self.assertEquals(1, len(rc)):
+ # self.assertEquals('Hi world', rc[0])
+
+if __name__ == '__main__':
+ # import sys;sys.argv = ['', 'Test.testName']
+ tester = ProcessHelperTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.PythonToTypeScript
+import appl.TextTool
+
+debug = False
+
+def usage(msg=None):
+ return 'test usage'
+
+class PythonToTypeScriptTest(UnitTestCase):
+
+ def __init__(self):
+ UnitTestCase.__init__(self)
+
+ self._python = appl.TextTool.TextTool(self.getOptions())
+ self._typeScript = appl.TextTool.TextTool(self.getOptions())
+ self._tool = base.PythonToTypeScript.PythonToTypeScript(self._python, self._typeScript)
+
+ def fill(self, pythonAsString):
+ self._python._lines = pythonAsString.split('\n')
+ self._typeScript._lines.clear()
+
+ def getOptions(self):
+ appInfo = base.BaseTool.ApplicationInfo('wiki', 'appl.WikiTool', usage)
+ rc = base.BaseTool.GlobalOptions(appInfo)
+ rc._exampleText = 'log.file=/var/log/local/pythontotypescript.log'
+ rc._logFiles.append('-')
+ return rc
+
+ def check(self, expected):
+ self._tool.convert()
+ current = '\n'.join(self._typeScript._lines)
+ base.StringUtils.toFile('/tmp/ucur.txt', current)
+ base.StringUtils.toFile('/tmp/uexp.txt', expected)
+ self.assertEquals(expected, current)
+
+ def testForIfWhile(self):
+ if debug:
+ return
+ self.fill("""
+def fibu(n):
+ # first fun
+ while True:
+ rc = 0
+ # if:
+ if n < 3:
+ #then
+ rc = 1
+ # then end
+ else:
+ # while
+ while False:
+ # pass:
+ pass
+ # for:
+ for x in range(n):
+ # add
+ rc += fibu(n-1) + fibu(n-2)
+ # result
+ break
+ return rc
+ # end fun
+""")
+ self.check("""fibu(n: string): number {
+ // first fun
+ while (true) {
+ let rc = 0;
+ // if:
+ if (n < 3) {
+ //then
+ let rc = 1;
+ // then end
+ } else {
+ // while
+ while (false) {
+ // pass:
+ pass;
+ // for:
+ } // while
+ for (let x in range(n)) {
+ // add
+ rc += fibu(n-1) + fibu(n-2);
+ // result
+ } // for
+ } // else
+ break;
+ } // while
+ return rc;
+ // end fun
+}""")
+
+ def testFor(self):
+ if debug:
+ return
+ self.fill("""# first
+import os.path
+# 2nd
+def fibu(n):
+# first fun
+ rc = 0
+ # for:
+ for x in range(n):
+ # add
+ rc += fibu(n-1) + fibu(n-2)
+ # result
+ return rc
+ # end fun
+""")
+ self.check("""// first
+// 2nd
+fibu(n: string): number {
+// first fun
+ let rc = 0;
+ // for:
+ for (let x in range(n)) {
+ // add
+ rc += fibu(n-1) + fibu(n-2);
+ // result
+ } // for
+ return rc;
+ // end fun
+}
+""")
+
+ def testDef(self):
+ if debug:
+ return
+ self.fill("""
+import os.path
+def testIt(name):
+ '''Tests whether a given file exists.
+ @param name: the filename
+ @return: True: the file exists
+ '''
+ if os.path.exists(name):
+ return True
+ else:
+ return False
+ return False
+""")
+ self.check("""/**
+ * Tests whether a given file exists.
+ * @param name: the filename
+ * @return: true: the file exists
+ */
+testIt(name: string): boolean {
+ if (os.path.exists(name)) {
+ return true;
+ } else {
+ return false;
+ } // else
+ return false;
+}
+""")
+
+ def testClass(self):
+ if False and debug:
+ return
+ self.fill("""
+class Derived (base.BaseClass):
+""\"A derivation of the BaseClass.
+Not realy meaningful.
+""\"
+ def __init__(self):
+ '''Constructor.
+ '''
+ base.BaseClass.__init__(self, 'default')
+ self._count = 1
+ self._stop = False
+ self._dict = dict()
+
+ def fac(n):
+ '''Iterative calculation of faculty
+ @param n: the filename
+ @return: the faculty of n
+ '''
+ self._log.append("fac")
+ rc = 1
+ while n > 1:
+ self._count += 1
+ rc *= n
+ n -= 1
+ return rc
+""")
+ self.check("""/**
+ * A derivation of the BaseClass.
+ * Not realy meaningful.
+ */
+export class Derived extends BaseClass {
+ private _count: number;
+ private _stop: boolean;
+ private _dict: object;
+ private _log: object;
+ /**
+ * Constructor.
+ */
+ constructor() {
+ super('default');
+ this._count = 1;
+ this._stop = false;
+ this._dict = dict();
+ }
+
+ /**
+ * Iterative calculation of faculty
+ * @param n: the filename
+ * @return: the faculty of n
+ */
+ fac(n: string): number {
+ this._log.append("fac");
+ let rc = 1;
+ while (n > 1) {
+ this._count += 1;
+ rc *= n;
+ n -= 1;
+ } // while
+ return rc;
+ }
+}
+""")
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = PythonToTypeScriptTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.StringUtils
+import os
+import re
+from base import StringUtils
+# import from base.Logger Logger
+
+class StringUtilsTest(UnitTestCase):
+
+ def testJoin(self):
+ self.assertEquals('1 2 3', base.StringUtils.join(' ', [1,2,3]))
+ self.assertEquals('1,B,[]', base.StringUtils.join(',', [1, 'B', []]))
+ self.assertEquals('A.B.C', base.StringUtils.join('.', ['A', 'B', 'C']))
+ self.assertEquals('', base.StringUtils.join('.', None))
+
+ def testToFile(self):
+ fn = '/tmp/stringutils.tmp'
+ if os.path.exists(fn):
+ os.unlink(fn)
+ content = 'line1\nline2'
+ base.StringUtils.toFile(fn, content)
+ self.assertTrue(os.path.exists(fn))
+ self.assertFileContains('line1', fn)
+ self.assertFileContains('line2', fn)
+
+ def testFromFile(self):
+ fn = '/tmp/stringutils.tmp'
+ content = 'xline1\nxline2'
+ base.StringUtils.toFile(fn, content)
+ current = base.StringUtils.fromFile(fn)
+ self.assertEquals(content, current)
+
+ def testFromFileSep(self):
+ fn = '/tmp/stringutils.tmp'
+ content = 'xline1\nxline2'
+ base.StringUtils.toFile(fn, content)
+ current = base.StringUtils.fromFile(fn, '\n')
+ self.assertEquals(content.split('\n'), current)
+
+ def testTailOfWord(self):
+ self.assertEquals('x', base.StringUtils.tailOfWord('-ax', '-a'))
+ self.assertEquals('x', base.StringUtils.tailOfWord('-b -ax', '-a'))
+ self.assertEquals('x', base.StringUtils.tailOfWord('-ax -b', '-a'))
+ self.assertEquals('x', base.StringUtils.tailOfWord('-c -ax -b', '-a'))
+ self.assertEquals('x', base.StringUtils.tailOfWord('-ax\t -b', '-a'))
+ self.assertEquals('x', base.StringUtils.tailOfWord('y \t-ax\t -b', '-a'))
+
+ self.assertNone(base.StringUtils.tailOfWord('--find-a-ax', '-a'))
+ self.assertNone(base.StringUtils.tailOfWord('-b\t-c -d', '-a'))
+
+ def testFormatSize(self):
+ self.assertEquals('120 Byte', base.StringUtils.formatSize(120))
+ self.assertEquals('123.456 KB', base.StringUtils.formatSize(123456))
+ self.assertEquals('123.456 MB', base.StringUtils.formatSize(123456*1000))
+ self.assertEquals('12.346 MB', base.StringUtils.formatSize(123456*100))
+ self.assertEquals('1.235 MB', base.StringUtils.formatSize(123456*10))
+ self.assertEquals('123.456 GB', base.StringUtils.formatSize(123456*1000*1000))
+ self.assertEquals('123.456 TB', base.StringUtils.formatSize(123456*1000*1000*1000))
+
+ def testHasContent(self):
+ filename = self.tempFile('example.txt', 'stringutiltest')
+ base.StringUtils.toFile(filename, '')
+ self.assertFalse(base.StringUtils.hasContent(filename))
+ base.StringUtils.toFile(filename, '# comment')
+ self.assertFalse(base.StringUtils.hasContent(filename))
+ base.StringUtils.toFile(filename, '# comment\n\t \n\n#comment2')
+ self.assertFalse(base.StringUtils.hasContent(filename))
+ self.assertFalse(base.StringUtils.hasContent(filename + '.not.existing'))
+ base.StringUtils.toFile(filename, '\t// comment\n\t \n\n//comment2')
+ self.assertFalse(base.StringUtils.hasContent(filename, '//'))
+
+ base.StringUtils.toFile(filename, '\t// comment\n\t \n\//comment2')
+ self.assertTrue(base.StringUtils.hasContent(filename, '#'))
+ base.StringUtils.toFile(filename, '# has content!\n\na=3')
+ self.assertTrue(base.StringUtils.hasContent(filename, '#'))
+
+ def testFirstMatch(self):
+ aList = ['# a=2', '#', 'b=3', '\t name = Jonny Cash ']
+ regExpr = re.compile(r'^\s*(\w+)\s*=\s*(.*?)\s*$')
+ matcher = base.StringUtils.firstMatch(aList, regExpr)
+ self.assertNotNone(matcher)
+ self.assertEquals('b', matcher.group(1))
+ self.assertEquals('3', matcher.group(2))
+
+ matcher = base.StringUtils.firstMatch(aList, regExpr, 3)
+ self.assertNotNone(matcher)
+ self.assertEquals('name', matcher.group(1))
+ self.assertEquals('Jonny Cash', matcher.group(2))
+
+ def testGrepInFile(self):
+ filename = self.tempFile('grep.txt', 'stringutiltest')
+ base.StringUtils.toFile(filename, """# Test
+a = 1
+# öäü b = 2
+c=333
+""")
+ regExpr = re.compile(r'^\s*(\w+)\s*=\s*(.*?)\s*$')
+ found = base.StringUtils.grepInFile(filename, regExpr)
+ self.assertEquals(2, len(found))
+ self.assertEquals('a = 1\n', found[0])
+ self.assertEquals('c=333\n', found[1])
+
+ found = base.StringUtils.grepInFile(filename, regExpr, 1)
+ self.assertEquals(1, len(found))
+ self.assertEquals("a = 1\n", found[0])
+
+ def testGrepInFileGroup(self):
+ filename = self.tempFile('grep.txt', 'stringutiltest')
+ base.StringUtils.toFile(filename, """# Test
+a = 1
+# öäü b = 2
+c=333
+""")
+ regExpr = re.compile(r'^\s*\w+\s*=\s*(.*?)\s*$')
+ found = base.StringUtils.grepInFile(filename, regExpr, None, 1)
+ self.assertEquals(2, len(found))
+ self.assertEquals('1', found[0])
+ self.assertEquals('333', found[1])
+
+ found = base.StringUtils.grepInFile(filename, regExpr, 1)
+ self.assertEquals(1, len(found))
+ self.assertEquals("a = 1\n", found[0])
+
+ def testLimitItemLength_WithoutElipsis(self):
+ source = ['1', '22', '333', '4444']
+ result = base.StringUtils.limitItemLength(source, 2)
+ self.assertEquals(source[0], '1')
+ self.assertEquals(source[3], '4444')
+ self.assertEquals(len(source), len(result))
+ for ix in range(len(source)):
+ self.assertEquals(source[ix][0:2], result[ix])
+ result = base.StringUtils.limitItemLength(source, 0)
+ self.assertEquals('', ''.join(result))
+
+ def testLimitItemLength(self):
+ source = ['abcd1', 'abcd22', 'abcd333', 'abcd4444']
+ result = base.StringUtils.limitItemLength(source, 5)
+ self.assertEquals(source[0], 'abcd1')
+ self.assertEquals(source[3], 'abcd4444')
+ self.assertEquals(len(source), len(result))
+ for ix in range(len(source)):
+ if ix == 0:
+ self.assertEquals(source[ix], result[ix])
+ else:
+ self.assertEquals(source[ix][0:2] + '...', result[ix])
+ result = base.StringUtils.limitItemLength(source, 0)
+ self.assertEquals('', ''.join(result))
+
+ def testToFloatAndTypeDate(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('2019.10.23')
+ self.assertEquals(1571781600.0, value)
+ self.assertEquals('date', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('1970-01-01')
+ self.assertEquals(-3600.0, value)
+ self.assertEquals('date', dataType)
+
+ def testToFloatAndTypeTime(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('01:02:03')
+ self.assertEquals(1*3600+2*60+3, value)
+ self.assertEquals('time', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('2:17')
+ self.assertEquals(2*3600+17*60, value)
+ self.assertEquals('time', dataType)
+
+ def testToFloatAndTypeDateTime(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('2019.10.23T01:02:03')
+ self.assertEquals(1571785323.0, value)
+ self.assertEquals('datetime', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('1970-01-02 5:17')
+ self.assertEquals(101820.0, value)
+ self.assertEquals('datetime', dataType)
+
+ def testToFloatAndTypeHex(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('0x1234')
+ self.assertEquals(float(0x1234), value)
+ self.assertEquals('int', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('0XABCDEF0123456')
+ self.assertEquals(float(0xABCDEF0123456), value)
+ self.assertEquals('int', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('0Xabcdef0')
+ self.assertEquals(float(0xABCDEF0), value)
+ self.assertEquals('int', dataType)
+
+ def testToFloatAndTypeOct(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('01234')
+ self.assertEquals(float(0o1234), value)
+ self.assertEquals('int', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('012345670')
+ self.assertEquals(float(0o12345670), value)
+ self.assertEquals('int', dataType)
+
+ def testToFloatAndTypeInt(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('1234')
+ self.assertEquals(1234.0, value)
+ self.assertEquals('int', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('987654321')
+ self.assertEquals(987654321.0, value)
+ self.assertEquals('int', dataType)
+
+ def testToFloatAndTypeFloat(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('1234.0')
+ self.assertEquals(1234.0, value)
+ self.assertEquals('float', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('987654321.0')
+ self.assertEquals(987654321.0, value)
+ self.assertEquals('float', dataType)
+ [value, dataType] = base.StringUtils.toFloatAndType('1.23E+44')
+ self.assertEquals(1.23E+44, value)
+ self.assertEquals('float', dataType)
+
+ def testToFloatAndTypeError(self):
+ [value, dataType] = base.StringUtils.toFloatAndType('host3')
+ self.assertEquals('float (or int or date(time)) expected, found: host3', value)
+ self.assertEquals('undef', dataType)
+
+ def testToFloatDate(self):
+ value = base.StringUtils.toFloat('2019.10.23')
+ self.assertEquals(1571781600.0, value)
+ value = base.StringUtils.toFloat('1970-01-01')
+ self.assertEquals(-3600.0, value)
+
+ def testToFloatTime(self):
+ value = base.StringUtils.toFloat('01:02:03')
+ self.assertEquals(1*3600+2*60+3, value)
+ value = base.StringUtils.toFloat('2:17')
+ self.assertEquals(2*3600+17*60, value)
+
+ def testToFloatDateTime(self):
+ value = base.StringUtils.toFloat('2019.10.23T01:02:03')
+ self.assertEquals(1571785323.0, value)
+ value = base.StringUtils.toFloat('1970-01-02 5:17')
+ self.assertEquals(101820.0, value)
+
+ def testToFloatHex(self):
+ value = base.StringUtils.toFloat('0x1234')
+ self.assertEquals(float(0x1234), value)
+ value = base.StringUtils.toFloat('0XABCDEF0123456')
+ self.assertEquals(float(0xABCDEF0123456), value)
+ value = base.StringUtils.toFloat('0Xabcdef0')
+ self.assertEquals(float(0xABCDEF0), value)
+
+ def testToFloatOct(self):
+ value = base.StringUtils.toFloat('01234')
+ self.assertEquals(float(0o1234), value)
+ value = base.StringUtils.toFloat('012345670')
+ self.assertEquals(float(0o12345670), value)
+
+ def testToFloatInt(self):
+ value = base.StringUtils.toFloat('1234')
+ self.assertEquals(1234.0, value)
+ value = base.StringUtils.toFloat('987654321')
+ self.assertEquals(987654321.0, value)
+
+ def testToFloatFloat(self):
+ value = base.StringUtils.toFloat('1234.0')
+ self.assertEquals(1234.0, value)
+ value = base.StringUtils.toFloat('987654321.0')
+ self.assertEquals(987654321.0, value)
+ value = base.StringUtils.toFloat('1.23E+44')
+ self.assertEquals(1.23E+44, value)
+
+ def testToFloatError(self):
+ value = base.StringUtils.toFloat('host3')
+ self.assertEquals('float (or int or date(time)) expected, found: host3', value)
+
+ def testAsInt(self):
+ self.assertEquals(321, base.StringUtils.asInt('321'))
+ self.assertEquals(0x321, base.StringUtils.asInt('0x321'))
+ self.assertEquals(0o321, base.StringUtils.asInt('0321'))
+ self.assertEquals(77, base.StringUtils.asInt('99x', 77))
+ self.assertEquals(777, base.StringUtils.asInt('x2', 777))
+
+ def testRegExprCompile(self):
+ rexpr = base.StringUtils.regExprCompile('\d', None, None, True)
+ self.assertNotNone(rexpr.match('7'))
+ rexpr = base.StringUtils.regExprCompile('Hi', None, None, False)
+ self.assertNotNone(rexpr.match('hi'))
+
+ def testRegExprCompileError(self):
+ rexpr = base.StringUtils.regExprCompile('*.txt', 'test of wrong pattern', self._logger)
+ self.assertNone(rexpr)
+ self._logger.contains('error in regular expression in test of wrong pattern: nothing to repeat at position 0')
+ rexpr = base.StringUtils.regExprCompile('(*.txt', 'test of wrong pattern')
+ self.assertNone(rexpr)
+
+ def testMinimizeArrayUtfError(self):
+ list = [b'\xffabcdefghijklmnopqrstuvwxyz01234567890', b'abcdefghijklmnopqrstuvwxyz01234567890\xff']
+ rc = base.StringUtils.minimizeArrayUtfError(list, self._logger)
+ self.assertEquals(2, len(rc))
+ self.assertEquals(1, rc[0].find('abcdefghijklmnopqrstuvwxyz01234567890'))
+ self.assertEquals(0, rc[1].find('abcdefghijklmnopqrstuvwxyz01234567890'))
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = StringUtilsTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+from base.TextProcessor import TextProcessor, IntegerVariable, TextVariable,\
+ DictVariable, ListVariable
+import base.Logger
+from base import StringUtils
+import appl.TextTool
+
+debug = True
+class TextProcessorTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+ self._logger = base.Logger.Logger('/tmp/unittest.log', True)
+
+ def checkPos(self, processor, line, col):
+ self.assertEquals(line, processor._currentPos._line)
+ self.assertEquals(col, processor._currentPos._col)
+
+ def checkMark(self, processor, ixMark, line, col):
+ self.assertEquals(line, processor._marks[ixMark]._line)
+ self.assertEquals(col, processor._marks[ixMark]._col)
+
+ def testContentString(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'line1\nline2', self._logger)
+ self.assertEquals(2, len(processor._lines));
+ self.assertEquals('line1', processor._lines[0]);
+ self.assertEquals('line2', processor._lines[1]);
+
+ def testContentList(self):
+ if debug:
+ return
+ processor = TextProcessor(None, ['xA', 'yB'], self._logger)
+ self.assertEquals(2, len(processor._lines));
+ self.assertEquals('xA', processor._lines[0]);
+ self.assertEquals('yB', processor._lines[1]);
+
+ def testIndexOf(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'line1\nline2', self._logger)
+ aTuple = processor.indexOf('2')
+ self.assertNotNone(aTuple)
+ (ixLine, col, length, matcher) = aTuple
+ self.assertEquals(1, ixLine);
+ self.assertEquals(4, col);
+ self.assertEquals(1, length);
+ self.assertEquals('2', matcher.group());
+
+ def testIndexOfMultiple(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'line1\nline2', self._logger)
+ aTuple = processor.indexOfMultiple(['1', '2'])
+ self.assertNotNone(aTuple)
+ (ixLine, col, length, matcher) = aTuple
+ self.assertEquals(1, ixLine);
+ self.assertEquals(4, col);
+ self.assertEquals(1, length);
+ self.assertEquals('2', matcher.group());
+
+ def testIndexOf2AncorsSameLine(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abc<a href="abc">', self._logger)
+ aTuple = processor.indexOfMultiple(['"', 'abc'])
+ self.assertNotNone(aTuple)
+ (ixLine, col, length, matcher) = aTuple
+ self.assertEquals(0, ixLine);
+ self.assertEquals(12, col);
+ self.assertEquals(3, length);
+ self.assertEquals('abc', matcher.group());
+ processor = TextProcessor(None, 'abc\nabc<a href="abc">', self._logger)
+ aTuple = processor.indexOfMultiple(['href', 'abc'])
+ self.assertNotNone(aTuple)
+ (ixLine, col, length, matcher) = aTuple
+ self.assertEquals(1, ixLine);
+ self.assertEquals(12, col);
+ self.assertEquals(3, length);
+ self.assertEquals('abc', matcher.group());
+ self.assertTrue(processor._errors == 0)
+
+ def checkTuple(self, expectedline, expectedCol, expectedString, aTuple):
+ self.assertNotNone(aTuple)
+ (ixLine, col, length, matcher) = aTuple
+ self.assertEquals(expectedline, ixLine)
+ self.assertEquals(expectedCol, col)
+ self.assertEquals(len(expectedString), length)
+ self.assertEquals(expectedString, matcher.group())
+
+ def testRIndex(self):
+ if debug:
+ return
+ # ...............................0123456789 123456789
+ processor = TextProcessor(None, 'abc<a href="abc">', self._logger)
+ aTuple = processor.rindexOf('abc')
+ self.checkTuple(0, 12, 'abc', aTuple)
+ aTuple = processor.rindexOf('abc', 0, 15)
+ self.checkTuple(0, 12, 'abc', aTuple)
+ aTuple = processor.rindexOf('abc', 0, 14)
+ self.checkTuple(0, 0, 'abc', aTuple)
+ self.assertTrue(processor._errors == 0)
+
+ def testReplaceOneHit(self):
+ if debug:
+ return
+ processor = TextProcessor(None, '\tabcd\nabc', self._logger)
+ processor.replace('abc', 'xy', 1)
+ self.assertEquals('\txyd', processor._lines[0]);
+ self.assertEquals('abc', processor._lines[1]);
+ self.assertTrue(processor._errors == 0)
+
+ def testReplaceManyHits(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abc.def\nabcabc', self._logger)
+ processor.replace('abc', 'xy')
+ self.assertEquals('xy.def', processor._lines[0]);
+ self.assertEquals('xyxy', processor._lines[1]);
+ self.assertTrue(processor._errors == 0)
+
+ def testDeleteChar(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefg\n0123456\nABCD', self._logger)
+ processor.parseAndInterpret('g1;d2;g0 2;delete 3 chars;g2 3;d10c')
+ self.assertEquals('abfg\n23456\nABC', '\n'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testDeleteToMarkBackward(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefg\n0123456\nABCD', self._logger)
+ processor.parseAndInterpret('g2 1;m#0;g0 2; delete#0; ')
+ self.assertEquals('abBCD', '\n'.join(processor._lines));
+ self.checkPos(processor, 0, 2)
+ self.checkMark(processor, 0, 0, 2)
+ self.assertTrue(processor._errors == 0)
+
+ def testDeleteToMarkforward(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefg\n0123456\nABCD', self._logger)
+ processor.parseAndInterpret('g 0 2;m#0;g 2 1;delete #0')
+ self.assertEquals('abBCD', '\n'.join(processor._lines));
+ self.checkPos(processor, 0, 2)
+ self.checkMark(processor, 0, 0, 2)
+ self.assertTrue(processor._errors == 0)
+
+ def testDeleteLines(self):
+ if debug:
+ return
+ processor = TextProcessor(None, '0\n1\n2\n3\n4\n5\n\6-abc\n7\n8', self._logger)
+ processor.parseAndInterpret('g6 2;d1l;goto 4;delete 1 line ; g1; d 2 lines;g+0')
+ self.assertEquals('0,3,5,7,8', ','.join(processor._lines));
+ self.checkPos(processor, 1, 0)
+ self.assertTrue(processor._errors == 0)
+
+ def testGotoRelative(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'A12345678\nB12345678\nC12345678\nD12345678\nE12345678\nF12345678', self._logger)
+ # ...........................(3,5) (5,0)(2,0) (2,8) (2,4)
+ processor.parseAndInterpret('g3 5;goto +2;g -3;g +0 +8 g-0-4')
+ self.checkPos(processor, 2, 4)
+ self.assertTrue(processor._errors == 0)
+
+ def testFind(self):
+ if debug:
+ return
+ # ...............................(0,0).................(2,0)
+ processor = TextProcessor(None, 'A12345678\nB12345678\nC12345678\nD12345678\nE12345678\nF12345678', self._logger)
+ processor.parseAndInterpret('f/^c/i')
+ self.checkPos(processor, 2, 0)
+ processor.parseAndInterpret('find backwards <78<c;')
+ self.checkPos(processor, 1, 7)
+ processor.parseAndInterpret('find !123!')
+ self.checkPos(processor, 2, 1)
+ processor.parseAndInterpret('fb/a/i;')
+ self.checkPos(processor, 0, 0)
+ processor.parseAndInterpret('f/F/;fbackwards /[567]/i;findbackwards =[234]{3}=')
+ self.checkPos(processor, 4, 2)
+ self.assertTrue(processor._errors == 0)
+
+ def testFindExpand(self):
+ if debug:
+ return
+ # ...............................(0,0).................(2,0)
+ processor = TextProcessor(None, 'A12345678\nB12345678\nC12345678\nD12345678\nE12345678\nF12345678', self._logger)
+ processor.parseAndInterpret('f/^!x43/i -e!')
+ self.checkPos(processor, 2, 0)
+ processor.parseAndInterpret('var int eight;$eight=8;find backwards <^x37!eight<c --meta-esc=^ --var-prefix=!')
+ self.checkPos(processor, 1, 7)
+ processor.parseAndInterpret('var int one;$one=1;find !${one}23! -p$ --meta-esc==')
+ self.checkPos(processor, 2, 1)
+ self.assertTrue(processor._errors == 0)
+
+ def testInsertOneLine(self):
+ if debug:
+ return
+ # ...............................(0,0).................(2,0)
+ processor = TextProcessor(None, 'A123\nB123', self._logger)
+ processor.parseAndInterpret('g0 2;i "xy" ;g1 1;insert \'lub\'')
+ self.assertEquals('A1xy23\nBlub123', '\n'.join(processor._lines));
+
+ def testInsertThreeLine(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'A123\nB123', self._logger)
+ processor.parseAndInterpret('g1 0;i <x\\ny\\nz< -e\\ -p!')
+ self.assertEquals('A123|x|y|zB123', '|'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testInsertThreeLineLongOpts(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'A123\nB123', self._logger)
+ processor.parseAndInterpret('g0 2;i <x\\ny\\nz< --var-prefix=% --meta-esc=\\')
+ self.assertEquals('A1x|y|z23|B123', '|'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testInsertWrongOpts(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ processor = TextProcessor(None, '', logger)
+ processor.parseAndInterpret('g0 2;i "1" --var-prefix=A -e.. -x --dummy-var')
+ self.assertTrue(logger.contains('+++ invalid meta-esc (length=1, non alfanum): ..'));
+ self.assertTrue(logger.contains('+++ invalid var-prefix (length=1, non alfanum): A'));
+ self.assertTrue(logger.contains('+++ unknown short option: x allowed: ep'));
+ self.assertTrue(logger.contains('+++ unknown long option: dummy-var allowed: var-prefix meta-esc'));
+
+ def testExpandMeta(self):
+ if debug:
+ return
+ processor = TextProcessor(None, '', self._logger)
+ self.assertEquals('\t\n\r. tab:\t!', processor._expandMeta('!t!n!r!x2e tab:!t!x21', '!'));
+ self.assertEquals('x\ty17', processor._expandMeta('x\\ty\x317', '\\'));
+ self.assertTrue(processor._errors == 0)
+
+ def testExpandVar(self):
+ if debug:
+ return
+ # ...............................(0,0).................(2,0)
+ processor = TextProcessor(None, '', self._logger)
+ intVar = IntegerVariable('number')
+ intVar._value = 2
+ strVar = TextVariable('txt')
+ strVar._value = 'red'
+ dictVar = DictVariable('color')
+ dictVar._dict = { 'red': 'f00', 'green': '0f0', 'blue': '00f'}
+ listVar = ListVariable('arr')
+ listVar._list = ['Adam', 'Berta', 'Charly']
+ processor._vars['number'] = intVar
+ processor._vars['txt'] = strVar
+ processor._vars['color'] = dictVar
+ processor._vars['arr'] = listVar
+ self.assertEquals('n: 2 t: red blue: 00f l1: Berta', processor._expandVar('n: %number t: %{txt} blue: %color[blue] l1: %arr[1]', '%'));
+ self.assertEquals('2red_and_greenf00Charly', processor._expandVar('$number${txt}_and_green$color[$txt]$arr[$number]', '$'));
+ self.assertTrue(processor._errors == 0)
+
+ def testSubstituteExpandLimit(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'A123\nB123', self._logger)
+ processor._vars['x'] = TextVariable('x', '1')
+ processor.parseAndInterpret('s/%x/./c -p% ; substitute =2=:\\t= c --meta-esc=\\ --limit=1')
+ self.assertEquals('A.:\t3|B.23', '|'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testSubstituteIgnore(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'A123\nB123', self._logger)
+ processor.parseAndInterpret('s/a/Alfa/i;substitute =b=:\t= i --var-prefix=% --meta-esc=^ --limit=99')
+ self.assertEquals('Alfa123|:\t123', '|'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testAssignmentScalar(self):
+ if debug:
+ return
+ processor = TextProcessor(None, None, self._logger)
+ processor.parseAndInterpret('var int num;var text name;$num=3;$name="Joe"')
+ self.assertEquals(3, processor._vars['num']._value);
+ self.assertEquals('Joe', processor._vars['name']._value);
+ self.assertTrue(processor._errors == 0)
+
+ def testAssignmentDict(self):
+ if debug:
+ return
+ processor = TextProcessor(None, None, self._logger)
+ processor.parseAndInterpret('var dict color;$color[red]="f00";$color[blue]=15')
+ self.assertEquals('f00', processor._vars['color']._dict['red']);
+ self.assertEquals('15', processor._vars['color']._dict['blue']);
+ self.assertTrue(processor._errors == 0)
+
+ def testAssignmentList(self):
+ if debug:
+ return
+ processor = TextProcessor(None, None, self._logger)
+ processor.parseAndInterpret('var list x;$x[]=100;$x[]=200;$x[0]=101')
+ self.assertEquals('101', processor._vars['x']._list[0]);
+ self.assertEquals('200', processor._vars['x']._list[1]);
+ self.assertTrue(processor._errors == 0)
+
+ def testLoadMark(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', self._logger)
+ processor.parseAndInterpret('var text x;v t y; g4 0;mark #1; g 0 2; load x #1;g 1 3; m #2; g 2 2;l y #2')
+ self.assertEquals('cdefgh\n012345\n_.:-', processor._vars['x']._value);
+ self.assertEquals('345\n_.', processor._vars['y']._value);
+ self.assertTrue(processor._errors == 0)
+
+ def testLoadChars(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', self._logger)
+ processor.parseAndInterpret('var text x;v t y; g1;load x 3; load y 20chars')
+ self.assertEquals('012', processor._vars['x']._value);
+ self.assertEquals('012345', processor._vars['y']._value);
+ self.assertTrue(processor._errors == 0)
+
+ def testLoadLines(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', self._logger)
+ processor.parseAndInterpret('var text x;v t y; g0 1; load x 3l; load y 1 line')
+ self.assertEquals('bcdefgh\n012345\n_.:-', processor._vars['x']._value);
+ self.assertEquals('bcdefgh\n', processor._vars['y']._value);
+ self.assertTrue(processor._errors == 0)
+
+ def testPrintMark(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', logger)
+ processor.parseAndInterpret('g4 0;mark #1; g 0 2; print #1 -l; g 1 3; m #2; g 2 2;p #2 --to-log')
+ if self.assertEquals(2, len(logger._lines)):
+ self.assertEquals('cdefgh\n012345\n_.:-', logger._lines[0])
+ self.assertEquals('345\n_.', logger._lines[1])
+ self.assertTrue(processor._errors == 0)
+
+ def testPrintChars(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', logger)
+ processor.parseAndInterpret('g1;print 3 -l; print 20chars -l')
+ self.assertEquals('012', logger._lines[0])
+ self.assertEquals('012345', logger._lines[1])
+ self.assertTrue(processor._errors == 0)
+
+ def testPrintLines(self):
+ if debug:
+ return
+ logger = base.MemoryLogger.MemoryLogger()
+ processor = TextProcessor(None, 'abcdefgh\n012345\n_.:-', logger)
+ processor.parseAndInterpret('g0 1; print 3l -l;p 1 line -l')
+ self.assertEquals('bcdefgh\n012345\n_.:-', logger._lines[0])
+ self.assertEquals('bcdefgh', logger._lines[1])
+ self.assertTrue(processor._errors == 0)
+
+ def testRead(self):
+ if debug:
+ return
+ processor = TextProcessor(None, None, self._logger)
+ fn = self.tempFile('unittest.txt')
+ StringUtils.toFile(fn, 'abc')
+ processor.parseAndInterpret('v t n;$n="{:s}";read "$n" -e^ -p$'.format(fn))
+ self.assertEquals('abc', '|'.join(processor._lines));
+ self.assertTrue(processor._errors == 0)
+
+ def testWrite(self):
+ if debug:
+ return
+ processor = TextProcessor(None, 'write successful', self._logger)
+ fn = self.tempFile('unittest.txt')
+ processor.parseAndInterpret('v t n;$n=!{:s}!;write !$n! --meta-esc=^ --var-prefix=$'.format(fn))
+ content = StringUtils.fromFile(fn)
+ self.assertEquals('write successful\n', content);
+ self.assertTrue(processor._errors == 0)
+
+ def testVariables(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "x", self._logger)
+ processor.parseAndInterpret('var int a b;$a=1;$b=2;insert "$a$b" -p$')
+ self.assertEquals('12x', '|'.join(processor._lines));
+ processor.parseAndInterpret('var text a2 b2 c2;$a2=5;$b2=6;$c2=7;insert "$a2$b2$c2" -p$')
+ self.assertEquals('56712x', '|'.join(processor._lines));
+
+ def testIfFind(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "x", self._logger)
+ processor.parseAndInterpret('if find /x/ then i " OK " ; else i " wrong " ; fi')
+ self.assertEquals(' OK x', '|'.join(processor._lines));
+ processor.parseAndInterpret('if find /y/ then i " wrong " ; else i " OK " ; fi')
+ self.assertEquals(' OK OK x', '|'.join(processor._lines));
+
+ def testIfExpr(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "x", self._logger)
+ processor.parseAndInterpret('if "x" then i " OK " ; else i " wrong " ; fi')
+ self.assertEquals(' OK x', '|'.join(processor._lines));
+ processor.parseAndInterpret('if "" then i " wrong " ; else i " OK " ; fi')
+ self.assertEquals(' OK OK x', '|'.join(processor._lines));
+
+ def testIfIf(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "x", self._logger)
+ processor.parseAndInterpret('if find /x/ then if "T" then i "TT" ; else i "FTwrong" ; fi ; else i " wrong " ; fi')
+ self.assertEquals('TTx', '|'.join(processor._lines));
+ processor.parseAndInterpret('if find /x/ then if "" then i "TTwrong" ; else i "TF" ; fi ; else i " wrong " ; fi')
+ self.assertEquals('TTTFx', '|'.join(processor._lines));
+ processor.parseAndInterpret('if find /y/ then if "" then i "wrong" ; else i "wrong" ; fi ; else if "1" then i "ft" ; else i "wrong" ; fi ; fi')
+ self.assertEquals('TTftTFx', '|'.join(processor._lines));
+ processor.parseAndInterpret('if find /y/ then if "" then i "wrong" ; else i "wrong" ; fi ; else if "" then i "ftwrong" ; else i "FF" ; fi ; fi')
+ self.assertEquals('TTFFftTFx', '|'.join(processor._lines));
+
+ def testNumAssign(self):
+ if debug:
+ return
+ processor = TextProcessor(None, ";", self._logger)
+ processor.parseAndInterpret('var int a b c;$a:=1+2;$b:=$a*2;$c:=$b/$a;i "$a,$b,$c" -p$')
+ self.assertEquals('3,6,2;', '|'.join(processor._lines));
+ processor.parseAndInterpret('$a+=5;$b-=-1;$c*=3;d20;i "$a,$b,$c" -p$')
+ self.assertEquals('8,7,6', '|'.join(processor._lines));
+ processor.parseAndInterpret('$a/=-1+3;$b%=3*1;$c+=-1;d20;i "$a,$b,$c" -p$')
+ self.assertEquals('4,1,5', '|'.join(processor._lines));
+
+ def testTextToolPrint(self):
+ if debug:
+ return
+ appl.TextTool.main(['any', 'execute', 'print "hi"'])
+
+ def testCallMethod(self):
+ if debug:
+ return
+ processor = TextProcessor(None, ";", self._logger)
+ processor.parseAndInterpret('var t line; $line="1,2";var list args; $line.split(args, ","); var int c; $c=$line.length()');
+ #processor.parseAndInterpret('var text hi; $hi="wow"; var int len; $len=$hi.length();i "$len" -p$')
+ self.assertEquals('1|2', '|'.join(processor._vars['args']._list));
+ self.assertEquals(3, processor._vars['c']._value);
+
+ def testListSize(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "adam\nberta\ncharly", self._logger)
+ processor.parseAndInterpret('var list names; load $names; var int size; $size=$names.size()');
+ self.assertEquals(3, processor._vars['size']._value);
+
+ def testListJoin(self):
+ if debug:
+ return
+ processor = TextProcessor(None, "adam\nberta\ncharly", self._logger)
+ processor.parseAndInterpret('var list names; load $names; var text csv; $csv=$names.join(",")');
+ self.assertEquals('adam,berta,charly', processor._vars['csv']._value);
+ processor.parseAndInterpret('var list names2; load $names2; var text lines; $lines=$names.join()');
+ self.assertEquals('adam\nberta\ncharly', processor._vars['lines']._value);
+
+ def testTextLength(self):
+ if debug:
+ return
+ processor = TextProcessor(None, None, self._logger)
+ processor.parseAndInterpret('var text t1 t2; $t1="'"; $t2=/012345/;var int l1 l1; $l1=t1.length(); $l2=$t2.length()");
+ self.assertEquals(0, processor._vars['l1']._value);
+ self.assertEquals(6, processor._vars['l2']._value);
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = TextProcessorTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+from base.ThreadLogger import ThreadLogger
+import os
+# import from base.Logger Logger
+
+class ThreadLoggerTest(UnitTestCase):
+
+ def testBase(self):
+ logFile = '/tmp/logger.log'
+ if os.path.isfile(logFile):
+ os.remove(logFile)
+ logger = ThreadLogger(logFile, True)
+ logger.log('Hi world')
+ logger.info('Hi readers')
+ logger.error('an expected error')
+ logger.debug('debug message')
+ self.assertFileContains('Hi world', logFile)
+ self.assertFileContains('Hi readers', logFile)
+ self.assertFileContains('+++ an expected error', logFile)
+ self.assertFileContains('debug message', logFile)
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = ThreadLoggerTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.StringUtils
+import base.Zipper
+import appl.ZipTool
+import os
+import re
+import shutil
+import time
+# import from base.Logger Logger
+
+debug=True
+class ZipperTest(UnitTestCase):
+
+ def makeTree(self):
+ text = 'Dies ist ein langer Text, der hoffentlich ein wenig komprimiert werden kann. Wenn nicht, kann man nichts machen' * 1000
+ dirName = self.tempFile('srcdir', 'zippertest')
+ os.makedirs(dirName, 0o777, True)
+ # /tmp/zippertest/srcdir: file[1-5].txt dir[123] .git linkConfig
+ # /tmp/zippertest/srcdir/dirX: levelX.txt subdir1
+ # /tmp/zippertest/srcdir/dirX/subdir1: a.jpg b.txt c.odt
+ # /tmp/zippertest/srcdir/.git: config aa bb
+ # /tmp/zippertest/srcdir/.git/xx: xx1234 xx2345 xx3456
+ for ix in range(1, 5+1):
+ fn = dirName + os.sep + 'file{:d}.txt'.format(ix)
+ base.StringUtils.toFile(fn, 'no{:d}: {:s}'.format(ix, text))
+ for ix in range(1, 3+1):
+ subdir = dirName + os.sep + 'dir' + str(ix)
+ os.makedirs(subdir, 0o777, True)
+ fn = subdir + os.sep + 'level{:d}'.format(ix)
+ base.StringUtils.toFile(fn, 'level{:d}: {:s}'.format(ix, text))
+ dir2 = subdir + os.sep + 'subdir1'
+ os.makedirs(dir2, 0o777, True)
+ for node in ['a.jpg', 'b.txt', 'c.odt']:
+ fn = dir2 + os.sep + node
+ base.StringUtils.toFile(fn, node + ': ' + text)
+
+ gitdir = dirName + os.sep + '.git'
+ os.makedirs(gitdir, 0o777, True)
+ fn = gitdir + os.sep + 'config'
+ base.StringUtils.toFile(fn, '.git: {:s}'.format(text))
+ for node in ['aa', 'bb']:
+ dir2 = gitdir + os.sep + node
+ os.makedirs(dir2, 0o777, True)
+ for node2 in ['123', '234', '567']:
+ fn = dir2 + os.sep + node + node2
+ base.StringUtils.toFile(fn, node + ': ' + text)
+ if not os.path.exists(dirName + os.sep + 'linkConfig'):
+ os.symlink('.git/config', dirName + os.sep + 'linkConfig')
+ return dirName
+
+ def _finish(self):
+ shutil.rmtree(self.tempFile('zippertest'))
+
+ def checkInfo(self, name, size, compressedSize, info):
+ self.assertEquals(name, info.filename)
+ self.assertEquals(size, info.file_size)
+ self.assertEquals(compressedSize, info.compress_size)
+
+ def testAppendDir(self):
+ if debug:
+ return
+ archive = self.tempFile('test.zip', 'zippertest')
+ dirName = self.makeTree()
+ os.makedirs(dirName, 0o777, True)
+ if os.path.exists(archive):
+ os.unlink(archive)
+ zipper = base.Zipper.Zipper(archive, 'w')
+ start = len(dirName)
+ zipper.appendDir(dirName, start, re.compile('file3'), True, re.compile('^dir2$'), re.compile('^[.]git$'))
+ zipper.close()
+ self.assertTrue(os.path.exists(archive))
+ zipper2 = base.Zipper.Zipper(archive, 'r')
+ infos = zipper2.infosOf(re.compile(r'file[1-5]'))
+ self.assertEquals(4, len(infos))
+ self.checkInfo('file2.txt', 111005, 489, infos[0]);
+ self.checkInfo('file5.txt', 111005, 489, infos[1]);
+ self.checkInfo('file4.txt', 111005, 489, infos[2]);
+ self.checkInfo('file1.txt', 111005, 488, infos[3]);
+
+ infos = zipper2.infosOf(None, 'dir1/subdir1/')
+ self.assertEquals(3, len(infos))
+ self.checkInfo('dir1/subdir1/a.jpg', 111007, 111007, infos[0]);
+ self.checkInfo('dir1/subdir1/b.txt', 111007, 489, infos[1]);
+ self.checkInfo('dir1/subdir1/c.odt', 111007, 111007, infos[2]);
+
+ infos = zipper2.infosOf('config')
+ self.assertEquals(1, len(infos))
+ self.checkInfo('.git/config', 111006, 111006, infos[0]);
+
+ infos = zipper2.infosOf(re.compile(r'[ab]\d3'), re.compile(r'[.]\w'))
+ self.assertEquals(2, len(infos))
+ self.checkInfo('.git/aa/aa234', 111004, 111004, infos[0]);
+ self.checkInfo('.git/bb/bb234', 111004, 111004, infos[1]);
+ zipper2.close()
+
+ def testCreate(self):
+ if debug:
+ return
+ dirName = self.makeTree()
+ logFile = self.tempFile('zt.log', 'zippertest')
+ archive = self.tempFile('createtest.zip', 'zippertest')
+ if os.path.exists(archive):
+ os.unlink(archive)
+ src = dirName + os.sep
+ # /tmp/zippertest/srcdir/dir1: levelX.txt subdir1
+ appl.ZipTool.main(['ziptool', '-l' + logFile, 'create', archive,
+ '--statistic',
+ '--ignored-files=[.]jpg', '--ignored-dirs=aa',
+ '--shortest-path', src + 'dir1', src + 'file1.txt', src + 'linkConfig',
+ '--store-node-only', src + 'dir3', src + 'file3.txt',
+ '--already-compressed-dirs=^[.]git$', src + '.git'])
+
+ zipper = base.Zipper.Zipper(archive, 'r');
+ infos = zipper.infosOf(re.compile('file[13].txt'))
+ self.assertEquals(2, len(infos))
+ # --shortest-path', src + 'file1.txt'
+ self.checkInfo('file1.txt', 111005, 488, infos[0]);
+ # --store-node-only', src + 'file3.txt',
+ self.checkInfo('srcdir/file3.txt', 111005, 489, infos[1]);
+ infos = zipper.infosOf('level1')
+ # --shortest-path', src + 'dir1'
+ self.assertEquals(1, len(infos))
+ self.checkInfo('level1', 111008, 490, infos[0]);
+ infos = zipper.infosOf('level3')
+ self.assertEquals(1, len(infos))
+ # --store-node-only', src + 'dir3'
+ self.checkInfo('dir3/level3', 111008, 491, infos[0]);
+ # --ignored-files:
+ infos = zipper.infosOf(re.compile('[.]jpg'))
+ self.assertEquals(0, len(infos))
+ # --ignored-dirs=/aa/
+ infos = zipper.infosOf(None, re.compile('aa'))
+ self.assertEquals(0, len(infos))
+ # --already-compressed-dirs=^[.]git$
+ infos = zipper.infosOf('config')
+ self.assertEquals(1, len(infos))
+ self.checkInfo('.git/config', 111006, 111006, infos[0]);
+
+ def testInfo(self):
+ if debug:
+ return
+ dirName = self.makeTree()
+ logFile = self.tempFile('info.log', 'zippertest')
+ archive = self.tempFile('infotest.zip', 'zippertest')
+ if os.path.exists(archive):
+ os.unlink(archive)
+ if os.path.exists(logFile):
+ os.unlink(logFile)
+ appl.ZipTool.main(['ziptool', '-l', 'create', archive,
+ '--shortest-path', '--already-compressed-dirs=^[.]git$', dirName])
+
+ appl.ZipTool.main(['ziptool', '-l' + logFile, 'info', archive,
+ 'file2',
+ '-1', 'config',
+ '-r', '--reg-expr', '--reg-expression', r'file[4]\W\w{3}',
+ '-w', '--wildcard', '--wildcards', '*1?[tT][Xx][Tt]',
+ '-s', '--substring', 'a23',
+ ])
+
+ lines = base.StringUtils.fromFile(logFile).split('\n')
+ self.assertEquals(6, len(lines))
+ self.assertMatches(r'111005\s+0% \d+\.\d+\.\d+-\d+:\d+:\d+\s+file2.txt', lines[0])
+ self.assertMatches(r'.git/config', lines[1])
+ self.assertMatches(r'^\d+\.\d+\.\d+ \d+:\d+:\d+\s+file4.txt$', lines[2])
+ self.assertMatches(r'^\d+\.\d+\.\d+ \d+:\d+:\d+\s+file1.txt$', lines[3])
+ self.assertMatches(r'^\d+\.\d+\.\d+ \d+:\d+:\d+\s+.git/aa/aa234$', lines[4])
+
+ def testCreateError(self):
+ if debug:
+ return
+ dirName = self.makeTree()
+ logFile = self.tempFile('info.log', 'zippertest')
+ archive = self.tempFile('infotest.zip', 'zippertest')
+ if os.path.exists(archive):
+ os.unlink(archive)
+ if os.path.exists(logFile):
+ os.unlink(logFile)
+ rc = appl.ZipTool.main(['ziptool', '-l', '--quiet', 'create',
+ '--shortest-path', '--already-compressed-dirs=^[.]git$', dirName])
+ self.assertEquals('missing <archive>', rc)
+
+ def testArgFile(self):
+ if debug:
+ return
+ dirName = self.makeTree()
+ logFile = self.tempFile('zt.log', 'zippertest')
+ archive = self.tempFile('createtest.zip', 'zippertest')
+ argFile = self.tempFile('argfile.txt', 'zippertest')
+ if os.path.exists(archive):
+ os.unlink(archive)
+ src = dirName + os.sep
+ base.StringUtils.toFile(argFile, '''# argument file test
+create
+{:s}
+--ignored-files=[.]jpg
+--ignored-dirs=aa
+--shortest-path
+{:s}dir1
+{:s}file1.txt
+
+--store-node-only
+{:s}dir3
+{:s}file3.txt
+--already-compressed-dirs=^[.]git$
+{:s}.git'''.format(archive, src, src, src, src, src))
+ # /tmp/zippertest/srcdir/dir1: levelX.txt subdir1
+ appl.ZipTool.main(['ziptool', '-l' + logFile, '--args={:s}'.format(argFile)])
+
+ zipper = base.Zipper.Zipper(archive, 'r');
+ infos = zipper.infosOf(re.compile('file[13].txt'))
+ self.assertEquals(2, len(infos))
+ # --shortest-path', src + 'file1.txt'
+ self.checkInfo('file1.txt', 111005, 488, infos[0]);
+ # --store-node-only', src + 'file3.txt',
+ self.checkInfo('srcdir/file3.txt', 111005, 489, infos[1]);
+ infos = zipper.infosOf('level1')
+ # --shortest-path', src + 'dir1'
+ self.assertEquals(1, len(infos))
+ self.checkInfo('level1', 111008, 490, infos[0]);
+ infos = zipper.infosOf('level3')
+ self.assertEquals(1, len(infos))
+ # --store-node-only', src + 'dir3'
+ self.checkInfo('dir3/level3', 111008, 491, infos[0]);
+ # --ignored-files:
+ infos = zipper.infosOf(re.compile('[.]jpg'))
+ self.assertEquals(0, len(infos))
+ # --ignored-dirs=/aa/
+ infos = zipper.infosOf(None, re.compile('aa'))
+ self.assertEquals(0, len(infos))
+ # --already-compressed-dirs=^[.]git$
+ infos = zipper.infosOf('config')
+ self.assertEquals(1, len(infos))
+ self.checkInfo('.git/config', 111006, 111006, infos[0]);
+
+ def testExtract(self):
+ if False and debug:
+ return
+ dirName = self.makeTree()
+ logFile = self.tempFile('zt.log', 'zippertest')
+ archive = self.tempFile('extracttest.zip', 'zippertest')
+ destination = self.tempFile('trgdir', 'zippertest')
+ if os.path.exists(archive):
+ os.unlink(archive)
+ src = dirName + os.sep
+ # /tmp/zippertest/srcdir/dir1: levelX.txt subdir1
+ appl.ZipTool.main(['ziptool', '-l' + logFile, 'create', archive,
+ '--shortest-path', src])
+ shutil.rmtree(destination, True)
+ os.mkdir(destination)
+ appl.ZipTool.main(['ziptool', '-l' + logFile, 'extract', archive,
+ '-d' + destination, '--destination=' + destination,
+ '-w',
+ '-u', '--update', '*.txt'])
+
+ oldTime = time.mktime((2000, 1, 1, 2, 33, 44, 0, 0, 0))
+ os.utime(dirName + os.sep + 'file2.txt', (oldTime, oldTime))
+
+ appl.ZipTool.main(['ziptool', '-l' + logFile, 'extract', archive,
+ '-d' + destination, '--destination=' + destination,
+ '-w',
+ '-u', '--update', '*.txt'])
+ zipper = base.Zipper.Zipper(archive, 'r');
+ infos = zipper.infosOf(re.compile('file[13].txt'))
+ self.assertEquals(2, len(infos))
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = ZipperTest()
+ tester.run()
--- /dev/null
+PYTHONPATH=/home/ws/py/pyrshell/unittest/base:/opt/ecl-php-2018.12/eclipse/plugins/org.python.pydev.core_7.0.3.201811082356/pysrc:/home/ws/py/pyrshell:/usr/lib/python3.6:/usr/lib/python3.6/lib-dynload:/usr/local/lib/python3.6/dist-packages:/usr/lib/python3/dist-packages:/usr/share/pyrshell:/usr/lib/python36.zip:/home/ws/py/pyrshell/unittest
+SAMPLE__CONFIG=/abc/def
+SAMPLE__APPL=sample
+SAMPLE__LOGFILE=/var/log/local/test.log
+A=B
+C=D
\ No newline at end of file
--- /dev/null
+receivergroup "" {
+ warning: default_warn@hamatoma.de second_warn@hamatoma.de
+ error: default-error@hamatoma.de defaultroot@hamatoma.de
+}
+
+receivergroup "globalGroup" {
+ warning: global_warn@hamatoma.de global_monitor@hamatoma.de
+ error: global_warn@hamatoma.de global_pit@hamatoma.de
+}
+
+smtphost "" {
+ host: smtp.gmx.de
+ port: 587
+ sender: hm.neutral@gmx.de
+ user: hm.neutral@gmx.de
+ code: 1G.e.h.t.H.e.i.m
+ tls: True
+}
+host "" {
+ address: localhost
+ disk "" {
+ used: 81% 91%
+ }
+ disk "/" {
+ receivers: globalGroup
+ used: 70% 80%
+ }
+ load1: 4.0 5.0
+ load5: 3.1 4.1
+ load10: 2.2 3.2
+ processes: 500 1000
+ swap: 50% 100%
+}
\ No newline at end of file
--- /dev/null
+}
+receivergroup "" {
+ warning: warn@hamatoma.de
+ error: error@hamatoma.de root@hamatoma.de
+}
+
+receivergroup "globalgroup" {
+ warning: joe@hamatoma.de monitor@hamatoma.de
+ error: monitor@hamatoma.de pit@hamatoma.de
+ disk "wrongPlace" {
+}
+disk "outside" {
+host "" {
+ receivers: unknownGroup2
+ address: localhost
+ disk "" {
+ used: 81% 91%
+ }
+ disk "/" {
+ used: 70% 80%
+ }
+ load1: 4.0 5.0
+ load5: 3.1 4.1
+ load10: 2.2 3.2
+ processes: 500 1000 unknownGroup1
+ dummyProp: 1 2
+ receivergroup "" {
+ host "dummy" {
+}
+process: 100 50
+host "x" {
+)
--- /dev/null
+receivergroup "group1" {
+ error: critical1@hamatoma.de
+ warning: maybe1@hamatoma.de
+}
+
+host "localhost" {
+ address: 127.0.0.1
+ disk "/" {
+ }
+ disk "/home" {
+ used: 83% 93%
+ receivers: group1
+ }
+ disk "/opt" {
+ used: 84% 94%
+ }
+ receivers: group1
+ load1: 3.3 5.3
+ load5: 2.4 3.4
+ load10: 1.5 2.5 globalGroup
+ processes: 502 1002
+ swap: 10% 90%
+}
\ No newline at end of file
--- /dev/null
+receivergroup "" {
+ warning: warn@hamatoma.de
+ error: error@hamatoma.de root@hamatoma.de
+}
+
+receivergroup "globalgroup" {
+ warning: joe@hamatoma.de monitor@hamatoma.de
+ error: monitor@hamatoma.de pit@hamatoma.de
+}
+
+host "" {
+ address: localhost
+ disk "" {
+ used: 81% 91%
+ }
+ disk "/" {
+ used: 70% 80%
+ }
+ load1: 4.0 5.0
+ load5: 3.1 4.1
+ load10: 2.2 3.2
+ processes: 500 1000
+}
\ No newline at end of file
--- /dev/null
+receivergroup "localgroup" {
+ error: critical@hamatoma.de
+ warning: one@hamatoma.de two@hamatoma.de
+}
+
+site "wiki" {
+ url: https://wiki.hamatoma.de
+}
+host "localhost" {
+ address: 127.0.0.1
+ disk "/" {
+ used: 10% 100%
+ }
+ disk "/home" {
+ used: 10% 11% globalgroup
+ }
+ receivers: localgroup
+ raid "md0" {
+ raidtype: raid1
+ }
+ raid "md1" {
+ raidtype: raid0
+ }
+ load1: 0.01 10.0
+ load5: 0.01 0.02
+ load10: 0.01 10.0
+ processes: 50 100
+ swap: 0% 1%
+}
\ No newline at end of file
--- /dev/null
+server {
+ listen 10116;
+ server_name monitor.infeos.eu;
+ server_name_in_redirect off;
+}
--- /dev/null
+server {
+ #listen 81;
+ listen 80;
+ server_name wiki.hamatoma.de;
+ #server_name wrong
+ root /home/www/freiwiki;
+ location /.well-known {
+ alias /home/www/dromedar/.well-known;
+ allow all;
+ }
+ location / {
+ return 301 https://$server_name$request_uri; # enforce https
+ }
+}
+
+server {
+ listen 443 ssl;
+ server_name wiki.hamatoma.de;
+# server_name wrong
+}
--- /dev/null
+<html><body><p>Hi</p></body></html>
--- /dev/null
+#! /bin/sh
+echo "Hi"
--- /dev/null
+hm@caribou:/home/ws/py/pyrshell/unittest$ ls
+appl configuration __init__.py net __pycache__ UnitTestCase.py UnitTestSuite.py
+base data __init__.pyc PackageTest.py sites-enabled UnitTestCase.pyc
+hm@caribou:/home/ws/py/pyrshell/unittest$ cd data
+hm@caribou:/home/ws/py/pyrshell/unittest/data$ X=$(pwd)
+hm@caribou:/home/ws/py/pyrshell/unittest/data$ cd /etc
+hm@caribou:/etc$ g_rsh
+hm@caribou:/home/ws/py/pyrshell$ zip $X/example.zip *.sh
+ adding: 2biber.sh (deflated 3%)
+ adding: 2bt.sh (deflated 3%)
+ adding: 2dr.sh (deflated 3%)
+ adding: 2heu.sh (deflated 3%)
+ adding: 2indian.sh (deflated 3%)
+ adding: 2merkur.sh (deflated 3%)
+ adding: 2neptun.sh (deflated 3%)
+ adding: 2next.sh (deflated 3%)
+ adding: 2silenus.sh (deflated 3%)
+ adding: 2sky.sh (deflated 3%)
+ adding: 2tom.sh (deflated 3%)
+ adding: All.sh (deflated 21%)
+ adding: Env.sh (deflated 50%)
+ adding: MkTar.sh (deflated 16%)
+ adding: unpackShell.sh (stored 0%)
+ adding: upd-monitor.sh (deflated 20%)
+hm@caribou:/home/ws/py/pyrshell$ tar $X/example.tar *.sh
+tar: Ungültige Option -- „/“
+„tar --help“ oder „tar --usage“ gibt weitere Informationen.
+hm@caribou:/home/ws/py/pyrshell$ tar cf $X/example.tar *.sh
+hm@caribou:/home/ws/py/pyrshell$ tar czf $X/example.tgz *.sh
+hm@caribou:/home/ws/py/pyrshell$ tar cjf $X/example.tbz *.sh
+hm@caribou:/home/ws/py/pyrshell$ cd $X
+hm@caribou:/home/ws/py/pyrshell/unittest/data$ cat >example.html
+<html><body><p>Hi</p></body></html>
+hm@caribou:/home/ws/py/pyrshell/unittest/data$ cat example.sh
+cat: example.sh: Datei oder Verzeichnis nicht gefunden
--- /dev/null
+receivergroup "":
+ error: default-error@hamatoma.de defaultroot@hamatoma.de
+ warning: default_warn@hamatoma.de second_warn@hamatoma.de
+receivergroup "globalGroup":
+ error: global_warn@hamatoma.de global_pit@hamatoma.de
+ warning: global_warn@hamatoma.de global_monitor@hamatoma.de
+receivergroup "group1":
+ error: critical1@hamatoma.de
+ warning: maybe1@hamatoma.de
+smtphost "":
+ code: 1G.e.h.t.H.e.i.m
+ host: smtp.gmx.de
+ port: 587
+ sender: hm.neutral@gmx.de
+ tls: True
+ user: hm.neutral@gmx.de
+===
+host "":
+ address: localhost
+ receivergroup "":
+ error: default-error@hamatoma.de defaultroot@hamatoma.de
+ warning: default_warn@hamatoma.de second_warn@hamatoma.de
+ load1: 4.0 5.0 ""
+ load10: 2.2 3.2 ""
+ load5: 3.1 4.1 ""
+ processes: 500 1000 ""
+ swap: 50 100 ""
+ disk "":
+ receivergroup "":
+ error:
+ warning:
+ used: 81% 91% ""
+ disk "/":
+ receivergroup "globalGroup":
+ error: global_warn@hamatoma.de global_pit@hamatoma.de
+ warning: global_warn@hamatoma.de global_monitor@hamatoma.de
+ used: 70% 80% "globalGroup"
+===
+host "localhost":
+ address: 127.0.0.1
+ receivergroup "group1":
+ error: critical1@hamatoma.de
+ warning: maybe1@hamatoma.de
+ load1: 3.3 5.3 "group1"
+ load10: 1.5 2.5 "globalGroup"
+ load5: 2.4 3.4 "group1"
+ processes: 502 1002 "group1"
+ swap: 10 90 "group1"
+ disk "":
+ receivergroup "":
+ error:
+ warning:
+ used: 81% 91% ""
+ disk "/":
+ receivergroup "globalGroup":
+ error: global_warn@hamatoma.de global_pit@hamatoma.de
+ warning: global_warn@hamatoma.de global_monitor@hamatoma.de
+ used: 70% 80% "globalGroup"
+ disk "/home":
+ receivergroup "group1":
+ error: critical1@hamatoma.de
+ warning: maybe1@hamatoma.de
+ used: 83% 93% ""
+ disk "/opt":
+ receivergroup "":
+ error:
+ warning:
+ used: 84% 94% ""
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import os
+import datetime
+import shutil
+
+from unittest.UnitTestCase import UnitTestCase
+from base.Logger import Logger
+import base.StringUtils
+import net.FileTcpTaskHandler
+
+class MockingServerThread:
+ def __init__(self):
+ self._answer = None
+ self._firstBlock = None
+ def send(self, message):
+ self._answer = message
+ def sendBinary(self, data):
+ self._answer = data.decode()
+ def readBinary(self):
+ return None
+
+class FileTcpTaskHandlerTest(UnitTestCase):
+ def __init__(self):
+ self._isRoot = os.geteuid() == 0
+ self._logFile = self.tempFile('logger.log')
+ self._logger = Logger(self._logFile, True)
+ dir1 = self.tempDir('1', 'handlertest')
+ dir2 = self.tempDir('2', 'handlertest')
+ self._dirs = [dir1, dir2]
+ os.makedirs(dir1, 0o777, True)
+ os.makedirs(dir2, 0o777, True)
+ if os.path.isfile(self._logFile):
+ os.remove(self._logFile)
+ self._serverThread = MockingServerThread()
+ UnitTestCase.__init__(self)
+
+ def _finish(self):
+ shutil.rmtree(self.tempDir('handlertest'))
+
+ def hasAnswer(self, expected):
+ self.assertEquals(expected, self._serverThread._answer)
+
+ def testCopy(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger)
+ source = self._dirs[0] + '/file1.txt'
+ target = self._dirs[1] + '/file2.txt'
+ content = 'xxxx'
+ base.StringUtils.toFile(source, content)
+ handler.fulfill(['file', 'test', 'copy', source, target], self._serverThread)
+ self.assertTrue(os.path.exists(target))
+ self.assertEquals(content, base.StringUtils.fromFile(target))
+ self.hasAnswer('OK')
+
+ def testCopyFailure(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger)
+ source = '/tmp/file1.txt'
+ target = '/tmp/file2.txt'
+ handler.copy([source, target], self._serverThread)
+ self.hasAnswer('+++ "file": copy: invalid path')
+ source = self._dirs[0] + '/missing.txt'
+ handler.copy([source], self._serverThread)
+ self.hasAnswer('+++ "file": copy: missing target')
+ target = self._dirs[1] + '/missing.txt'
+ handler.copy([source, target], self._serverThread)
+ self.hasAnswer('+++ "file": copy failed No such file or directory')
+
+ def testDelete(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger)
+ source = self._dirs[0] + '/todelete.txt'
+ content = 'yyy'
+ base.StringUtils.toFile(source, content)
+ self.assertTrue(os.path.exists(source))
+ handler.fulfill(['file', 'test', 'delete', source], self._serverThread)
+ self.assertFalse(os.path.exists(source))
+ self.hasAnswer('OK')
+
+ def testMkDir(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger)
+ source = self._dirs[0] + '/subdir/dir1'
+ if os.path.isdir(source):
+ os.rmdir(source)
+ os.rmdir(os.path.dirname(source))
+ self.assertFalse(os.path.isdir(source))
+ handler.fulfill(['file', 'test', 'mkdir', source], self._serverThread)
+ self.assertTrue(os.path.isdir(source))
+ self.hasAnswer('OK')
+
+ def testMove(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger)
+ source = self._dirs[0] + '/file1.txt'
+ target = self._dirs[1] + '/file2.txt'
+ content = 'yyy'
+ base.StringUtils.toFile(source, content)
+ handler.fulfill(['file', 'test', 'move', source, target], self._serverThread)
+ self.assertFalse(os.path.exists(source))
+ self.assertTrue(os.path.exists(target))
+ self.assertEquals(content, base.StringUtils.fromFile(target))
+ self.hasAnswer('OK')
+
+ def checkFile(self, name, offset, length, info, expectedContent):
+ # 'OK' SIZE OFFSET LENGTH MTIME MODE OWNER GROUP '\n'
+ x, content = info.split('\n')
+ parts = x.split(' ')
+ info = os.stat(name)
+ self.assertEquals(expectedContent, content)
+ self.assertEquals('OK', parts[0])
+ self.assertEquals('{:x}'.format(info.st_size), parts[1])
+ self.assertEquals('{:x}'.format(offset), parts[2])
+ self.assertEquals('{:x}'.format(length), parts[3])
+ self.assertEquals('{:x}'.format(int(info.st_mtime_ns / 1000)), parts[4])
+ self.assertEquals('{:x}'.format(info.st_mode), parts[5])
+ self.assertEquals('{:x}'.format(info.st_uid), parts[6])
+ self.assertEquals('{:x}'.format(info.st_gid), parts[7])
+
+ def testGet(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger, 10)
+ source = self._dirs[0] + '/fileGet.txt'
+ content = '1234567890abcdefghijklmnopqrstuvwxyz'
+ base.StringUtils.toFile(source, content)
+ handler.fulfill(['file', 'test', 'get', source, '0'], self._serverThread)
+ self.checkFile(source, 0, 10, self._serverThread._answer, '1234567890')
+ handler.fulfill(['file', 'test', 'get', source, '10'], self._serverThread)
+ self.checkFile(source, 10, 10, self._serverThread._answer, 'abcdefghij')
+ handler.fulfill(['file', 'test', 'get', source, '20'], self._serverThread)
+ self.checkFile(source, 20, 10, self._serverThread._answer, 'klmnopqrst')
+ handler.fulfill(['file', 'test', 'get', source, '30'], self._serverThread)
+ self.checkFile(source, 30, 6, self._serverThread._answer, 'uvwxyz')
+
+ def testPut(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger, 10)
+ source = self._dirs[0] + '/filePut.txt'
+ if os.path.exists(source):
+ os.unlink(source)
+ self.assertFalse(os.path.exists(source))
+ content = '1234567890abcdefghijklmnopqrstuvwxyz'
+ date = datetime.datetime(2018, 4, 1, 3, 44, 55, 123456)
+ mtime = int(date.timestamp() * 1000000)
+ self._serverThread._firstBlock = content.encode()
+ # source SIZE OFFSET LENGTH MTIME MODE OWNER GROUP
+ owner = 201 if self._isRoot else os.geteuid()
+ group = 202 if self._isRoot else os.getegid()
+ handler.fulfill(['file', 'test', 'put', source, '36', '0', '36', hex(mtime), hex(0o664), hex(owner), hex(group)], self._serverThread)
+ self.hasAnswer('OK')
+ info = os.stat(source)
+ self.assertEquals(mtime * 1000, info.st_mtime_ns)
+ self.assertEquals(owner, info.st_uid)
+ self.assertEquals(group, info.st_gid)
+ current = base.StringUtils.fromFile(source)
+ self.assertEquals(content, current)
+
+ def testDiskFree(self):
+ handler = net.FileTcpTaskHandler.FileTcpTaskHandler('test', self._dirs, self._logger, 10)
+ handler.fulfill(['file', 'test', 'df'], self._serverThread)
+ lines = self._serverThread._answer.split('\n')
+ self.assertEquals('OK', lines[0])
+ self.assertMatches(r'^\S+\t\d+\t\d$', lines[1])
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = FileTcpTaskHandlerTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+from unittest.UnitTestCase import UnitTestCase
+import base.Logger
+import net.HttpClient
+
+class HttpClientTest(UnitTestCase):
+ def __init__(self):
+ UnitTestCase.__init__(self)
+
+ def testGetHead(self):
+ logger = base.Logger.Logger('/tmp/unittest.log', True)
+ client = net.HttpClient.HttpClient(3, logger)
+ aType = client.getHead('https://wiki.hamatoma.de', 5)
+ self.assertEquals('text/html; charset=UTF-8', aType)
+
+ def testGetContent(self):
+ logger = base.Logger.Logger('/tmp/unittest.log', True)
+ client = net.HttpClient.HttpClient(3, logger)
+ content = client.getContent('https://wiki.hamatoma.de', 5).decode()
+ self.assertTrue(content.startswith('<!DOCTYPE html>'))
+ client.close()
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = HttpClientTest()
+ tester.run()
--- /dev/null
+'''
+Created on 12.04.2018
+
+@author: hm
+'''
+import os
+from unittest.UnitTestCase import UnitTestCase
+from base.Logger import Logger
+import net.LinuxTcpTaskHandler
+
+class MockingServerThread:
+ def __init__(self):
+ self._answer = None
+ self._firstBlock = None
+ def send(self, message):
+ self._answer = message
+ def sendBinary(self, data):
+ self._answer = data.decode()
+ def readBinary(self):
+ return None
+
+class LinuxTcpTaskHandlerTest(UnitTestCase):
+ def __init__(self):
+ self._isRoot = os.geteuid() == 0
+ self._logFile = '/tmp/logger.log'
+ self._logger = Logger(self._logFile, True)
+ self._serverThread = MockingServerThread()
+ UnitTestCase.__init__(self)
+
+ def hasAnswer(self, expected):
+ self.assertEquals(expected, self._serverThread._answer)
+
+ def testDiskFree(self):
+ handler = net.LinuxTcpTaskHandler.LinuxTcpTaskHandler(self._logger)
+ handler.fulfill(['diskfree'], self._serverThread)
+ lines = self._serverThread._answer.split('\n')
+ self.assertEquals('OK', lines[0])
+ self.assertMatches(r"(\t?\['[^']+', \d+, \d+, \d+\])+", lines[1]);
+
+ def testUsers(self):
+ handler = net.LinuxTcpTaskHandler.LinuxTcpTaskHandler(self._logger)
+ handler.fulfill(['users'], self._serverThread)
+ lines = self._serverThread._answer.split('\n')
+ self.assertEquals('OK', lines[0])
+ self.assertMatches(r'^\S+\t[:.\d]+(\t[\d:]+){2}', lines[1])
+
+if __name__ == '__main__':
+ #import sys;sys.argv = ['', 'Test.testName']
+ tester = LinuxTcpTaskHandlerTest()
+ tester.run()
--- /dev/null
+'''
+Created on 22.04.2018
+
+@author: hm
+'''
+
+import unittest.PackageTest
+import unittest.net.FileTcpTaskHandlerTest
+import unittest.net.LinuxTcpTaskHandlerTest
+import unittest.net.HttpClientTest
+
+def main():
+ runner = unittest.PackageTest.PackageTest('NetTester')
+ runner.run(unittest.net.FileTcpTaskHandlerTest.FileTcpTaskHandlerTest)
+ runner.run(unittest.net.LinuxTcpTaskHandlerTest.LinuxTcpTaskHandlerTest)
+ runner.run(unittest.net.HttpClientTest.HttpClientTest)
+ runner.finish()
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file