From 3005872aefd245bdb9a5161825cb6ff45310ce83 Mon Sep 17 00:00:00 2001 From: Shane Canon Date: Mon, 14 Feb 2022 22:53:05 +0000 Subject: [PATCH] Initial version --- Dockerfile | 31 ++++ LICENSE.md | 7 + README.md | 5 + biokbase/__init__.py | 0 biokbase/auth.py | 339 +++++++++++++++++++++++++++++++++++++++ biokbase/log.py | 368 +++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 5 + 7 files changed, 755 insertions(+) create mode 100644 Dockerfile create mode 100644 LICENSE.md create mode 100644 README.md create mode 100644 biokbase/__init__.py create mode 100644 biokbase/auth.py create mode 100644 biokbase/log.py create mode 100644 requirements.txt diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..61aafe7 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,31 @@ +FROM ubuntu:20.04 + +RUN \ + apt-get -y update && \ + export DEBIAN_FRONTEND=noninteractive && \ + export TZ=Etc/UTC && \ + apt-get -y install gcc make curl git openjdk-8-jre + +# Copy in the SDK +COPY --from=kbase/kb-sdk:20180808 /src /sdk +RUN sed -i 's|/src|/sdk|g' /sdk/bin/* + +RUN \ + V=py38_4.10.3 && \ + curl -o conda.sh -s https://repo.anaconda.com/miniconda/Miniconda3-${V}-Linux-x86_64.sh && \ + sh ./conda.sh -b -p /opt/conda3 && \ + rm conda.sh + +ENV PATH=/opt/conda3/bin:$PATH:/sdk/bin + +# Install packages including mamba +RUN \ + conda install -c conda-forge mamba + +ADD ./requirements.txt /tmp/ +RUN \ + pip install -r /tmp/requirements.txt + +# Add in some legacy modules +ADD biokbase /opt/conda3/lib/python3.8/site-packages/biokbase + diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..6329683 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,7 @@ +Copyright (c) 2022 The KBase Project and its Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..d40ee74 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# SDK Base Python Image + +This is a very minimal base python image for building KBase SDK apps. + + diff --git a/biokbase/__init__.py b/biokbase/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/biokbase/auth.py b/biokbase/auth.py new file mode 100644 index 0000000..46d82d5 --- /dev/null +++ b/biokbase/auth.py @@ -0,0 +1,339 @@ +""" +Kbase wrappers around Globus Online Nexus client libraries. We wrap the Nexus +libraries to provide a similar API between the Perl Bio::KBase::Auth* libraries +and the python version + +In this module, we follow standard Python idioms of raising exceptions for +various failure states ( Perl modules returned error states in error_msg field) +""" +from biokbase.nexus.client import NexusClient +from ConfigParser import ConfigParser +import os +from urlparse import urlparse +from pprint import pformat +import requests +import re + +""" +Package "globals" + kb_config + trust_token_signers + attrs + authdata + config + tokenend + AuthSvcHost + RoleSvcURL + nexusconfig + +""" +__version__ = "0.9" + +kb_config = os.environ.get('KB_DEPLOYMENT_CONFIG',os.environ['HOME']+"/.kbase_config") + +trust_token_signers = [ 'https://nexus.api.globusonline.org/goauth/keys' ] +attrs = [ 'user_id', 'token','client_secret', 'keyfile', + 'keyfile_passphrase','password','sshagent_keys', + 'sshagent_keyname'] + +# authdata stores the configuration key/values from any configuration file +authdata = dict() +if os.path.exists( kb_config): + try: + conf = ConfigParser() + conf.read(kb_config) + # strip down whatever we read to only what is legit + for x in attrs: + authdata[x] = conf.get('authentication',x) if conf.has_option('authentication',x) else None + except Exception, e: + print "Error while reading INI file %s: %s" % (kb_config, e) +tokenenv = authdata.get( 'tokenvar', 'KB_AUTH_TOKEN') +# Yes, some variables are camel cased and others are all lower. Trying to maintain +# the attributes names from the perl version which was a mishmash too. regret. +AuthSvcHost = authdata.get( 'servicehost', "https://nexus.api.globusonline.org/") +# Copied from perl libs for reference, not used here +#ProfilePath = authdata.get( 'authpath', "/goauth/token") +RoleSvcURL = authdata.get( 'rolesvcurl', "https://kbase.us/services/authorization/Roles") +nexusconfig = { 'cache' : { 'class': 'biokbase.nexus.token_utils.InMemoryCache', + 'args': [], + }, + 'server' : urlparse(AuthSvcHost).netloc, + 'verify_ssl' : False, + 'client' : None, + 'client_secret' : None} +# Compile a regex for parsing out user_id's from tokens +token_userid = re.compile( '(?<=^un=)\w+') + +def LoadConfig(): + """ + Method to load configuration from INI style files from the file in kb_config + """ + global kb_config,authdata,tokenenv,AuthSvcHost,RolesSvcHost + global RoleSvcURL,nexusconfig,conf + + kb_config = os.environ.get('KB_DEPLOYMENT_CONFIG',os.environ['HOME']+"/.kbase_config") + + if os.path.exists( kb_config): + try: + conf = ConfigParser() + conf.read(kb_config) + # strip down whatever we read to only what is legit + for x in attrs: + authdata[x] = conf.get('authentication',x) if conf.has_option('authentication',x) else None + except Exception, e: + print "Error while reading INI file %s: %s" % (kb_config, e) + tokenenv = authdata.get( 'tokenvar', 'KB_AUTH_TOKEN') + # Yes, some variables are camel cased and others are all lower. Trying to maintain + # the attributes names from the perl version which was a mishmash too. regret. + AuthSvcHost = authdata.get( 'servicehost', "https://nexus.api.globusonline.org/") + # Copied from perl libs for reference, not used here + #ProfilePath = authdata.get( 'authpath', "/goauth/token") + RoleSvcURL = authdata.get( 'rolesvcurl', "https://kbase.us/services/authorization/Roles") + nexusconfig = { 'cache' : { 'class': 'biokbase.nexus.token_utils.InMemoryCache', + 'args': [], + }, + 'server' : urlparse(AuthSvcHost).netloc, + 'verify_ssl' : False, + 'client' : None, + 'client_secret' : None} + +def SetConfigs( configs): + """ + Method used to set configuration directives in INI file kb_config + Takes as a parameter a dictionary of config directives within the authentication section + that need to be set/unset. If there is a dictionary entry where the value is None + then the config setting will be deleted + """ + global kb_config,authdata,tokenenv,AuthSvcHost,RolesSvcHost + global RoleSvcURL,nexusconfig,conf + + conf = ConfigParser() + if os.path.exists( kb_config): + conf.read(kb_config) + if not conf.has_section('authentication'): + conf.add_section('authentication') + for key in configs.keys(): + if configs[key] is not None: + conf.set('authentication',key, configs[key]) + else: + conf.remove_option('authentication',key) + with open(kb_config, 'wb') as configfile: + conf.write(configfile) + LoadConfig() + +class AuthCredentialsNeeded( Exception ): + """ + Simple wrapper around Exception class that flags the fact that we don't have + enough credentials to authenticate, which is distinct from having bad or bogus + credentials + """ + pass + +class AuthFail( Exception ): + """ + Simple wrapper around Exception class that flags our credentials are bad or bogus + """ + pass + +class Token: + """ + Class that handles token requests and validation. This is basically a wrapper + around the biokbase.nexus.client.NexusClient class from GlobusOnline that provides a + similar API to the perl Bio::KBase::AuthToken module. For KBase purposes + we have modified the base Globus Online classes to support ssh agent based + authentication as well. + + In memory caching is provided by the underlying biokbase.nexus.client implementation. + + Instance Attributes: + user_id + password + token + keyfile + client_secret + keyfile_passphrase + sshagent_keyname + """ + + def __init__(self, **kwargs): + """ + Constructor for Token class will accept these optional parameters attributes in + order to initialize the object: + + user_id, password, token, keyfile, client_secret, keyfile_passphrase, sshagent_keyname + + If user_id is provided among the initializers, the get() method will be called at the + end of initialization to attempt to fetch a token from the service defined in + AuthSvcHost. If there are not enough credentials to authenticate, we ignore the + exception. However if there are enough credentials and they fail to authenticate, + the exception will be reraised. + + If there is a ~/kbase_config INI file, it will be used to fill in values when there + are no initialization values given - this can be short circuited by setting + ignore_kbase_config to true among the initialization params + """ + global nexusconfig + attrs = [ 'keyfile','keyfile_passphrase','user_id','password','token','client_secret','sshagent_keyname'] + for attr in attrs: + setattr( self, attr, kwargs.get(attr,None)) + self.nclient = NexusClient(nexusconfig) + self.nclient.user_key_file = self.keyfile + + if self.nclient.__dict__.has_key("agent_keys"): + self.sshagent_keys = self.nclient.agent_keys + else: + self.sshagent_keys = dict() + + # Flag to mark if we got default values from .kbase_config file + defattr = reduce( lambda x,y: x or (authconf.get(y, None) is not None), attrs) + # if we have a user_id defined, try to get a token with whatever else was given + # if it fails due to not enough creds, try any values from ~/.kbase_config + if (self.user_id): + try: + self.get() + except AuthCredentialsNeeded: + pass + except Exception, e: + raise e + elif os.environ.get(tokenenv): + self.token = os.environ[tokenenv] + elif defattr and not kwargs.get('ignore_kbase_config'): + for attr in attrs: + if authdata.get(attr) is not None: + setattr(self,attr,authdata[attr]) + try: + self.get() + except AuthCredentialsNeeded: + pass + except Exception, e: + raise e + if self.user_id is None and self.token: + # parse out the user_id and set it + self.user_id = token_userid.search( self.token).group(0) + + def validate( self, token = None): + """ + Method that validates the contents of self.token against the authentication service backend + This method caches results, so an initial validation will be high latency due to the + network round trips, but subsequent validations will return very quickly + + A successfully validated token will return user_id + + Invalid tokens will generate a ValueError exception + """ + if token is not None: + res = self.nclient.validate_token( token) + else: + res = self.nclient.validate_token( self.token) + self.user_id = res[0] + return self.user_id + + def get(self, **kwargs): + """ + Use either explicit parameters or the current instance vars to authenticate and retrieve a + token from GlobusOnline (or whoever else is defined in the AuthSvcHost class attribute). + + The following parameters are optional, and will be assigned to the instance vars before + attempting to fetch a token: + keyfile, keyfile_passphrase, user_id, password, client_secret, sshagent_keyname + + A user_id and any of the following will be enough to attempt authentication: + keyfile, keyfile_passphrase, password, sshagent_keyname + + If there are not enough credentials, then an AuthCredentialsNeeded exception will be raised + If the underlying Globus libraries fail to authenticate, the exception will be passed up + + Success returns self, but with the token attribute containing a good token, an AuthFail + exception will be thrown if the credentials are rejected by Globus Online + + Note: authentication with an explicit RSA client_secret is not currently supported + """ + # attributes that we would allow to be passed in via kwargs + attrs = [ 'keyfile','keyfile_passphrase','user_id','password','token','client_secret','sshagent_keyname'] + for attr in attrs: + if attr in kwargs: + setattr( self, attr, kwargs[attr]) + # override the user_key_file default in the nclient object + self.nclient.user_key_file = self.keyfile + # in the perl libraries, if we have a user_id, no other credentials, and a single + # available sshagent_keyname from ssh_agent, default to using that for auth + if (self.user_id and not ( self.password or self.sshagent_keyname or self.keyfile) + and (len(self.sshagent_keys.keys()) == 1)): + self.sshagent_keyname = self.sshagent_keys.keys()[0] + if not (self.user_id and ( self.password or self.sshagent_keyname or self.keyfile)): + raise AuthCredentialsNeeded( "Need either (user_id, client_secret || password || sshagent_keyname) to be defined.") + if self.keyfile: + self.nclient.user_key_file = self.keyfile + if (self.user_id and self.keyfile): + passphrase = kwargs.get("keyfile_passphrase",self.keyfile_passphrase) + res = self.nclient.request_client_credential( self.user_id, lambda : passphrase ) + elif (self.user_id and self.password): + res = self.nclient.request_client_credential( self.user_id, self.password) + elif (self.user_id and self.sshagent_keyname): + res = self.nclient.request_client_credential_sshagent( self.user_id, self.sshagent_keyname) + else: + raise AuthCredentialsNeeded("Authentication with explicit client_secret not supported - please put key in file or sshagent") + if 'access_token' in res: + self.token = res['access_token'] + else: + raise AuthFail('Could not authenticate with values: ' + pformat(self.__dict__)) + return self + + def get_sessDB_token(): + pass + +class User: + top_attrs = { "user_id" : "username", + "verified" : "email_validated", + "opt_in" : "opt_in", + "name" : "fullname", + "email" : "email", + "system_admin" : "system_admin" } + + def __init__(self, **kwargs): + """ + Constructor for User class will accept these optional parameters attributes in + order to initialize the object: + + user_id, password, token, enabled, groups, name, email, verified + + If a token is provided among the initializers, the get() method will be called at the + end of initialization to attempt to fetch the user profile from Globus Online + + The ~/.kbase_config file is only indirectly supported - use it to get a token, and then + use that token as an initializer to this function to fetch a profile + """ + global nexusconfig + attrs = [ 'user_id', 'enabled', 'groups', 'name', 'email', 'verified' ] + for attr in attrs: + setattr( self, attr, kwargs.get(attr,None)) + if kwargs['token']: + self.authToken = Token( token = kwargs['token']) + self.token = self.authToken.token + self.get() + return + + def get(self, **kwargs): + if 'token' in kwargs: + self.authToken = Token( token = kwargs['token']) + self.token = self.authToken.token + if not self.token: + raise AuthCredentialsNeeded( "Authentication token required") + p = { 'custom_fields' : '*', + 'fields' : 'groups,username,email_validated,fullname,email' + } + headers = { 'Authorization' : 'Globus-Goauthtoken ' + self.token } + resp = requests.get( AuthSvcHost+"users/" + self.authToken.user_id, params = p, + headers = headers) + profile = resp.json() + for attr,go_attr in self.top_attrs.items(): + setattr( self, attr, profile.get( go_attr)) + # pull out the name field from the groups dict entries and put into groups + setattr( self, 'groups', [ x['name'] for x in resp.json['groups']]) + if 'custom_fields' in profile: + for attr in profile['custom_fields'].keys(): + setattr( self, attr, profile['custom_fields'][attr]) + return self + + def update(self, **kwargs): + pass diff --git a/biokbase/log.py b/biokbase/log.py new file mode 100644 index 0000000..5626ac0 --- /dev/null +++ b/biokbase/log.py @@ -0,0 +1,368 @@ +""" +NAME + log + +DESCRIPTION + A library for sending logging messages to syslog. + +METHODS + log(string subsystem, hashref constraints): Initializes log. You + should call this at the beginning of your program. Constraints are + optional. + + log_message(int level, string message): sends log message to syslog. + + * level: (0-9) The logging level for this message is compared to + the logging level that has been set in log. If it is <= + the set logging level, the message will be sent to syslog, + otherwise it will be ignored. Logging level is set to 6 + if control API cannot be reached and the user does + not set the log level. Log level can also be entered as + string (e.g. 'DEBUG') + + * message: This is the log message. + + get_log_level(): Returns the current log level as an integer. + + set_log_level(integer level) : Sets the log level. Only use this if you + wish to override the log levels that are defined by the control API. + Can also be entered as string (e.g. 'DEBUG') + + * level : priority + + * 0 : EMERG - system is unusable + + * 1 : ALERT - component must be fixed immediately + + * 2 : CRIT - secondary component must be fixed immediately + + * 3 : ERR - non-urgent failure + + * 4 : WARNING - warning that an error will occur if no action + is taken + + * 5 : NOTICE - unusual but safe conditions + + * 6 : INFO - normal operational messages + + * 7 : DEBUG - lowest level of debug + + * 8 : DEBUG2 - second level of debug + + * 9 : DEBUG3 - highest level of debug + + set_log_msg_check_count(integer count): used to set the number the + messages that log will log before querying the control API for the + log level (default is 100 messages). + + set_log_msg_check_interval(integer seconds): used to set the interval, + in seconds, that will be allowed to pass before log will query the + control API for the log level (default is 300 seconds). + + update_api_log_level() : Checks the control API for the currently set + log level. + + use_api_log_level() : Removes the user-defined log level and tells log + to use the control API-defined log level. +""" + +import json as _json +import urllib.request as _urllib2 +import syslog as _syslog +import platform as _platform +import inspect as _inspect +import os as _os +import getpass as _getpass +import warnings as _warnings +from configparser import ConfigParser as _ConfigParser +import time + +MLOG_ENV_FILE = 'MLOG_CONFIG_FILE' +_GLOBAL = 'global' +MLOG_LOG_LEVEL = 'mlog_log_level' +MLOG_API_URL = 'mlog_api_url' +MLOG_LOG_FILE = 'mlog_log_file' + +DEFAULT_LOG_LEVEL = 6 +#MSG_CHECK_COUNT = 100 +#MSG_CHECK_INTERVAL = 300 # 300s = 5min +MSG_FACILITY = _syslog.LOG_LOCAL1 +EMERG_FACILITY = _syslog.LOG_LOCAL0 + +EMERG = 0 +ALERT = 1 +CRIT = 2 +ERR = 3 +WARNING = 4 +NOTICE = 5 +INFO = 6 +DEBUG = 7 +DEBUG2 = 8 +DEBUG3 = 9 +_MLOG_TEXT_TO_LEVEL = {'EMERG': EMERG, + 'ALERT': ALERT, + 'CRIT': CRIT, + 'ERR': ERR, + 'WARNING': WARNING, + 'NOTICE': NOTICE, + 'INFO': INFO, + 'DEBUG': DEBUG, + 'DEBUG2': DEBUG2, + 'DEBUG3': DEBUG3, + } +_MLOG_TO_SYSLOG = [_syslog.LOG_EMERG, _syslog.LOG_ALERT, _syslog.LOG_CRIT, + _syslog.LOG_ERR, _syslog.LOG_WARNING, _syslog.LOG_NOTICE, + _syslog.LOG_INFO, _syslog.LOG_DEBUG, _syslog.LOG_DEBUG, + _syslog.LOG_DEBUG] +#ALLOWED_LOG_LEVELS = set(_MLOG_TEXT_TO_LEVEL.values()) +_MLOG_LEVEL_TO_TEXT = {} +for k, v in _MLOG_TEXT_TO_LEVEL.items(): + _MLOG_LEVEL_TO_TEXT[v] = k +LOG_LEVEL_MIN = min(_MLOG_LEVEL_TO_TEXT.keys()) +LOG_LEVEL_MAX = max(_MLOG_LEVEL_TO_TEXT.keys()) +del k, v + + +class log(object): + """ + This class contains the methods necessary for sending log messages. + """ + + def __init__(self, subsystem, constraints=None, config=None, logfile=None, + ip_address=False, authuser=False, module=False, + method=False, call_id=False, changecallback=None): + if not subsystem: + raise ValueError("Subsystem must be supplied") + + self.user = _getpass.getuser() + self.parentfile = _os.path.abspath(_inspect.getfile( + _inspect.stack()[1][0])) + self.ip_address = ip_address + self.authuser = authuser + self.module = module + self.method = method + self.call_id = call_id + noop = lambda: None + self._callback = changecallback or noop + self._subsystem = str(subsystem) + self._mlog_config_file = config + if not self._mlog_config_file: + self._mlog_config_file = _os.environ.get(MLOG_ENV_FILE, None) + if self._mlog_config_file: + self._mlog_config_file = str(self._mlog_config_file) + self._user_log_level = -1 + self._config_log_level = -1 + self._user_log_file = logfile + self._config_log_file = None + self._api_log_level = -1 + self._msgs_since_config_update = 0 + self._time_at_config_update = time.time() + self.msg_count = 0 + self._recheck_api_msg = 100 + self._recheck_api_time = 300 # 5 mins + self._log_constraints = {} if not constraints else constraints + + self._init = True + self.update_config() + self._init = False + + def _get_time_since_start(self): + time_diff = time.time() - self._time_at_config_update + return time_diff + + def get_log_level(self): + if(self._user_log_level != -1): + return self._user_log_level + elif(self._config_log_level != -1): + return self._config_log_level + elif(self._api_log_level != -1): + return self._api_log_level + else: + return DEFAULT_LOG_LEVEL + + def _get_config_items(self, cfg, section): + cfgitems = {} + if cfg.has_section(section): + for k, v in cfg.items(section): + cfgitems[k] = v + return cfgitems + + def update_config(self): + loglevel = self.get_log_level() + logfile = self.get_log_file() + + self._api_log_level = -1 + self._msgs_since_config_update = 0 + self._time_at_config_update = time.time() + + # Retrieving the control API defined log level + api_url = None + if self._mlog_config_file and _os.path.isfile(self._mlog_config_file): + cfg = _ConfigParser() + cfg.read(self._mlog_config_file) + cfgitems = self._get_config_items(cfg, _GLOBAL) + cfgitems.update(self._get_config_items(cfg, self._subsystem)) + if MLOG_LOG_LEVEL in cfgitems: + try: + self._config_log_level = int(cfgitems[MLOG_LOG_LEVEL]) + except: + _warnings.warn( + 'Cannot parse log level {} from file {} to int'.format( + cfgitems[MLOG_LOG_LEVEL], self._mlog_config_file) + + '. Keeping current log level.') + if MLOG_API_URL in cfgitems: + api_url = cfgitems[MLOG_API_URL] + if MLOG_LOG_FILE in cfgitems: + self._config_log_file = cfgitems[MLOG_LOG_FILE] + elif self._mlog_config_file: + _warnings.warn('Cannot read config file ' + self._mlog_config_file) + + if (api_url): + subsystem_api_url = api_url + "/" + self._subsystem + try: + data = _json.load(_urllib2.urlopen(subsystem_api_url, + timeout=5)) + except _urllib2.URLError as e: + code_ = None + if hasattr(e, 'code'): + code_ = ' ' + str(e.code) + _warnings.warn( + 'Could not connect to mlog api server at ' + + '{}:{} {}. Using default log level {}.'.format( + subsystem_api_url, code_, str(e.reason), + str(DEFAULT_LOG_LEVEL))) + else: + max_matching_level = -1 + for constraint_set in data['log_levels']: + level = constraint_set['level'] + constraints = constraint_set['constraints'] + if level <= max_matching_level: + continue + + matches = 1 + for constraint in constraints: + if constraint not in self._log_constraints: + matches = 0 + elif (self._log_constraints[constraint] != + constraints[constraint]): + matches = 0 + + if matches == 1: + max_matching_level = level + + self._api_log_level = max_matching_level + if ((self.get_log_level() != loglevel or + self.get_log_file() != logfile) and not self._init): + self._callback() + + def _resolve_log_level(self, level): + if(level in _MLOG_TEXT_TO_LEVEL): + level = _MLOG_TEXT_TO_LEVEL[level] + elif(level not in _MLOG_LEVEL_TO_TEXT): + raise ValueError('Illegal log level') + return level + + def set_log_level(self, level): + self._user_log_level = self._resolve_log_level(level) + self._callback() + + def get_log_file(self): + if self._user_log_file: + return self._user_log_file + if self._config_log_file: + return self._config_log_file + return None + + def set_log_file(self, filename): + self._user_log_file = filename + self._callback() + + def set_log_msg_check_count(self, count): + count = int(count) + if count < 0: + raise ValueError('Cannot check a negative number of messages') + self._recheck_api_msg = count + + def set_log_msg_check_interval(self, interval): + interval = int(interval) + if interval < 0: + raise ValueError('interval must be positive') + self._recheck_api_time = interval + + def clear_user_log_level(self): + self._user_log_level = -1 + self._callback() + + def _get_ident(self, level, user, parentfile, ip_address, authuser, module, + method, call_id): + infos = [self._subsystem, _MLOG_LEVEL_TO_TEXT[level], + repr(time.time()), user, parentfile, str(_os.getpid())] + if self.ip_address: + infos.append(str(ip_address) if ip_address else '-') + if self.authuser: + infos.append(str(authuser) if authuser else '-') + if self.module: + infos.append(str(module) if module else '-') + if self.method: + infos.append(str(method) if method else '-') + if self.call_id: + infos.append(str(call_id) if call_id else '-') + return "[" + "] [".join(infos) + "]" + + def _syslog(self, facility, level, ident, message): + _syslog.openlog(ident, facility) + if isinstance(message, str): + _syslog.syslog(_MLOG_TO_SYSLOG[level], message) + else: + try: + for m in message: + _syslog.syslog(_MLOG_TO_SYSLOG[level], m) + except TypeError: + _syslog.syslog(_MLOG_TO_SYSLOG[level], str(message)) + _syslog.closelog() + + def _log(self, ident, message): + ident = ' '.join([str(time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime())), + _platform.node(), ident + ': ']) + try: + with open(self.get_log_file(), 'a') as log: + if isinstance(message, str): + log.write(ident + message + '\n') + else: + try: + for m in message: + log.write(ident + m + '\n') + except TypeError: + log.write(ident + str(message) + '\n') + except Exception as e: + err = 'Could not write to log file ' + str(self.get_log_file()) + \ + ': ' + str(e) + '.' + _warnings.warn(err) + + def log_message(self, level, message, ip_address=None, authuser=None, + module=None, method=None, call_id=None): +# message = str(message) + level = self._resolve_log_level(level) + + self.msg_count += 1 + self._msgs_since_config_update += 1 + + if(self._msgs_since_config_update >= self._recheck_api_msg + or self._get_time_since_start() >= self._recheck_api_time): + self.update_config() + + ident = self._get_ident(level, self.user, self.parentfile, ip_address, + authuser, module, method, call_id) + # If this message is an emergency, send a copy to the emergency + # facility first. + if(level == 0): + self._syslog(EMERG_FACILITY, level, ident, message) + + if(level <= self.get_log_level()): + self._syslog(MSG_FACILITY, level, ident, message) + if self.get_log_file(): + self._log(ident, message) + +if __name__ == '__main__': + pass diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..5027e83 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +requests +coverage +nose +sphinx +jsonrpcbase