Skip to content

Commit

Permalink
Merged
Browse files Browse the repository at this point in the history
  • Loading branch information
gvasold committed Feb 7, 2020
2 parents 21a20b7 + bdc35cb commit e8b3995
Show file tree
Hide file tree
Showing 7 changed files with 241 additions and 30 deletions.
2 changes: 1 addition & 1 deletion bin/validate_factoids.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def run(jsonfile, quiet=False, permissive=False, spec_file=None):
"{} ... invalid\n\t{}".format(
f_id, validator.make_readable_validation_msg(err)
),
flush=True,
flush=True
)
if not quiet:
print(
Expand Down
31 changes: 31 additions & 0 deletions src/papilotte/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from papilotte.resolver import PapiResolver
from . import util
import papilotte
from logging.config import dictConfig

__version__ = "0.1.4.dev1"

Expand All @@ -16,10 +17,40 @@
print("Papilotte requires Python version 3.5 or higher.")
sys.exit(1)

def get_logging_configuration(options):
# TODO: check for missing options
# SMTPHandler (error only)??
cfg = {
"version": 1,
"handlers": {
"console": {
"class": logging.StreamHandler,
"level": logging.INFO,
"stream": sys.stdout
},
"file": {
"class": logging.handlers.RotatingFileHandler,
#"formatter": None,
"filename": None, # FIXME
"maxBytes": 100000,
"backupCount": 3
},
"syslog": {
"class": logging.handlers.SysLogHandler
}
},
"root": {
"handlers": ['syslog']
}
}

return cfg


def create_app(**cli_options):
"""Create the app object."""
papilotte.options = util.get_options(**cli_options)
#dictConfig(get_logging_configuration(papilotte.options))
# Only used for json connector
util.validate_json(papilotte.options)
app = connexion.FlaskApp(
Expand Down
6 changes: 4 additions & 2 deletions src/papilotte/config/default_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@ debug: false

# --- logging ---------------------------------
log_level: info
# console, file, syslog
log_handler: console
# Set one or more space separated values from: console, file, syslog
log_handlers: console
# Only required for 'file' log handler
log_file: NULL


# --- api -------------------------------------
Expand Down
4 changes: 4 additions & 0 deletions src/papilotte/errors.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
class DeletionError(Exception):
"Thrown by connectors if deletion of a resource fails."
pass

class ConfigurationError(Exception):
"Thrown if bad configuration values are detected."
pass
31 changes: 31 additions & 0 deletions src/papilotte/resolver.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""A custom resolver which maps operationIds to functions in modules.
A request to eg. `GET /api/factoids` will be mapped to
`api.factoids.get_factoids` because the operationId of this operation
is `getFactoids`.
This could also be done using qualified operationIds but I try to
keep packages/modules out of the spec.
"""
import re
from connexion.resolver import RestyResolver

class PapiResolver(RestyResolver):

def resolve_operation_id(self, operation):
"""
Resolves the operationId in snake_case using a mechanism similar to RestyResolver.
eg. `GET /api/factoids` is resolved to `api.factoids.get_factoids()` because the operationId is `getFactoids`.
Uses RestyResolver as fallback for missing operationIds.
"""
if operation.operation_id:
operation_id = RestyResolver.resolve_operation_id(self, operation)
pythonic_operation_id = re.sub(r'([A-Z])', lambda m: '_' + m.group(1).lower(), operation_id)
path_match = re.search(r'^/?(?P<resource_name>([\w\-](?<!/))*)(?P<trailing_slash>/*)(?P<extended_path>.*)$',
operation.path)
resource_name = path_match.group('resource_name')
return self.default_module_name + '.' + resource_name + "." + pythonic_operation_id

return self.resolve_operation_id_using_rest_semantics(operation)
119 changes: 94 additions & 25 deletions src/papilotte/util.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,26 @@
import json
import logging
import os
import sys
from collections import ChainMap

from jsonschema.exceptions import ValidationError
from pkg_resources import resource_filename

import yaml
import json
from papilotte import validator
from jsonschema.exceptions import ValidationError
from papilotte.connectors.json.reader import read_json_file

from papilotte.errors import ConfigurationError


options = {}

LOG_LEVELS = {
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG,
}


Expand All @@ -24,9 +31,9 @@ def transform_cli_options(**cli_params):
# only used with the built-in server (Flask)
"""
options = {}
ignore_options = ('config_file', 'debug')
if cli_params.get('debug', False):
options['log_level'] = 'debug'
ignore_options = ("config_file", "debug")
if cli_params.get("debug", False):
options["log_level"] = "debug"
for param, value in cli_params.items():
if value is not None and param not in ignore_options:
options[param] = value
Expand All @@ -45,25 +52,23 @@ def get_options(**cli_params):
:rtype: dict
"""
cli_options = transform_cli_options(**cli_params)
default_cfg_file = resource_filename(__name__,
'config/default_config.yml')
default_spec_file = resource_filename(__name__, 'openapi/ipif.yml')
default_cfg_file = resource_filename(__name__, "config/default_config.yml")
default_spec_file = resource_filename(__name__, "openapi/ipif.yml")
with open(default_cfg_file) as file_:
default_config = yaml.safe_load(file_)
default_config['spec_file'] = default_spec_file
default_config["spec_file"] = default_spec_file

custom_config = {}
if cli_params.get('config_file') is not None:
with open(cli_params['config_file']) as file_:
if cli_params.get("config_file") is not None:
with open(cli_params["config_file"]) as file_:
custom_config = yaml.safe_load(file_)

options = dict(ChainMap(cli_options, custom_config, default_config))
# cast to expected data types
options['log_level'] = LOG_LEVELS[options['log_level']]
options["log_level"] = LOG_LEVELS[options["log_level"]]
for key, value in options.items():
if value is not None:
if key in ('default_size', 'max_size', 'compliance_level',
'port'):
if key in ("default_size", "max_size", "compliance_level", "port"):
options[key] = int(value)
return options

Expand All @@ -76,16 +81,80 @@ def validate_json(options):
IPIF OpenAPI spec.
:return: None
"""
if options['connector'] == 'papilotte.connectors.json':
spec_file = options.get('spec_file')
json_file = options.get('json_file')
strict_validation = options.get('strict_validation')
if options["connector"] == "papilotte.connectors.json":
spec_file = options.get("spec_file")
json_file = options.get("json_file")
strict_validation = options.get("strict_validation")
factoids = read_json_file(json_file)
try:
for factoid in factoids:
validator.validate(factoid, strict_validation, spec_file)
except ValidationError as err:
msg = ("'{}' contains invalid factoids:\n{}"
"\nUse the 'validate_factoids.py' script to validate your data."
).format(json_file, validator.make_readable_validation_msg(err))
msg = (
"'{}' contains invalid factoids:\n{}"
"\nUse the 'validate_factoids.py' script to validate your data."
).format(json_file, validator.make_readable_validation_msg(err))
raise validator.JSONValidationError(msg)


def validate_log_options(options, log_handlers):
"""Validate logging specific options.
:param options: a dictionary of configuration options
:type options: dict
:param log_handlers': a list of log handler types
:type log_handlers: list of strings
:raises: error.ConfigurationError
"""
ALLOWED_HANDLERS = ('console', 'file', 'syslog')
# minimal validation for supported log_handlers
for hdl in log_handlers:
if hdl not in ALLOWED_HANDLERS:
raise ConfigurationError("'{}' is not an allowed value for log_handlers!".format(hdl))

# file log handler needs a log_file value
if 'file' in log_handlers:
if not 'log_file' in options or options['log_file'] is None:
raise ConfigurationError("'When using the log_handler 'file', you must specify a value for 'log_file'!")
else:
log_dir = os.path.abspath(os.path.dirname(options['log_file']))
if not os.path.isdir(log_dir):
raise ConfigurationError("Log directory '{}' does not exist.".format(log_dir))


def get_logging_configuration(options):
""" TODO: check for missing options
# SMTPHandler (error only)??
"""
# convert log handlers to list
if "log_handlers" in options and options["log_handlers"]:
log_handlers = [hdl.strip() for hdl in options['log_handlers'].split()]
else:
log_handlers = ['console']
validate_log_options(options, log_handlers)
cfg = {
"version": 1,
"handlers": {}
}
for hdl in log_handlers:
if hdl == "console":
cfg["handlers"]["console"] = {
"class": "logging.StreamHandler",
"level": options['log_level'],
"stream": sys.stdout
}
elif hdl == "syslog":
cfg["handlers"]["syslog"] = {
"class": "logging.handlers.SysLogHandler",
"level": options['log_level'],
}
elif hdl == "file":
cfg["handlers"]["file"] = {
"class": "logging.handlers.RotatingFileHandler",
# "formatter": None,
"filename": options['log_file'],
"maxBytes": 100000,
"backupCount": 3,
"level": options['log_level'],
}
return cfg
78 changes: 76 additions & 2 deletions tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
import pytest

import yaml
from papilotte.util import get_options, transform_cli_options, validate_json
from papilotte.util import get_options, transform_cli_options, validate_json, validate_log_options, get_logging_configuration
from papilotte.validator import JSONValidationError
from papilotte.errors import ConfigurationError


def test_transform_cli_options_debug():
Expand Down Expand Up @@ -246,4 +247,77 @@ def test_validate_strict(minimal_factoid):
}
with pytest.raises(JSONValidationError):
validate_json(options)
os.unlink(data_file)
os.unlink(data_file)


def test_validate_log_options():
"Test validation of logging options."
options = {}
log_handlers = ["console"]
validate_log_options(options, log_handlers)

log_handlers = ["console", "syslog"]
validate_log_options(options, log_handlers)

# not supported log handler
with pytest.raises(ConfigurationError):
validate_log_options({}, ["foo"])

# file handler requires a log_file value
options = {"log_file": "/tmp/papilotte.log"}
validate_log_options(options, ["file"])

# a missing log_file value must raise an exception
with pytest.raises(ConfigurationError):
validate_log_options({}, ['file'])

# the same with None
# a missing log_file value must raise an exception
options = {"log_file": None}
with pytest.raises(ConfigurationError):
validate_log_options(options, ['file'])

# log directory must exist
options = {"log_file": "/dasfgdahgfld/papilotte.log"}
with pytest.raises(ConfigurationError):
validate_log_options(options, ['file'])


def test_get_logging_configuration_console():
"Test creation of a logging.config.dictConfig dictionary."
options = {
"log_level": logging.WARNING,
}
cfg = get_logging_configuration(options)
assert "console" in cfg["handlers"]
assert cfg["handlers"]["console"]["level"] == logging.WARNING

# the same with an explicitely set log_handler
options['log_handlers'] = "console"
cfg = get_logging_configuration(options)
assert "console" in cfg["handlers"]
assert cfg["handlers"]["console"]["level"] == logging.WARNING


def test_get_logging_configuration_syslog():
"Test creation of a logging.config.dictConfig dictionary."
options = {
"log_level": logging.ERROR,
"log_handlers": "syslog"
}
cfg = get_logging_configuration(options)
assert "syslog" in cfg["handlers"]
assert cfg["handlers"]["syslog"]["level"] == logging.ERROR


def test_get_logging_configuration_file():
"Test creation of a logging.config.dictConfig dictionary."
options = {
"log_level": logging.ERROR,
"log_handlers": "file",
"log_file": "/tmp/papilotte.log"
}
cfg = get_logging_configuration(options)
assert "file" in cfg["handlers"]
assert cfg["handlers"]["file"]["level"] == logging.ERROR
assert cfg["handlers"]["file"]["filename"] == "/tmp/papilotte.log"

0 comments on commit e8b3995

Please sign in to comment.