Skip to content
This repository has been archived by the owner on Aug 11, 2022. It is now read-only.

Commit

Permalink
Update logging setup
Browse files Browse the repository at this point in the history
  • Loading branch information
ms-christensen committed Aug 25, 2019
1 parent 49ab205 commit ca0ddf5
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 5 deletions.
29 changes: 29 additions & 0 deletions app.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,51 @@
import logging
import json
import os
import subprocess

from logging.config import dictConfig
from flask import Flask, request, jsonify
from flask.logging import default_handler

import lbp_print.core as lbp_print
import lbp_print.config as lbp_config

from processor import handle_job

dictConfig(
{
"version": 1,
"formatters": {
"default": {
"format": "[%(asctime)s] %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"default": {
"level": "DEBUG",
"formatter": "default",
"class": "logging.handlers.RotatingFileHandler",
"filename": "test.log",
"maxBytes": 5000000,
"backupCount": 5,
}
},
"root": {"level": logging.DEBUG, "handlers": ["default"]},
}
)

# App version
__VERSION__ = subprocess.check_output("git describe --tags", shell=True).decode()

app = Flask(__name__, instance_path=os.getcwd())

logger = logging.getLogger()
logger.addHandler(default_handler)


@app.route("/api/v1/resource")
def process_resource():
logger.debug(f"Received request with args: {request.args}")
resource_id = request.args.get("id")
resource_url = request.args.get("url")
if not resource_id and not resource_url:
Expand Down
18 changes: 13 additions & 5 deletions processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,11 @@
import lbp_print.config as lbp_config
from lbp_print.exceptions import SaxonError


logger = logging.getLogger()
logger.warning("Child This is a warning")
logger.error("Child his is an error")


lbp_config.cache_dir = "cache"

Expand All @@ -18,8 +22,10 @@

def handle_job(resource_value: str, resource_type: str) -> dict:
try:
logger.debug(f"Checking for job with the id {resource_value}")
job = Job.fetch(resource_value, connection=Redis())
except NoSuchJobError:
logger.debug(f"No existing job. Starting one up ...")
job = q.enqueue(
convert_resource,
resource_value,
Expand All @@ -30,16 +36,18 @@ def handle_job(resource_value: str, resource_type: str) -> dict:
)
return {"Status": f"Started processing {resource_value}"}

status = job.meta["progress"]

if job.result:
response = {"Status": "Finished", "url": job.result}
logger.debug(f"Job was finished. Result: " + job.result)
elif job.is_failed:
response = {
"Status": "Failed. Resubmit to retry.",
"error": job.meta["progress"],
}
response = {"Status": "Failed. Resubmit to retry.", "error": status}
logger.warn(f"Job failed." + status)
job.delete()
else:
response = {"Status": job.meta["progress"]}
response = {"Status": status}
logger.debug(f"Job running. Status: " + status)
return response


Expand Down

0 comments on commit ca0ddf5

Please sign in to comment.