Skip to content
This repository has been archived by the owner on Aug 11, 2022. It is now read-only.

Commit

Permalink
Refactor and setup log handling more cleanly
Browse files Browse the repository at this point in the history
  • Loading branch information
ms-christensen committed Aug 11, 2019
1 parent ab584c5 commit 1c4c05d
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 64 deletions.
52 changes: 36 additions & 16 deletions processor.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,46 @@
import logging
from logging import handlers

from rq import get_current_job
from redis import Redis
from rq import get_current_job, Queue
from rq.job import Job
from rq.exceptions import NoSuchJobError

import lbp_print.core as lbp_print
import lbp_print.config as lbp_config
from lbp_print.exceptions import SaxonError

from utils import setup_logger

lbp_config.cache_dir = "cache"

logger = logging.getLogger()
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
file_handler = handlers.RotatingFileHandler(
"logs/service.log", maxBytes=1024 * 1000, backupCount=5
)
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
stream_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
logger = setup_logger("print_api")

q = Queue(connection=Redis())


def handle_job(resource_value: str, resource_type: str) -> dict:
try:
job = Job.fetch(resource_value, connection=Redis())
except NoSuchJobError:
job = q.enqueue(
convert_resource,
resource_value,
resource_type,
job_id=resource_value,
job_timeout="1h",
result_ttl=30,
)
return {"Status": f"Started processing {resource_value}"}

if job.result:
response = {"Status": "Finished", "url": job.result}
elif job.is_failed:
response = {
"Status": "Failed. Resubmit to retry.",
"error": job.meta["progress"],
}
job.delete()
else:
response = {"Status": job.meta["progress"]}
return response


def update_status(message, job):
Expand Down
51 changes: 3 additions & 48 deletions service.py
Original file line number Diff line number Diff line change
@@ -1,66 +1,21 @@
import json
import logging
import os
import subprocess
from logging import handlers

from flask import Flask, request, jsonify
from redis import Redis
from rq import Queue
from rq.job import Job
from rq.exceptions import NoSuchJobError

import lbp_print.core as lbp_print
import lbp_print.config as lbp_config

from processor import convert_resource
from processor import handle_job
from utils import setup_logger

# App version
__VERSION__ = subprocess.check_output("git describe --tags", shell=True).decode()

app = Flask(__name__, instance_path=os.getcwd())

logger = logging.getLogger()
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
file_handler = handlers.RotatingFileHandler(
"logs/service.log", maxBytes=1024 * 1000, backupCount=5
)
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
stream_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)

q = Queue(connection=Redis())


def handle_job(resource_value: str, resource_type: str) -> dict:
try:
job = Job.fetch(resource_value, connection=Redis())
except NoSuchJobError:
job = q.enqueue(
convert_resource,
resource_value,
resource_type,
job_id=resource_value,
job_timeout="1h",
result_ttl=30,
)
return {"Status": "Started"}

if job.result:
response = {"Status": "Finished", "url": job.result}
elif job.is_failed:
response = {
"Status": "Failed. Resubmit to retry.",
"error": job.meta["progress"],
}
job.delete()
else:
response = {"Status": "Working"}
return response
logger = setup_logger("print_api")


@app.route("/api/v1/resource")
Expand Down
19 changes: 19 additions & 0 deletions utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import logging
from logging import handlers


def setup_logger(name):
wz = logging.getLogger("werkzeug")

logger = logging.getLogger(name)
file_handler = handlers.RotatingFileHandler(
"logs/service.log", maxBytes=1024 * 1000, backupCount=5
)
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
wz.addHandler(file_handler)
return logger

0 comments on commit 1c4c05d

Please sign in to comment.