Skip to content

Commit

Permalink
Merge pull request #469 from yoshitomo-matsubara/dev
Browse files Browse the repository at this point in the history
Disable an auto-configuration for def_logger
  • Loading branch information
yoshitomo-matsubara committed May 25, 2024
2 parents dd17646 + 027d688 commit d55508a
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 10 deletions.
3 changes: 2 additions & 1 deletion examples/hf_transformers/text_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from torchdistill.core.training import get_training_box
from torchdistill.datasets import util
from torchdistill.datasets.registry import register_collate_func
from torchdistill.misc.log import setup_log_file, SmoothedValue, MetricLogger
from torchdistill.misc.log import set_basic_log_config, setup_log_file, SmoothedValue, MetricLogger

logger = def_logger.getChild(__name__)

Expand Down Expand Up @@ -206,6 +206,7 @@ def predict_private(model, dataset_dict, label_names_dict, is_regression, accele


def main(args):
set_basic_log_config()
log_file_path = args.run_log
if is_main_process() and log_file_path is not None:
setup_log_file(os.path.expanduser(log_file_path))
Expand Down
3 changes: 2 additions & 1 deletion examples/torchvision/image_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from torchdistill.core.distillation import get_distillation_box
from torchdistill.core.training import get_training_box
from torchdistill.datasets.util import build_data_loader
from torchdistill.misc.log import setup_log_file, SmoothedValue, MetricLogger
from torchdistill.misc.log import set_basic_log_config, setup_log_file, SmoothedValue, MetricLogger
from torchdistill.models.official import get_image_classification_model
from torchdistill.models.registry import get_model

Expand Down Expand Up @@ -158,6 +158,7 @@ def train(teacher_model, student_model, dataset_dict, src_ckpt_file_path, dst_ck


def main(args):
set_basic_log_config()
log_file_path = args.run_log
if is_main_process() and log_file_path is not None:
setup_log_file(os.path.expanduser(log_file_path))
Expand Down
3 changes: 2 additions & 1 deletion examples/torchvision/object_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from torchdistill.core.distillation import get_distillation_box
from torchdistill.core.training import get_training_box
from torchdistill.datasets.util import build_data_loader
from torchdistill.misc.log import setup_log_file, SmoothedValue, MetricLogger
from torchdistill.misc.log import set_basic_log_config, setup_log_file, SmoothedValue, MetricLogger
from torchdistill.models.official import get_object_detection_model
from torchdistill.models.registry import get_model

Expand Down Expand Up @@ -198,6 +198,7 @@ def train(teacher_model, student_model, dataset_dict, src_ckpt_file_path, dst_ck


def main(args):
set_basic_log_config()
log_file_path = args.run_log
if is_main_process() and log_file_path is not None:
setup_log_file(os.path.expanduser(log_file_path))
Expand Down
3 changes: 2 additions & 1 deletion examples/torchvision/semantic_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from torchdistill.core.distillation import get_distillation_box
from torchdistill.core.training import get_training_box
from torchdistill.datasets.util import build_data_loader
from torchdistill.misc.log import setup_log_file, SmoothedValue, MetricLogger
from torchdistill.misc.log import set_basic_log_config, setup_log_file, SmoothedValue, MetricLogger
from torchdistill.models.official import get_semantic_segmentation_model
from torchdistill.models.registry import get_model
from utils.eval import SegEvaluator
Expand Down Expand Up @@ -151,6 +151,7 @@ def train(teacher_model, student_model, dataset_dict, src_ckpt_file_path, dst_ck


def main(args):
set_basic_log_config()
log_file_path = args.run_log
if is_main_process() and log_file_path is not None:
setup_log_file(os.path.expanduser(log_file_path))
Expand Down
6 changes: 0 additions & 6 deletions torchdistill/common/constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,4 @@

SELF_MODULE_PATH = '.'
LOGGING_FORMAT = '%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s'

logging.basicConfig(
format=LOGGING_FORMAT,
datefmt='%Y/%m/%d %H:%M:%S',
level=logging.INFO,
)
def_logger = logging.getLogger()
12 changes: 12 additions & 0 deletions torchdistill/misc/log.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import datetime
import logging
import time
from collections import defaultdict, deque
from logging import FileHandler, Formatter
Expand All @@ -13,6 +14,17 @@
logger = def_logger.getChild(__name__)


def set_basic_log_config():
"""
Sets a default basic configuration for logging.
"""
logging.basicConfig(
format=LOGGING_FORMAT,
datefmt='%Y/%m/%d %H:%M:%S',
level=logging.INFO
)


def setup_log_file(log_file_path):
"""
Sets a file handler with ``log_file_path`` to write a log file.
Expand Down

0 comments on commit d55508a

Please sign in to comment.