Celery logging in the same console as Django

29 Views Asked by At

I am using Celery to run tasks inside a Django API. The problem is that I get the logs of Django and Celery in two consoles separated and I want to show all the logs in Django.

This is the logging config in settings.py:

LOGGING = {
    "version": 1,
    "disable_existing_loggers": False,
    "formatters": {
        "verbose": {
            "()": "colorlog.ColoredFormatter",
            "format": "%(log_color)s %(levelname)-8s %(asctime)s %(request_id)s  %(process)s --- "
            "%(lineno)-8s [%(name)s] %(funcName)-24s : %(message)s",
            "log_colors": {
                "DEBUG": "blue",
                "INFO": "white",
                "WARNING": "yellow",
                "ERROR": "red",
                "CRITICAL": "bold_red",
            },
        },
        "aws": {
            "format": "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s",
            "datefmt": "%Y-%m-%d %H:%M:%S",
        },
    },
    "filters": {
        "request_id": {"()": "log_request_id.filters.RequestIDFilter"},
    },
    "handlers": {
        "console": {
            "class": "logging.StreamHandler",
            "formatter": "verbose",
            "filters": ["request_id"],
        }
    },
    "loggers": {
        # Default logger for any logger name
        "": {
            "level": "INFO",
            "handlers": [
                "console",
            ],
            "propagate": False,
        },
    },
}

These are the celery config and an example of task:

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "green_navigation_back.settings")

app = Celery("green_navigation_back")
app.conf.update(
    CELERYD_HIJACK_ROOT_LOGGER=False,
)
app.config_from_object("django.conf:settings", namespace="CELERY")

@setup_logging.connect
def config_loggers(*args, **kwargs):
    from logging.config import dictConfig 
    from django.conf import settings  

    dictConfig(settings.LOGGING)

app.autodiscover_tasks()
logger = get_task_logger(__name__)

@shared_task(ignore_result=True, time_limit=3600)
def optimization_email_task(request_data, user_email):
    logger.info("Preparing optimization")
    if "arn:aws" in settings.LAMBDA_URL:
        # Timeout must be lower than time_limit
        client_config = Config(connect_timeout=1800, read_timeout=1800)
        lambda_client = boto3.client(
            "lambda",
            region_name=settings.AWS_REGION,
            config=client_config,
        )
        lambda_response = lambda_client.invoke(
            FunctionName=settings.LAMBDA_URL,
            InvocationType="RequestResponse",
            Payload=json.dumps(request_data),
        )
        lambda_response = json.loads(lambda_response["Payload"].read())
    else:
        lambda_response = requests.post(
            settings.LAMBDA_URL, json=request_data  # , timeout=timeout + 300
        ).json()

    ship_name = "+".join([ship["name"] for ship in request_data["ship_data"]])

    logger.info("Sending email")
    send_files_email(
        user_email=user_email,
        csv_string=lambda_response["body"]["csv"],
        report_base64=lambda_response["body"]["report"],
        ship_name=ship_name,
    )
    logger.info("Email sent")

Note: In the future I will run the celery worker detached (-D), so I'm not sure how this can affect. I want to fix this problem first atleast.

0

There are 0 best solutions below