Skip to content

Commit

Permalink
Merge branch 'update'
Browse files Browse the repository at this point in the history
  • Loading branch information
djccnt15 committed Jun 8, 2024
2 parents 35ce7dc + e2c8997 commit d57eefe
Show file tree
Hide file tree
Showing 12 changed files with 129 additions and 94 deletions.
2 changes: 1 addition & 1 deletion docs/blog/index.md
Original file line number Diff line number Diff line change
@@ -1 +1 @@
# Blog
# Note
2 changes: 1 addition & 1 deletion docs/blog/posts/2022-01-30-numpy_reshape.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ description: >
categories:
- Data Analysis
tags:
- numpy
- NumPy
- array
---

Expand Down
2 changes: 1 addition & 1 deletion docs/blog/posts/2022-07-17-sympy_tutorial.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ description: >
categories:
- Mathematics
tags:
- sympy
- SymPy
---

symbolic computation을 다루는 SymPy 튜토리얼
Expand Down
4 changes: 2 additions & 2 deletions docs/blog/posts/2023-06-10-fastapi_orm.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ categories:
tags:
- FastAPI
- ORM
- SQLAlchemy
- Alembic
- aqlalchemy
- alembic
---

SQLAlchemy와 Alembic을 활용한 데이터베이스 FastAPI 서버 ORM 활용
Expand Down
2 changes: 1 addition & 1 deletion docs/blog/posts/2023-09-19-tensorflow_install_note.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ categories:
- AI
tags:
- AI
- tensorflow
- TensorFlow
---

TensorFlow 설치 관련 주의할 점들에 대한 기록들
Expand Down
100 changes: 58 additions & 42 deletions docs/blog/posts/2023-09-21-python_logging.md
Original file line number Diff line number Diff line change
Expand Up @@ -135,14 +135,14 @@ Python이 기본 제공하는 다양한 Log Handler 중에 [TimedRotatingFileHan
로그 필터를 직접 만들어 주입해주면 **특정 정보의 로그**만 필터링 해줄 수 있다.

```python title="src/log/filter.py"
import logging
from logging import Filter, LogRecord


class MyFilter(logging.Filter): # (1)!
class MyFilter(Filter): # (1)!
def __init__(self, levels: list[int]):
self.__level = levels

def filter(self, logRecord):
def filter(self, logRecord: LogRecord):
return logRecord.levelno in self.__level
```

Expand Down Expand Up @@ -200,12 +200,12 @@ class MyFilter(logging.Filter): # (1)!
```python title="src/log/formatter.py"
import datetime as dt
import json
import logging
from logging import Formatter, LogRecord

KST = dt.timezone(dt.timedelta(hours=9))
KST = dt.timezone(offset=dt.timedelta(hours=9))


class JsonFormatter(logging.Formatter):
class JsonFormatter(Formatter):
def __init__(
self,
*,
Expand All @@ -215,11 +215,11 @@ class MyFilter(logging.Filter): # (1)!
self.fmt_keys = fmt_keys if fmt_keys is not None else {}

# override
def format(self, record: logging.LogRecord) -> str:
def format(self, record: LogRecord) -> str:
message = self._prepare_log(record=record)
return json.dumps(message, default=str)
return json.dumps(obj=message, default=str)

def _prepare_log(self, record: logging.LogRecord):
def _prepare_log(self, record: LogRecord):
always_fields = {
"message": record.getMessage(),
"timestamp": (
Expand All @@ -234,9 +234,11 @@ class MyFilter(logging.Filter): # (1)!
always_fields["stack_info"] = self.formatStack(record.stack_info)

message = {
key: msg_val
if (msg_val := always_fields.pop(val, None)) is not None
else getattr(record, val)
key: (
msg_val
if (msg_val := always_fields.pop(val, None)) is not None
else getattr(record, val)
)
for key, val in self.fmt_keys.items()
}
message.update(always_fields)
Expand All @@ -251,13 +253,13 @@ class MyFilter(logging.Filter): # (1)!
```python title="src/log/formatter.py"
import datetime as dt
import json
import logging
from logging import Formatter, LogRecord
from typing import override

KST = dt.timezone(dt.timedelta(hours=9))


class JsonFormatter(logging.Formatter):
class JsonFormatter(Formatter):
def __init__(
self,
*,
Expand All @@ -267,15 +269,15 @@ class MyFilter(logging.Filter): # (1)!
self.fmt_keys = fmt_keys if fmt_keys is not None else {}

@override
def format(self, record: logging.LogRecord) -> str:
def format(self, record: LogRecord) -> str:
message = self._prepare_log(record=record)
return json.dumps(message, default=str)

def _prepare_log(self, record: logging.LogRecord):
def _prepare_log(self, record: LogRecord):
always_fields = {
"message": record.getMessage(),
"timestamp": (
dt.datetime.fromtimestamp(record.created, tz=KST) # (1)!
dt.datetime.fromtimestamp(timestamp=record.created, tz=KST) # (1)!
).isoformat(),
}

Expand All @@ -286,17 +288,19 @@ class MyFilter(logging.Filter): # (1)!
always_fields["stack_info"] = self.formatStack(record.stack_info)

message = {
key: msg_val
if (msg_val := always_fields.pop(val, None)) is not None
else getattr(record, val)
key: (
msg_val
if (msg_val := always_fields.pop(val, None)) is not None
else getattr(record, val)
)
for key, val in self.fmt_keys.items()
}
message.update(always_fields)

return message
```

1. UTC 기준으로 로그를 생성하고 싶다면 `dt.datetime.fromtimestamp(record.created, tz=dt.timezone.utc)`으로 만들면 된다.
1. UTC 기준으로 로그를 생성하고 싶다면 `dt.datetime.fromtimestamp(timestamp=record.created, tz=dt.timezone.utc)`으로 만들면 된다.

## Best Practice

Expand All @@ -305,7 +309,7 @@ class MyFilter(logging.Filter): # (1)!
```python title="src/log/config.py"
import logging
import queue
from logging import StreamHandler
from logging import Formatter, StreamHandler
from logging.handlers import QueueHandler, QueueListener, TimedRotatingFileHandler
from pathlib import Path

Expand All @@ -319,22 +323,22 @@ except FileExistsError:
...

# create Logger instance
logger = logging.getLogger("logger")
logger.setLevel(logging.DEBUG)
logger = logging.getLogger(name="logger")
logger.setLevel(level=logging.DEBUG)

# set log format
simple_formatter = logging.Formatter(
simple_formatter = Formatter(
fmt="%(asctime)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%dT%H:%M:%S%z",
)
detailed_formatter = logging.Formatter(
detailed_formatter = Formatter(
fmt="%(asctime)s - %(levelname)s - [%(module)s:%(lineno)d] %(message)s",
datefmt="%Y-%m-%dT%H:%M:%S%z",
)

# StreamHandler
stream_handler = StreamHandler()
stream_handler.setFormatter(simple_formatter)
stream_handler.setFormatter(fmt=simple_formatter)

# TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(
Expand All @@ -343,7 +347,7 @@ file_handler = TimedRotatingFileHandler(
backupCount=3, # define number of log files, 0 to save all log files
encoding="utf-8",
)
file_handler.setFormatter(detailed_formatter)
file_handler.setFormatter(fmt=detailed_formatter)

# JsonlHandler
json_handler = TimedRotatingFileHandler(
Expand All @@ -360,7 +364,7 @@ fmt_keys = {
"function": "funcName",
"line": "lineno",
}
json_handler.setFormatter(formatter.JsonFormatter(fmt_keys=fmt_keys))
json_handler.setFormatter(fmt=formatter.JsonFormatter(fmt_keys=fmt_keys))

# DebugHandler
debug_handler = TimedRotatingFileHandler(
Expand All @@ -369,15 +373,21 @@ debug_handler = TimedRotatingFileHandler(
backupCount=3,
encoding="utf-8",
)
debug_handler.setFormatter(detailed_formatter)
debug_handler.setFormatter(fmt=detailed_formatter)
debug_handler.addFilter(
filter.MyFilter([logging.DEBUG, logging.ERROR, logging.CRITICAL])
filter=filter.MyFilter(
levels=[
logging.DEBUG,
logging.ERROR,
logging.CRITICAL,
]
)
)

# QueueHandler
log_queue = queue.Queue() # (1)!
queue_handler = QueueHandler(log_queue)
logger.addHandler(queue_handler)
queue_handler = QueueHandler(queue=log_queue)
logger.addHandler(hdlr=queue_handler)

# QueueListener
log_listener = QueueListener(
Expand Down Expand Up @@ -409,7 +419,7 @@ def main():
try:
raise Exception
except Exception as e:
logger.exception(e) # log for error catch
logger.exception(e) # logging error traceback

log_listener.stop()

Expand Down Expand Up @@ -520,24 +530,30 @@ from pathlib import Path

from src.log import filter

logger = logging.getLogger("logger")
logger = logging.getLogger(name="logger")


def set_logger():
log_config_file = Path(r"config\log_config.json")
with open(log_config_file, encoding="utf-8") as f:
log_config = json.load(f)
logging.config.dictConfig(log_config)
with open(file=log_config_file, encoding="utf-8") as f:
log_config = json.load(fp=f)
logging.config.dictConfig(config=log_config)

queue_handler = logging.getHandlerByName("queue_handler")
queue_handler = logging.getHandlerByName(name="queue_handler")
if queue_handler is not None:
queue_handler.listener.start()
atexit.register(queue_handler.listener.stop)
atexit.register(func=queue_handler.listener.stop)

debug_handler = logging.getHandlerByName("debug_handler")
debug_handler = logging.getHandlerByName(name="debug_handler")
if debug_handler is not None:
debug_handler.addFilter(
filter.MyFilter([logging.DEBUG, logging.ERROR, logging.CRITICAL])
filter=filter.MyFilter(
levels=[
logging.DEBUG,
logging.ERROR,
logging.CRITICAL,
]
)
)
```

Expand Down
2 changes: 1 addition & 1 deletion docs/blog/posts/2023-12-16-redis_basic.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ categories:
- Data Engineering
tags:
- in-memory database
- redis
- Redis
---

Redis의 입문을 위한 간단 정리
Expand Down
18 changes: 5 additions & 13 deletions docs/blog/posts/2024-05-05-sqlalchemy_alembic.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ class User(Base):
매핑 Entity에 속성을 부여하는 방법은 여러 가지가 있는데, 아래와 같이 추상 테이블 객체에 칼럼을 만들어두면, 해당 추상 테이블을 상속 받는 테이블들에는 해당 칼럼이 기본적으로 생성되게 된다.

```python title="src/db/entity.py"
from datetime import datetime
from enum import IntEnum

from sqlalchemy.orm import DeclarativeBase, mapped_column
from sqlalchemy.schema import Column
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from sqlalchemy.types import BigInteger, DateTime, String


Expand All @@ -65,7 +65,7 @@ class BaseEntity(DeclarativeBase): ...
class BigintIdEntity(BaseEntity):
__abstract__ = True # (1)!

id = mapped_column(
id: Mapped[int] = mapped_column(
type_=BigInteger,
primary_key=True,
autoincrement=True,
Expand All @@ -76,11 +76,8 @@ class BigintIdEntity(BaseEntity):
class UserEntity(BigintIdEntity):
__tablename__ = "user"

name = Column(
String(length=UserEntityEnum.NAME.value), # (3)!
nullable=False,
)
created_at = Column(DateTime, nullable=False)
name: Mapped[str] = mapped_column(String(length=UserEntityEnum.NAME.value)) # (3)!
created_at: Mapped[datetime] = mapped_column(DateTime)
```

1. 추상 테이블 표시를 통해 Alembic으로 테이블을 관리할 때 테이블 생성에서 제외
Expand All @@ -89,11 +86,6 @@ class UserEntity(BigintIdEntity):

`BaseEntity`에 속성을 부여하면 모든 테이블이 해당 속성을 사용하게 된다. 특정 테이블들만 속성을 공유하도록 하려면 중간에 추상 테이블을 별도로 생성해야 한다.

!!! tip
개인적으로는 더 직관적으로 보이는 `Column` 객체 사용을 더 선호하는데, `Column` 객체는 `sort_order` 속성이 없어서 추상 테이블에서 선언한 칼럼의 순서를 설정해줄 수 없다.

기존 테이블에 ORM을 통해 연결할 경우 문제가 없지만, Alembic을 통해 테이블을 생성하고 관리하게 된다면 상속 받은 칼럼이 나중에 생성되어 칼럼 순서가 직관적이지 않은 문제가 발생한다.

!!! warning
위 예시처럼 칼럼 길이를 Enum으로 별도 관리할 때는 SQLAlchemy Entity에 입력할 때는 반드시 `.value` 까지 입력해서 값만 불러오도록 해야한다. Alembic이 Enum 객체를 제대로 인식하지 못해 revision 생성 시 칼럼 길이에 값 대신 Enum 객체를 입력해버리는 문제가 있다.

Expand Down
Loading

0 comments on commit d57eefe

Please sign in to comment.