Files
GLASSES_AI_SERVER/rest/app/database/crud.py

301 lines
10 KiB
Python

# -*- coding: utf-8 -*-
"""
@File: crud.py
@Date: 2020-09-14
@author: A2TEC
@section MODIFYINFO 수정정보
- 수정자/수정일 : 수정내역
- 2022-01-14/hsj100@a2tec.co.kr : refactoring
@brief: CRUD
"""
import math
from datetime import datetime
from dateutil.relativedelta import relativedelta
from sqlalchemy import func, desc
from fastapi import APIRouter, Depends, Body
from sqlalchemy.orm import Session
from rest.app import models as M
from rest.app.database.conn import Base, db
from rest.app.database.schema import Users, UserLog
from rest.app.utils.extra import query_to_groupby, query_to_groupby_date
def get_month_info_list(start: datetime, end: datetime):
delta = relativedelta(end, start)
month_delta = 12 * delta.years + delta.months + (1 if delta.days > 0 else 0)
month_list = list()
for i in range(month_delta):
count = start + relativedelta(months=i)
month_info = dict()
month_info['period_title'] = count.strftime('%Y-%m')
month_info['year'] = count.year
month_info['month'] = count.month
month_info['start'] = (start + relativedelta(months=i)).replace(day=1)
month_info['end'] = (start + relativedelta(months=i + 1)).replace(day=1)
month_list.append(month_info)
return month_list
def request_parser(request_data: dict = None) -> dict:
"""
request information -> dict
:param request_data:
:return:
"""
result_dict = dict()
if not request_data:
return result_dict
for key, val in request_data.items():
if val is not None:
result_dict[key] = val if val != 'null' else None
return result_dict
def dict_to_filter_fmt(dict_data, get_attributes_callback=None):
"""
dict -> sqlalchemy filter (criterion: sql expression)
:param dict_data:
:param get_attributes_callback:
:return:
"""
if get_attributes_callback is None:
raise Exception('invalid get_attributes_callback')
criterion = list()
for key, val in dict_data.items():
key = key.split('__')
if len(key) > 2:
raise Exception('length of split(key) should be no more than 2.')
key_length = len(key)
col = get_attributes_callback(key[0])
if col is None:
continue
if key_length == 1:
criterion.append((col == val))
elif key_length == 2 and key[1] == 'gt':
criterion.append((col > val))
elif key_length == 2 and key[1] == 'gte':
criterion.append((col >= val))
elif key_length == 2 and key[1] == 'lt':
criterion.append((col < val))
elif key_length == 2 and key[1] == 'lte':
criterion.append((col <= val))
elif key_length == 2 and key[1] == 'in':
criterion.append((col.in_(val)))
elif key_length == 2 and key[1] == 'like':
criterion.append((col.like(val)))
return criterion
async def table_select(accessor_info, target_table, request_body_info, response_model, response_model_data):
"""
table_read
"""
try:
# parameter
if not accessor_info:
raise Exception('invalid accessor')
if not target_table:
raise Exception(f'invalid table_name:{target_table}')
# if not request_body_info:
# raise Exception('invalid request_body_info')
if not response_model:
raise Exception('invalid response_model')
if not response_model_data:
raise Exception('invalid response_model_data')
# paging - request
paging_request = None
if request_body_info:
if hasattr(request_body_info, 'paging'):
paging_request = request_body_info.paging
request_body_info = request_body_info.search
# request
request_info = request_parser(request_body_info.dict())
# search
criterion = None
if isinstance(request_body_info, M.UserLogDaySearchReq):
# UserLog search
# request
request_info = request_parser(request_body_info.dict())
# search UserLog
def get_attributes_callback(key: str):
return getattr(UserLog, key)
criterion = dict_to_filter_fmt(request_info, get_attributes_callback)
# search
session = next(db.session())
search_info = session.query(UserLog)\
.filter(UserLog.mac.isnot(None)
, UserLog.api == '/api/auth/login'
, UserLog.type == M.UserLogMessageType.info
, UserLog.message == 'ok'
, *criterion)\
.order_by(desc(UserLog.created_at), desc(UserLog.updated_at))\
.all()
if not search_info:
raise Exception('not found data')
group_by_day = query_to_groupby_date(search_info, 'updated_at')
# result
result_info = list()
for day, info_list in group_by_day.items():
info_by_mac = query_to_groupby(info_list, 'mac', first=True)
for log_info in info_by_mac.values():
result_info.append(response_model_data.from_orm(log_info))
else:
# basic search (single table)
# request
request_info = request_parser(request_body_info.dict())
# search
search_info = target_table.filter(**request_info).all()
if not search_info:
raise Exception('not found data')
# result
result_info = list()
for purchase_info in search_info:
result_info.append(response_model_data.from_orm(purchase_info))
# response - paging
paging_response = None
if paging_request:
total_contents_num = len(result_info)
total_page_num = math.ceil(total_contents_num / paging_request.page_contents_num)
start_contents_index = (paging_request.start_page - 1) * paging_request.page_contents_num
end_contents_index = start_contents_index + paging_request.page_contents_num
if end_contents_index < total_contents_num:
result_info = result_info[start_contents_index: end_contents_index]
else:
result_info = result_info[start_contents_index:]
paging_response = M.PagingRes()
paging_response.total_page_num = total_page_num
paging_response.total_contents_num = total_contents_num
paging_response.start_page = paging_request.start_page
paging_response.search_contents_num = len(result_info)
return response_model(data=result_info, paging=paging_response)
except Exception as e:
return response_model.set_error(str(e))
async def table_update(accessor_info, target_table, request_body_info, response_model, response_model_data=None):
search_info = None
try:
# parameter
if not accessor_info:
raise Exception('invalid accessor')
if not target_table:
raise Exception(f'invalid table_name:{target_table}')
if not request_body_info:
raise Exception('invalid request_body_info')
if not response_model:
raise Exception('invalid response_model')
# if not response_model_data:
# raise Exception('invalid response_model_data')
# request
if not request_body_info.search_info:
raise Exception('invalid request_body: search_info')
request_search_info = request_parser(request_body_info.search_info.dict())
if not request_search_info:
raise Exception('invalid request_body: search_info')
if not request_body_info.update_info:
raise Exception('invalid request_body: update_info')
request_update_info = request_parser(request_body_info.update_info.dict())
if not request_update_info:
raise Exception('invalid request_body: update_info')
# search
search_info = target_table.filter(**request_search_info)
# process
search_info.update(auto_commit=True, synchronize_session=False, **request_update_info)
# result
return response_model()
except Exception as e:
if search_info:
search_info.close()
return response_model.set_error(str(e))
async def table_delete(accessor_info, target_table, request_body_info, response_model, response_model_data=None):
search_info = None
try:
# request
if not accessor_info:
raise Exception('invalid accessor')
if not target_table:
raise Exception(f'invalid table_name:{target_table}')
if not request_body_info:
raise Exception('invalid request_body_info')
if not response_model:
raise Exception('invalid response_model')
# if not response_model_data:
# raise Exception('invalid response_model_data')
# request
request_search_info = request_parser(request_body_info.dict())
if not request_search_info:
raise Exception('invalid request_body')
# search
search_info = target_table.filter(**request_search_info)
temp_search = search_info.all()
# process
search_info.delete(auto_commit=True, synchronize_session=False)
# update license num
uuid_list = list()
for _license in temp_search:
if not hasattr(temp_search, 'uuid'):
# case: license
break
if _license.uuid not in uuid_list:
uuid_list.append(_license.uuid)
license_num = target_table.filter(uuid=_license.uuid).count()
target_table.filter(uuid=_license.uuid).update(auto_commit=True, synchronize_session=False, num=license_num)
# result
return response_model()
except Exception as e:
if search_info:
search_info.close()
return response_model.set_error(str(e))