Compare commits
38 Commits
9c036d7cb7
...
103ee99d26
Author | SHA1 | Date |
---|---|---|
|
103ee99d26 | 12 months ago |
|
171f891d63 | 1 year ago |
|
1c04b32028 | 1 year ago |
|
a034aa4bf6 | 1 year ago |
|
6931e7914e | 1 year ago |
|
baef5ee8a9 | 1 year ago |
|
734ba9818c | 1 year ago |
|
af2d037577 | 1 year ago |
|
7a765dd1ac | 1 year ago |
|
9fe7ff8382 | 1 year ago |
|
5c524390b9 | 1 year ago |
|
ada8cb2053 | 1 year ago |
|
7a44899f2f | 1 year ago |
|
bd92e72d5f | 1 year ago |
|
7a4212140f | 1 year ago |
|
bc77bc5ce9 | 1 year ago |
|
b82bccc95d | 1 year ago |
|
c1f4dc7778 | 1 year ago |
|
85b1e044d0 | 1 year ago |
|
469b04438c | 1 year ago |
|
1bb705c5c4 | 1 year ago |
|
6a4100d612 | 1 year ago |
|
6b10daf0fe | 1 year ago |
|
46e8e308f2 | 1 year ago |
|
ead7b8fc94 | 1 year ago |
|
809ef6f7b4 | 1 year ago |
|
85daf8c320 | 1 year ago |
|
48c14a9651 | 1 year ago |
|
a2abb09955 | 1 year ago |
|
c5e4d44827 | 1 year ago |
|
b7a3c0fb94 | 1 year ago |
|
57ce3d896e | 1 year ago |
|
ae4ac605c0 | 1 year ago |
|
9f25d21303 | 1 year ago |
|
7e77fbe67b | 1 year ago |
|
933b1878af | 1 year ago |
|
eee871a2da | 1 year ago |
|
2406ef5c95 | 1 year ago |
@ -0,0 +1,55 @@
|
||||
FROM 192.168.10.94:5000/generalai:v1
|
||||
|
||||
# Set timezone
|
||||
ENV TZ=Asia/Shanghai
|
||||
|
||||
# Install necessary packages
|
||||
RUN apt-get update && apt-get install -y \
|
||||
# python3-pip \
|
||||
# mysql-client \
|
||||
default-mysql-client \
|
||||
redis-tools \
|
||||
&& apt-get clean
|
||||
|
||||
# ENV APP_HOME=/app/lemon
|
||||
|
||||
RUN mkdir -p /app/lemon
|
||||
RUN mkdir -p /app/fileupload
|
||||
RUN mkdir -p /app/log
|
||||
# Set working directory
|
||||
WORKDIR /app/lemon
|
||||
|
||||
# Copy application files
|
||||
COPY website/ ./website/
|
||||
COPY dist/ ./dist/
|
||||
|
||||
# COPY mysql_app.sql /app/lemon/
|
||||
|
||||
# Copy init script and make it executable
|
||||
# COPY mysql_init.sh /app/lemon/
|
||||
# RUN chmod +x /app/lemon/mysql_init.sh
|
||||
|
||||
# Install Python dependencies
|
||||
# RUN pip3 install -r /app/lemon/website/requirements.txt
|
||||
|
||||
# Copy supervisor configuration
|
||||
COPY supervisor.conf /etc/supervisor/conf.d/lemon.conf
|
||||
|
||||
# Copy nginx configuration
|
||||
# COPY nginx.conf /etc/nginx/sites-available/lemon
|
||||
# RUN ln -s /etc/nginx/sites-available/lemon /etc/nginx/sites-enabled/
|
||||
|
||||
|
||||
# COPY entrypoint.sh /app/lemon/entrypoint.sh
|
||||
# RUN chmod +x /app/lemon/entrypoint.sh
|
||||
COPY entrypoint.sh /
|
||||
RUN sed -i 's/\r//' /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
# Expose port
|
||||
|
||||
EXPOSE 80
|
||||
# VOLUME /data
|
||||
# Start services
|
||||
# ENTRYPOINT ["/usr/bin/supervisord", "-n", "-c", "/etc/supervisor/supervisord.conf"]
|
||||
ENTRYPOINT []
|
@ -0,0 +1,74 @@
|
||||
version: "3.3"
|
||||
|
||||
services:
|
||||
web:
|
||||
# build: .
|
||||
working_dir: /app/lemon/website
|
||||
image: lemon:latest
|
||||
tty:
|
||||
true
|
||||
container_name: lemon_web
|
||||
privileged: true
|
||||
# ports:
|
||||
# - "80:80"
|
||||
# - "8989:8989"
|
||||
volumes:
|
||||
# - /data/app/lemon:/app/lemon
|
||||
- /data/app/log:/app/log
|
||||
- /data/app/fileupload:/app/fileupload
|
||||
environment:
|
||||
- TZ=Asia/Shanghai
|
||||
networks:
|
||||
- lemon_network
|
||||
depends_on:
|
||||
- mysql
|
||||
- redis
|
||||
command: sh -c "/entrypoint.sh && /usr/bin/supervisord -n -c /etc/supervisor/supervisord.conf"
|
||||
|
||||
nginx:
|
||||
image: nginx:latest
|
||||
container_name: lemon_nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx.conf:/etc/nginx/conf.d/lemon.conf
|
||||
- ./dist:/app/lemon/dist
|
||||
depends_on:
|
||||
- web
|
||||
networks:
|
||||
- lemon_network
|
||||
|
||||
mysql:
|
||||
image: mysql:8.0
|
||||
container_name: lemon_mysql
|
||||
ports:
|
||||
- "13306:3306"
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: SghjdA887#
|
||||
MYSQL_DATABASE: aiplatform
|
||||
volumes:
|
||||
- /data/mysql_data:/var/lib/mysql
|
||||
- ./mysql_auth.sql:/docker-entrypoint-initdb.d/auth.sql
|
||||
- ./mysql_app.sql:/docker-entrypoint-initdb.d/app.sql
|
||||
networks:
|
||||
- lemon_network
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
container_name: lemon_redis
|
||||
ports:
|
||||
- "16379:6379"
|
||||
command: redis-server --requirepass hgkiYY87
|
||||
volumes:
|
||||
- /data/redis_data:/data
|
||||
networks:
|
||||
- lemon_network
|
||||
|
||||
#volumes:
|
||||
# mysql_data:
|
||||
# redis_data:
|
||||
|
||||
networks:
|
||||
lemon_network:
|
||||
driver: bridge
|
@ -0,0 +1,6 @@
|
||||
USE mysql;
|
||||
-- 修改密码加密规则
|
||||
ALTER USER 'root'@'%' IDENTIFIED BY 'password' PASSWORD EXPIRE NEVER;
|
||||
ALTER USER 'root'@'%' IDENTIFIED WITH mysql_native_password BY 'SghjdA887#';
|
||||
-- 刷新权限
|
||||
FLUSH PRIVILEGES;
|
@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Wait for MySQL to be ready
|
||||
echo "Waiting for MySQL to be ready..."
|
||||
while ! mysqladmin ping -h"lemon_mysql" --silent; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Initialize MySQL database
|
||||
echo "Initializing MySQL database..."
|
||||
mysql -h lemon_mysql -u root -pSghjdA887# aiplatform < /app/lemon/mysql_app.sql
|
||||
|
||||
echo "MySQL initialization completed."
|
||||
|
||||
# Start the application
|
||||
exec "$@"
|
@ -0,0 +1,18 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name 192.168.10.94;
|
||||
|
||||
root /app/lemon/dist;
|
||||
|
||||
location ^~ /api {
|
||||
proxy_pass http://lemon_web:8989;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
[program:lemon]
|
||||
directory=/app/lemon/website
|
||||
command=python /app/lemon/website/app.py --port=8989
|
||||
numprocs=1
|
||||
process_name=%(program_name)s_%(process_num)02d
|
||||
autorestart=true
|
||||
autostart=true
|
||||
startsecs=5
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/app/log/app_%(process_num)02d.log
|
@ -0,0 +1,230 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
import time
|
||||
|
||||
import redis
|
||||
import tornado.escape
|
||||
import tornado.ioloop
|
||||
import tornado.options
|
||||
import tornado.web
|
||||
# import tornado.websocket
|
||||
# import torndb
|
||||
import importlib
|
||||
# from confluent_kafka import Producer
|
||||
# from rediscluster import StrictRedisCluster
|
||||
# from redis import sentinel
|
||||
# from rediscluster import RedisCluster
|
||||
# from redis.sentinel import Sentinel
|
||||
# from tornado.options import define, options
|
||||
from tornado.options import options, define as _define, parse_command_line
|
||||
# from elasticsearch import Elasticsearch
|
||||
# from tornado_swagger import swagger
|
||||
|
||||
|
||||
|
||||
def define(name, default=None, type=None, help=None, metavar=None,
|
||||
multiple=False, group=None, callback=None):
|
||||
if name not in options._options:
|
||||
return _define(name, default, type, help, metavar,
|
||||
multiple, group, callback)
|
||||
|
||||
tornado.options.define = define
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
define("port", default=8888, help="run on the given port", type=int)
|
||||
define("debug", default=0)
|
||||
|
||||
_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
importlib.reload(sys)
|
||||
# sys.setdefaultencoding('utf-8')
|
||||
|
||||
try:
|
||||
import website
|
||||
except ImportError:
|
||||
print("app package import error and try to config sys.path")
|
||||
logging.info("app import error")
|
||||
|
||||
# sys.path.append(os.path.join(_ROOT, "../.."))
|
||||
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "..")))
|
||||
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "../..")))
|
||||
|
||||
from website import settings
|
||||
from website.db_mysql import app_engine
|
||||
from website.handler import APIErrorHandler
|
||||
from website.urls import handlers, page_handlers
|
||||
# from website.urls import handlers_v2
|
||||
# print(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# class Connection(torndb.Connection):
|
||||
# def __init__(self,
|
||||
# host,
|
||||
# database,
|
||||
# user=None,
|
||||
# password=None,
|
||||
# max_idle_time=7 * 3600,
|
||||
# connect_timeout=500,
|
||||
# time_zone="+0:00"):
|
||||
# self.host = host
|
||||
# self.database = database
|
||||
# self.max_idle_time = float(max_idle_time)
|
||||
#
|
||||
# args = dict(conv=torndb.CONVERSIONS,
|
||||
# use_unicode=True,
|
||||
# charset="utf8",
|
||||
# db=database,
|
||||
# init_command=('SET time_zone = "%s";' %
|
||||
# time_zone),
|
||||
# connect_timeout=connect_timeout,
|
||||
# sql_mode="TRADITIONAL")
|
||||
# if user is not None:
|
||||
# args["user"] = user
|
||||
# if password is not None:
|
||||
# args["passwd"] = password
|
||||
#
|
||||
# # We accept a path to a MySQL socket file or a host(:port) string
|
||||
# if "/" in host:
|
||||
# args["unix_socket"] = host
|
||||
# else:
|
||||
# self.socket = None
|
||||
# pair = host.split(":")
|
||||
# if len(pair) == 2:
|
||||
# args["host"] = pair[0]
|
||||
# args["port"] = int(pair[1])
|
||||
# else:
|
||||
# args["host"] = host
|
||||
# args["port"] = 3306
|
||||
#
|
||||
# self._db = None
|
||||
# self._db_args = args
|
||||
# self._last_use_time = time.time()
|
||||
# try:
|
||||
# self.reconnect()
|
||||
# except Exception:
|
||||
# logging.error("Cannot connect to MySQL on %s",
|
||||
# self.host,
|
||||
# exc_info=True)
|
||||
|
||||
|
||||
# class NoCacheStaticFileHandler(tornado.web.StaticFileHandler):
|
||||
# def set_extra_headers(self, path):
|
||||
# self.set_header("Cache-control", "no-cache")
|
||||
|
||||
|
||||
class Application(tornado.web.Application):
|
||||
def __init__(self):
|
||||
# from website.handlers import Model
|
||||
handlers_ = []
|
||||
for handler in handlers:
|
||||
handlers_.append(("%s%s" % (settings.api_prefix, handler[0]),
|
||||
handler[1]))
|
||||
|
||||
for handler in page_handlers:
|
||||
handlers_.append((handler[0], handler[1]))
|
||||
|
||||
# for handler in handlers_v2:
|
||||
# handlers_.append(("%s%s" % (settings.api_prefix_v2, handler[0]),
|
||||
# handler[1]))
|
||||
|
||||
# handlers_.append((r"/wap/s", tornado.web.RedirectHandler, dict(url=r"//wap/s.html")))
|
||||
handlers_.append((r".*", APIErrorHandler))
|
||||
# handlers_.append((r"/static/(.*)", NoCacheStaticFileHandler, {"path": os.path.join(_ROOT, "static")}))
|
||||
|
||||
settings_ = dict(
|
||||
debug=options.debug,
|
||||
# login_url="/login",
|
||||
login_url="",
|
||||
cookie_secret=settings.cookie_secret,
|
||||
template_path=os.path.join(_ROOT, "templates"),
|
||||
static_path=os.path.join(_ROOT, "static"),
|
||||
xsrf_cookies=False,
|
||||
autoescape=None,
|
||||
)
|
||||
|
||||
# self.db_app = Connection(
|
||||
# settings.mysql_app["host"],
|
||||
# settings.mysql_app["database"],
|
||||
# user=settings.mysql_app["user"],
|
||||
# password=settings.mysql_app["password"],
|
||||
# time_zone=settings.mysql_app["time_zone"])
|
||||
|
||||
self.app_mysql = app_engine
|
||||
|
||||
# if settings.redis_sentinel == 1:
|
||||
# rs = Sentinel(settings.redis_sentinel_nodes, socket_timeout=0.1)
|
||||
# self.r_app = rs.master_for(settings.redis_sentinel_master,
|
||||
# socket_timeout=0.1,
|
||||
# password=settings.redis_sentinel_pwd)
|
||||
# if settings.redis_cluster == 1:
|
||||
# self.r_app = RedisCluster(startup_nodes=settings.redis_app_cluster_notes, decode_responses=True,
|
||||
# password=settings.redis_cluster_pwd)
|
||||
# else:
|
||||
# self.r_app = redis.Redis(*settings.redis_app, decode_responses=True)
|
||||
self.r_app = redis.Redis(*settings.redis_app, decode_responses=True)
|
||||
# self.r_app = redis.Redis(*settings.redis_app)
|
||||
|
||||
# self.kafka_producer = Producer(**settings.kafka_conf)
|
||||
# self.es = Elasticsearch(settings.es_nodes)
|
||||
|
||||
# Model.setup_dbs({"db_app": self.db_app,
|
||||
# "r_app": self.r_app
|
||||
# })
|
||||
tornado.web.Application.__init__(self, handlers_, **settings_)
|
||||
# swagger.Application.__init__(self, handlers_, **settings_)
|
||||
|
||||
|
||||
def sig_handler(signum, frame):
|
||||
tornado.ioloop.IOLoop.instance().stop()
|
||||
|
||||
|
||||
class PwdFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
try:
|
||||
print("##########")
|
||||
print("{}, {}".format(record.name, record.msg))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
tornado.options.parse_command_line()
|
||||
# options.parse_command_line()
|
||||
|
||||
formatter = logging.Formatter(
|
||||
'[%(levelname)1.1s %(asctime)s.%(msecs)d '
|
||||
'%(module)s:%(funcName)s:%(lineno)d] %(message)s',
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
) # creating own format
|
||||
for handler in logging.getLogger().handlers: # setting format for all handlers
|
||||
handler.setFormatter(formatter)
|
||||
# handler.addFilter(PwdFilter())
|
||||
|
||||
app = Application()
|
||||
# app.listen(options.port)
|
||||
|
||||
server = tornado.httpserver.HTTPServer(app, max_buffer_size=104857600*3)
|
||||
server.listen(options.port)
|
||||
|
||||
# def ping():
|
||||
# try:
|
||||
# row = app.db_app.get("select id from user limit 1")
|
||||
# if row:
|
||||
# logging.info("db check ok")
|
||||
# except Exception as e:
|
||||
# logging.info(e)
|
||||
# logging.info("db connection err, reconnect")
|
||||
# app.db_app.reconnect()
|
||||
|
||||
logging.info("start app server...")
|
||||
|
||||
# tornado.ioloop.PeriodicCallback(ping, 600000).start()
|
||||
tornado.ioloop.IOLoop.instance().start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func
|
||||
from website.db_mysql import get_session
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
"""
|
||||
设备分类表
|
||||
"""
|
||||
class DeviceClassification(Base):
|
||||
__tablename__ = 'device_classification'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(255), default='', comment='名称')
|
||||
suid = Column(String(10), default='', comment='short uuid')
|
||||
delete = Column("del", Integer, default=0)
|
||||
create_time = Column(DateTime, default=func.now())
|
||||
|
||||
class DeviceClassificationReporitory(object):
|
||||
|
||||
def get_row_by_id(self, cid):
|
||||
with get_session() as session:
|
||||
return session.query(DeviceClassification).filter_by(id=cid).first()
|
||||
|
||||
def get_row_by_suid(self, suid):
|
||||
with get_session() as session:
|
||||
return session.query(DeviceClassification).filter_by(suid=suid).first()
|
@ -0,0 +1,136 @@
|
||||
from website.handler import BaseHandler
|
||||
from sqlalchemy import text
|
||||
from typing import Any
|
||||
from website.db_mysql import get_session, get_async_session, to_json, to_json_list
|
||||
import json
|
||||
|
||||
|
||||
# 获取企业模型数量
|
||||
def get_enterprise_model_count(id: int) -> int:
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# 获取企业设备数量
|
||||
def get_enterprise_device_count(id: int) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
# 获取所有企业实体数量
|
||||
def get_enterprise_entity_count(engine: Any) -> int:
|
||||
with engine.connect() as conn:
|
||||
count_sql_text = "select count(*) from enterprise "
|
||||
count = conn.execute(text(count_sql_text)).fetchone()
|
||||
if count:
|
||||
return count[0]
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# 获取所有企业模型数量
|
||||
def get_enterprise_model_count(entity_id: int = 0, entity_suid: str = "") -> int:
|
||||
with get_session() as session:
|
||||
sql = "select base_models from enterprise_busi_model where "
|
||||
param = {}
|
||||
if entity_id:
|
||||
sql += "entity_id = :entity_id"
|
||||
param["entity_id"] = entity_id
|
||||
elif entity_suid:
|
||||
sql += "entity_suid = :entity_suid"
|
||||
param["entity_suid"] = entity_suid
|
||||
|
||||
cur = session.execute(text(sql), param)
|
||||
res = to_json(cur)
|
||||
if res:
|
||||
base_model_list = json.loads(res)
|
||||
return len(base_model_list)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# 获取所有企业设备数量
|
||||
def get_enterprise_device_count(entity_id: int = 0, entity_suid: str = "") -> int:
|
||||
with get_session() as session:
|
||||
sql = "select count(id) as device_count from enterprise_device "
|
||||
param = {}
|
||||
if entity_id:
|
||||
sql += "where entity_id = :entity_id"
|
||||
param = {"entity_id": entity_id}
|
||||
elif entity_suid:
|
||||
sql += "where entity_suid = :entity_suid"
|
||||
param = {"entity_suid": entity_suid}
|
||||
|
||||
cur = session.execute(text(sql), param)
|
||||
res = to_json(cur)
|
||||
if res:
|
||||
return res["device_count"]
|
||||
return 0
|
||||
|
||||
|
||||
async def get_enterprise_model_and_device_count(
|
||||
entity_id: int = 0, entity_suid: str = ""
|
||||
) -> (int, int):
|
||||
model_count = 0
|
||||
device_count = 0
|
||||
|
||||
async with get_async_session() as session:
|
||||
# sql_model = "select base_models from enterprise_busi_model where "
|
||||
# param_model = {}
|
||||
# if entity_id:
|
||||
# sql_model += "entity_id = :entity_id"
|
||||
# param_model["entity_id"] = entity_id
|
||||
# elif entity_suid:
|
||||
# sql_model += "entity_suid = :entity_suid"
|
||||
# param_model["entity_suid"] = entity_suid
|
||||
|
||||
# cur_model = await session.execute(text(sql_model), param_model)
|
||||
# res_model = to_json(cur_model)
|
||||
# if res_model:
|
||||
# base_model_list = json.loads(res_model)
|
||||
# model_count = len(base_model_list)
|
||||
|
||||
# sql_device = "select count(id) as device_count from enterprise_device where "
|
||||
# param_device = {}
|
||||
# if entity_id:
|
||||
# sql_device += "entity_id = :entity_id"
|
||||
# param_device["entity_id"] = entity_id
|
||||
# elif entity_suid:
|
||||
# sql_device += "where entity_suid = :entity_suid"
|
||||
# param_device = {"entity_suid": entity_suid}
|
||||
|
||||
# cur_device = await session.execute(text(sql_device), param_device)
|
||||
# res_device = to_json(cur_device)
|
||||
# if res_device:
|
||||
# device_count = res_device["device_count"]
|
||||
|
||||
|
||||
|
||||
sql_device_count = "SELECT COUNT(*) AS device_count FROM enterprise_device WHERE {where_clause} "
|
||||
sql_base_model = "SELECT base_models FROM enterprise_busi_model WHERE {where_clause} "
|
||||
|
||||
where_clause = ""
|
||||
params = {}
|
||||
if entity_id:
|
||||
where_clause = "entity_id = :entity_id"
|
||||
params["entity_id"] = entity_id
|
||||
elif entity_suid:
|
||||
where_clause = "entity_suid = :entity_suid"
|
||||
params["entity_suid"] = entity_suid
|
||||
|
||||
sql_device_count = sql_device_count.format(where_clause=where_clause)
|
||||
result_device_count = await session.execute(text(sql_device_count), params)
|
||||
device_count = to_json(result_device_count)["device_count"]
|
||||
|
||||
sql_base_model = sql_base_model.format(where_clause=where_clause)
|
||||
result_base_model = await session.execute(text(sql_base_model), params)
|
||||
base_models = to_json_list(result_base_model)
|
||||
base_model_ids = []
|
||||
for item in base_models:
|
||||
base_models = json.loads(item["base_models"])
|
||||
for base_model in base_models:
|
||||
if base_model["id"] not in base_model_ids:
|
||||
base_model_ids.append(base_model["id"])
|
||||
|
||||
model_count = len(base_model_ids)
|
||||
|
||||
return model_count, device_count
|
@ -0,0 +1,156 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
from typing import List, Union, Optional, Tuple
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func, text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from website.db_mysql import get_session, to_json, to_json_list, Row
|
||||
|
||||
"""
|
||||
CREATE TABLE `enterprise_busi_model_node_device` (
|
||||
`id` int NOT NULL,
|
||||
`suid` varchar(10) DEFAULT NULL,
|
||||
`entity_suid` varchar(10) DEFAULT NULL,
|
||||
`node_id` int DEFAULT NULL,
|
||||
`node_suid` varchar(10) DEFAULT NULL,
|
||||
`busi_model_id` int DEFAULT NULL,
|
||||
`busi_model_suid` varchar(10) DEFAULT NULL,
|
||||
`device_id` int DEFAULT NULL,
|
||||
`device_suid` varchar(10) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='节点-业务模型-设备关联表';
|
||||
"""
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class EnterpriseBusiModelNodeDevice(Base):
|
||||
__tablename__ = "enterprise_busi_model_node_device"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
suid = Column(String(10))
|
||||
entity_suid = Column(String(10))
|
||||
node_id = Column(Integer)
|
||||
node_suid = Column(String(10))
|
||||
busi_model_id = Column(Integer)
|
||||
busi_model_suid = Column(String(10))
|
||||
device_id = Column(Integer)
|
||||
device_suid = Column(String(10))
|
||||
create_time = Column(
|
||||
DateTime, default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"EnterpriseBusiModelNodeDevice(id={self.id}, suid='{self.suid}')"
|
||||
|
||||
|
||||
class EnterpriseBusiModelNodeDeviceRepository(object):
|
||||
|
||||
def insert_record(self, records: List[EnterpriseBusiModelNodeDevice]):
|
||||
with get_session() as session:
|
||||
for record in records:
|
||||
session.add(record)
|
||||
session.commit()
|
||||
|
||||
def batch_insert_record(self, records: List[dict]):
|
||||
with get_session() as session:
|
||||
session.bulk_insert_mappings(EnterpriseBusiModelNodeDevice, records)
|
||||
session.commit()
|
||||
|
||||
def delete_by_busi_model_and_node(self, node_id: int, busi_model_id: int) -> bool:
|
||||
try:
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"delete from enterprise_busi_model_node_device "
|
||||
"where node_id=:node_id and busi_model_id=:busi_model_id"
|
||||
)
|
||||
session.execute(
|
||||
sql, {"node_id": node_id, "busi_model_id": busi_model_id}
|
||||
)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
logging.info("delete_by_busi_model_and_node error: %s", e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_deployed(self, node_id: int, busi_model_id: int) -> int:
|
||||
with get_session() as session:
|
||||
count = (
|
||||
session.query(EnterpriseBusiModelNodeDevice)
|
||||
.filter(
|
||||
EnterpriseBusiModelNodeDevice.node_id == node_id,
|
||||
EnterpriseBusiModelNodeDevice.busi_model_id == busi_model_id,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
return 1 if count > 0 else 0
|
||||
|
||||
def get_one_by_busimodel(self, busi_model_id: int):
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"select node_id, busi_model_id from enterprise_busi_model_node_device "
|
||||
"where busi_model_id=:busi_model_id limit 1"
|
||||
)
|
||||
res = session.execute(sql, {"busi_model_id": busi_model_id})
|
||||
return to_json(res)
|
||||
|
||||
def get_device_ids(self, node_id: int, busi_model_id: int) -> list:
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"select device_id from enterprise_busi_model_node_device "
|
||||
"where node_id=:node_id and busi_model_id=:busi_model_id"
|
||||
)
|
||||
res = session.execute(
|
||||
sql, {"node_id": node_id, "busi_model_id": busi_model_id}
|
||||
)
|
||||
return to_json_list(res)
|
||||
|
||||
def get_busi_model_by_device(self, device_id: int = 0, device_suid: str = "", pagination: bool = False,
|
||||
page_no: int = 0,
|
||||
page_size: int = 0) -> Union[list, Tuple[Optional[List[Row]], int]]:
|
||||
if not device_id and not device_suid:
|
||||
logging.error("get_busi_model_by_device error: device_id and device_suid is null")
|
||||
return []
|
||||
|
||||
count = 0
|
||||
with get_session() as session:
|
||||
sql = """
|
||||
select d.busi_model_id, d.node_id, m.name, m.base_models
|
||||
from enterprise_busi_model_node_device d, enterprise_busi_model m
|
||||
where d.busi_model_id=m.id"""
|
||||
p = {}
|
||||
|
||||
sql_count = """
|
||||
select count(1) from enterprise_busi_model_node_device d, enterprise_busi_model m where d.busi_model_id=m.id
|
||||
"""
|
||||
p_count = {}
|
||||
|
||||
if device_id:
|
||||
sql += " and d.device_id=:device_id"
|
||||
p.update({"device_id": device_id})
|
||||
|
||||
sql_count += " and d.device_id=:device_id"
|
||||
p_count.update({"device_id": device_id})
|
||||
|
||||
if device_suid:
|
||||
sql += " and d.device_suid=:device_suid"
|
||||
p.update({"device_suid": device_suid})
|
||||
|
||||
sql_count += " and d.device_suid=:device_suid"
|
||||
p_count.update({"device_suid": device_suid})
|
||||
|
||||
if pagination:
|
||||
sql += " order by d.id desc"
|
||||
|
||||
if page_no > 0:
|
||||
sql += " limit :pageno, :pagesize"
|
||||
p.update({"pageno": (page_no - 1) * page_size, "pagesize": page_size})
|
||||
|
||||
count = session.execute(text(sql_count), p_count).scalar()
|
||||
|
||||
res = session.execute(text(sql), p)
|
||||
res = to_json_list(res)
|
||||
return res, count
|
@ -0,0 +1,385 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from website import consts
|
||||
from website import errors
|
||||
from website.db.device_classification.device_classification import DeviceClassification
|
||||
from website.db.enterprise_entity.enterprise_entity import EnterpriseEntityRepository
|
||||
from website.db.enterprise_node.enterprise_node import EnterpriseNodeRepository
|
||||
from website.db_mysql import get_session
|
||||
from website.util import shortuuid
|
||||
|
||||
|
||||
def row2dict(row):
|
||||
d = {}
|
||||
for column in row.__table__.columns:
|
||||
d[column.name] = str(getattr(row, column.name))
|
||||
|
||||
return d
|
||||
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
"""
|
||||
CREATE TABLE `enterprise_device` (
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`suid` varchar(10) DEFAULT NULL COMMENT '设备suid',
|
||||
`entity_id` int NOT NULL COMMENT '企业id',
|
||||
`entity_suid` varchar(10) NOT NULL COMMENT '企业suid',
|
||||
`node_id` int NOT NULL COMMENT '节点id',
|
||||
`node_suid` varchar(10) NOT NULL COMMENT '节点suid',
|
||||
`classification` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT '' COMMENT '分类suid',
|
||||
`name` varchar(255) NOT NULL,
|
||||
`addr` varchar(255) DEFAULT '',
|
||||
`device_model` varchar(255) DEFAULT '' COMMENT '设备型号',
|
||||
`param` varchar(255) DEFAULT '',
|
||||
`comment` varchar(255) DEFAULT '',
|
||||
`del` int DEFAULT '0',
|
||||
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
|
||||
`update_time` datetime DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='企业设备表';
|
||||
"""
|
||||
|
||||
|
||||
class EnterpriseDevice(Base):
|
||||
__tablename__ = "enterprise_device"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
suid = Column(String(length=10), default="")
|
||||
entity_id = Column(Integer)
|
||||
entity_suid = Column(String)
|
||||
node_id = Column(Integer)
|
||||
node_suid = Column(String)
|
||||
classification = Column(String)
|
||||
name = Column(String)
|
||||
addr = Column(String)
|
||||
device_model = Column(String)
|
||||
param = Column(String)
|
||||
comment = Column(String)
|
||||
status = Column(Integer)
|
||||
delete = Column("del", Integer, default=0)
|
||||
create_time = Column(DateTime, default=func.now())
|
||||
update_time = Column(DateTime, default=func.now())
|
||||
|
||||
|
||||
class EnterpriseDeviceRepository(object):
|
||||
|
||||
def add_device(self, device):
|
||||
entity_id = device["entity_id"]
|
||||
node_id = device["node_id"]
|
||||
entity_suid = EnterpriseEntityRepository().get_entity_suid(entity_id)
|
||||
node = EnterpriseNodeRepository().get_node_by_id(node_id)
|
||||
node_suid = node["suid"]
|
||||
|
||||
device["entity_suid"] = entity_suid
|
||||
device["node_suid"] = node_suid
|
||||
device["suid"] = shortuuid.ShortUUID().random(10)
|
||||
name = device["name"]
|
||||
|
||||
with get_session() as session:
|
||||
existing_device = (
|
||||
session.query(EnterpriseDevice)
|
||||
.filter_by(node_id=node_id, name=name, delete=0)
|
||||
.first()
|
||||
)
|
||||
if existing_device:
|
||||
logging.error(
|
||||
f"Failed to add device: device with node_id={node_id} and name={name} already exists"
|
||||
)
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "设备已存在")
|
||||
|
||||
new_device = EnterpriseDevice(**device)
|
||||
try:
|
||||
# session.execute(
|
||||
# text(
|
||||
# """INSERT INTO enterprise_device
|
||||
# (suid, entity_id, entity_suid, node_id, node_suid, classification, name, addr, device_model, param, comment)
|
||||
# values (:suid, :entity_id, :entity_suid, :node_id, :node_suid, :classification, :name, :addr, :device_model, :param, :comment)"""
|
||||
# ),
|
||||
# device)
|
||||
session.add(new_device)
|
||||
except Exception as e:
|
||||
logging.error("Failed to add device")
|
||||
raise e
|
||||
|
||||
return
|
||||
|
||||
def edit_device(self, device):
|
||||
device_id = device["id"]
|
||||
with get_session() as session:
|
||||
try:
|
||||
# valid_columns = {col.name for col in EnterpriseDevice.__table__.columns}
|
||||
# filtered_data = {key: value for key, value in device.items() if key in valid_columns}
|
||||
session.query(EnterpriseDevice).filter(
|
||||
EnterpriseDevice.id == device_id
|
||||
).update(device)
|
||||
except Exception as e:
|
||||
logging.error("Failed to edit device")
|
||||
raise e
|
||||
|
||||
return
|
||||
|
||||
def delete_device(self, device_id):
|
||||
with get_session() as session:
|
||||
try:
|
||||
session.query(EnterpriseDevice).filter(
|
||||
EnterpriseDevice.id == device_id
|
||||
).update({"delete": 1})
|
||||
except Exception as e:
|
||||
logging.error("Failed to delete device")
|
||||
raise e
|
||||
|
||||
return
|
||||
|
||||
def list_devices(self, node_id: int, pageNo: int, pageSize: int) -> dict:
|
||||
with get_session() as session:
|
||||
try:
|
||||
total_count = (
|
||||
session.query(EnterpriseDevice)
|
||||
.filter(
|
||||
EnterpriseDevice.node_id == node_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
devices = (
|
||||
session.query(
|
||||
EnterpriseDevice,
|
||||
DeviceClassification.name.label("classification_name"),
|
||||
)
|
||||
.join(
|
||||
DeviceClassification,
|
||||
EnterpriseDevice.classification == DeviceClassification.suid,
|
||||
)
|
||||
.filter(
|
||||
EnterpriseDevice.node_id == node_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
.order_by(EnterpriseDevice.id.desc())
|
||||
.limit(pageSize)
|
||||
.offset((pageNo - 1) * pageSize)
|
||||
.all()
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Failed to list devices")
|
||||
raise e
|
||||
|
||||
device_dicts = []
|
||||
for device, classification_name in devices:
|
||||
device_dict = {
|
||||
"id": device.id,
|
||||
"suid": device.suid,
|
||||
"entity_id": device.entity_id,
|
||||
"entity_suid": device.entity_suid,
|
||||
"node_id": device.node_id,
|
||||
"node_suid": device.node_suid,
|
||||
"classification": classification_name,
|
||||
"name": device.name,
|
||||
"addr": device.addr,
|
||||
"device_model": device.device_model,
|
||||
"param": device.param,
|
||||
"comment": device.comment,
|
||||
"delete": device.delete,
|
||||
"create_time": device.create_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"update_time": str(device.update_time),
|
||||
}
|
||||
device_dicts.append(device_dict)
|
||||
# for row in devices:
|
||||
# logging.info(row.name)
|
||||
logging.info(device_dicts)
|
||||
|
||||
return {"devices": device_dicts, "total_count": total_count}
|
||||
|
||||
def list_simple_devices(self, node_id: int) -> list:
|
||||
"""获取节点下所有设备的简单信息。"""
|
||||
with get_session() as session:
|
||||
try:
|
||||
devices = (
|
||||
session.query(
|
||||
EnterpriseDevice,
|
||||
DeviceClassification.name.label("classification_name"),
|
||||
)
|
||||
.join(
|
||||
DeviceClassification,
|
||||
EnterpriseDevice.classification == DeviceClassification.suid,
|
||||
)
|
||||
.filter(
|
||||
EnterpriseDevice.node_id == node_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Failed to list devices")
|
||||
raise e
|
||||
device_dicts = []
|
||||
for device, cname in devices:
|
||||
device_dict = {
|
||||
"id": device.id,
|
||||
"name": device.name,
|
||||
"classification_name": cname,
|
||||
}
|
||||
device_dicts.append(device_dict)
|
||||
return device_dicts
|
||||
|
||||
def get_device(self, device_id: int) -> dict:
|
||||
"""
|
||||
根据设备ID获取设备信息。
|
||||
|
||||
"""
|
||||
with get_session() as session:
|
||||
try:
|
||||
device = (
|
||||
session.query(EnterpriseDevice)
|
||||
.filter(
|
||||
EnterpriseDevice.id == device_id, EnterpriseDevice.delete != 1
|
||||
)
|
||||
.first()
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Failed to get device")
|
||||
raise e
|
||||
device_dict = {}
|
||||
if device:
|
||||
device_dict = {
|
||||
"id": device.id,
|
||||
"entity_id": device.entity_id,
|
||||
"entity_suid": device.entity_suid,
|
||||
"node_id": device.node_id,
|
||||
"node_suid": device.node_suid,
|
||||
"suid": device.suid,
|
||||
"name": device.name,
|
||||
"addr": device.addr,
|
||||
"device_model": device.device_model,
|
||||
"param": device.param,
|
||||
"comment": device.comment,
|
||||
"classification": device.classification,
|
||||
}
|
||||
|
||||
return device_dict
|
||||
|
||||
def get_devices(self, device_ids: list) -> list:
|
||||
"""
|
||||
根据设备ID列表获取设备信息。
|
||||
"""
|
||||
with get_session() as session:
|
||||
try:
|
||||
devices = (
|
||||
session.query(EnterpriseDevice)
|
||||
.filter(
|
||||
EnterpriseDevice.id.in_(device_ids),
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Failed to get devices")
|
||||
raise e
|
||||
device_dicts = []
|
||||
for device in devices:
|
||||
device_dict = {
|
||||
"id": device.id,
|
||||
"entity_id": device.entity_id,
|
||||
"entity_suid": device.entity_suid,
|
||||
"node_id": device.node_id,
|
||||
"node_suid": device.node_suid,
|
||||
"suid": device.suid,
|
||||
"name": device.name,
|
||||
"addr": device.addr,
|
||||
"device_model": device.device_model,
|
||||
"param": device.param,
|
||||
"comment": device.comment,
|
||||
"classification": device.classification,
|
||||
}
|
||||
device_dicts.append(device_dict)
|
||||
return device_dicts
|
||||
|
||||
def get_all_device_count(self) -> int:
|
||||
"""获取所有设备的数量"""
|
||||
with get_session() as session:
|
||||
try:
|
||||
count = session.query(EnterpriseDevice).filter(EnterpriseDevice.delete != 1).count()
|
||||
return count
|
||||
except Exception as e:
|
||||
logging.error("Failed to get all device count, error: {}".format(e))
|
||||
|
||||
return 0
|
||||
|
||||
def list_entity_devices(self, entity_id: int, pageno: int, pagesize: int, classification: str = "",
|
||||
status: int = 0, group: int = 0) -> dict:
|
||||
"""获取企业的设备列表"""
|
||||
logging.info(status)
|
||||
with get_session() as session:
|
||||
try:
|
||||
session_count = (
|
||||
session.query(EnterpriseDevice)
|
||||
.filter(
|
||||
EnterpriseDevice.entity_id == entity_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
)
|
||||
if classification:
|
||||
session_count = session_count.filter(EnterpriseDevice.classification == classification)
|
||||
if status in consts.device_status_map and status != consts.device_status_default:
|
||||
logging.info("filter status")
|
||||
session_count = session_count.filter(EnterpriseDevice.status == status)
|
||||
if group:
|
||||
session_count = session_count.filter(EnterpriseDevice.node_id == group)
|
||||
count = session_count.count()
|
||||
|
||||
session_device = (
|
||||
session.query(EnterpriseDevice, DeviceClassification.name.label("classification_name"))
|
||||
.join(DeviceClassification, DeviceClassification.id == EnterpriseDevice.classification,
|
||||
isouter=True)
|
||||
.filter(
|
||||
EnterpriseDevice.entity_id == entity_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
)
|
||||
if classification:
|
||||
session_device = session_device.filter(EnterpriseDevice.classification == classification)
|
||||
if status in consts.device_status_map and status != consts.device_status_default:
|
||||
session_device = session_device.filter(EnterpriseDevice.status == status)
|
||||
if group:
|
||||
session_device = session_count.filter(EnterpriseDevice.node_id == group)
|
||||
|
||||
devices = session_device.order_by(EnterpriseDevice.id.desc()).limit(pagesize).offset(
|
||||
(pageno - 1) * pagesize).all()
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Failed to list devices")
|
||||
raise e
|
||||
|
||||
logging.info(devices)
|
||||
|
||||
return {"count": count, "devices": devices}
|
||||
|
||||
def status_count(self, entity_id: int, classification: str = ""):
|
||||
with get_session() as session:
|
||||
try:
|
||||
session_count = (
|
||||
session.query(
|
||||
EnterpriseDevice.status,
|
||||
func.count(EnterpriseDevice.id).label("count")
|
||||
)
|
||||
.filter(
|
||||
EnterpriseDevice.entity_id == entity_id,
|
||||
EnterpriseDevice.delete != 1,
|
||||
)
|
||||
.group_by(EnterpriseDevice.status)
|
||||
)
|
||||
|
||||
if classification:
|
||||
session_count = session_count.filter(EnterpriseDevice.classification == classification)
|
||||
|
||||
result = session_count.all()
|
||||
except Exception as e:
|
||||
logging.error("Failed to get status count")
|
||||
raise e
|
||||
|
||||
return dict(result)
|
@ -0,0 +1,43 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
from website.db_mysql import get_session, to_json
|
||||
from sqlalchemy import text
|
||||
|
||||
"""
|
||||
CREATE TABLE `enterprise` (
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`suid` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT '',
|
||||
`name` varchar(255) NOT NULL,
|
||||
`province` varchar(50) NOT NULL,
|
||||
`city` varchar(50) NOT NULL,
|
||||
`addr` varchar(255) NOT NULL,
|
||||
`industry` int NOT NULL,
|
||||
`contact` varchar(100) NOT NULL,
|
||||
`phone` varchar(50) NOT NULL,
|
||||
`summary` varchar(255) DEFAULT NULL,
|
||||
`logo` text,
|
||||
`account` varchar(20) DEFAULT NULL,
|
||||
`pwd` varchar(100) DEFAULT NULL,
|
||||
`del` int DEFAULT '0',
|
||||
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_suid` (`suid`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='企业信息表';
|
||||
"""
|
||||
|
||||
|
||||
class EnterpriseEntityRepository(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
def get_entity_suid(self, entity_id: int) -> str:
|
||||
with get_session() as session:
|
||||
res = session.execute(text("select suid from enterprise where id=:id"),
|
||||
{"id": entity_id})
|
||||
entity = to_json(res)
|
||||
|
||||
return entity["suid"] if entity else ""
|
||||
|
||||
|
@ -0,0 +1,194 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from typing import List, Dict, Any, Union
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from website.db_mysql import get_session, to_json_list, to_json
|
||||
|
||||
"""
|
||||
CREATE TABLE `enterprise_node` (
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`entity_suid` int NOT NULL COMMENT '企业uuid',
|
||||
`name` varchar(255) DEFAULT '' COMMENT '企业name',
|
||||
`parent` int DEFAULT NULL,
|
||||
`addr` varchar(255) DEFAULT '',
|
||||
`lola` varchar(255) DEFAULT '',
|
||||
`contact` varchar(255) DEFAULT '',
|
||||
`phone` varchar(255) DEFAULT '',
|
||||
`comment` varchar(255) DEFAULT '',
|
||||
`del` int DEFAULT '0',
|
||||
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='企业节点表'
|
||||
"""
|
||||
|
||||
|
||||
class EnterpriseNodeRepository(object):
|
||||
|
||||
def insert_node(self, node: dict) -> int:
|
||||
with get_session() as session:
|
||||
session.execute(
|
||||
text(
|
||||
"insert into "
|
||||
"enterprise_node(suid, entity_id, entity_suid, name, parent, addr, lola, contact, phone, comment) "
|
||||
"values (:suid, :entity_id, :entity_suid, :name, :parent, :addr, :lola, :contact, :phone, :comment)"
|
||||
),
|
||||
node,
|
||||
)
|
||||
|
||||
# last_insert_id = session.execute(text("SELECT LAST_INSERT_ID()")).scalar()
|
||||
# logging.info(f"last_insert_id: {last_insert_id}")
|
||||
return 0
|
||||
|
||||
def update_node(self, node: dict) -> int:
|
||||
with get_session() as session:
|
||||
sql = (
|
||||
"update enterprise_node "
|
||||
"set name=:name, parent=:parent, addr=:addr, lola=:lola, contact=:contact, phone=:phone, comment=:comment where id=:id"
|
||||
)
|
||||
param = {
|
||||
"id": node["id"],
|
||||
"name": node["name"],
|
||||
"parent": node["parent"],
|
||||
"addr": node["addr"],
|
||||
"lola": node["lola"],
|
||||
"contact": node["contact"],
|
||||
"phone": node["phone"],
|
||||
"comment": node["comment"],
|
||||
}
|
||||
session.execute(text(sql), param)
|
||||
return 0
|
||||
|
||||
def select_tree(self, entity_id: int, name: str = "") -> List[Dict[str, Any]]:
|
||||
roots = []
|
||||
with get_session() as session:
|
||||
# sql = "select id, name, suid, parent from enterprise_node where entity_id=:entity_id and del=0 and parent=0 "
|
||||
|
||||
sql = (
|
||||
"""
|
||||
SELECT
|
||||
n.id, n.name, n.parent, p.name AS parent_name, n.suid
|
||||
FROM enterprise_node n
|
||||
LEFT JOIN enterprise_node p ON n.parent = p.id
|
||||
WHERE n.entity_id=:entity_id AND n.del=0 and n.parent=0
|
||||
"""
|
||||
)
|
||||
param = {"entity_id": entity_id}
|
||||
|
||||
if name:
|
||||
sql += " and n.name like :name"
|
||||
param["name"] = f"%{name}%"
|
||||
|
||||
res = session.execute(text(sql), param)
|
||||
node_list = to_json_list(res)
|
||||
node_list = node_list and node_list or []
|
||||
for node in node_list:
|
||||
root = {
|
||||
"id": node["id"],
|
||||
"name": node["name"],
|
||||
"suid": node["suid"],
|
||||
"parent": node["parent"],
|
||||
"parent_name": node["parent_name"],
|
||||
"children": self.build_tree(session, node, name),
|
||||
}
|
||||
roots.append(root)
|
||||
# return node_list
|
||||
return roots
|
||||
|
||||
def build_tree(
|
||||
self, session: Any, node: Dict[str, Any], name: str = ""
|
||||
) -> List[Any]:
|
||||
# sql = (
|
||||
# "select id, name, suid, parent from enterprise_node where del=0 and parent=:parent"
|
||||
# )
|
||||
|
||||
sql = (
|
||||
"""
|
||||
SELECT
|
||||
n.id, n.name, n.parent, p.name AS parent_name, n.suid
|
||||
FROM enterprise_node n
|
||||
LEFT JOIN enterprise_node p ON n.parent = p.id
|
||||
WHERE n.parent=:parent AND n.del=0
|
||||
"""
|
||||
)
|
||||
|
||||
param = {"parent": node["id"]}
|
||||
|
||||
if name:
|
||||
sql += " and n.name like :name"
|
||||
param["name"] = f"%{name}%"
|
||||
|
||||
res = session.execute(text(sql), param)
|
||||
node_list = to_json_list(res)
|
||||
node_list = node_list and node_list or []
|
||||
children = []
|
||||
for node in node_list:
|
||||
child = {
|
||||
"id": node["id"],
|
||||
"name": node["name"],
|
||||
"suid": node["suid"],
|
||||
"parent": node["parent"],
|
||||
"parent_name": node["parent_name"],
|
||||
"children": self.build_tree(session, node, name),
|
||||
}
|
||||
children.append(child)
|
||||
|
||||
return children
|
||||
|
||||
def select_node(self, node_id: int) -> Dict[str, Any]:
|
||||
with get_session() as session:
|
||||
# sql = (
|
||||
# "select id, name, parent, addr, lola, contact, phone, comment, suid from enterprise_node "
|
||||
# "where id=:id and del=0"
|
||||
# )
|
||||
sql = (
|
||||
"""
|
||||
SELECT
|
||||
n.id, n.name, n.parent, p.name AS parent_name, n.addr, n.lola, n.contact, n.phone, n.comment, n.suid
|
||||
FROM enterprise_node n
|
||||
LEFT JOIN enterprise_node p ON n.parent = p.id
|
||||
WHERE n.id=:id AND n.del=0
|
||||
"""
|
||||
)
|
||||
param = {"id": node_id}
|
||||
res = session.execute(text(sql), param)
|
||||
node_list = to_json_list(res)
|
||||
node_list = node_list and node_list or []
|
||||
return node_list[0] if node_list else None
|
||||
|
||||
def delete_node(self, node_id: int) -> int:
|
||||
with get_session() as session:
|
||||
sql = "update enterprise_node set del=1 where id=:id"
|
||||
param = {"id": node_id}
|
||||
session.execute(text(sql), param)
|
||||
session.commit()
|
||||
return 0
|
||||
|
||||
def get_node_by_id(self, node_id: int) -> dict:
|
||||
with get_session() as session:
|
||||
sql = "select suid, name from enterprise_node where id=:id"
|
||||
param = {"id": node_id}
|
||||
res = session.execute(text(sql), param)
|
||||
node = to_json(res)
|
||||
return node
|
||||
|
||||
def get_entity_suid_by_node_id(self, node_id: int) -> dict:
|
||||
with get_session() as session:
|
||||
res = session.execute(
|
||||
text("select suid, entity_suid from enterprise_node where id=:id"),
|
||||
{"id": node_id},
|
||||
)
|
||||
entity = to_json(res)
|
||||
|
||||
return entity
|
||||
|
||||
def simple_list(self, entity_id: int) -> List[Any]:
|
||||
with get_session() as session:
|
||||
sql = (
|
||||
"select id, name from enterprise_node where del=0 and entity_id=:entity_id"
|
||||
)
|
||||
param = {"entity_id": entity_id}
|
||||
res = session.execute(text(sql), param)
|
||||
node_list = to_json_list(res)
|
||||
return node_list
|
@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from typing import List, Dict, Union
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from website.db_mysql import get_session, to_json_list, to_json
|
||||
|
||||
|
||||
class EnterpriseNodeAlertRepository(object):
|
||||
|
||||
def get_all(self) -> List[Dict]:
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"""SELECT * FROM `enterprise_alert` WHERE `enterprise_node_id`=:enterprise_node_id ORDER BY `created_at` DESC;"""
|
||||
)
|
||||
cursor = session.execute(
|
||||
sql, {"enterprise_node_id": self.enterprise_node_id}
|
||||
)
|
||||
return to_json_list(cursor)
|
||||
|
||||
def get_one(
|
||||
self, entity_suid: str, node_id: int
|
||||
) -> Union[Dict, None]:
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"""SELECT * FROM `enterprise_alert` WHERE `entity_suid`=:entity_suid and `node_id`=:node_id;"""
|
||||
)
|
||||
cursor = session.execute(
|
||||
sql, {"entity_suid": entity_suid, "node_id": node_id}
|
||||
)
|
||||
return to_json(cursor)
|
||||
|
||||
def update_or_inert(self, data: Dict) -> None:
|
||||
with get_session() as session:
|
||||
sql = text(
|
||||
"""
|
||||
INSERT INTO `enterprise_alert` (`entity_suid`, `node_id`, `node_suid`, `is_sms`, `sms_to`, `is_email`, `email_to`, `freq`)
|
||||
VALUES (:entity_suid, :node_id, :node_suid, :is_sms, :sms_to, :is_email, :email_to, :freq)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
`node_suid`=:node_suid, `is_sms`=:is_sms, `sms_to`=:sms_to, `is_email`=:is_email, `email_to`=:email_to, `freq`=:freq
|
||||
"""
|
||||
)
|
||||
session.execute(sql, data)
|
||||
session.commit()
|
@ -0,0 +1,61 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from website.db.alg_model import alg_model as DB_alg_model
|
||||
from website.db.enterprise_entity.enterprise_entity import EnterpriseEntityRepository
|
||||
from website.db.enterprise_node import enterprise_node as DB_Node
|
||||
from website.db_mysql import get_session
|
||||
from website.util import shortuuid
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class EnterpriseNodeDeviceBMCusConf(Base):
|
||||
"""企业设备关联业务模型下的基础模型自定义配置"""
|
||||
__tablename__ = 'enterprise_node_device_bm_cus_conf'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
entity_id = Column(Integer, nullable=True)
|
||||
entity_suid = Column(String(255), nullable=True)
|
||||
node_id = Column(Integer, nullable=True)
|
||||
node_suid = Column(String(255), nullable=True)
|
||||
device_id = Column(Integer, nullable=True)
|
||||
device_suid = Column(String(255), nullable=True)
|
||||
busi_model_id = Column(Integer, nullable=True)
|
||||
busi_model_suid = Column(String(255), nullable=True)
|
||||
base_model_id = Column(Integer, nullable=True)
|
||||
base_model_suid = Column(String(255), nullable=True)
|
||||
busi_conf_file = Column(String(255), nullable=True)
|
||||
busi_conf_str = Column(String(255), nullable=True)
|
||||
model_hub_image = Column(String(255), nullable=True)
|
||||
model_conf_file = Column(String(255), nullable=True)
|
||||
model_conf_str = Column(String(255), nullable=True)
|
||||
create_time = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"EnterpriseNodeDeviceBMCusConf(id={self.id}, entity_id={self.entity_id})"
|
||||
|
||||
|
||||
class EnterpriseNodeDeviceBMCusConfRepository():
|
||||
|
||||
def create_busi_model_custom_config(self, data: Dict[str, Any]):
|
||||
with get_session() as session:
|
||||
|
||||
session.add(EnterpriseNodeDeviceBMCusConf(**data))
|
||||
session.commit()
|
||||
return
|
||||
|
||||
def get_busi_model_custom_config(self, node_id: int, device_id: int, busi_model_id: int, base_model_id: int):
|
||||
with get_session() as session:
|
||||
data = session.query(EnterpriseNodeDeviceBMCusConf).filter(
|
||||
EnterpriseNodeDeviceBMCusConf.node_id == node_id,
|
||||
EnterpriseNodeDeviceBMCusConf.device_id == device_id,
|
||||
EnterpriseNodeDeviceBMCusConf.busi_model_id == busi_model_id,
|
||||
EnterpriseNodeDeviceBMCusConf.base_model_id == base_model_id
|
||||
).first()
|
||||
return data
|
@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from typing import List, Dict, Any, Type
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func, text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from website.db_mysql import get_session, to_json_list, to_json, dict_to_obj
|
||||
|
||||
|
||||
"""
|
||||
CREATE TABLE `files` (
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`filename` varchar(255) NOT NULL DEFAULT '',
|
||||
`filepath` varchar(255) NOT NULL DEFAULT '',
|
||||
`md5_str` varchar(32) NOT NULL DEFAULT '',
|
||||
`filesize` int DEFAULT NULL,
|
||||
`filetype` varchar(50) DEFAULT '',
|
||||
`user` int DEFAULT '0',
|
||||
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='文件表';
|
||||
"""
|
||||
|
||||
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class File(Base):
|
||||
__tablename__ = 'files'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
filename = Column(String(255), nullable=False, default='')
|
||||
filepath = Column(String(255), nullable=False, default='')
|
||||
md5_str = Column(String(32), nullable=False, default='')
|
||||
filesize = Column(Integer, nullable=True)
|
||||
filetype = Column(String(50), nullable=True, default='')
|
||||
user = Column(Integer, nullable=True, default=0)
|
||||
create_time = Column(DateTime, nullable=False, default=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"File(id={self.id}, filename='{self.filename}', filepath='{self.filepath}')"
|
||||
|
||||
|
||||
class FileRepository(object):
|
||||
|
||||
def get_file_by_md5(self, md5_list: List[str]) -> List[File]:
|
||||
resp = []
|
||||
with get_session() as session:
|
||||
# return (
|
||||
# session.query(File).filter(File.md5_str == md5_str).first()
|
||||
# )
|
||||
files = session.query(File).filter(File.md5_str.in_(md5_list)).all()
|
||||
for file in files:
|
||||
obj_dict = file.__dict__
|
||||
del obj_dict['_sa_instance_state']
|
||||
print(obj_dict)
|
||||
resp.append(dict_to_obj(obj_dict))
|
||||
|
||||
# return session.query(File).filter(File.md5_str.in_(md5_list)).all()
|
||||
return resp
|
||||
|
||||
|
@ -0,0 +1,71 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
from tornado import escape
|
||||
from tornado.web import HTTPError
|
||||
|
||||
# HTTP status code
|
||||
HTTP_OK = 200
|
||||
ERROR_BAD_REQUEST = 400
|
||||
ERROR_UNAUTHORIZED = 401
|
||||
ERROR_FORBIDDEN = 403
|
||||
ERROR_NOT_FOUND = 404
|
||||
ERROR_METHOD_NOT_ALLOWED = 405
|
||||
ERROR_INTERNAL_SERVER_ERROR = 500
|
||||
# Custom error code
|
||||
ERROR_WARNING = 1001
|
||||
ERROR_DEPRECATED = 1002
|
||||
ERROR_MAINTAINING = 1003
|
||||
ERROR_UNKNOWN_ERROR = 9999
|
||||
ERROR_LICENSE_NOT_ACTIVE = 9000
|
||||
ERROR_LICENSE_EXPIRE_ATALL = 9003
|
||||
|
||||
|
||||
|
||||
class HTTPAPIError(HTTPError):
|
||||
|
||||
"""API error handling exception
|
||||
|
||||
API server always returns formatted JSON to client even there is
|
||||
an internal server error.
|
||||
"""
|
||||
|
||||
def __init__(self, status_code=ERROR_UNKNOWN_ERROR, message=None,
|
||||
error=None, data=None, log_message=None, *args):
|
||||
assert isinstance(data, dict) or data is None
|
||||
message = message if message else ""
|
||||
assert isinstance(message, str)
|
||||
|
||||
super(HTTPAPIError, self).__init__(int(status_code),
|
||||
log_message, *args)
|
||||
|
||||
self.error = error if error else \
|
||||
_error_types.get(self.status_code, _unknow_error)
|
||||
self.message = message
|
||||
self.data = data if data is not None else {}
|
||||
|
||||
def __str__(self):
|
||||
err = {"meta": {"code": self.status_code, "error": self.error}}
|
||||
|
||||
if self.data:
|
||||
err["data"] = self.data
|
||||
|
||||
if self.message:
|
||||
err["meta"]["message"] = self.message
|
||||
|
||||
return escape.json_encode(err)
|
||||
|
||||
|
||||
# default errors
|
||||
_unknow_error = "unknow_error"
|
||||
_error_types = {400: "bad_request",
|
||||
401: "unauthorized",
|
||||
403: "forbidden",
|
||||
404: "not_found",
|
||||
405: "method_not_allowed",
|
||||
500: "internal_server_error",
|
||||
1001: "warning",
|
||||
1002: "deprecated",
|
||||
1003: "maintaining",
|
||||
9000: "license_not_active",
|
||||
9003: "license_expire_at_all",
|
||||
9999: _unknow_error}
|
@ -0,0 +1,593 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import ast
|
||||
import functools
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
import urllib
|
||||
from typing import Any
|
||||
# import urlparse
|
||||
from urllib.parse import parse_qs, unquote
|
||||
|
||||
# from urllib import unquote
|
||||
import tornado
|
||||
from sqlalchemy import text
|
||||
from tornado import escape
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.options import options
|
||||
# from tornado.web import RequestHandler as BaseRequestHandler, HTTPError, asynchronous
|
||||
from tornado.web import RequestHandler as BaseRequestHandler, HTTPError
|
||||
|
||||
from website import errors
|
||||
from website import settings
|
||||
from website.service.license import get_license_status
|
||||
|
||||
# from torndb import Row
|
||||
|
||||
if settings.enable_curl_async_http_client:
|
||||
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
|
||||
else:
|
||||
AsyncHTTPClient.configure(None, max_clients=settings.max_clients)
|
||||
|
||||
REMOVE_SLASH_RE = re.compile(".+/$")
|
||||
|
||||
|
||||
class Row(dict):
|
||||
"""A dict that allows for object-like property access syntax."""
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
|
||||
def _callback_wrapper(callback):
|
||||
"""A wrapper to handling basic callback error"""
|
||||
|
||||
def _wrapper(response):
|
||||
if response.error:
|
||||
logging.error("call remote api err: %s" % response)
|
||||
if isinstance(response.error, tornado.httpclient.HTTPError):
|
||||
raise errors.HTTPAPIError(response.error.code, "网络连接失败")
|
||||
else:
|
||||
raise errors.HTTPAPIError(errors.ERROR_UNKNOWN_ERROR, "未知错误")
|
||||
else:
|
||||
callback(response)
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
class BaseHandler(BaseRequestHandler):
|
||||
# def __init__(self, application, request, **kwargs):
|
||||
# super(BaseHandler, self).__init__(application, request, **kwargs)
|
||||
# self.xsrf_form_html()
|
||||
|
||||
def set_default_headers(self):
|
||||
self.set_header("Access-Control-Allow-Origin", "*")
|
||||
self.set_header("Access-Control-Allow-Headers",
|
||||
"DNT,token,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,"
|
||||
"Content-Type")
|
||||
# self.set_header("Access-Control-Allow-Headers",
|
||||
# "DNT,web-token,app-token,Authorization,Accept,Origin,Keep-Alive,User-Agent,X-Mx-ReqToken,"
|
||||
# "X-Data-Type,X-Auth-Token,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range")
|
||||
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
|
||||
|
||||
# def _request_summary(self):
|
||||
#
|
||||
# return "%s %s %s(@%s)" % (self.request.method, self.request.uri, self.request.body.decode(),
|
||||
# self.request.remote_ip)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
# enable GET request when enable delegate get to post
|
||||
if settings.app_get_to_post:
|
||||
self.post(*args, **kwargs)
|
||||
else:
|
||||
raise HTTPError(405)
|
||||
|
||||
def render(self, template_name, **kwargs):
|
||||
if self.current_user:
|
||||
if 'username' not in kwargs:
|
||||
kwargs["username"] = self.current_user.name
|
||||
if 'current_userid' not in kwargs:
|
||||
kwargs["current_userid"] = self.current_user.id
|
||||
if 'role' not in kwargs:
|
||||
kwargs["role"] = self.current_user.role
|
||||
self.set_header("Cache-control", "no-cache")
|
||||
return super(BaseHandler, self).render(template_name, **kwargs)
|
||||
|
||||
# @property
|
||||
# def db_app(self):
|
||||
# return self.application.db_app
|
||||
|
||||
@property
|
||||
def app_mysql(self):
|
||||
return self.application.app_mysql
|
||||
|
||||
@property
|
||||
def r_app(self):
|
||||
return self.application.r_app
|
||||
|
||||
@property
|
||||
def kafka_producer(self):
|
||||
return self.application.kafka_producer
|
||||
|
||||
#
|
||||
@property
|
||||
def es(self):
|
||||
return self.application.es
|
||||
|
||||
# @property
|
||||
# def nsq(self):
|
||||
# return self.application.nsq
|
||||
|
||||
def _call_api(self, url, headers, body, callback, method, callback_wrapper=_callback_wrapper):
|
||||
start = 0
|
||||
if callback_wrapper:
|
||||
callback = callback_wrapper(callback)
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
AsyncHTTPClient().fetch(HTTPRequest(url=url,
|
||||
method=method,
|
||||
body=body,
|
||||
headers=headers,
|
||||
allow_nonstandard_methods=True,
|
||||
connect_timeout=settings.remote_connect_timeout,
|
||||
request_timeout=settings.remote_request_timeout,
|
||||
follow_redirects=False),
|
||||
callback)
|
||||
except tornado.httpclient.HTTPError:
|
||||
logging.error("requet from %s, take time: %s" % (url, (time.time() - start) * 1000))
|
||||
# if hasattr(x, "response") and x.response:
|
||||
# callback(x.response)
|
||||
# else:
|
||||
# logging.error("Tornado signalled HTTPError %s", x)
|
||||
# raise x
|
||||
|
||||
# @asynchronous
|
||||
async def call_api(self, url, headers=None, body=None, callback=None, method="POST"):
|
||||
if callback is None:
|
||||
callback = self.call_api_callback
|
||||
|
||||
if headers is None:
|
||||
headers = self.request.headers
|
||||
|
||||
if body is None:
|
||||
body = self.request.body
|
||||
else:
|
||||
# make sure it is a post request
|
||||
headers["Content-Type"] = "application/x-www-form-urlencoded"
|
||||
|
||||
self._call_api(url, headers, body, callback, method)
|
||||
|
||||
def get_current_user(self, token_body=None):
|
||||
# jid = self.get_secure_cookie(settings.cookie_key)
|
||||
token = self.request.headers.get("token")
|
||||
if token_body:
|
||||
token = token_body
|
||||
|
||||
if not token:
|
||||
return None
|
||||
token = unquote(token)
|
||||
jid = tornado.web.decode_signed_value(
|
||||
settings.cookie_secret,
|
||||
settings.secure_cookie_name,
|
||||
token
|
||||
)
|
||||
|
||||
jid = jid and str(jid, encoding="utf-8") or ""
|
||||
key = settings.session_key_prefix % jid
|
||||
user = self.r_app.get(key)
|
||||
if user:
|
||||
if "/user/info" in self.request.path:
|
||||
pass
|
||||
else:
|
||||
self.r_app.expire(key, settings.session_ttl)
|
||||
user = str(user, encoding='utf8') if isinstance(user, bytes) else user
|
||||
# return Row(ast.literal_eval(str(user, encoding="utf-8")))
|
||||
return Row(ast.literal_eval(user))
|
||||
else:
|
||||
return None
|
||||
|
||||
def md5compare(self, name):
|
||||
string = unquote(name)
|
||||
|
||||
num1 = string.split("|")[0]
|
||||
num2 = string.split("|")[1]
|
||||
num3 = string.split("|")[2]
|
||||
|
||||
num = num1 + num2
|
||||
md5string = hashlib.md5(num).hexdigest().upper()
|
||||
|
||||
if md5string == num3:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def tostr(self, src):
|
||||
return str(src, encoding='utf8') if isinstance(src, bytes) else src
|
||||
|
||||
def prepare(self):
|
||||
self.remove_slash()
|
||||
self.prepare_context()
|
||||
self.set_default_jsonbody()
|
||||
# self.traffic_threshold()
|
||||
|
||||
def set_default_jsonbody(self):
|
||||
if self.request.headers.get('Content-Type') == 'application/json' and self.request.body:
|
||||
# logging.info(self.request.headers.get('Content-Type'))
|
||||
# if self.request.headers.get('Content-Type') == 'application/json; charset=UTF-8':
|
||||
json_body = tornado.escape.json_decode(self.request.body)
|
||||
for key, value in json_body.items():
|
||||
if value is not None:
|
||||
if type(value) is list:
|
||||
self.request.arguments.setdefault(key, []).extend(value)
|
||||
elif type(value) is dict:
|
||||
self.request.arguments[key] = value
|
||||
else:
|
||||
self.request.arguments.setdefault(key, []).extend([bytes(str(value), 'utf-8')])
|
||||
|
||||
def traffic_threshold(self):
|
||||
# if self.request.uri in ["/api/download"]:
|
||||
# return
|
||||
if not self.current_user:
|
||||
user_id = self.request.remote_ip
|
||||
else:
|
||||
user_id = self.current_user.id
|
||||
|
||||
freq_key = "API:FREQ:%s:%s" % (user_id, int(time.time()) / 10)
|
||||
send_count = self.r_app.incr(freq_key)
|
||||
if send_count > settings.api_count_in_ten_second:
|
||||
freq_key = "API:FREQ:%s:%s" % (user_id, int(time.time()) / 10 + 1)
|
||||
self.r_app.setex(freq_key, send_count, 10)
|
||||
raise errors.HTTPAPIError(
|
||||
errors.ERROR_METHOD_NOT_ALLOWED, "请勿频繁操作")
|
||||
if send_count == 1:
|
||||
self.r_app.expire(freq_key, 10)
|
||||
|
||||
def prepare_context(self):
|
||||
# self.nsq.pub(settings.nsq_topic_stats, escape.json_encode(self._create_stats_msg()))
|
||||
pass
|
||||
|
||||
def remove_slash(self):
|
||||
if self.request.method == "GET":
|
||||
if REMOVE_SLASH_RE.match(self.request.path):
|
||||
# remove trail slash in path
|
||||
uri = self.request.path.rstrip("/")
|
||||
if self.request.query:
|
||||
uri += "?" + self.request.query
|
||||
|
||||
self.redirect(uri)
|
||||
|
||||
# def get_json_argument(self, name, default=BaseRequestHandler._ARG_DEFAULT):
|
||||
def get_json_argument(self, name, default=object()):
|
||||
json_body = tornado.escape.json_decode(self.request.body)
|
||||
value = json_body.get(name, default)
|
||||
return escape.utf8(value) if isinstance(value, str) else value
|
||||
|
||||
def get_int_json_argument(self, name, default=0):
|
||||
try:
|
||||
return int(self.get_json_argument(name, default))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_escaped_json_argument(self, name, default=None):
|
||||
if default is not None:
|
||||
return self.escape_string(self.get_json_argument(name, default))
|
||||
else:
|
||||
return self.get_json_argument(name, default)
|
||||
|
||||
# def get_argument(self, name, default=BaseRequestHandler._ARG_DEFAULT, strip=True):
|
||||
def get_argument(self, name, default=object(), strip=True):
|
||||
value = super(BaseHandler, self).get_argument(name, default, strip)
|
||||
return escape.utf8(value) if isinstance(value, str) else value
|
||||
|
||||
# def get_int_argument(self, name, default=0):
|
||||
def get_int_argument(self, name: Any, default: int = 0) -> int:
|
||||
try:
|
||||
return int(self.get_argument(name, default))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_float_argument(self, name, default=0.0):
|
||||
try:
|
||||
return float(self.get_argument(name, default))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def get_uint_arg(self, name, default=0):
|
||||
try:
|
||||
return abs(int(self.get_argument(name, default)))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
def unescape_string(self, s):
|
||||
return escape.xhtml_unescape(s)
|
||||
|
||||
def escape_string(self, s):
|
||||
return escape.xhtml_escape(s)
|
||||
|
||||
def get_escaped_argument(self, name, default=None):
|
||||
if default is not None:
|
||||
return self.escape_string(self.get_argument(name, default))
|
||||
else:
|
||||
return self.get_argument(name, default)
|
||||
|
||||
def get_page_url(self, page, form_id=None, tab=None):
|
||||
if form_id:
|
||||
return "javascript:goto_page('%s',%s);" % (form_id.strip(), page)
|
||||
path = self.request.path
|
||||
query = self.request.query
|
||||
# qdict = urlparse.parse_qs(query)
|
||||
qdict = parse_qs(query)
|
||||
for k, v in qdict.items():
|
||||
if isinstance(v, list):
|
||||
qdict[k] = v and v[0] or ''
|
||||
qdict['page'] = page
|
||||
if tab:
|
||||
qdict['tab'] = tab
|
||||
return path + '?' + urllib.urlencode(qdict)
|
||||
|
||||
def find_all(self, target, substring):
|
||||
current_pos = target.find(substring)
|
||||
while current_pos != -1:
|
||||
yield current_pos
|
||||
current_pos += len(substring)
|
||||
current_pos = target.find(substring, current_pos)
|
||||
|
||||
|
||||
class WebHandler(BaseHandler):
|
||||
def finish(self, chunk=None, message=None):
|
||||
callback = escape.utf8(self.get_argument("callback", None))
|
||||
if callback:
|
||||
self.set_header("Content-Type", "application/x-javascript")
|
||||
|
||||
if isinstance(chunk, dict):
|
||||
chunk = escape.json_encode(chunk)
|
||||
|
||||
self._write_buffer = [callback, "(", chunk, ")"] if chunk else []
|
||||
super(WebHandler, self).finish()
|
||||
else:
|
||||
self.set_header("Cache-control", "no-cache")
|
||||
super(WebHandler, self).finish(chunk)
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
try:
|
||||
exc_info = kwargs.pop('exc_info')
|
||||
e = exc_info[1]
|
||||
|
||||
if isinstance(e, errors.HTTPAPIError):
|
||||
pass
|
||||
elif isinstance(e, HTTPError):
|
||||
if e.status_code == 401:
|
||||
self.redirect("/", permanent=True)
|
||||
return
|
||||
e = errors.HTTPAPIError(e.status_code)
|
||||
else:
|
||||
e = errors.HTTPAPIError(errors.ERROR_INTERNAL_SERVER_ERROR)
|
||||
|
||||
exception = "".join([ln for ln
|
||||
in traceback.format_exception(*exc_info)])
|
||||
|
||||
if status_code == errors.ERROR_INTERNAL_SERVER_ERROR \
|
||||
and not options.debug:
|
||||
self.send_error_mail(exception)
|
||||
|
||||
if options.debug:
|
||||
e.data["exception"] = exception
|
||||
|
||||
self.clear()
|
||||
# always return 200 OK for Web errors
|
||||
self.set_status(errors.HTTP_OK)
|
||||
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||
self.finish(str(e))
|
||||
except Exception:
|
||||
logging.error(traceback.format_exc())
|
||||
return super(WebHandler, self).write_error(status_code, **kwargs)
|
||||
|
||||
def send_error_mail(self, exception):
|
||||
pass
|
||||
|
||||
|
||||
class APIHandler(BaseHandler):
|
||||
def finish(self, chunk=None, message=None):
|
||||
if chunk is None:
|
||||
chunk = {}
|
||||
|
||||
if isinstance(chunk, dict):
|
||||
chunk = {"meta": {"code": errors.HTTP_OK}, "data": chunk}
|
||||
|
||||
if message:
|
||||
chunk["message"] = message
|
||||
callback = escape.utf8(self.get_argument("callback", None))
|
||||
if callback:
|
||||
self.set_header("Content-Type", "application/x-javascript")
|
||||
|
||||
if isinstance(chunk, dict):
|
||||
chunk = escape.json_encode(chunk)
|
||||
|
||||
self._write_buffer = [callback, "(", chunk, ")"] if chunk else []
|
||||
super(APIHandler, self).finish()
|
||||
else:
|
||||
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||
super(APIHandler, self).finish(chunk)
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
try:
|
||||
exc_info = kwargs.pop('exc_info')
|
||||
e = exc_info[1]
|
||||
|
||||
if isinstance(e, errors.HTTPAPIError):
|
||||
pass
|
||||
elif isinstance(e, HTTPError):
|
||||
e = errors.HTTPAPIError(e.status_code)
|
||||
else:
|
||||
e = errors.HTTPAPIError(errors.ERROR_INTERNAL_SERVER_ERROR)
|
||||
|
||||
exception = "".join([ln for ln
|
||||
in traceback.format_exception(*exc_info)])
|
||||
|
||||
if status_code == errors.ERROR_INTERNAL_SERVER_ERROR \
|
||||
and not options.debug:
|
||||
self.send_error_mail(exception)
|
||||
|
||||
if options.debug:
|
||||
e.data["exception"] = exception
|
||||
|
||||
self.clear()
|
||||
# always return 200 OK for API errors
|
||||
self.set_status(errors.HTTP_OK)
|
||||
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||
self.finish(str(e))
|
||||
except Exception:
|
||||
logging.error(traceback.format_exc())
|
||||
return super(APIHandler, self).write_error(status_code, **kwargs)
|
||||
|
||||
def send_error_mail(self, exception):
|
||||
"""Override to implement custom error mail"""
|
||||
pass
|
||||
|
||||
|
||||
class ErrorHandler(BaseHandler):
|
||||
"""Default 404: Not Found handler."""
|
||||
|
||||
def prepare(self):
|
||||
super(ErrorHandler, self).prepare()
|
||||
raise HTTPError(errors.ERROR_NOT_FOUND)
|
||||
|
||||
|
||||
class APIErrorHandler(APIHandler):
|
||||
"""Default API 404: Not Found handler."""
|
||||
|
||||
def prepare(self):
|
||||
super(APIErrorHandler, self).prepare()
|
||||
raise errors.HTTPAPIError(errors.ERROR_NOT_FOUND)
|
||||
|
||||
|
||||
def authenticated(method):
|
||||
"""Decorate methods with this to require that the user be logged in.
|
||||
|
||||
Just raise 401
|
||||
or be avaliable
|
||||
"""
|
||||
|
||||
@functools.wraps(method)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if not self.current_user:
|
||||
# raise HTTPError(401)
|
||||
raise errors.HTTPAPIError(errors.ERROR_UNAUTHORIZED, "登录失效")
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def authenticated_admin(method):
|
||||
@functools.wraps(method)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if int(self.current_user.role) != 1001:
|
||||
# raise HTTPError(403)
|
||||
raise errors.HTTPAPIError(errors.ERROR_FORBIDDEN, "permission denied")
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def operation_log(primary_menu, sub_menu, ope_type, content, comment):
|
||||
"""
|
||||
Add logging to a function. level is the logging
|
||||
level, name is the logger name, and message is the
|
||||
log message. If name and message aren't specified,
|
||||
they default to the function's module and name.
|
||||
"""
|
||||
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
with self.app_mysql.connect() as conn:
|
||||
conn.execute(text(
|
||||
"insert into sys_log(user, ip, primary_menu, sub_menu, op_type, content, comment) "
|
||||
"values(:user, :ip, :primary_menu, :sub_menu, :op_type, :content, :comment)"
|
||||
),
|
||||
{"user": self.current_user.name,
|
||||
"ip": self.request.headers[
|
||||
"X-Forwarded-For"] if "X-Forwarded-For" in self.request.headers else self.request.remote_ip,
|
||||
"primary_menu": primary_menu,
|
||||
"sub_menu": sub_menu,
|
||||
"op_type": ope_type,
|
||||
"content": content,
|
||||
"comment": comment
|
||||
}
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def permission(codes):
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
rows = self.db_app.query(
|
||||
"select rp.permission from role_permission rp, user_role ur where rp.role=ur.role and ur.userid=%s",
|
||||
self.current_user.id
|
||||
)
|
||||
permissions = [item["permission"] for item in rows]
|
||||
for code in codes:
|
||||
if code not in permissions:
|
||||
raise errors.HTTPAPIError(errors.ERROR_FORBIDDEN, "permission denied")
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def license_validate(codes):
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
license_cache = self.r_app.get("system:license")
|
||||
license_info = {}
|
||||
if license_cache:
|
||||
license_info = json.loads(self.tostr(license_cache))
|
||||
else:
|
||||
row = self.db_app.get("select syscode, expireat from license limit 1")
|
||||
if row:
|
||||
self.r_app.set("system:license",
|
||||
json.dumps({"syscode": row["syscode"], "expireat": row["expireat"]}))
|
||||
license_info = row
|
||||
|
||||
license_status = get_license_status(license_info)
|
||||
# logging.info("license status is : {}, need : {}".format(license_status, codes))
|
||||
if license_status not in codes:
|
||||
raise errors.HTTPAPIError(errors.ERROR_LICENSE_NOT_ACTIVE, "系统License未授权")
|
||||
# if not license_info:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_LICENSE_NOT_ACTIVE, "License未授权")
|
||||
# expireat = int(license_info["expireat"])
|
||||
# local_time = int(time.time())
|
||||
# if local_time >= expireat:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_LICENSE_NOT_ACTIVE, "License授权过期")
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def userlog(method):
|
||||
@functools.wraps(method)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
logging.info("[ip]:%s [user]:%s" % (self.request.remote_ip, self.current_user.id))
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
class Model(object):
|
||||
_dbs = {}
|
||||
|
||||
@classmethod
|
||||
def setup_dbs(cls, dbs):
|
||||
cls._dbs = dbs
|
||||
|
||||
@property
|
||||
def db_app(self):
|
||||
return self._dbs.get("db_app", None)
|
||||
|
||||
@property
|
||||
def r_app(self):
|
||||
return self._dbs.get("r_app", None)
|
@ -0,0 +1,26 @@
|
||||
from website.handlers.alg_model import handler
|
||||
|
||||
handlers = [
|
||||
# ("/", handler.Handler),
|
||||
("/model/classification/add", handler.ClassificationAddHandler),
|
||||
("/model/classification/list", handler.ClassificationListHandler),
|
||||
("/model/classification/delete", handler.ClassificationDeleteHandler),
|
||||
|
||||
("/model/list", handler.ListHandler),
|
||||
("/model/list/simple", handler.ListSimpleHandler),
|
||||
("/model/add", handler.AddHandler),
|
||||
("/model/edit", handler.EditHandler),
|
||||
("/model/info", handler.InfoHandler),
|
||||
("/model/delete", handler.DeleteHandler),
|
||||
|
||||
("/model/version/add", handler.VersionAddHandler),
|
||||
("/model/version/edit", handler.VersionEditHandler),
|
||||
("/model/version/list", handler.VersionListHandler),
|
||||
("/model/version/info", handler.VersionInfoHandler),
|
||||
("/model/version/setdefault", handler.VersionSetDefaultHandler),
|
||||
("/model/version/delete", handler.VersionDeleteHandler),
|
||||
]
|
||||
|
||||
page_handlers = [
|
||||
|
||||
]
|
@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from website.handlers.alg_model_hub import handler
|
||||
|
||||
handlers = [
|
||||
# ("/", handler.Handler),
|
||||
("/model/hub/list", handler.ListHandler),
|
||||
("/model/hub/sync", handler.SyncHandler),
|
||||
("/model/hub/add", handler.AddHandler),
|
||||
("/model/hub/edit", handler.EditHandler),
|
||||
("/model/hub/info", handler.InfoHandler),
|
||||
("/model/hub/delete", handler.DeleteHandler),
|
||||
]
|
||||
|
||||
page_handlers = [
|
||||
]
|
@ -0,0 +1,13 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from website.handlers.enterprise_busi_model import handler
|
||||
|
||||
|
||||
handlers = [
|
||||
("/enterprise/model/deployment/list", handler.ListHandler),
|
||||
("/enterprise/model/deployment/add", handler.AddHandler),
|
||||
("/enterprise/model/deployment/info", handler.InfoHandler),
|
||||
("/enterprise/model/deployment/edit", handler.EditHandler),
|
||||
("/enterprise/model/delete", handler.DeleteHandler),
|
||||
]
|
||||
page_handlers = []
|
@ -0,0 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from website.handlers.enterprise_device import handler
|
||||
|
||||
handlers = [
|
||||
("/enterprise/device/classification/add", handler.DeviceClassificationAddHandler),
|
||||
("/enterprise/device/classification", handler.DeviceClassificationHandler),
|
||||
(
|
||||
"/enterprise/device/classification/delete",
|
||||
handler.DeviceClassificationDeleteHandler,
|
||||
),
|
||||
("/enterprise/entity/nodes/device/add", handler.DeviceAddHandler),
|
||||
("/enterprise/entity/nodes/device/edit", handler.DeviceEditHandler),
|
||||
("/enterprise/entity/nodes/device/delete", handler.DeviceDeleteHandler),
|
||||
("/enterprise/entity/nodes/device/list", handler.DeviceListHandler),
|
||||
("/enterprise/entity/nodes/device/list/simple", handler.DeviceListSimpleHandler),
|
||||
("/enterprise/entity/nodes/device/info", handler.DeviceInfoHandler),
|
||||
("/enterprise/entity/nodes/device/basemodel/list", handler.DeviceBasemodelListHandler),
|
||||
(
|
||||
"/enterprise/entity/nodes/device/basemodel/custom/config",
|
||||
handler.DeviceBaseModelCustomConfigHandler,
|
||||
),
|
||||
("/enterprise/device/status/list", handler.StatusListHandler),
|
||||
("/enterprise/device/status/group", handler.StatusGroupHandler),
|
||||
("/enterprise/device/status/info", handler.StatusInfoHandler),
|
||||
("/enterprise/device/status/log", handler.StatusLogHandler),
|
||||
]
|
||||
page_handlers = []
|
@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from website.handlers.enterprise_entity import handler
|
||||
|
||||
handlers = [
|
||||
("/enterprise/entity/index", handler.EntityIndexHandler),
|
||||
("/enterprise/entity/index/basecount", handler.EntityIndexBasecountHandler),
|
||||
("/enterprise/entity/add", handler.EntityAddHandler),
|
||||
("/enterprise/entity/edit", handler.EntityEditHandler),
|
||||
("/enterprise/entity/info", handler.EntityInfoHandler),
|
||||
("/enterprise/entity/models", handler.ModelsHandler),
|
||||
("/enterprise/entity/delete", handler.EntityDeleteHandler),
|
||||
("/enterprise/entity/pwdcheck", handler.EntityPwdcheckHandler),
|
||||
("/enterprise/industrymap", handler.IndustryMapHandler),
|
||||
]
|
||||
|
||||
page_handlers = []
|
@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from website.handlers.enterprise_node import handler
|
||||
|
||||
handlers = [
|
||||
("/enterprise/entity/nodes/add", handler.AddHandler),
|
||||
("/enterprise/entity/nodes/edit", handler.EditHandler),
|
||||
("/enterprise/entity/nodes", handler.TreeHandler),
|
||||
("/enterprise/entity/nodes/info", handler.InfoHandler),
|
||||
("/enterprise/entity/nodes/delete", handler.DeleteHandler),
|
||||
("/enterprise/entity/nodes/busimodel", handler.BusimodelHandler),
|
||||
("/enterprise/entity/nodes/busimodel/info", handler.BusimodelInfoHandler),
|
||||
("/enterprise/entity/nodes/busimodel/deploy", handler.BusimodelDeployHandler),
|
||||
("/enterprise/entity/nodes/alert", handler.AlertHandler),
|
||||
("/enterprise/entity/nodes/alert/config", handler.AlertConfigHandler),
|
||||
]
|
||||
|
||||
page_handlers = []
|
@ -0,0 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from website.handlers.enterprise_server import handler
|
||||
|
||||
handlers = [
|
||||
("/enterprise/server/add", handler.AddHandler),
|
||||
("/enterprise/server/edit", handler.EditHandler),
|
||||
("/enterprise/server/list", handler.ListHandler),
|
||||
("/enterprise/server/info", handler.InfoHandler),
|
||||
("/enterprise/server/log", handler.LogHandler),
|
||||
("/enterprise/server/delete", handler.DeleteHandler)
|
||||
]
|
||||
|
||||
page_handlers = []
|
@ -0,0 +1,129 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import aiofiles
|
||||
from sqlalchemy import text
|
||||
|
||||
from website import db_mysql
|
||||
from website import errors
|
||||
from website import settings
|
||||
from website.handler import APIHandler, authenticated
|
||||
|
||||
|
||||
class UploadHandler(APIHandler):
|
||||
@authenticated
|
||||
async def post(self):
|
||||
file_metas = self.request.files.get('file', None)
|
||||
if not file_metas:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "请选择文件")
|
||||
|
||||
filename = file_metas[0].filename
|
||||
punctuation = """!"#$%&'()*+,/:;<=>?@[\]^`{|}~ """
|
||||
regex = re.compile('[%s]' % re.escape(punctuation))
|
||||
filename = regex.sub("", filename.replace('..', ''))
|
||||
file_size = len(file_metas[0].body)
|
||||
|
||||
logging.info("file_size: %s", file_size)
|
||||
|
||||
if file_size > 300 * 1024 * 1024:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, 'Exceed 300M size limit')
|
||||
|
||||
filetype = filename.split(".") and filename.split(".")[-1] or ""
|
||||
|
||||
file_upload_dir = settings.file_upload_dir
|
||||
os.makedirs(file_upload_dir, exist_ok=True)
|
||||
|
||||
md5_str = hashlib.md5(file_metas[0].body).hexdigest()
|
||||
|
||||
row = None
|
||||
with self.app_mysql.connect() as conn:
|
||||
sql = text("select id from files where md5_str=:md5_str")
|
||||
cur = conn.execute(sql, {"md5_str": md5_str})
|
||||
row = cur.fetchone()
|
||||
|
||||
if not row:
|
||||
filepath = os.path.join(settings.file_upload_dir, md5_str + '_' + filename)
|
||||
if not os.path.exists(filepath):
|
||||
for meta in file_metas:
|
||||
# filename = meta['filename']
|
||||
async with aiofiles.open(filepath, 'wb') as f:
|
||||
await f.write(meta['body'])
|
||||
|
||||
sql_insert = text(
|
||||
"""insert into files(filename, filepath, md5_str, filesize, filetype, user)
|
||||
values(:filename, :filepath, :md5_str, :file_size, :filetype, :user)"""
|
||||
)
|
||||
conn.execute(sql_insert, {"filename": filename, "filepath": filepath, "md5_str": md5_str,
|
||||
"file_size": int(file_size / 1024 / 1024), "filetype": filetype,
|
||||
"user": self.current_user.id})
|
||||
conn.commit()
|
||||
|
||||
self.finish({"result": md5_str})
|
||||
|
||||
|
||||
class DeleteHandler(APIHandler):
|
||||
@authenticated
|
||||
def post(self):
|
||||
md5_str = self.get_escaped_argument("file_md5", "")
|
||||
if not md5_str:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "file md5 is required")
|
||||
logging.info("md5_str: %s", md5_str)
|
||||
row = None
|
||||
with self.app_mysql.connect() as conn:
|
||||
sql = text("select filepath from files where md5_str=:md5_str")
|
||||
cur = conn.execute(sql, {"md5_str": md5_str})
|
||||
row = db_mysql.to_json(cur)
|
||||
|
||||
if not row:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "file not found")
|
||||
|
||||
filepath = row["filepath"]
|
||||
if os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
sql_del = text("delete from files where md5_str=:md5_str")
|
||||
conn.execute(sql_del, {"md5_str": md5_str})
|
||||
conn.commit()
|
||||
|
||||
self.finish()
|
||||
|
||||
|
||||
class BigFileUploadHandler(APIHandler):
|
||||
async def post(self):
|
||||
file_metas = self.request.files.get('file', None)
|
||||
if not file_metas:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "请选择文件")
|
||||
|
||||
filename = file_metas[0].filename
|
||||
punctuation = """!"#$%&'()*+,/:;<=>?@[\]^`{|}~ """
|
||||
regex = re.compile('[%s]' % re.escape(punctuation))
|
||||
filename = regex.sub("", filename.replace('..', ''))
|
||||
file_size = len(file_metas[0].body)
|
||||
|
||||
logging.info("file_size: %s", file_size)
|
||||
|
||||
if file_size > 300 * 1024 * 1024:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, 'Exceed 300M size limit')
|
||||
|
||||
filetype = filename.split(".") and filename.split(".")[-1] or ""
|
||||
|
||||
file_upload_dir = settings.file_upload_dir
|
||||
os.makedirs(file_upload_dir, exist_ok=True)
|
||||
|
||||
# md5_str = hashlib.md5(file_metas[0].body).hexdigest()
|
||||
|
||||
filepath = os.path.join(settings.file_upload_dir, filename)
|
||||
if not os.path.exists(filepath):
|
||||
for meta in file_metas:
|
||||
# filename = meta['filename']
|
||||
# with open(filepath, 'wb') as f:
|
||||
# f.write(meta['body'])
|
||||
|
||||
async with aiofiles.open(filepath, 'wb') as f:
|
||||
await f.write(meta['body'])
|
||||
|
||||
self.finish()
|
@ -0,0 +1,12 @@
|
||||
from website.handlers.file import handler
|
||||
|
||||
handlers = [
|
||||
# ("/", handler.Handler),
|
||||
("/file/upload", handler.UploadHandler),
|
||||
("/file/delete", handler.DeleteHandler),
|
||||
("/bigfile/upload", handler.BigFileUploadHandler),
|
||||
]
|
||||
|
||||
page_handlers = [
|
||||
|
||||
]
|
@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from website.handlers.system import handler
|
||||
|
||||
handlers = [
|
||||
# ("/", handler.Handler),
|
||||
("/system/version", handler.VersionHandler),
|
||||
("/system/identifycode", handler.IdentifycodeHandler),
|
||||
("/system/license/upload", handler.LicenseUploadHandler),
|
||||
("/system/activate/info", handler.ActivateInfoHandler),
|
||||
("/system/info", handler.InfoHandler),
|
||||
|
||||
("/system/log", handler.LogHandler),
|
||||
]
|
||||
|
||||
page_handlers = [
|
||||
]
|
@ -0,0 +1,189 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from io import BytesIO
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from website import db_mysql
|
||||
from website import errors
|
||||
from website import settings
|
||||
from website.handler import APIHandler, authenticated
|
||||
from website.util import aes
|
||||
from website.util import shortuuid
|
||||
from website.util.captcha import create_validate_code
|
||||
|
||||
|
||||
class CaptchaHandler(APIHandler):
|
||||
def get(self):
|
||||
self.set_header("Content-Type", "image/png")
|
||||
image, image_str = create_validate_code()
|
||||
c = uuid.uuid4().hex
|
||||
token = self.create_signed_value("logc", c)
|
||||
self.r_app.set("logincaptcha:%s" % c, image_str, ex=120)
|
||||
|
||||
buffered = BytesIO()
|
||||
# 保存验证码图片
|
||||
image.save(buffered, 'png')
|
||||
img_b64 = base64.b64encode(buffered.getvalue())
|
||||
|
||||
# for line in buffered.getvalue():
|
||||
# self.write(line)
|
||||
# output.close()
|
||||
self.finish({"token": self.tostr(token), "captcha": self.tostr(img_b64)})
|
||||
|
||||
|
||||
class LogoutHandler(APIHandler):
|
||||
def get(self):
|
||||
if self.current_user:
|
||||
# self.db_app.insert(
|
||||
# "insert into system_log(user, ip, first_module, second_module, op_type, op_content, description) "
|
||||
# "values(%s, %s, %s, %s, %s, %s, %s)",
|
||||
# self.current_user.name, self.request.remote_ip, "平台管理中心", "账号管理", "登出", "系统登出", "系统登出"
|
||||
# )
|
||||
|
||||
self.r_app.delete(settings.session_key_prefix % self.current_user.uuid)
|
||||
self.finish()
|
||||
|
||||
|
||||
class LoginHandler(APIHandler):
|
||||
def post(self):
|
||||
# suid = shortuuid.ShortUUID().random(10)
|
||||
# logging.info(suid)
|
||||
|
||||
username = self.get_escaped_argument("username")
|
||||
password = self.get_escaped_argument("pwd")
|
||||
# captcha = self.get_escaped_argument("captcha", "")
|
||||
# captcha_token = self.get_escaped_argument("captcha_token", "")
|
||||
|
||||
# wrong_time_lock = self.r_app.get("pwd:wrong:time:%s:lock" % self.tostr(username))
|
||||
# if wrong_time_lock:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "账号处于冷却期,请稍后再试")
|
||||
# return
|
||||
|
||||
if not username or not password:
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "请输入用户名和密码")
|
||||
|
||||
# if not captcha:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "请输入验证码")
|
||||
# if not captcha_token:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "缺少参数")
|
||||
|
||||
# c = tornado.web.decode_signed_value(
|
||||
# settings.cookie_secret,
|
||||
# "logc",
|
||||
# self.tostr(captcha_token)
|
||||
# )
|
||||
# code = self.r_app.get("logincaptcha:%s" % self.tostr(c))
|
||||
# 清除校验码缓存
|
||||
# self.r_app.delete("logincaptcha:%s" % c)
|
||||
# if not code:
|
||||
# raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "验证码已过期")
|
||||
# 判断验证码与缓存是否一致
|
||||
# if self.tostr(captcha).lower() != self.tostr(code).lower():
|
||||
# raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "验证码错误")
|
||||
|
||||
username = self.tostr(username)
|
||||
password = self.tostr(password)
|
||||
|
||||
pwd_enc = aes.encrypt(settings.pwd_aes_key, password)
|
||||
|
||||
row = {}
|
||||
|
||||
with self.app_mysql.connect() as conn:
|
||||
cur = conn.execute(
|
||||
text("select id, uid, available from sys_user where name=:name and pwd=:pwd"),
|
||||
{"name": username, "pwd": pwd_enc}
|
||||
)
|
||||
# keys = list(cur.keys())
|
||||
#
|
||||
# one = cur.fetchone()
|
||||
# row = dict(zip(keys, one))
|
||||
# logging.info(db.Row(itertools.zip_longest(keys, one)))
|
||||
|
||||
row = db_mysql.to_json(cur)
|
||||
|
||||
cur.close()
|
||||
# data = [dict(zip(keys, res)) for res in cur.fetchall()]
|
||||
|
||||
if not row:
|
||||
# wrong_time = self.r_app.get("pwd:wrong:time:%s" % username)
|
||||
# logging.info(wrong_time)
|
||||
# logging.info(settings.pwd_error_limit - 1)
|
||||
# if wrong_time and int(wrong_time) > settings.pwd_error_limit - 1:
|
||||
# self.r_app.set("pwd:wrong:time:%s:lock" % username, 1, ex=3600)
|
||||
# self.r_app.delete("pwd:wrong:time:%s" % username)
|
||||
# else:
|
||||
# self.r_app.incr("pwd:wrong:time:%s" % username)
|
||||
raise errors.HTTPAPIError(errors.ERROR_BAD_REQUEST, "用户名或者密码错误")
|
||||
return
|
||||
if row["available"] == 0:
|
||||
raise errors.HTTPAPIError(errors.ERROR_FORBIDDEN, "当前用户被禁用")
|
||||
return
|
||||
|
||||
# row_role = self.db_app.get("select role from user_role where userid=%s", row["id"])
|
||||
# user_role = row_role["role"]
|
||||
|
||||
userId = row["id"]
|
||||
jsessionid = row["uid"]
|
||||
|
||||
# create sign value admin_login_sign
|
||||
secure_cookie = self.create_signed_value(settings.secure_cookie_name, str(jsessionid))
|
||||
|
||||
self.r_app.set(
|
||||
settings.session_key_prefix % jsessionid,
|
||||
json.dumps({
|
||||
"id": userId,
|
||||
"name": username,
|
||||
"uuid": row["uid"],
|
||||
# "role": user_role
|
||||
}),
|
||||
ex=settings.session_ttl
|
||||
)
|
||||
|
||||
# self.db_app.insert(
|
||||
# "insert into system_log(user, ip, first_module, second_module, op_type, op_content, description) "
|
||||
# "values(%s, %s, %s, %s, %s, %s, %s)",
|
||||
# username, self.request.remote_ip, "平台管理中心", "账号管理", "登录", "系统登录", "系统登录"
|
||||
# )
|
||||
|
||||
# license_row = self.db_app.get(
|
||||
# "select expireat from license limit 1"
|
||||
# )
|
||||
|
||||
# system_status = get_license_status(license_row)
|
||||
|
||||
render_data = {
|
||||
"token": str(secure_cookie, encoding="utf-8"),
|
||||
# "role": user_role,
|
||||
"username": username,
|
||||
# "system_status": system_status, # 9000/未激活, 9001/已激活, 9002/过期可查看, 9003/完全过期
|
||||
}
|
||||
|
||||
self.finish(render_data)
|
||||
|
||||
|
||||
class UserInfoHandler(APIHandler):
|
||||
def post(self):
|
||||
# token = self.get_argument("token")
|
||||
# user = self.get_current_user(token_body=self.tostr(token))
|
||||
user = self.get_current_user()
|
||||
if not user:
|
||||
raise errors.HTTPAPIError(errors.ERROR_UNAUTHORIZED)
|
||||
|
||||
self.finish({"name": user.name, "avtar": ""})
|
||||
|
||||
class UserListHandler(APIHandler):
|
||||
@authenticated
|
||||
def post(self):
|
||||
with self.app_mysql.connect() as conn:
|
||||
cur = conn.execute(
|
||||
text(
|
||||
"select name from sys_user"
|
||||
)
|
||||
)
|
||||
res = db_mysql.to_json_list(cur)
|
||||
names = [row["name"] for row in res]
|
||||
self.finish({"data": names})
|
@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from website.handlers.user import handler
|
||||
|
||||
handlers = [
|
||||
# ("/user/list", handler.UserListHandler),
|
||||
# ("/captcha", handler.CaptchaHandler),
|
||||
# ("/bodyargument", handler.BodyHandler),
|
||||
# ("/user/info", handler.UserInfoHandler),
|
||||
("/login", handler.LoginHandler),
|
||||
("/logout", handler.LogoutHandler),
|
||||
("/user/info", handler.UserInfoHandler),
|
||||
("/users", handler.UserListHandler),
|
||||
]
|
||||
|
||||
page_handlers = [
|
||||
|
||||
]
|
@ -0,0 +1,32 @@
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
from website import consts, settings
|
||||
|
||||
def get_license_status(license):
|
||||
status = consts.system_status_not_active
|
||||
# if not license:
|
||||
# pass
|
||||
if license:
|
||||
now = datetime.datetime.now()
|
||||
timestamp_now = int(now.timestamp())
|
||||
|
||||
expireat = int(license["expireat"])
|
||||
expireat_datetime = datetime.datetime.fromtimestamp(expireat)
|
||||
expireat_next30days = expireat_datetime + datetime.timedelta(days=30)
|
||||
expireat_next30days_timestamp = int(expireat_next30days.timestamp())
|
||||
|
||||
time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
|
||||
|
||||
# logging.info(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(expireat)))
|
||||
# logging.info(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(expireat_next30days_timestamp)))
|
||||
# logging.info(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timestamp_now)))
|
||||
|
||||
if timestamp_now >= expireat_next30days_timestamp:
|
||||
status = consts.system_status_expire_atall
|
||||
elif timestamp_now >= expireat and timestamp_now < expireat_next30days_timestamp:
|
||||
status = consts.system_status_expire_but_ok
|
||||
elif timestamp_now < expireat:
|
||||
status = consts.system_status_activated
|
||||
|
||||
return status
|
@ -0,0 +1,17 @@
|
||||
import os
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
handlers = []
|
||||
# handlers_v2 = []
|
||||
page_handlers = []
|
||||
|
||||
handlers_path = os.path.join(os.getcwd(), "handlers")
|
||||
sys.path.append(handlers_path)
|
||||
|
||||
handlers_dir = os.listdir(handlers_path)
|
||||
for item in handlers_dir:
|
||||
if os.path.isdir(os.path.join(handlers_path, item)):
|
||||
hu = importlib.import_module("{}.url".format(item))
|
||||
handlers.extend(hu.handlers)
|
||||
page_handlers.extend(hu.page_handlers)
|
@ -0,0 +1,10 @@
|
||||
import re
|
||||
|
||||
def md5_validate(v: str) -> bool:
|
||||
md5_pattern = re.compile(r'^[a-fA-F0-9]{32}$')
|
||||
# 匹配字符串
|
||||
if md5_pattern.match(v):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from M2Crypto import RSA
|
||||
from M2Crypto import BIO
|
||||
from binascii import a2b_hex, b2a_hex
|
||||
|
||||
def load_pub_key_string(string):
|
||||
bio = BIO.MemoryBuffer(string)
|
||||
return RSA.load_pub_key_bio(bio)
|
||||
|
||||
def block_data(texts, block_size):
|
||||
for i in range(0, len(texts), block_size):
|
||||
yield texts[i:i + block_size]
|
||||
|
||||
def decrypt(publick_key, texts):
|
||||
plaintext = b""
|
||||
block_size = 256
|
||||
|
||||
for text in block_data(a2b_hex(texts), block_size):
|
||||
current_text = publick_key.public_decrypt(text, RSA.pkcs1_padding)
|
||||
plaintext += current_text
|
||||
|
||||
return plaintext
|
@ -0,0 +1,137 @@
|
||||
"""Concise UUID generation."""
|
||||
|
||||
import math
|
||||
import secrets
|
||||
import uuid as _uu
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def int_to_string(
|
||||
number: int, alphabet: List[str], padding: Optional[int] = None
|
||||
) -> str:
|
||||
"""
|
||||
Convert a number to a string, using the given alphabet.
|
||||
|
||||
The output has the most significant digit first.
|
||||
"""
|
||||
output = ""
|
||||
alpha_len = len(alphabet)
|
||||
while number:
|
||||
number, digit = divmod(number, alpha_len)
|
||||
output += alphabet[digit]
|
||||
if padding:
|
||||
remainder = max(padding - len(output), 0)
|
||||
output = output + alphabet[0] * remainder
|
||||
return output[::-1]
|
||||
|
||||
|
||||
def string_to_int(string: str, alphabet: List[str]) -> int:
|
||||
"""
|
||||
Convert a string to a number, using the given alphabet.
|
||||
|
||||
The input is assumed to have the most significant digit first.
|
||||
"""
|
||||
number = 0
|
||||
alpha_len = len(alphabet)
|
||||
for char in string:
|
||||
number = number * alpha_len + alphabet.index(char)
|
||||
return number
|
||||
|
||||
|
||||
class ShortUUID(object):
|
||||
def __init__(self, alphabet: Optional[str] = None) -> None:
|
||||
if alphabet is None:
|
||||
alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZ" "abcdefghijkmnopqrstuvwxyz"
|
||||
|
||||
self.set_alphabet(alphabet)
|
||||
|
||||
@property
|
||||
def _length(self) -> int:
|
||||
"""Return the necessary length to fit the entire UUID given the current alphabet."""
|
||||
return int(math.ceil(math.log(2**128, self._alpha_len)))
|
||||
|
||||
def encode(self, uuid: _uu.UUID, pad_length: Optional[int] = None) -> str:
|
||||
"""
|
||||
Encode a UUID into a string (LSB first) according to the alphabet.
|
||||
|
||||
If leftmost (MSB) bits are 0, the string might be shorter.
|
||||
"""
|
||||
if not isinstance(uuid, _uu.UUID):
|
||||
raise ValueError("Input `uuid` must be a UUID object.")
|
||||
if pad_length is None:
|
||||
pad_length = self._length
|
||||
return int_to_string(uuid.int, self._alphabet, padding=pad_length)
|
||||
|
||||
def decode(self, string: str, legacy: bool = False) -> _uu.UUID:
|
||||
"""
|
||||
Decode a string according to the current alphabet into a UUID.
|
||||
|
||||
Raises ValueError when encountering illegal characters or a too-long string.
|
||||
|
||||
If string too short, fills leftmost (MSB) bits with 0.
|
||||
|
||||
Pass `legacy=True` if your UUID was encoded with a ShortUUID version prior to
|
||||
1.0.0.
|
||||
"""
|
||||
if not isinstance(string, str):
|
||||
raise ValueError("Input `string` must be a str.")
|
||||
if legacy:
|
||||
string = string[::-1]
|
||||
return _uu.UUID(int=string_to_int(string, self._alphabet))
|
||||
|
||||
def uuid(self, name: Optional[str] = None, pad_length: Optional[int] = None) -> str:
|
||||
"""
|
||||
Generate and return a UUID.
|
||||
|
||||
If the name parameter is provided, set the namespace to the provided
|
||||
name and generate a UUID.
|
||||
"""
|
||||
if pad_length is None:
|
||||
pad_length = self._length
|
||||
|
||||
# If no name is given, generate a random UUID.
|
||||
if name is None:
|
||||
u = _uu.uuid4()
|
||||
elif name.lower().startswith(("http://", "https://")):
|
||||
u = _uu.uuid5(_uu.NAMESPACE_URL, name)
|
||||
else:
|
||||
u = _uu.uuid5(_uu.NAMESPACE_DNS, name)
|
||||
return self.encode(u, pad_length)
|
||||
|
||||
def random(self, length: Optional[int] = None) -> str:
|
||||
"""Generate and return a cryptographically secure short random string of `length`."""
|
||||
if length is None:
|
||||
length = self._length
|
||||
|
||||
return "".join(secrets.choice(self._alphabet) for _ in range(length))
|
||||
|
||||
def get_alphabet(self) -> str:
|
||||
"""Return the current alphabet used for new UUIDs."""
|
||||
return "".join(self._alphabet)
|
||||
|
||||
def set_alphabet(self, alphabet: str) -> None:
|
||||
"""Set the alphabet to be used for new UUIDs."""
|
||||
# Turn the alphabet into a set and sort it to prevent duplicates
|
||||
# and ensure reproducibility.
|
||||
new_alphabet = list(sorted(set(alphabet)))
|
||||
if len(new_alphabet) > 1:
|
||||
self._alphabet = new_alphabet
|
||||
self._alpha_len = len(self._alphabet)
|
||||
else:
|
||||
raise ValueError("Alphabet with more than " "one unique symbols required.")
|
||||
|
||||
def encoded_length(self, num_bytes: int = 16) -> int:
|
||||
"""Return the string length of the shortened UUID."""
|
||||
factor = math.log(256) / math.log(self._alpha_len)
|
||||
return int(math.ceil(factor * num_bytes))
|
||||
|
||||
|
||||
# For backwards compatibility
|
||||
_global_instance = ShortUUID()
|
||||
encode = _global_instance.encode
|
||||
decode = _global_instance.decode
|
||||
uuid = _global_instance.uuid
|
||||
random = _global_instance.random
|
||||
get_alphabet = _global_instance.get_alphabet
|
||||
set_alphabet = _global_instance.set_alphabet
|
@ -0,0 +1,86 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import subprocess
|
||||
import uuid
|
||||
|
||||
|
||||
def get_cpu_id():
|
||||
p = subprocess.Popen(["dmidecode -t 4 | grep ID"],
|
||||
shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
data = p.stdout
|
||||
lines = []
|
||||
while True:
|
||||
line = str(data.readline(), encoding="utf-8")
|
||||
if line == '\n':
|
||||
break
|
||||
if line:
|
||||
d = dict([line.strip().split(': ')])
|
||||
lines.append(d)
|
||||
else:
|
||||
break
|
||||
return lines
|
||||
|
||||
|
||||
def get_board_serialnumber():
|
||||
p = subprocess.Popen(["dmidecode -t 2 | grep Serial"],
|
||||
shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
data = p.stdout
|
||||
lines = []
|
||||
while True:
|
||||
line = str(data.readline(), encoding="utf-8")
|
||||
if line == '\n':
|
||||
break
|
||||
if line:
|
||||
d = dict([line.strip().split(': ')])
|
||||
lines.append(d)
|
||||
else:
|
||||
break
|
||||
return lines
|
||||
|
||||
|
||||
def get_identify_code():
|
||||
mac = uuid.UUID(int=uuid.getnode()).hex[-12:]
|
||||
mac_addr = ":".join([mac[e:e + 2] for e in range(0, 11, 2)])
|
||||
|
||||
host_name = socket.getfqdn(socket.gethostname())
|
||||
cpu_ids = get_cpu_id()
|
||||
serialnumbers = get_board_serialnumber()
|
||||
|
||||
s = ""
|
||||
if mac_addr:
|
||||
s += mac_addr
|
||||
if host_name:
|
||||
s += host_name
|
||||
if cpu_ids:
|
||||
for cpu in cpu_ids:
|
||||
s += cpu["ID"]
|
||||
if serialnumbers:
|
||||
for number in serialnumbers:
|
||||
s += number["Serial Number"]
|
||||
logging.info(s)
|
||||
|
||||
code = hashlib.new('md5', s.encode("utf8")).hexdigest()
|
||||
return code
|
||||
|
||||
|
||||
def get_system_uuid():
|
||||
# 获取系统uuid
|
||||
# 这个 UUID 是与硬件相关的,因此即使在 Docker 容器中,它也应该是唯一的,可以用来标识宿主机,而不是容器本身。
|
||||
with open("/sys/class/dmi/id/product_uuid", "r") as f:
|
||||
host_uuid = f.read().strip()
|
||||
return host_uuid
|
||||
|
||||
|
||||
def get_docker_container_id():
|
||||
# 获取当前 Docker 容器的 ID
|
||||
cmd = "cat /proc/self/cgroup"
|
||||
output = os.popen(cmd)
|
||||
rests = output.readlines()
|
||||
container_message = rests[-1]
|
||||
if not container_message:
|
||||
container_id = "abc"
|
||||
else:
|
||||
container_id = container_message.strip().split("docker/")[-1]
|
||||
return container_id
|
@ -0,0 +1,103 @@
|
||||
# -*- coding:utf8 -*-
|
||||
|
||||
import io
|
||||
|
||||
import pandas as pd
|
||||
import tornado.gen as gen
|
||||
import xlwt
|
||||
|
||||
|
||||
class Excel(object):
|
||||
@gen.coroutine
|
||||
def generate_excel(self, head, rows):
|
||||
"""
|
||||
head is a dict, eg: [(0, u"编号"), (1, u"地址")]
|
||||
rows is detail list, eg: [[0, "XXX"], ...]
|
||||
"""
|
||||
workbook = xlwt.Workbook(encoding='utf-8')
|
||||
worksheet = workbook.add_sheet("sheet1")
|
||||
row_num = 0
|
||||
# col_num = 0
|
||||
|
||||
for item in head:
|
||||
worksheet.write(row_num, head.index(item), item[1])
|
||||
|
||||
for row in rows:
|
||||
row_num += 1
|
||||
col_num = 0
|
||||
for col in row:
|
||||
worksheet.write(row_num, col_num, col)
|
||||
col_num += 1
|
||||
|
||||
sio = io.BytesIO()
|
||||
workbook.save(sio)
|
||||
|
||||
raise gen.Return(sio)
|
||||
|
||||
@gen.coroutine
|
||||
# def generate_excel_pd(self, index, data, columns):
|
||||
def generate_excel_pd(self, pd_data):
|
||||
"""
|
||||
pandas 构建图表
|
||||
"""
|
||||
|
||||
sio = io.StringIO()
|
||||
writer = pd.ExcelWriter(sio, engine='xlsxwriter')
|
||||
|
||||
for data in pd_data:
|
||||
df = pd.DataFrame(data=data["data"], index=data["index"], columns=data["columns"])
|
||||
sheet_name = data["sheet_name"]
|
||||
|
||||
df.to_excel(writer, sheet_name=sheet_name)
|
||||
|
||||
workbook = writer.book
|
||||
worksheet = writer.sheets[sheet_name]
|
||||
|
||||
chart = workbook.add_chart({'type': 'line'})
|
||||
|
||||
max_row = len(df) + 1
|
||||
|
||||
for i in range(len(data['columns'])):
|
||||
col = i + 1
|
||||
chart.add_series({
|
||||
# 'name': ['Sheet1', 0, col],
|
||||
'name': [sheet_name, 0, col],
|
||||
'categories': [sheet_name, 1, 0, max_row, 0],
|
||||
'values': [sheet_name, 1, col, max_row, col],
|
||||
'line': {'width': 1.00},
|
||||
})
|
||||
|
||||
chart.set_x_axis({'name': 'Date', 'date_axis': True})
|
||||
chart.set_y_axis({'name': 'Statistics', 'major_gridlines': {'visible': False}})
|
||||
|
||||
chart.set_legend({'position': 'top'})
|
||||
|
||||
worksheet.insert_chart('H2', chart)
|
||||
|
||||
# df = pd.DataFrame(data=data, index=index, columns=columns)
|
||||
|
||||
"""
|
||||
# ================ anothor method =================
|
||||
# workbook.save(sio)
|
||||
|
||||
io = StringIO.StringIO()
|
||||
|
||||
# Use a temp filename to keep pandas happy.
|
||||
writer = pd.ExcelWriter('temp.xls', engine='xlsxwriter')
|
||||
|
||||
# Set the filename/file handle in the xlsxwriter.workbook object.
|
||||
writer.book.filename = io
|
||||
#
|
||||
# Write the data frame to the StringIO object.
|
||||
df.to_excel(writer, sheet_name='Sheet1')
|
||||
writer.save()
|
||||
|
||||
xlsx_data = io.getvalue()
|
||||
# ================ anothor method =================
|
||||
"""
|
||||
|
||||
# sheet_name = 'Sheet1'
|
||||
|
||||
writer.save()
|
||||
|
||||
raise gen.Return(sio)
|
Loading…
Reference in New Issue