crypto_quant/market_monitor_main.py

165 lines
6.7 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from numpy import real
from market_data_main import MarketDataMain
from huge_volume_main import HugeVolumeMain
from core.biz.market_monitor import create_metrics_report
from core.db.db_market_monitor import DBMarketMonitor
from core.wechat import Wechat
from config import MONITOR_CONFIG, MYSQL_CONFIG
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
import logging
import os
import pandas as pd
from datetime import datetime, timedelta
import json
import re
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s: %(message)s")
class MarketMonitorMain:
def __init__(self):
self.market_data_main = MarketDataMain()
self.huge_volume_main = HugeVolumeMain()
self.wechat = Wechat()
self.monitor_config = MONITOR_CONFIG
self.window_size = 100
self.start_date = MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00"
)
self.latest_record_file_path = "./output/latest_record.json"
self.latest_record = self.get_latest_record()
self.output_folder = "./output/report/market_monitor/"
os.makedirs(self.output_folder, exist_ok=True)
mysql_user = MYSQL_CONFIG.get("user", "xch")
mysql_password = MYSQL_CONFIG.get("password", "")
if not mysql_password:
raise ValueError("MySQL password is not set")
mysql_host = MYSQL_CONFIG.get("host", "localhost")
mysql_port = MYSQL_CONFIG.get("port", 3306)
mysql_database = MYSQL_CONFIG.get("database", "okx")
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_monitor = DBMarketMonitor(self.db_url)
def get_latest_record(self):
"""
获取最新记录
"""
if os.path.exists(self.latest_record_file_path):
with open(self.latest_record_file_path, "r", encoding="utf-8") as f:
return json.load(f)
else:
with open(self.latest_record_file_path, "w", encoding="utf-8") as f:
json.dump({}, f, ensure_ascii=False, indent=4)
return {}
def monitor_realtime_market(
self,
symbol: str,
bar: str,
only_output_huge_volume: bool = False,
only_output_rise: bool = False,
):
"""
监控最新市场数据
考虑到速度暂不与数据库交互直接从api获取数据
"""
real_time_data = self.market_data_main.market_data.get_realtime_kline_data(
symbol=symbol,
bar=bar,
end_time=None,
limit=50,
)
if real_time_data is None or len(real_time_data) == 0:
logging.error(f"获取最新市场数据失败: {symbol}, {bar}")
return
latest_realtime_timestamp = real_time_data["timestamp"].iloc[-1]
latest_record_timestamp = (
self.latest_record.get(symbol, {}).get(bar, {}).get("timestamp", 0)
)
latest_reatime_datetime = timestamp_to_datetime(latest_realtime_timestamp)
latest_record_datetime = timestamp_to_datetime(latest_record_timestamp)
if (
latest_record_timestamp is not None
and latest_realtime_timestamp <= latest_record_timestamp
):
logging.info(
f"最新市场数据时间戳 {latest_reatime_datetime} 小于等于最新记录时间戳 {latest_record_datetime} 不进行监控"
)
return
else:
self.latest_record[symbol] = {bar: {"timestamp": latest_realtime_timestamp}}
with open(self.latest_record_file_path, "w", encoding="utf-8") as f:
json.dump(self.latest_record, f, ensure_ascii=False, indent=4)
logging.info(
f"最新市场数据时间 {latest_reatime_datetime}, 上一次记录时间 {latest_record_datetime}"
)
real_time_data = self.market_data_main.add_new_columns(real_time_data)
logging.info(f"开始计算技术指标: {symbol} {bar}")
real_time_data = self.market_data_main.calculate_metrics(real_time_data)
logging.info(f"开始计算大成交量: {symbol} {bar} 窗口大小: {self.window_size}")
real_time_data = self.huge_volume_main.huge_volume.detect_huge_volume(
data=real_time_data,
window_size=self.window_size,
threshold=self.huge_volume_main.threshold,
check_price=True,
only_output_huge_volume=only_output_huge_volume,
output_excel=False,
)
if real_time_data is None or len(real_time_data) == 0:
logging.error(
f"计算大成交量失败: {symbol} {bar} 窗口大小: {self.window_size}"
)
return
report = create_metrics_report(real_time_data, only_output_rise)
text_length = len(report.encode("utf-8"))
logging.info(f"发送报告到企业微信,字节数: {text_length}")
self.wechat.send_markdown(report)
self.latest_record[symbol][bar]["timestamp"] = latest_realtime_timestamp
with open(self.latest_record_file_path, "w", encoding="utf-8") as f:
json.dump(self.latest_record, f, ensure_ascii=False, indent=4)
# remove punction in latest_reatime_datetime
latest_reatime_datetime = re.sub(r"[\:\-\s]", "", latest_reatime_datetime)
report_file_name = f"{symbol}_{bar}_{self.window_size}_{latest_reatime_datetime}.md"
report_file_path = os.path.join(self.output_folder, report_file_name)
with open(report_file_path, "w", encoding="utf-8") as f:
f.write(report.replace(":", "_"))
report_file_byte_size = os.path.getsize(report_file_path)
report_data = {
"symbol": symbol,
"bar": bar,
"window_size": self.window_size,
"timestamp": latest_realtime_timestamp,
"date_time": latest_reatime_datetime,
"report": report,
"report_file_path": report_file_path,
"report_file_name": report_file_name,
"report_file_byte_size": report_file_byte_size
}
report_data = pd.DataFrame([report_data])
logging.info(f"插入数据到数据库")
self.db_market_monitor.insert_data_to_mysql(report_data)
def batch_monitor_realtime_market(
self,
only_output_huge_volume: bool = True,
only_output_rise: bool = False,
):
for symbol in self.market_data_main.symbols:
for bar in self.market_data_main.bars:
self.monitor_realtime_market(
symbol,
bar,
only_output_huge_volume,
only_output_rise,
)