crypto_quant/market_monitor_main.py

293 lines
12 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from numpy import real
from market_data_main import MarketDataMain
from huge_volume_main import HugeVolumeMain
from core.biz.market_monitor import create_metrics_report
from core.db.db_market_monitor import DBMarketMonitor
from core.wechat import Wechat
from config import MONITOR_CONFIG, MYSQL_CONFIG
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
import core.logger as logging
import os
import pandas as pd
from datetime import datetime, timedelta
import json
import re
logger = logging.logger
class MarketMonitorMain:
def __init__(self):
self.market_data_main = MarketDataMain()
self.huge_volume_main = HugeVolumeMain()
self.wechat = Wechat()
self.monitor_config = MONITOR_CONFIG
self.window_size = 100
self.start_date = MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00"
)
self.latest_record_file_path = "./output/record/latest_record.json"
self.latest_record = self.get_latest_record()
self.output_folder = "./output/report/market_monitor/"
os.makedirs(self.output_folder, exist_ok=True)
mysql_user = MYSQL_CONFIG.get("user", "xch")
mysql_password = MYSQL_CONFIG.get("password", "")
if not mysql_password:
raise ValueError("MySQL password is not set")
mysql_host = MYSQL_CONFIG.get("host", "localhost")
mysql_port = MYSQL_CONFIG.get("port", 3306)
mysql_database = MYSQL_CONFIG.get("database", "okx")
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_monitor = DBMarketMonitor(self.db_url)
def get_latest_record(self):
"""
获取最新记录
"""
os.makedirs(os.path.dirname(self.latest_record_file_path), exist_ok=True)
try:
if os.path.exists(self.latest_record_file_path):
with open(self.latest_record_file_path, "r", encoding="utf-8") as f:
return json.load(f)
else:
with open(self.latest_record_file_path, "w", encoding="utf-8") as f:
json.dump({}, f, ensure_ascii=False, indent=4)
return {}
except Exception as e:
logger.error(f"获取最后一次报表生成记录失败: {e}")
return {}
def monitor_realtime_market(
self,
symbol: str,
bar: str,
only_output_huge_volume: bool = False,
only_output_over_mean_volume: bool = False,
only_output_rise: bool = False,
):
"""
监控最新市场数据
考虑到速度暂不与数据库交互直接从api获取数据
"""
# 获得当前时间字符串
now_datetime = datetime.now()
now_datetime_str = now_datetime.strftime("%Y-%m-%d %H:%M:%S")
end_time = transform_date_time_to_timestamp(now_datetime_str)
real_time_data = self.market_data_main.market_data.get_realtime_kline_data(
symbol=symbol,
bar=bar,
end_time=end_time,
limit=100,
)
if real_time_data is None or len(real_time_data) == 0:
logger.error(f"获取最新市场数据失败: {symbol}, {bar}")
return
latest_realtime_timestamp = real_time_data["timestamp"].iloc[-1]
latest_realtime_timestamp = int(latest_realtime_timestamp)
latest_record_timestamp = (
self.latest_record.get(symbol, {}).get(bar, {}).get("timestamp", None)
)
latest_reatime_datetime = timestamp_to_datetime(latest_realtime_timestamp)
if latest_record_timestamp is not None:
latest_record_timestamp = int(latest_record_timestamp)
latest_record_datetime = timestamp_to_datetime(latest_record_timestamp)
if (
latest_record_timestamp is not None
and latest_realtime_timestamp <= latest_record_timestamp
):
logger.info(
f"最新市场数据时间戳 {latest_reatime_datetime} 小于等于最新记录时间戳 {latest_record_datetime} 不进行监控"
)
return
logger.info(
f"最新市场数据时间 {latest_reatime_datetime}, 上一次记录时间 {latest_record_datetime}"
)
else:
logger.info(
f"最新市场数据时间 {latest_reatime_datetime}, 上一次记录时间为空"
)
real_time_data = self.market_data_main.add_new_columns(real_time_data)
logger.info(f"开始计算技术指标: {symbol} {bar}")
real_time_data = self.market_data_main.calculate_metrics(real_time_data)
logger.info(f"开始计算大成交量: {symbol} {bar} 窗口大小: {self.window_size}")
real_time_data = self.huge_volume_main.huge_volume.detect_huge_volume(
data=real_time_data,
window_size=self.window_size,
threshold=self.huge_volume_main.threshold,
check_price=True,
only_output_huge_volume=False,
output_excel=False,
)
if real_time_data is None or len(real_time_data) == 0:
logger.error(
f"计算大成交量失败: {symbol} {bar} 窗口大小: {self.window_size}"
)
return
realtime_row = real_time_data.iloc[-1]
real_time_data = self.calculate_price_anomaly(real_time_data, realtime_row)
if only_output_huge_volume:
if realtime_row["huge_volume"] == 1:
logger.info(f"监控到巨量: {symbol} {bar} 窗口大小: {self.window_size}")
if only_output_over_mean_volume:
# 获得huge_volume==1时的volume_ratio的均量
mean_huge_volume_ratio = real_time_data[real_time_data["huge_volume"] == 1]["volume_ratio"].mean()
if realtime_row["volume_ratio"] >= mean_huge_volume_ratio:
logger.info(f"监控到巨量且超过均量: {symbol} {bar} 窗口大小: {self.window_size}")
else:
logger.info(
f"监控到巨量但未超过均量: {symbol} {bar} 窗口大小: {self.window_size},退出本次监控"
)
return
else:
logger.info(
f"监控到非巨量: {symbol} {bar} 窗口大小: {self.window_size},退出本次监控"
)
return
if only_output_rise:
if realtime_row["pct_chg"] > 0:
logger.info(f"监控到上涨: {symbol} {bar} 窗口大小: {self.window_size}")
else:
logger.info(
f"监控到下跌: {symbol} {bar} 窗口大小: {self.window_size},退出本次监控"
)
return
next_bar_row = self.get_other_realtime_data(symbol, bar, end_time, next=True)
if "BTC-USDT" in symbol:
btc_bar_row = None
else:
btc_bar_row = self.get_other_realtime_data(
"BTC-USDT", bar, end_time, next=False
)
report = create_metrics_report(
realtime_row,
next_bar_row,
btc_bar_row,
real_time_data,
only_output_huge_volume,
only_output_rise,
now_datetime_str,
)
text_length = len(report.encode("utf-8"))
logger.info(f"发送报告到企业微信,字节数: {text_length}")
self.wechat.send_markdown(report)
# remove punction in latest_reatime_datetime
file_datetime = re.sub(r"[\:\-\s]", "", latest_reatime_datetime)
report_file_name = f"{symbol}_{bar}_{self.window_size}_{file_datetime}.md"
report_file_path = os.path.join(self.output_folder, report_file_name)
with open(report_file_path, "w", encoding="utf-8") as f:
f.write(report)
report_file_byte_size = os.path.getsize(report_file_path)
report_data = {
"symbol": symbol,
"bar": bar,
"window_size": self.window_size,
"timestamp": latest_realtime_timestamp,
"date_time": latest_reatime_datetime,
"report": report,
"report_file_path": report_file_path,
"report_file_name": report_file_name,
"report_file_byte_size": report_file_byte_size,
}
report_data = pd.DataFrame([report_data])
logger.info(f"插入数据到数据库")
self.db_market_monitor.insert_data_to_mysql(report_data)
if self.latest_record.get(symbol, None) is None:
self.latest_record[symbol] = {bar: {"timestamp": latest_realtime_timestamp}}
else:
if self.latest_record.get(symbol, {}).get(bar, None) is None:
self.latest_record[symbol][bar] = {"timestamp": latest_realtime_timestamp}
else:
self.latest_record[symbol][bar]["timestamp"] = latest_realtime_timestamp
with open(self.latest_record_file_path, "w", encoding="utf-8") as f:
json.dump(self.latest_record, f, ensure_ascii=False, indent=4)
def calculate_price_anomaly(self, data: pd.DataFrame, realtime_row: pd.Series):
k = 2
# 计算均值和标准差
data = data.copy()[0:-1]
pct_chg = realtime_row["pct_chg"]
pct_chg_mean = data['pct_chg'].mean()
pct_chg_std = data['pct_chg'].std()
pct_chg_upper_bound = pct_chg_mean + k * pct_chg_std
pct_chg_lower_bound = pct_chg_mean - k * pct_chg_std
if pct_chg > pct_chg_upper_bound or pct_chg < pct_chg_lower_bound:
realtime_row["price_anomaly"] = True
else:
realtime_row["price_anomaly"] = False
return realtime_row
def get_other_realtime_data(
self, symbol: str, bar: str, end_time: int, next: bool = True
):
"""
获取下一个长周期实时数据
"""
if next:
# 获得bar在self.market_data_main.bars中的索引
bar_index = self.market_data_main.bars.index(bar)
if bar_index == len(self.market_data_main.bars) - 1:
logger.error(f"已经是最后一个bar: {bar}")
return None
# 获得下一个bar
bar = self.market_data_main.bars[bar_index + 1]
# 获得下一个bar的实时数据
data = self.market_data_main.market_data.get_realtime_kline_data(
symbol=symbol, bar=bar, end_time=end_time, limit=100
)
if data is None or len(data) == 0:
logger.error(f"获取实时数据失败: {symbol}, {bar}")
return None
data = self.market_data_main.add_new_columns(data)
logger.info(f"开始计算技术指标: {symbol} {bar}")
data = self.market_data_main.calculate_metrics(data)
row = data.iloc[-1]
return row
def batch_monitor_realtime_market(
self,
only_output_huge_volume: bool = True,
only_output_over_mean_volume: bool = True,
only_output_rise: bool = True,
):
for symbol in self.market_data_main.symbols:
for bar in self.market_data_main.bars:
logger.info(
f"开始监控: {symbol} {bar} 窗口大小: {self.window_size} 行情数据"
)
try:
self.monitor_realtime_market(
symbol,
bar,
only_output_huge_volume,
only_output_over_mean_volume,
only_output_rise,
)
except Exception as e:
logger.error(
f"监控失败: {symbol} {bar} 窗口大小: {self.window_size} 行情数据: {e}"
)
continue
if __name__ == "__main__":
market_monitor_main = MarketMonitorMain()
market_monitor_main.monitor_realtime_market(
symbol="PUMP-USDT",
bar="5m",
only_output_huge_volume=False,
only_output_rise=False,
)