support get US STOCK data

This commit is contained in:
blade 2025-08-31 11:20:59 +08:00
parent 69aff37c24
commit a8a310ecf0
23 changed files with 822 additions and 103 deletions

View File

@ -48,10 +48,9 @@ TIME_CONFIG = {
"kline_limit": 100, # K线数据条数 "kline_limit": 100, # K线数据条数
} }
MONITOR_CONFIG = { OKX_MONITOR_CONFIG = {
"volume_monitor":{ "volume_monitor":{
"symbols": ["XCH-USDT", "BONK-USDT", "PENGU-USDT", "symbols": ["XCH-USDT","SOL-USDT",
"CFX-USDT", "PUMP-USDT", "SOL-USDT",
"BTC-USDT", "ETH-USDT", "DOGE-USDT"], "BTC-USDT", "ETH-USDT", "DOGE-USDT"],
"bars": ["5m", "15m", "30m", "1H"], "bars": ["5m", "15m", "30m", "1H"],
"initial_date": "2025-05-15 00:00:00" "initial_date": "2025-05-15 00:00:00"
@ -71,6 +70,14 @@ MONITOR_CONFIG = {
} }
} }
US_STOCK_MONITOR_CONFIG = {
"volume_monitor":{
"symbols": ["QQQ", "TQQQ", "MSFT", "AAPL", "GOOG", "NVDA", "META", "AMZN", "TSLA", "AVGO"],
"bars": ["5m"],
"initial_date": "2015-08-31 00:00:00"
}
}
WINDOW_SIZE = {"window_sizes":[50, 80, 100, 120]} WINDOW_SIZE = {"window_sizes":[50, 80, 100, 120]}
BAR_THRESHOLD = { BAR_THRESHOLD = {
@ -93,3 +100,5 @@ MYSQL_CONFIG = {
WECHAT_CONFIG = { WECHAT_CONFIG = {
"key": "11e6f7ac-efa9-418a-904c-9325a9f5d324" "key": "11e6f7ac-efa9-418a-904c-9325a9f5d324"
} }
ITICK_API_KEY = "dfd4bc0caed148d6bc03b960224754ffb5356349e389431f828702b3a27e8a2b"

View File

@ -4,7 +4,8 @@ from typing import Optional
import pandas as pd import pandas as pd
import okx.MarketData as Market import okx.MarketData as Market
import okx.TradingData as TradingData import okx.TradingData as TradingData
from core.utils import transform_date_time_to_timestamp from core.utils import transform_date_time_to_timestamp, timestamp_to_datetime
from core.biz.market_data_from_itick import MarketDataFromItick
import core.logger as logging import core.logger as logging
logger = logging.logger logger = logging.logger
@ -14,7 +15,8 @@ class MarketData:
api_key: str, api_key: str,
secret_key: str, secret_key: str,
passphrase: str, passphrase: str,
sandbox: bool = True): sandbox: bool = True,
is_us_stock: bool = False):
self.flag = "1" if sandbox else "0" # 0:实盘环境 1:沙盒环境 self.flag = "1" if sandbox else "0" # 0:实盘环境 1:沙盒环境
self.api_key = api_key self.api_key = api_key
self.secret_key = secret_key self.secret_key = secret_key
@ -23,6 +25,8 @@ class MarketData:
api_key=api_key, api_secret_key=secret_key, passphrase=passphrase, api_key=api_key, api_secret_key=secret_key, passphrase=passphrase,
flag=self.flag flag=self.flag
) )
self.is_us_stock = is_us_stock
# self.trade_api = TradingData.TradingDataAPI( # self.trade_api = TradingData.TradingDataAPI(
# api_key=api_key, api_secret_key=secret_key, passphrase=passphrase, # api_key=api_key, api_secret_key=secret_key, passphrase=passphrase,
# flag=flag # flag=flag
@ -107,7 +111,11 @@ class MarketData:
while start_time < end_time: while start_time < end_time:
try: try:
# after真实逻辑是获得指定时间之前的数据 # after真实逻辑是获得指定时间之前的数据
response = self.get_historical_candlesticks_from_api(symbol, bar, end_time, limit) if self.is_us_stock:
market_data_from_itick = MarketDataFromItick(symbol=symbol, bar=bar, end_time=end_time, limit=limit)
response = market_data_from_itick.get_historical_candlesticks_from_api()
else:
response = self.get_historical_candlesticks_from_api(symbol, bar, end_time, limit)
if response is None: if response is None:
logger.warning(f"请求失败,请稍后再试") logger.warning(f"请求失败,请稍后再试")
break break
@ -115,9 +123,15 @@ class MarketData:
logger.warning(f"请求失败或无数据: {response.get('msg', 'No message')}") logger.warning(f"请求失败或无数据: {response.get('msg', 'No message')}")
break break
candles = response["data"] candles = response["data"]
if len(candles) == 0:
from_time = int(candles[-1][0]) logger.warning(f"请求失败或无数据: {response.get('msg', 'No message')}")
to_time = int(candles[0][0]) break
if self.is_us_stock:
from_time = int(candles[-1]["timestamp"])
to_time = int(candles[0]["timestamp"])
else:
from_time = int(candles[-1][0])
to_time = int(candles[0][0])
if latest_timestamp == -1: if latest_timestamp == -1:
latest_timestamp = from_time latest_timestamp = from_time
else: else:
@ -126,14 +140,25 @@ class MarketData:
else: else:
logger.warning(f"上一次数据最早时间戳 {latest_timestamp} 小于等于 from_time {from_time} 停止获取数据") logger.warning(f"上一次数据最早时间戳 {latest_timestamp} 小于等于 from_time {from_time} 停止获取数据")
break break
from_time_str = pd.to_datetime(from_time, unit='ms', utc=True).tz_convert('Asia/Shanghai') if self.is_us_stock:
to_time_str = pd.to_datetime(to_time, unit='ms', utc=True).tz_convert('Asia/Shanghai') from_time_str = pd.to_datetime(from_time, unit='ms', utc=True).tz_convert('America/New_York')
to_time_str = pd.to_datetime(to_time, unit='ms', utc=True).tz_convert('America/New_York')
else:
from_time_str = pd.to_datetime(from_time, unit='ms', utc=True).tz_convert('Asia/Shanghai')
to_time_str = pd.to_datetime(to_time, unit='ms', utc=True).tz_convert('Asia/Shanghai')
logger.info(f"已获取{symbol}, 周期:{bar} {len(candles)} 条数据,从: {from_time_str} 到: {to_time_str}") logger.info(f"已获取{symbol}, 周期:{bar} {len(candles)} 条数据,从: {from_time_str} 到: {to_time_str}")
if from_time < start_time: if from_time < start_time:
start_time_str = pd.to_datetime(start_time, unit='ms', utc=True).tz_convert('Asia/Shanghai') if self.is_us_stock:
start_time_str = pd.to_datetime(start_time, unit='ms', utc=True).tz_convert('America/New_York')
else:
start_time_str = pd.to_datetime(start_time, unit='ms', utc=True).tz_convert('Asia/Shanghai')
logger.warning(f"本轮获取{symbol}, {bar} 数据最早时间 {from_time_str} 早于 此次数据获取起始时间 {start_time_str} 停止获取数据") logger.warning(f"本轮获取{symbol}, {bar} 数据最早时间 {from_time_str} 早于 此次数据获取起始时间 {start_time_str} 停止获取数据")
# candels中仅保留start_time之后的数据 # candels中仅保留start_time之后的数据
candles = [candle for candle in candles if int(candle[0]) >= start_time] if self.is_us_stock:
if to_time > start_time:
candles = [candle for candle in candles if int(candle["timestamp"]) >= start_time]
else:
candles = [candle for candle in candles if int(candle[0]) >= start_time]
if len(candles) > 0: if len(candles) > 0:
candles_pd = pd.DataFrame(candles, columns=columns) candles_pd = pd.DataFrame(candles, columns=columns)
all_data.append(candles_pd) all_data.append(candles_pd)
@ -141,6 +166,9 @@ class MarketData:
else: else:
break break
else: else:
if end_time + 1 == from_time or end_time == from_time:
logger.warning(f"本轮获取{symbol}, {bar} 数据最早时间 {from_time_str} 等于 此次数据获取结束时间, 停止获取数据")
break
if len(candles) > 0: if len(candles) > 0:
candles_pd = pd.DataFrame(candles, columns=columns) candles_pd = pd.DataFrame(candles, columns=columns)
all_data.append(candles_pd) all_data.append(candles_pd)
@ -161,6 +189,8 @@ class MarketData:
df[col] = pd.to_numeric(df[col], errors='coerce') df[col] = pd.to_numeric(df[col], errors='coerce')
dt_series = pd.to_datetime(df['timestamp'].astype(int), unit='ms', utc=True, errors='coerce').dt.tz_convert('Asia/Shanghai') dt_series = pd.to_datetime(df['timestamp'].astype(int), unit='ms', utc=True, errors='coerce').dt.tz_convert('Asia/Shanghai')
df['date_time'] = dt_series.dt.strftime('%Y-%m-%d %H:%M:%S') df['date_time'] = dt_series.dt.strftime('%Y-%m-%d %H:%M:%S')
dt_us_series = pd.to_datetime(df['timestamp'].astype(int), unit='ms', utc=True, errors='coerce').dt.tz_convert('America/New_York')
df['date_time_us'] = dt_us_series.dt.strftime('%Y-%m-%d %H:%M:%S')
# 将timestamp转换为整型 # 将timestamp转换为整型
df['timestamp'] = df['timestamp'].astype(int) df['timestamp'] = df['timestamp'].astype(int)
# 添加虚拟货币名称列内容为symbol # 添加虚拟货币名称列内容为symbol
@ -169,14 +199,35 @@ class MarketData:
df['bar'] = bar df['bar'] = bar
df['create_time'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S') df['create_time'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if self.is_us_stock:
# 如果是美股数据则仅保留date_time_us字中在开盘时间内的数据即开盘时间为美国时间9:30到16:00
# 请将date_time_us转换为datetime对象然后判断是否在开盘时间内的数据如果是则保留否则删除
df['date_time_us'] = pd.to_datetime(df['date_time_us'], errors='coerce')
# 使用 .loc 避免 SettingWithCopyWarning
mask = (df['date_time_us'].dt.hour >= 9) & (df['date_time_us'].dt.hour <= 16)
df = df.loc[mask].copy()
# 对于9点只保留9:35与之后的数据
mask_9 = ~((df['date_time_us'].dt.hour == 9) & (df['date_time_us'].dt.minute <= 30))
df = df.loc[mask_9].copy()
# 对于16点只保留16:00之前的数据
mask_16 = ~((df['date_time_us'].dt.hour == 16) & (df['date_time_us'].dt.minute > 0))
df = df.loc[mask_16].copy()
# 将date_time_us转换为字符串
df.loc[:, 'date_time_us'] = df['date_time_us'].dt.strftime('%Y-%m-%d %H:%M:%S')
# 获取df中date_time的最早时间与最新时间 并保存到df中 # 获取df中date_time的最早时间与最新时间 并保存到df中
df = df[['symbol', 'bar', 'timestamp', 'date_time', 'open', 'high', 'low', 'close', 'volume', 'volCcy', 'volCCyQuote', 'create_time']] df = df[['symbol', 'bar', 'timestamp', 'date_time', 'date_time_us', 'open', 'high', 'low', 'close', 'volume', 'volCcy', 'volCCyQuote', 'create_time']]
df.sort_values('timestamp', inplace=True) df.sort_values('timestamp', inplace=True)
df.reset_index(drop=True, inplace=True) df.reset_index(drop=True, inplace=True)
logger.info(f"总计获取 {len(df)} 条 K 线数据仅confirm=1") logger.info(f"总计获取 {len(df)} 条 K 线数据仅confirm=1")
# 获取df中date_time的最早时间与最新时间 # 获取df中date_time的最早时间与最新时间
earliest_time = df['date_time'].min() if self.is_us_stock:
latest_time = df['date_time'].max() earliest_time = df['date_time_us'].min()
latest_time = df['date_time_us'].max()
else:
earliest_time = df['date_time'].min()
latest_time = df['date_time'].max()
logger.info(f"本轮更新{symbol}, {bar} 数据最早时间: {earliest_time} 最新时间: {latest_time}") logger.info(f"本轮更新{symbol}, {bar} 数据最早时间: {earliest_time} 最新时间: {latest_time}")
return df return df
else: else:

View File

@ -0,0 +1,138 @@
import requests
import pandas as pd
from datetime import datetime, timedelta
import time
import json
from typing import Optional
import mplfinance as mpf
import random
from core.utils import transform_date_time_to_timestamp, datetime_to_timestamp, timestamp_to_datetime
import core.logger as logging
from config import ITICK_API_KEY
logger = logging.logger
class MarketDataFromItick:
"""用于从 Yahoo Finance 下载 K 线数据的类,支持分段下载和数据处理。"""
def __init__(self, symbol='QQQ', bar='5m', end_time: str=None, limit: int=1000):
"""
初始化下载器
参数:
ticker (str): 股票或 ETF 代码 'QQQ'
start_date (datetime): 开始日期默认为 1 年前
end_date (datetime): 结束日期默认为当前日期
bar (str): K 线间隔 '5m'5 分钟
segment_days (int): 每次下载的天数默认为 10
"""
self.symbol = symbol
self.time_stamp_unit = 0
if bar == '1m':
self.ktype = '1'
self.time_stamp_unit = 1000 * 60
elif bar == '5m':
self.ktype = '2'
self.time_stamp_unit = 1000 * 60 * 5
elif bar == '15m':
self.ktype = '3'
self.time_stamp_unit = 1000 * 60 * 15
elif bar == '30m':
self.ktype = '4'
self.time_stamp_unit = 1000 * 60 * 30
elif bar == '1H':
self.ktype = '5'
self.time_stamp_unit = 1000 * 60 * 60
elif bar == '2H':
self.ktype = '6'
self.time_stamp_unit = 1000 * 60 * 60 * 2
elif bar == '4H':
self.ktype = '7'
self.time_stamp_unit = 1000 * 60 * 60 * 4
elif bar == '1D':
self.ktype = '8'
elif bar == '1W':
self.ktype = '9'
elif bar == '1M':
self.ktype = '10'
else:
self.ktype = '2'
self.limit = limit
# 设置默认时间范围
if end_time is None:
end_time = int(datetime.now().strftime('%Y-%m-%d %H:%M:%S').timestamp())
if isinstance(end_time, str):
end_time = int(datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S').timestamp())
self.end_time = end_time
self.headers = {
"accept": "application/json",
"token": ITICK_API_KEY
}
# 初始化空 DataFrame
self.data = pd.DataFrame()
# 重试配置
self.max_retries = 3
self.base_delay = 5 # 基础延迟秒数
self.max_delay = 60 # 最大延迟秒数
def get_historical_candlesticks_from_api(self):
response = None
count = 0
end_time_str = timestamp_to_datetime(self.end_time)
logger.info(f"请求数据: {self.symbol} {self.ktype} {end_time_str} {self.limit}")
while True:
try:
url = f"https://api.itick.org/stock/kline?region=US&code={self.symbol}&kType={self.ktype}&et={self.end_time}&limit={self.limit}"
response = requests.get(url, headers=self.headers)
if response:
response = json.loads(response.text)
if response["code"] != 0:
raise Exception(f"请求出错: {response["msg"]}")
data_list = response["data"]
"""
{
"tu": 1292317671.363,
"c": 560.78,
"t": 1752858600000,
"v": 2305857,
"h": 560.79,
"l": 560.06,
"o": 560.45
}
"""
result_list = []
# itick的时间分钟数据与市面上的分钟数据有1个周期的差距即itick的时间是市面上的时间的前1个周期
# 因此需要将itick的时间加上1个周期的单位
for data in data_list:
result_list.append({
"timestamp": int(data["t"]) + self.time_stamp_unit,
"open": float(data["o"]),
"high": float(data["h"]),
"low": float(data["l"]),
"close": float(data["c"]),
"volume": int(data["v"]),
"volCcy": 0,
"volCCyQuote": 0,
"confirm": "1"
})
# result_list按照timestamp降序排列
result_list = sorted(result_list, key=lambda x: x["timestamp"], reverse=True)
response["data"] = result_list
response["code"] = "0"
response["msg"] = "success"
break
except Exception as e:
count += 1
logger.error(f"请求出错: {e}, 第{count}次重试30秒后重试")
if count > 3:
break
time.sleep(30)
logger.info(f"请求成功等待12秒后返回")
time.sleep(12)
return response

View File

@ -18,6 +18,7 @@ class DBMarketData:
"bar", "bar",
"timestamp", "timestamp",
"date_time", "date_time",
"date_time_us",
"open", "open",
"high", "high",
"low", "low",

View File

@ -11,7 +11,7 @@ from openpyxl.drawing.image import Image
import openpyxl import openpyxl
from openpyxl.styles import Font from openpyxl.styles import Font
from PIL import Image as PILImage from PIL import Image as PILImage
from config import MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
from core.db.db_market_data import DBMarketData from core.db.db_market_data import DBMarketData
from core.db.db_huge_volume_data import DBHugeVolumeData from core.db.db_huge_volume_data import DBHugeVolumeData
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
@ -35,10 +35,10 @@ class PriceVolumeStats:
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}" self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_data = DBMarketData(self.db_url) self.db_market_data = DBMarketData(self.db_url)
self.db_huge_volume_data = DBHugeVolumeData(self.db_url) self.db_huge_volume_data = DBHugeVolumeData(self.db_url)
self.symbol_list = MONITOR_CONFIG.get("volume_monitor", {}).get("symbols", []) self.symbol_list = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get("symbols", [])
self.bars_list = MONITOR_CONFIG.get("volume_monitor", {}).get("bars", []) self.bars_list = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get("bars", [])
self.windows_list = WINDOW_SIZE.get("window_sizes", []) self.windows_list = WINDOW_SIZE.get("window_sizes", [])
self.initial_date = MONITOR_CONFIG.get("volume_monitor", {}).get( self.initial_date = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-15 00:00:00" "initial_date", "2025-05-15 00:00:00"
) )
self.end_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.end_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

Binary file not shown.

View File

@ -12,7 +12,7 @@ from openpyxl.drawing.image import Image
import openpyxl import openpyxl
from openpyxl.styles import Font from openpyxl.styles import Font
from PIL import Image as PILImage from PIL import Image as PILImage
from config import MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
from core.db.db_market_data import DBMarketData from core.db.db_market_data import DBMarketData
from core.db.db_huge_volume_data import DBHugeVolumeData from core.db.db_huge_volume_data import DBHugeVolumeData
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
@ -44,10 +44,10 @@ class MaBreakStatistics:
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}" self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_data = DBMarketData(self.db_url) self.db_market_data = DBMarketData(self.db_url)
self.db_huge_volume_data = DBHugeVolumeData(self.db_url) self.db_huge_volume_data = DBHugeVolumeData(self.db_url)
self.symbols = MONITOR_CONFIG.get("volume_monitor", {}).get( self.symbols = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"symbols", ["XCH-USDT"] "symbols", ["XCH-USDT"]
) )
self.bars = MONITOR_CONFIG.get("volume_monitor", {}).get( self.bars = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"bars", ["5m", "15m", "30m", "1H"] "bars", ["5m", "15m", "30m", "1H"]
) )
self.stats_output_dir = "./output/trade_sandbox/ma_strategy/excel/" self.stats_output_dir = "./output/trade_sandbox/ma_strategy/excel/"

View File

@ -11,7 +11,7 @@ from openpyxl.drawing.image import Image
import openpyxl import openpyxl
from openpyxl.styles import Font from openpyxl.styles import Font
from PIL import Image as PILImage from PIL import Image as PILImage
from config import MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
import core.logger as logging import core.logger as logging
from core.db.db_merge_market_huge_volume import DBMergeMarketHugeVolume from core.db.db_merge_market_huge_volume import DBMergeMarketHugeVolume
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp

403
core/trade/orb_trade.py Normal file
View File

@ -0,0 +1,403 @@
import yfinance as yf
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import core.logger as logging
from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
from core.db.db_market_data import DBMarketData
from core.db.db_huge_volume_data import DBHugeVolumeData
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
# seaborn支持中文
plt.rcParams["font.family"] = ["SimHei"]
logger = logging.logger
class ORBStrategy:
def __init__(
self,
initial_capital=25000,
max_leverage=4,
risk_per_trade=0.01,
commission_per_share=0.0005,
):
"""
初始化ORB策略参数
:param initial_capital: 初始账户资金美元
:param max_leverage: 最大杠杆倍数默认4倍符合FINRA规定
:param risk_per_trade: 单次交易风险比例默认1%
:param commission_per_share: 每股交易佣金美元默认0.0005
"""
logger.info(f"初始化ORB策略参数初始账户资金={initial_capital},最大杠杆倍数={max_leverage},单次交易风险比例={risk_per_trade},每股交易佣金={commission_per_share}")
self.initial_capital = initial_capital
self.max_leverage = max_leverage
self.risk_per_trade = risk_per_trade
self.commission_per_share = commission_per_share
self.data = None # 存储K线数据
self.trades = [] # 存储交易记录
self.equity_curve = None # 存储账户净值曲线
mysql_user = MYSQL_CONFIG.get("user", "xch")
mysql_password = MYSQL_CONFIG.get("password", "")
if not mysql_password:
raise ValueError("MySQL password is not set")
mysql_host = MYSQL_CONFIG.get("host", "localhost")
mysql_port = MYSQL_CONFIG.get("port", 3306)
mysql_database = MYSQL_CONFIG.get("database", "okx")
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_data = DBMarketData(self.db_url)
def fetch_intraday_data(self, symbol, start_date, end_date, interval="5m"):
"""
获取日内5分钟K线数据需yfinance支持部分数据可能有延迟
:param ticker: 股票代码如QQQTQQQ
:param start_date: 起始日期格式YYYY-MM-DD
:param end_date: 结束日期格式YYYY-MM-DD
:param interval: K线周期默认5分钟
"""
logger.info(f"开始获取{symbol}数据:{start_date}{end_date},间隔{interval}")
# data = yf.download(
# symbol, start=start_date, end=end_date, interval=interval, progress=False
# )
data = self.db_market_data.query_market_data_by_symbol_bar(
symbol, interval, start=start_date, end=end_date
)
data = pd.DataFrame(data)
data.sort_values(by="date_time", inplace=True)
# 保留核心列:开盘价、最高价、最低价、收盘价、成交量
data["Open"] = data["open"]
data["High"] = data["high"]
data["Low"] = data["low"]
data["Close"] = data["close"]
data["Volume"] = data["volume"]
# 将data["date_time"]从字符串类型转换为日期
data["date_time"] = pd.to_datetime(data["date_time"])
# data["Date"]为日期不包括时分秒即date_time如果是2025-01-01 10:00:00则Date为2025-01-01
data["Date"] = data["date_time"].dt.date
# 将Date转换为datetime64[ns]类型以确保类型一致
data["Date"] = pd.to_datetime(data["Date"])
self.data = data[["Date", "date_time", "Open", "High", "Low", "Close", "Volume"]].copy()
logger.info(f"成功获取{symbol}数据:{len(self.data)}{interval}K线")
def calculate_shares(self, account_value, entry_price, stop_price):
"""
根据ORB公式计算交易股数
:param account_value: 当前账户价值美元
:param entry_price: 交易entry价格第二根5分钟K线开盘价
:param stop_price: 止损价格多头=第一根K线最低价空头=第一根K线最高价
:return: 整数股数Shares
"""
logger.info(f"开始计算交易股数:账户价值={account_value}entry价格={entry_price},止损价格={stop_price}")
# 计算单交易风险金额($R
risk_per_trade_dollar = abs(entry_price - stop_price) # 风险金额取绝对值
if risk_per_trade_dollar <= 0:
return 0 # 无风险时不交易
# 公式1基于风险预算的最大股数风险控制优先
shares_risk = (account_value * self.risk_per_trade) / risk_per_trade_dollar
# 公式2基于杠杆限制的最大股数杠杆约束
shares_leverage = (self.max_leverage * account_value) / entry_price
# 取两者最小值(满足风险和杠杆双重约束)
max_shares = min(shares_risk, shares_leverage)
# 扣除佣金影响(简化计算:假设佣金从可用资金中扣除)
commission_cost = max_shares * self.commission_per_share
if (account_value - commission_cost) < 0:
return 0 # 扣除佣金后资金不足,不交易
return int(max_shares) # 股数取整
def generate_orb_signals(self):
"""
生成ORB策略信号每日仅1次交易机会
- 第一根5分钟K线确定开盘区间High1, Low1
- 第二根5分钟K线根据第一根K线方向生成多空信号
"""
logger.info("开始生成ORB策略信号")
if self.data is None:
raise ValueError("请先调用fetch_intraday_data获取数据")
signals = []
# 按日期分组处理每日数据
for date, daily_data in self.data.groupby("Date"):
daily_data = daily_data.sort_index() # 按时间排序
if len(daily_data) < 2:
continue # 当日K线不足2根跳过
# 第一根5分钟K线开盘区间
first_candle = daily_data.iloc[0]
high1 = first_candle["High"]
low1 = first_candle["Low"]
open1 = first_candle["Open"]
close1 = first_candle["Close"]
# 第二根5分钟K线entry信号
second_candle = daily_data.iloc[1]
entry_price = second_candle["Open"] # entry价格=第二根K线开盘价
entry_time = second_candle.date_time # entry时间
# 生成信号第一根K线方向决定多空排除十字星open1 == close1
if open1 < close1:
# 第一根K线收涨→多头信号
signal = "Long"
stop_price = low1 # 多头止损=第一根K线最低价
elif open1 > close1:
# 第一根K线收跌→空头信号
signal = "Short"
stop_price = high1 # 空头止损=第一根K线最高价
else:
# 十字星→无信号
signal = "None"
stop_price = None
signals.append(
{
"Date": date,
"EntryTime": entry_time,
"Signal": signal,
"EntryPrice": entry_price,
"StopPrice": stop_price,
"High1": high1,
"Low1": low1,
}
)
# 将信号合并到原始数据
signals_df = pd.DataFrame(signals)
# 确保Date列类型一致将Date转换为datetime64[ns]类型
signals_df['Date'] = pd.to_datetime(signals_df['Date'])
# 使用merge而不是join来合并数据,根据signals_df的EntryTime与self.data的date_time进行匹配
# TODO: 这里需要优化
self.data = self.data.merge(signals_df, left_on="date_time", right_on="EntryTime", how="left")
# 将Date_x和Date_y合并为Date
self.data["Date"] = self.data["Date_x"].combine_first(self.data["Date_y"])
# 删除Date_x和Date_y
self.data.drop(columns=["Date_x", "Date_y"], inplace=True)
logger.info(
f"生成信号完成:共{len(signals_df)}个交易日,其中多头{sum(signals_df['Signal']=='Long')}次,空头{sum(signals_df['Signal']=='Short')}"
)
def backtest(self, profit_target_multiple=10):
"""
回测ORB策略
:param profit_target_multiple: 盈利目标倍数默认10倍$R即10R
"""
logger.info(f"开始回测ORB策略盈利目标倍数={profit_target_multiple}")
if "Signal" not in self.data.columns:
raise ValueError("请先调用generate_orb_signals生成策略信号")
account_value = self.initial_capital # 初始账户价值
current_position = None # 当前持仓None=空仓Long/Short=持仓)
equity_history = [account_value] # 净值历史
trade_id = 0 # 交易ID
# 按时间遍历数据每日仅处理第二根K线后的信号
for date, daily_data in self.data.groupby("Date"):
daily_data = daily_data.sort_index()
if len(daily_data) < 2:
continue
# 获取当日信号第二根K线的信号
signal_row = (
daily_data[~pd.isna(daily_data["Signal"])].iloc[0]
if sum(~pd.isna(daily_data["Signal"])) > 0
else None
)
if signal_row is None:
# 无信号→当日不交易,净值保持不变
equity_history.append(account_value)
continue
# 提取信号参数
signal = signal_row["Signal"]
if pd.isna(signal):
continue
entry_price = signal_row["EntryPrice"]
stop_price = signal_row["StopPrice"]
high1 = signal_row["High1"]
low1 = signal_row["Low1"]
risk_assumed = abs(entry_price - stop_price) # 计算$R
profit_target = (
entry_price + (risk_assumed * profit_target_multiple)
if signal == "Long"
else entry_price - (risk_assumed * profit_target_multiple)
)
# 计算交易股数
shares = self.calculate_shares(account_value, entry_price, stop_price)
if shares == 0:
# 股数为0→不交易
equity_history.append(account_value)
continue
# 计算佣金(买入/卖出各收一次)
total_commission = shares * self.commission_per_share * 2 # 往返佣金
# 模拟日内持仓:寻找止损/止盈触发点,或当日收盘平仓
daily_prices = daily_data[
daily_data.date_time > signal_row.date_time
] # 从entry时间开始遍历
exit_price = None
exit_time = None
exit_reason = None
for idx, (time, row) in enumerate(daily_prices.iterrows()):
high = row["High"]
low = row["Low"]
close = row["Close"]
# 检查止损/止盈条件
if signal == "Long":
# 多头:跌破止损→止损;突破止盈→止盈
if low <= stop_price:
exit_price = stop_price
exit_reason = "Stop Loss"
exit_time = time
break
elif high >= profit_target:
exit_price = profit_target
exit_reason = "Profit Target (10R)"
exit_time = time
break
elif signal == "Short":
# 空头:突破止损→止损;跌破止盈→止盈
if high >= stop_price:
exit_price = stop_price
exit_reason = "Stop Loss"
exit_time = time
break
elif low <= profit_target:
exit_price = profit_target
exit_reason = "Profit Target (10R)"
exit_time = time
break
# 若未触发止损/止盈,当日收盘平仓
if exit_price is None:
exit_price = daily_prices.iloc[-1]["Close"]
exit_reason = "End of Day (EoD)"
exit_time = daily_prices.iloc[-1].date_time
# 计算盈亏
if signal == "Long":
profit_loss = (exit_price - entry_price) * shares - total_commission
else: # Short
profit_loss = (entry_price - exit_price) * shares - total_commission
# 更新账户价值
account_value += profit_loss
account_value = max(account_value, 0) # 账户价值不能为负
# 记录交易
self.trades.append(
{
"TradeID": trade_id,
"Date": date,
"Signal": signal,
"EntryTime": signal_row.date_time,
"EntryPrice": entry_price,
"ExitTime": exit_time,
"ExitPrice": exit_price,
"Shares": shares,
"RiskAssumed": risk_assumed,
"ProfitLoss": profit_loss,
"ExitReason": exit_reason,
"AccountValueAfter": account_value,
}
)
# 记录净值
equity_history.append(account_value)
trade_id += 1
# 生成净值曲线
self.equity_curve = pd.Series(
equity_history,
index=pd.date_range(
start=self.data.index[0].date(), periods=len(equity_history), freq="D"
),
)
# 输出回测结果
trades_df = pd.DataFrame(self.trades)
total_return = (
(account_value - self.initial_capital) / self.initial_capital * 100
)
win_rate = (
(trades_df["ProfitLoss"] > 0).sum() / len(trades_df) * 100
if len(trades_df) > 0
else 0
)
logger.info("\n" + "=" * 50)
logger.info("ORB策略回测结果")
logger.info("=" * 50)
logger.info(f"初始资金:${self.initial_capital:,.2f}")
logger.info(f"最终资金:${account_value:,.2f}")
logger.info(f"总收益率:{total_return:.2f}%")
logger.info(f"总交易次数:{len(trades_df)}")
logger.info(f"胜率:{win_rate:.2f}%")
if len(trades_df) > 0:
logger.info(f"平均每笔盈亏:${trades_df['ProfitLoss'].mean():.2f}")
logger.info(f"最大单笔盈利:${trades_df['ProfitLoss'].max():.2f}")
logger.info(f"最大单笔亏损:${trades_df['ProfitLoss'].min():.2f}")
def plot_equity_curve(self):
"""
绘制账户净值曲线
"""
logger.info("开始绘制账户净值曲线")
if self.equity_curve is None:
raise ValueError("请先调用backtest进行回测")
# seaborn风格设置
sns.set_theme(style="whitegrid")
# plt.rcParams['font.family'] = "SimHei"
plt.rcParams["font.sans-serif"] = ["SimHei"] # 也可直接用字体名
plt.rcParams["font.size"] = 11 # 设置字体大小
plt.rcParams["axes.unicode_minus"] = False # 解决负号显示问题
plt.figure(figsize=(12, 6))
plt.plot(
self.equity_curve.index,
self.equity_curve.values,
label="ORB策略净值",
color="blue",
)
plt.axhline(
y=self.initial_capital, color="red", linestyle="--", label="初始资金"
)
plt.title("ORB策略账户净值曲线", fontsize=14)
plt.xlabel("日期", fontsize=12)
plt.ylabel("账户价值(美元)", fontsize=12)
plt.legend()
plt.grid(True, alpha=0.3)
plt.show()
# ------------------- 策略示例回测QQQ的ORB策略2016-2023 -------------------
if __name__ == "__main__":
# 初始化ORB策略
orb_strategy = ORBStrategy(
initial_capital=25000,
max_leverage=4,
risk_per_trade=0.01,
commission_per_share=0.0005,
)
# 1. 获取QQQ的5分钟日内数据2024-2025注意yfinance免费版可能限制历史日内数据建议用专业数据源
orb_strategy.fetch_intraday_data(
symbol="ETH-USDT", start_date="2025-05-15", end_date="2025-08-20", interval="5m"
)
# 2. 生成ORB策略信号
orb_strategy.generate_orb_signals()
# 3. 回测策略盈利目标10R
orb_strategy.backtest(profit_target_multiple=10)
# 4. 绘制净值曲线
orb_strategy.plot_equity_curve()

View File

@ -6,7 +6,7 @@ from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
from market_data_main import MarketDataMain from market_data_main import MarketDataMain
from core.wechat import Wechat from core.wechat import Wechat
import core.logger as logging import core.logger as logging
from config import MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE from config import OKX_MONITOR_CONFIG, US_STOCK_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pandas as pd import pandas as pd
import os import os
@ -16,7 +16,7 @@ logger = logging.logger
class HugeVolumeMain: class HugeVolumeMain:
def __init__(self, threshold: float = 2.0): def __init__(self, threshold: float = 2.0, is_us_stock: bool = False):
mysql_user = MYSQL_CONFIG.get("user", "xch") mysql_user = MYSQL_CONFIG.get("user", "xch")
mysql_password = MYSQL_CONFIG.get("password", "") mysql_password = MYSQL_CONFIG.get("password", "")
if not mysql_password: if not mysql_password:
@ -29,7 +29,7 @@ class HugeVolumeMain:
self.huge_volume = HugeVolume() self.huge_volume = HugeVolume()
self.db_market_data = DBMarketData(self.db_url) self.db_market_data = DBMarketData(self.db_url)
self.db_huge_volume_data = DBHugeVolumeData(self.db_url) self.db_huge_volume_data = DBHugeVolumeData(self.db_url)
self.market_data_main = MarketDataMain() self.market_data_main = MarketDataMain(is_us_stock=is_us_stock)
self.threshold = threshold self.threshold = threshold
self.output_folder = "./output/huge_volume_statistics/" self.output_folder = "./output/huge_volume_statistics/"
@ -41,9 +41,14 @@ class HugeVolumeMain:
for symbol in self.market_data_main.symbols: for symbol in self.market_data_main.symbols:
for bar in self.market_data_main.bars: for bar in self.market_data_main.bars:
if start is None: if start is None:
start = MONITOR_CONFIG.get("volume_monitor", {}).get( if self.is_us_stock:
"initial_date", "2025-05-15 00:00:00" start = US_STOCK_MONITOR_CONFIG.get("volume_monitor", {}).get(
) "initial_date", "2015-08-30 00:00:00"
)
else:
start = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-15 00:00:00"
)
data = self.detect_volume_spike( data = self.detect_volume_spike(
symbol, symbol,
bar, bar,
@ -68,9 +73,15 @@ class HugeVolumeMain:
is_update: bool = False, is_update: bool = False,
): ):
if start is None: if start is None:
start = MONITOR_CONFIG.get("volume_monitor", {}).get( if self.is_us_stock:
"initial_date", "2025-05-01 00:00:00" start = US_STOCK_MONITOR_CONFIG.get("volume_monitor", {}).get(
) "initial_date", "2015-08-31 00:00:00"
)
else:
start = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00"
)
if end is None: if end is None:
end = datetime.now().strftime("%Y-%m-%d %H:%M:%S") end = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.info( logger.info(
@ -169,7 +180,7 @@ class HugeVolumeMain:
logger.info(f"此次更新巨量交易数据为空") logger.info(f"此次更新巨量交易数据为空")
except Exception as e: except Exception as e:
logger.error( logger.error(
f"更新巨量交易数据失败: {symbol} {bar} 窗口大小: {window_size} {earliest_date_time} {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}: {e}" f"更新巨量交易数据失败: {symbol} {bar} 窗口大小: {window_size} {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}: {e}"
) )
def get_seconds_by_bar(self, bar: str): def get_seconds_by_bar(self, bar: str):
@ -226,7 +237,7 @@ class HugeVolumeMain:
periods: list = [3, 5], periods: list = [3, 5],
): ):
if start is None: if start is None:
start = MONITOR_CONFIG.get("volume_monitor", {}).get( start = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00" "initial_date", "2025-05-01 00:00:00"
) )
if end is None: if end is None:
@ -286,7 +297,7 @@ class HugeVolumeMain:
def send_huge_volume_data_to_wechat(self, start: str = None, end: str = None): def send_huge_volume_data_to_wechat(self, start: str = None, end: str = None):
if start is None: if start is None:
start = MONITOR_CONFIG.get("volume_monitor", {}).get( start = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00" "initial_date", "2025-05-01 00:00:00"
) )
if end is None: if end is None:
@ -395,7 +406,7 @@ class HugeVolumeMain:
output_excel: bool = False, output_excel: bool = False,
): ):
if start is None: if start is None:
start = MONITOR_CONFIG.get("volume_monitor", {}).get( start = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00" "initial_date", "2025-05-01 00:00:00"
) )
if end is None: if end is None:
@ -464,7 +475,7 @@ def batch_initial_detect_volume_spike(threshold: float = 2.0):
): ):
window_sizes = [50, 80, 100, 120] window_sizes = [50, 80, 100, 120]
huge_volume_main = HugeVolumeMain(threshold) huge_volume_main = HugeVolumeMain(threshold)
start_date = MONITOR_CONFIG.get("volume_monitor", {}).get( start_date = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-15 00:00:00" "initial_date", "2025-05-15 00:00:00"
) )
for window_size in window_sizes: for window_size in window_sizes:
@ -474,7 +485,7 @@ def batch_initial_detect_volume_spike(threshold: float = 2.0):
) )
def batch_update_volume_spike(threshold: float = 2.0): def batch_update_volume_spike(threshold: float = 2.0, is_us_stock: bool = False):
window_sizes = WINDOW_SIZE.get("window_sizes", None) window_sizes = WINDOW_SIZE.get("window_sizes", None)
if ( if (
window_sizes is None window_sizes is None
@ -482,7 +493,7 @@ def batch_update_volume_spike(threshold: float = 2.0):
or len(window_sizes) == 0 or len(window_sizes) == 0
): ):
window_sizes = [50, 80, 100, 120] window_sizes = [50, 80, 100, 120]
huge_volume_main = HugeVolumeMain(threshold) huge_volume_main = HugeVolumeMain(threshold, is_us_stock)
for window_size in window_sizes: for window_size in window_sizes:
huge_volume_main.batch_update_volume_spike(window_size=window_size) huge_volume_main.batch_update_volume_spike(window_size=window_size)
@ -501,7 +512,7 @@ def test_send_huge_volume_data_to_wechat():
if __name__ == "__main__": if __name__ == "__main__":
# test_send_huge_volume_data_to_wechat() # test_send_huge_volume_data_to_wechat()
# batch_initial_detect_volume_spike(threshold=2.0) # batch_initial_detect_volume_spike(threshold=2.0)
batch_update_volume_spike(threshold=2.0) batch_update_volume_spike(threshold=2.0, is_us_stock=True)
# huge_volume_main = HugeVolumeMain(threshold=2.0) # huge_volume_main = HugeVolumeMain(threshold=2.0)
# huge_volume_main.batch_next_periods_rise_or_fall(output_excel=True) # huge_volume_main.batch_next_periods_rise_or_fall(output_excel=True)
# data_file_path = "./output/huge_volume_statistics/next_periods_rise_or_fall_stat_20250731200304.xlsx" # data_file_path = "./output/huge_volume_statistics/next_periods_rise_or_fall_stat_20250731200304.xlsx"

View File

@ -0,0 +1,60 @@
import requests
import core.logger as logging
from datetime import datetime, timedelta
from futu import KLType
logger = logging.logger
def main():
# 配置参数
symbol = "QQQ"
start_date = "2025-08-01 00:00:00"
end_date = "2025-08-20 00:00:00"
interval = "5m"
segment_days = 5 # 减少每段天数,降低单次请求的数据量
print(f"开始下载 {symbol} 数据")
print(f"时间范围: {start_date}{end_date}")
print(f"数据间隔: {interval}")
print(f"分段天数: {segment_days}")
print("-" * 50)
try:
market_data_from_futu = MarketDataFromAlphaVantage(
symbol=symbol,
start_date=start_date,
end_date=end_date,
interval=interval,
segment_days=segment_days,
)
# 下载数据
market_data_from_futu.download_all()
# 处理数据
processed_data = market_data_from_futu.process_data()
if not processed_data.empty:
logger.info(f"成功下载 {len(processed_data)} 条数据")
# 保存数据
filename = f"{symbol}_{interval}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"
market_data_from_futu.save_to_csv(filename)
# 显示数据统计
market_data_from_futu.show_head()
market_data_from_futu.show_stats()
else:
logger.warning("未获取到任何数据")
except Exception as e:
logger.error(f"下载过程中发生错误: {e}")
print(f"错误详情: {e}")
if __name__ == "__main__":
main()

View File

@ -16,7 +16,8 @@ from config import (
SECRET_KEY, SECRET_KEY,
PASSPHRASE, PASSPHRASE,
SANDBOX, SANDBOX,
MONITOR_CONFIG, OKX_MONITOR_CONFIG,
US_STOCK_MONITOR_CONFIG,
MYSQL_CONFIG, MYSQL_CONFIG,
BAR_THRESHOLD, BAR_THRESHOLD,
) )
@ -25,22 +26,34 @@ logger = logging.logger
class MarketDataMain: class MarketDataMain:
def __init__(self): def __init__(self, is_us_stock: bool = False):
self.market_data = MarketData( self.market_data = MarketData(
api_key=API_KEY, api_key=API_KEY,
secret_key=SECRET_KEY, secret_key=SECRET_KEY,
passphrase=PASSPHRASE, passphrase=PASSPHRASE,
sandbox=SANDBOX, sandbox=SANDBOX,
is_us_stock=is_us_stock,
) )
self.symbols = MONITOR_CONFIG.get("volume_monitor", {}).get( if is_us_stock:
"symbols", ["XCH-USDT"] self.symbols = US_STOCK_MONITOR_CONFIG.get("volume_monitor", {}).get(
) "symbols", ["QQQ"]
self.bars = MONITOR_CONFIG.get("volume_monitor", {}).get( )
"bars", ["5m", "15m", "1H", "1D"] self.bars = US_STOCK_MONITOR_CONFIG.get("volume_monitor", {}).get(
) "bars", ["5m"]
self.initial_date = MONITOR_CONFIG.get("volume_monitor", {}).get( )
"initial_date", "2025-07-01 00:00:00" self.initial_date = US_STOCK_MONITOR_CONFIG.get("volume_monitor", {}).get(
) "initial_date", "2015-08-30 00:00:00"
)
else:
self.symbols = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"symbols", ["XCH-USDT"]
)
self.bars = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"bars", ["5m", "15m", "1H", "1D"]
)
self.initial_date = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-07-01 00:00:00"
)
mysql_user = MYSQL_CONFIG.get("user", "xch") mysql_user = MYSQL_CONFIG.get("user", "xch")
mysql_password = MYSQL_CONFIG.get("password", "") mysql_password = MYSQL_CONFIG.get("password", "")
if not mysql_password: if not mysql_password:
@ -52,6 +65,7 @@ class MarketDataMain:
self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}" self.db_url = f"mysql+pymysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_database}"
self.db_market_data = DBMarketData(self.db_url) self.db_market_data = DBMarketData(self.db_url)
self.trade_data_main = TradeDataMain() self.trade_data_main = TradeDataMain()
self.is_us_stock = is_us_stock
def initial_data(self): def initial_data(self):
""" """
@ -96,7 +110,15 @@ class MarketDataMain:
# 获取数据直到end_time_ts # 获取数据直到end_time_ts
threshold = None threshold = None
if bar in ["5m", "15m", "30m"]: if bar in ["5m", "15m", "30m"]:
threshold = 86400000 if self.is_us_stock:
if bar == "5m":
threshold = 86400000 * 4
elif bar == "15m":
threshold = 86400000 * 4 * 3
elif bar == "30m":
threshold = 86400000 * 4 * 6
else:
threshold = 86400000
elif bar in ["1H", "4H"]: elif bar in ["1H", "4H"]:
threshold = 432000000 threshold = 432000000
elif bar == "1D": elif bar == "1D":
@ -105,7 +127,7 @@ class MarketDataMain:
get_data = False get_data = False
min_start_time_ts = start_time_ts min_start_time_ts = start_time_ts
while start_time_ts < end_time_ts: while start_time_ts < end_time_ts:
current_start_time_ts = end_time_ts - threshold current_start_time_ts = int(end_time_ts - threshold)
if current_start_time_ts < start_time_ts: if current_start_time_ts < start_time_ts:
current_start_time_ts = start_time_ts current_start_time_ts = start_time_ts
start_date_time = timestamp_to_datetime(current_start_time_ts) start_date_time = timestamp_to_datetime(current_start_time_ts)
@ -113,46 +135,21 @@ class MarketDataMain:
logger.info( logger.info(
f"获取行情数据: {symbol} {bar}{start_date_time}{end_date_time}" f"获取行情数据: {symbol} {bar}{start_date_time}{end_date_time}"
) )
if self.is_us_stock:
limit = 1000
else:
limit = 100
data = self.market_data.get_historical_kline_data( data = self.market_data.get_historical_kline_data(
symbol=symbol, symbol=symbol,
start=current_start_time_ts, start=current_start_time_ts,
bar=bar, bar=bar,
end_time=end_time_ts, end_time=end_time_ts,
limit=limit,
) )
if data is not None and len(data) > 0: if data is not None and len(data) > 0:
data["buy_sz"] = -1 data["buy_sz"] = -1
data["sell_sz"] = -1 data["sell_sz"] = -1
# 根据交易数据设置buy_sz和sell_sz
# 比特币的数据获取过慢,暂时不获取交易数据
# if not symbol.endswith("-SWAP"):
# # trade_data的end_time需要比market_data的end_time大一个周期
# trade_data = self.trade_data_main.get_trade_data(
# symbol=symbol, start_time=current_start_time_ts, end_time=end_time_ts
# )
# for index, row in data.iterrows():
# try:
# current_from_time = int(row["timestamp"])
# if index == len(data) - 1:
# current_to_time = current_from_time + BAR_THRESHOLD[bar]
# else:
# current_to_time = int(data.iloc[index + 1]["timestamp"])
# current_trade_data = trade_data[
# (trade_data["ts"] >= current_from_time)
# & (trade_data["ts"] <= current_to_time)
# ]
# if current_trade_data is not None and len(current_trade_data) > 0:
# current_buy_sz = current_trade_data[
# current_trade_data["side"] == "buy"
# ]["sz"].sum()
# current_sell_sz = current_trade_data[
# current_trade_data["side"] == "sell"
# ]["sz"].sum()
# data.loc[index, "buy_sz"] = current_buy_sz
# data.loc[index, "sell_sz"] = current_sell_sz
# except Exception as e:
# logger.error(f"设置buy_sz和sell_sz失败: {e}")
# continue
if data is not None and len(data) > 0: if data is not None and len(data) > 0:
data = data[ data = data[
[ [
@ -160,6 +157,7 @@ class MarketDataMain:
"bar", "bar",
"timestamp", "timestamp",
"date_time", "date_time",
"date_time_us",
"open", "open",
"high", "high",
"low", "low",
@ -174,13 +172,22 @@ class MarketDataMain:
] ]
data = self.add_new_columns(data) data = self.add_new_columns(data)
self.db_market_data.insert_data_to_mysql(data) self.db_market_data.insert_data_to_mysql(data)
current_min_start_time_ts = data["timestamp"].min() current_min_start_time_ts = int(data["timestamp"].min())
if current_min_start_time_ts < min_start_time_ts: if current_min_start_time_ts < min_start_time_ts:
min_start_time_ts = current_min_start_time_ts min_start_time_ts = current_min_start_time_ts
get_data = True get_data = True
else:
logger.warning(f"获取行情数据为空: {symbol} {bar}{start_date_time}{end_date_time}")
break
if current_start_time_ts == start_time_ts: if current_start_time_ts == start_time_ts:
break break
end_time_ts = current_start_time_ts
if current_min_start_time_ts < current_start_time_ts:
end_time_ts = current_min_start_time_ts
else:
end_time_ts = current_start_time_ts
if min_start_time_ts is not None and get_data: if min_start_time_ts is not None and get_data:
# 补充技术指标数据 # 补充技术指标数据
# 获得min_start_time_ts之前30条数据 # 获得min_start_time_ts之前30条数据
@ -374,7 +381,7 @@ class MarketDataMain:
批量计算技术指标 批量计算技术指标
""" """
logger.info("开始批量计算技术指标") logger.info("开始批量计算技术指标")
start_date_time = MONITOR_CONFIG.get("volume_monitor", {}).get( start_date_time = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-15 00:00:00" "initial_date", "2025-05-15 00:00:00"
) )
start_timestamp = transform_date_time_to_timestamp(start_date_time) start_timestamp = transform_date_time_to_timestamp(start_date_time)

View File

@ -4,7 +4,7 @@ from huge_volume_main import HugeVolumeMain
from core.biz.market_monitor import create_metrics_report from core.biz.market_monitor import create_metrics_report
from core.db.db_market_monitor import DBMarketMonitor from core.db.db_market_monitor import DBMarketMonitor
from core.wechat import Wechat from core.wechat import Wechat
from config import MONITOR_CONFIG, MYSQL_CONFIG from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
import core.logger as logging import core.logger as logging
@ -21,9 +21,9 @@ class MarketMonitorMain:
self.market_data_main = MarketDataMain() self.market_data_main = MarketDataMain()
self.huge_volume_main = HugeVolumeMain() self.huge_volume_main = HugeVolumeMain()
self.wechat = Wechat() self.wechat = Wechat()
self.monitor_config = MONITOR_CONFIG self.monitor_config = OKX_MONITOR_CONFIG
self.window_size = 100 self.window_size = 100
self.start_date = MONITOR_CONFIG.get("volume_monitor", {}).get( self.start_date = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"initial_date", "2025-05-01 00:00:00" "initial_date", "2025-05-01 00:00:00"
) )
self.latest_record_file_path = "./output/record/latest_record.json" self.latest_record_file_path = "./output/record/latest_record.json"

28
orb_trade_main.py Normal file
View File

@ -0,0 +1,28 @@
from core.trade.orb_trade import ORBStrategy
def main():
# 初始化ORB策略
orb_strategy = ORBStrategy(
initial_capital=25000,
max_leverage=4,
risk_per_trade=0.01,
commission_per_share=0.0005,
)
# 1. 获取QQQ的5分钟日内数据2024-2025注意yfinance免费版可能限制历史日内数据建议用专业数据源
orb_strategy.fetch_intraday_data(
symbol="ETH-USDT", start_date="2025-05-15", end_date="2025-08-20", interval="5m"
)
# 2. 生成ORB策略信号
orb_strategy.generate_orb_signals()
# 3. 回测策略盈利目标10R
orb_strategy.backtest(profit_target_multiple=10)
# 4. 绘制净值曲线
orb_strategy.plot_equity_curve()
if __name__ == "__main__":
main()

View File

@ -1,12 +1,19 @@
select * from crypto_market_monitor select * from crypto_market_data
order by date_time desc; where symbol = "TQQQ" and bar="5m"
order by date_time_us DESC;
select * from crypto_huge_volume
where symbol = "QQQ" and bar="5m"
order by date_time;
select symbol, bar, date_time, close, select symbol, bar, date_time, close,
pct_chg, ma_cross, ma5, ma10, ma20, ma30, dif, dea, macd, kdj_k, kdj_d, kdj_k, kdj_pattern, pct_chg, ma_cross, ma5, ma10, ma20, ma30, dif, dea, macd, kdj_k, kdj_d, kdj_k, kdj_pattern,
rsi_14, rsi_signal, rsi_14, rsi_signal,
boll_upper, boll_middle, boll_lower, boll_pattern, boll_signal boll_upper, boll_middle, boll_lower, boll_pattern, boll_signal
from crypto_market_data from crypto_market_data
WHERE close > boll_upper WHERE ma_cross in ("20下穿10", "10下穿5")
order by timestamp desc; order by timestamp desc;
select symbol, bar, window_size, date_time, close, select symbol, bar, window_size, date_time, close,
@ -14,8 +21,8 @@ volume, volume_ratio, huge_volume,
close_20_low, low_20_low, close_10_low, low_10_low, close_20_low, low_20_low, close_10_low, low_10_low,
close_80_high, close_90_high, high_80_high, high_90_high close_80_high, close_90_high, high_80_high, high_90_high
from crypto_huge_volume from crypto_huge_volume
WHERE symbol='XCH-USDT' and bar='5m' and window_size=120# and low_10_low=1 WHERE symbol='BTC-USDT' and bar='5m' and window_size=120# and low_10_low=1
order by timestamp desc; order by timestamp;
select * from crypto_huge_volume select * from crypto_huge_volume
WHERE symbol='BTC-USDT' and bar='5m' #and date_time > '2025-08-04 15:00:00' WHERE symbol='BTC-USDT' and bar='5m' #and date_time > '2025-08-04 15:00:00'

View File

@ -7,6 +7,7 @@ CREATE TABLE IF NOT EXISTS crypto_market_data (
bar VARCHAR(20) NOT NULL, bar VARCHAR(20) NOT NULL,
timestamp BIGINT NOT NULL, timestamp BIGINT NOT NULL,
date_time VARCHAR(50) NOT NULL, date_time VARCHAR(50) NOT NULL,
date_time_us VARCHAR(50) NULL COMMENT '美国时间格式的日期时间',
open DECIMAL(20,10) NOT NULL, open DECIMAL(20,10) NOT NULL,
high DECIMAL(20,10) NOT NULL, high DECIMAL(20,10) NOT NULL,
low DECIMAL(20,10) NOT NULL, low DECIMAL(20,10) NOT NULL,
@ -60,3 +61,6 @@ CREATE TABLE IF NOT EXISTS crypto_market_data (
--ma_cross字段长度为150 --ma_cross字段长度为150
ALTER TABLE crypto_market_data MODIFY COLUMN ma_cross VARCHAR(150) DEFAULT NULL COMMENT '均线交叉信号'; ALTER TABLE crypto_market_data MODIFY COLUMN ma_cross VARCHAR(150) DEFAULT NULL COMMENT '均线交叉信号';
--date_time_us字段
ALTER TABLE crypto_market_data ADD COLUMN date_time_us VARCHAR(50) NULL COMMENT '美国时间格式的日期时间' AFTER date_time;

View File

@ -10,7 +10,7 @@ import matplotlib.pyplot as plt
from core.db.db_market_data import DBMarketData from core.db.db_market_data import DBMarketData
from core.biz.metrics_calculation import MetricsCalculation from core.biz.metrics_calculation import MetricsCalculation
import logging import logging
from config import MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE from config import OKX_MONITOR_CONFIG, MYSQL_CONFIG, WINDOW_SIZE
# plt支持中文 # plt支持中文
plt.rcParams['font.family'] = ['SimHei'] plt.rcParams['font.family'] = ['SimHei']

View File

@ -9,7 +9,7 @@ from config import (
SECRET_KEY, SECRET_KEY,
PASSPHRASE, PASSPHRASE,
SANDBOX, SANDBOX,
MONITOR_CONFIG, OKX_MONITOR_CONFIG,
MYSQL_CONFIG, MYSQL_CONFIG,
) )
@ -48,7 +48,7 @@ class TradeDataMain:
# 处理start参数 # 处理start参数
if start_time is None: if start_time is None:
# 默认两个月前 # 默认两个月前
start_time_str = MONITOR_CONFIG.get("volume_monitor", {}).get("initial_date", "2025-05-01 00:00:00") start_time_str = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get("initial_date", "2025-05-01 00:00:00")
start_time = transform_date_time_to_timestamp(start_time_str) start_time = transform_date_time_to_timestamp(start_time_str)
else: else:
start_time = transform_date_time_to_timestamp(start_time) start_time = transform_date_time_to_timestamp(start_time)

View File

@ -17,7 +17,7 @@ from config import (
SECRET_KEY, SECRET_KEY,
PASSPHRASE, PASSPHRASE,
SANDBOX, SANDBOX,
MONITOR_CONFIG, OKX_MONITOR_CONFIG,
MYSQL_CONFIG, MYSQL_CONFIG,
BAR_THRESHOLD, BAR_THRESHOLD,
) )

View File

@ -12,7 +12,7 @@ from openpyxl.drawing.image import Image
import openpyxl import openpyxl
from openpyxl.styles import Font from openpyxl.styles import Font
from PIL import Image as PILImage from PIL import Image as PILImage
from config import MONITOR_CONFIG from config import OKX_MONITOR_CONFIG
from core.trade.mean_reversion_sandbox import MeanReversionSandbox from core.trade.mean_reversion_sandbox import MeanReversionSandbox
from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp from core.utils import timestamp_to_datetime, transform_date_time_to_timestamp
@ -24,14 +24,14 @@ logger = logging.logger
class MeanReversionSandboxMain: class MeanReversionSandboxMain:
def __init__(self, start_date: str, end_date: str, window_size: int, only_5m: bool = False, solution_list: list = None): def __init__(self, start_date: str, end_date: str, window_size: int, only_5m: bool = False, solution_list: list = None):
self.symbols = MONITOR_CONFIG.get("volume_monitor", {}).get( self.symbols = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"symbols", ["XCH-USDT"] "symbols", ["XCH-USDT"]
) )
self.only_5m = only_5m self.only_5m = only_5m
if only_5m: if only_5m:
self.bars = ["5m"] self.bars = ["5m"]
else: else:
self.bars = MONITOR_CONFIG.get("volume_monitor", {}).get( self.bars = OKX_MONITOR_CONFIG.get("volume_monitor", {}).get(
"bars", ["5m", "15m", "30m", "1H"] "bars", ["5m", "15m", "30m", "1H"]
) )
if solution_list is None: if solution_list is None: