feat: rewrite strategy to 10m vol-lead with undying signal + watch alert
- core/strategy.py: full rewrite to Volume Lead strategy - 10m candle direct detection (no 40m resampling) - F&G 3-tier vol threshold: <=40->6x, 41-50->5x, >50->blocked - Undying signal: price drop does not cancel signal (sig_p fixed) - Vol refresh: stronger vol_r updates signal price and timer - Watch alert: 4x-6x approaching threshold notifies via Telegram - WATCH_VOL_THRESH=4.0, WATCH_COOLDOWN_MIN=30, WATCH_VOL_JUMP=0.5 - daemon/runner.py: remove FNG_MIN_ENTRY block and Bear regime block - Only FNG_MAX_ENTRY(>50) blocks scan (greed/extreme greed) - Fast-poll loop cleaned of regime check - core/notify.py: add notify_watch() for near-signal Telegram alerts - Shows vol_r, distance to threshold, price, quiet pct - tests/: add 1y data collection and simulation scripts - collect_1y_data.py, refresh_cache.py - sim_10m_vol.py, sim_current.py, sim_regime_1y.py - sim_regime_sweep.py, sim_vol_override.py Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
81
tests/refresh_cache.py
Normal file
81
tests/refresh_cache.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""10분봉 캐시 갱신 스크립트 — 최신 45일 데이터를 Upbit API로 재수집."""
|
||||
|
||||
import os, sys, pickle, time
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pyupbit
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(dotenv_path=Path(__file__).parent.parent / ".env")
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
CACHE_FILE = Path(__file__).parent.parent / "data" / "sim10m_cache.pkl"
|
||||
TOP30_FILE = Path(__file__).parent.parent / "data" / "top30_tickers.pkl"
|
||||
SIM_DAYS = 45
|
||||
TOP_N = 20
|
||||
|
||||
|
||||
def fetch_10m(ticker: str, days: int) -> "pd.DataFrame | None":
|
||||
import pandas as pd
|
||||
target_start = datetime.now() - timedelta(days=days)
|
||||
all_dfs, to, prev_oldest = [], None, None
|
||||
while True:
|
||||
kwargs = dict(ticker=ticker, interval="minute10", count=200)
|
||||
if to:
|
||||
kwargs["to"] = to.strftime("%Y-%m-%d %H:%M:%S")
|
||||
try:
|
||||
df = pyupbit.get_ohlcv(**kwargs)
|
||||
except Exception:
|
||||
time.sleep(0.5)
|
||||
break
|
||||
if df is None or df.empty:
|
||||
break
|
||||
all_dfs.append(df)
|
||||
oldest = df.index[0]
|
||||
if prev_oldest is not None and oldest >= prev_oldest:
|
||||
break
|
||||
prev_oldest = oldest
|
||||
if oldest <= target_start:
|
||||
break
|
||||
to = oldest
|
||||
time.sleep(0.12)
|
||||
if not all_dfs:
|
||||
return None
|
||||
combined = pd.concat(all_dfs).sort_index()
|
||||
combined = combined[~combined.index.duplicated(keep="last")]
|
||||
return combined[combined.index >= target_start]
|
||||
|
||||
|
||||
def main():
|
||||
# 현재 Top20 종목 가져오기
|
||||
from core.market import get_top_tickers
|
||||
print("Top20 종목 조회...")
|
||||
tickers = get_top_tickers()[:TOP_N]
|
||||
print(f" {tickers}\n")
|
||||
|
||||
data = {"10m": {}}
|
||||
for i, ticker in enumerate(tickers, 1):
|
||||
print(f"\r {i:>2}/{len(tickers)} {ticker} ", end="", flush=True)
|
||||
df = fetch_10m(ticker, SIM_DAYS)
|
||||
if df is not None and len(df) > 100:
|
||||
data["10m"][ticker] = df
|
||||
time.sleep(0.15)
|
||||
|
||||
print(f"\n\n종목: {len(data['10m'])}개")
|
||||
if data["10m"]:
|
||||
sample = next(iter(data["10m"].values()))
|
||||
print(f"기간: {sample.index[0].strftime('%Y-%m-%d')} ~ {sample.index[-1].strftime('%Y-%m-%d')}")
|
||||
print(f"레코드: {len(sample)}개")
|
||||
|
||||
# 저장
|
||||
pickle.dump(data, open(CACHE_FILE, "wb"))
|
||||
print(f"\n캐시 저장: {CACHE_FILE}")
|
||||
|
||||
# top30 갱신
|
||||
pickle.dump(tickers, open(TOP30_FILE, "wb"))
|
||||
print(f"종목 저장: {TOP30_FILE}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user