ADD: added optimizer workflow
This commit is contained in:
115
batch_optimizer.py
Normal file
115
batch_optimizer.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import concurrent.futures
|
||||
from optimizer import run_optimized_backtest
|
||||
import optimizers
|
||||
|
||||
|
||||
def load_symbols_from_file(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return [line.strip().upper() for line in f if line.strip()]
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Run optimizer for multiple tickers')
|
||||
parser.add_argument('--tickers', type=str, help='Comma-separated list of tickers, e.g. AAPL,MSFT')
|
||||
parser.add_argument('--file', type=str, help='Path to a file containing one ticker per line')
|
||||
parser.add_argument('--symbols', nargs='*', help='Space-separated tickers')
|
||||
parser.add_argument('--json', type=str, help='Path to a JSON file with either an array of tickers or an object with a "tickers" key')
|
||||
parser.add_argument('--workers', type=int, default=1, help='Number of parallel workers (processes) to use')
|
||||
parser.add_argument('--optimizers', type=str, help='Comma-separated optimizer names from optimizers.py, e.g. RsiOptimizer')
|
||||
args = parser.parse_args()
|
||||
|
||||
symbols = []
|
||||
if args.tickers:
|
||||
symbols = [s.strip().upper() for s in args.tickers.split(',') if s.strip()]
|
||||
elif args.file:
|
||||
if os.path.exists(args.file):
|
||||
symbols = load_symbols_from_file(args.file)
|
||||
else:
|
||||
print(f"Ticker file not found: {args.file}")
|
||||
raise SystemExit(1)
|
||||
elif args.json:
|
||||
if os.path.exists(args.json):
|
||||
with open(args.json, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
if isinstance(data, list):
|
||||
symbols = [s.strip().upper() for s in data if isinstance(s, str) and s.strip()]
|
||||
elif isinstance(data, dict):
|
||||
# support {'tickers': [...]} or {'symbols': [...]} keys
|
||||
arr = data.get('tickers') or data.get('symbols')
|
||||
if isinstance(arr, list):
|
||||
symbols = [s.strip().upper() for s in arr if isinstance(s, str) and s.strip()]
|
||||
else:
|
||||
print(f"JSON file does not contain a list under 'tickers' or 'symbols': {args.json}")
|
||||
raise SystemExit(1)
|
||||
else:
|
||||
print(f"JSON root must be an array or object: {args.json}")
|
||||
raise SystemExit(1)
|
||||
else:
|
||||
print(f"JSON file not found: {args.json}")
|
||||
raise SystemExit(1)
|
||||
elif args.symbols:
|
||||
symbols = [s.upper() for s in args.symbols]
|
||||
else:
|
||||
parser.print_help()
|
||||
raise SystemExit(1)
|
||||
|
||||
# Resolve optimizers
|
||||
if args.optimizers:
|
||||
optimizer_names = [s.strip() for s in args.optimizers.split(',') if s.strip()]
|
||||
else:
|
||||
optimizer_names = ['RsiOptimizer']
|
||||
|
||||
# Build job list (symbol, optimizer_name) pairs
|
||||
jobs = [(sym, opt_name) for sym in symbols for opt_name in optimizer_names]
|
||||
|
||||
workers = max(1, int(args.workers or 1))
|
||||
results = []
|
||||
|
||||
if workers > 1:
|
||||
print(f"Running optimizations in parallel with {workers} workers...")
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor:
|
||||
# Use module-level runner to ensure picklable callable
|
||||
future_to_job = {executor.submit(optimizers.run_optimizer, sym, opt_name): (sym, opt_name) for (sym, opt_name) in jobs}
|
||||
for fut in concurrent.futures.as_completed(future_to_job):
|
||||
sym, opt_name = future_to_job[fut]
|
||||
try:
|
||||
res = fut.result()
|
||||
if isinstance(res, dict):
|
||||
# tag result with optimizer name
|
||||
res['optimizer'] = opt_name
|
||||
results.append(res)
|
||||
except Exception as e:
|
||||
print(f"Error optimizing {sym} with {opt_name}: {e}")
|
||||
else:
|
||||
for sym, opt_name in jobs:
|
||||
try:
|
||||
print(f"\n--- Optimizing {sym} using {opt_name} ---")
|
||||
res = optimizers.run_optimizer(sym, opt_name)
|
||||
if isinstance(res, dict):
|
||||
res['optimizer'] = opt_name
|
||||
results.append(res)
|
||||
except Exception as e:
|
||||
print(f"Error optimizing {sym} with {opt_name}: {e}")
|
||||
|
||||
# Save results to Excel
|
||||
if results:
|
||||
try:
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
|
||||
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
out_name = os.path.join('output', f'optimized_summary_{ts}.xlsx')
|
||||
df = pd.DataFrame(results)
|
||||
os.makedirs('output', exist_ok=True)
|
||||
df.to_excel(out_name, index=False)
|
||||
print(f"\n✅ Optimized summary saved: {out_name}")
|
||||
except Exception as e:
|
||||
print(f"Failed to write Excel summary: {e}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user