Spaces:
Runtime error
Runtime error
Commit
·
5913c8f
0
Parent(s):
Initial commit with clean codebase
Browse files- .space +4 -0
- Docker +15 -0
- Dockerfile +13 -0
- __init__.py +1 -0
- api-2.py +66 -0
- api.py +5 -0
- app.py +100 -0
- auto_chart_generator.py +21 -0
- chart_maker.py +69 -0
- indicator_analyzer.py +66 -0
- indicator_chart_generator.py +22 -0
- main.py +85 -0
- pattern_analyzer.py +69 -0
- pattern_logic.py +103 -0
- requirements.txt +12 -0
- run.py +6 -0
- test_charts.py +34 -0
- test_data.py +23 -0
- test_imports.py +29 -0
- test_indicators.py +28 -0
- test_model.py +34 -0
- test_patterns.py +32 -0
- train.py +29 -0
- trigger_handler.py +21 -0
.space
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
title: Pattern Analysis API
|
2 |
+
sdk: docker
|
3 |
+
port: 7865
|
4 |
+
entrypoint: python run.py
|
Docker
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9-slim
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
|
7 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
8 |
+
|
9 |
+
COPY . /code
|
10 |
+
|
11 |
+
EXPOSE 7865
|
12 |
+
|
13 |
+
ENV PYTHONPATH=/code
|
14 |
+
|
15 |
+
CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7865"]
|
Dockerfile
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9-slim
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
|
7 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
8 |
+
|
9 |
+
COPY . /code
|
10 |
+
|
11 |
+
EXPOSE 7865
|
12 |
+
|
13 |
+
CMD ["python", "run.py"]
|
__init__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
from app import app
|
api-2.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
3 |
+
import uvicorn
|
4 |
+
import requests
|
5 |
+
import pandas as pd
|
6 |
+
import numpy as np
|
7 |
+
from pattern_analyzer import PatternAnalyzer
|
8 |
+
from indicator_analyzer import IndicatorAnalyzer
|
9 |
+
from chart_maker import ChartMaker
|
10 |
+
from test_data import test_data
|
11 |
+
|
12 |
+
app = FastAPI()
|
13 |
+
|
14 |
+
app.add_middleware(
|
15 |
+
CORSMiddleware,
|
16 |
+
allow_origins=["*"],
|
17 |
+
allow_credentials=True,
|
18 |
+
allow_methods=["*"],
|
19 |
+
allow_headers=["*"],
|
20 |
+
)
|
21 |
+
|
22 |
+
@app.get("/analyze/{symbol}")
|
23 |
+
async def analyze_patterns(symbol: str, timeframe: str = "1D"):
|
24 |
+
try:
|
25 |
+
# Fetch OHLCV data
|
26 |
+
candle_response = requests.get(f"http://localhost:5000/fetch_candles?symbol={symbol}&timeframe={timeframe}")
|
27 |
+
ohlcv_data = candle_response.json()
|
28 |
+
|
29 |
+
# Create DataFrame with correct column structure
|
30 |
+
df = pd.DataFrame(ohlcv_data)
|
31 |
+
df = df.rename(columns={'time': 'timestamp'})
|
32 |
+
|
33 |
+
# Initialize analyzers
|
34 |
+
pattern_analyzer = PatternAnalyzer()
|
35 |
+
indicator_analyzer = IndicatorAnalyzer()
|
36 |
+
chart_maker = ChartMaker()
|
37 |
+
|
38 |
+
# Get analysis results
|
39 |
+
patterns = pattern_analyzer.analyze_data(df)
|
40 |
+
indicators = indicator_analyzer.analyze_indicators(df)
|
41 |
+
pattern_charts = chart_maker.create_pattern_chart(df, patterns)
|
42 |
+
|
43 |
+
return {
|
44 |
+
"patterns": patterns,
|
45 |
+
"indicators": indicators,
|
46 |
+
"charts": pattern_charts,
|
47 |
+
"ohlcv_data": ohlcv_data
|
48 |
+
}
|
49 |
+
|
50 |
+
except Exception as e:
|
51 |
+
raise HTTPException(status_code=500, detail=str(e))
|
52 |
+
|
53 |
+
@app.get("/")
|
54 |
+
async def root():
|
55 |
+
return {
|
56 |
+
"status": "online",
|
57 |
+
"service": "Pattern Analysis API",
|
58 |
+
"endpoints": {
|
59 |
+
"analyze": "/analyze/{symbol}",
|
60 |
+
"docs": "/docs",
|
61 |
+
"health": "/health"
|
62 |
+
}
|
63 |
+
}
|
64 |
+
|
65 |
+
if __name__ == "__main__":
|
66 |
+
uvicorn.run(app, host="127.0.0.1", port=8000)
|
api.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from app import app
|
2 |
+
import uvicorn
|
3 |
+
|
4 |
+
if __name__ == "__main__":
|
5 |
+
uvicorn.run(app, host="0.0.0.0", port=7865)
|
app.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
from fastapi.middleware.cors import CORSMiddleware
|
4 |
+
import uvicorn
|
5 |
+
import requests
|
6 |
+
import aiohttp
|
7 |
+
import pandas as pd
|
8 |
+
import numpy as np
|
9 |
+
import asyncio
|
10 |
+
import json
|
11 |
+
from datetime import datetime
|
12 |
+
from typing import Dict, List, Optional
|
13 |
+
import gc
|
14 |
+
|
15 |
+
# Import project modules
|
16 |
+
from pattern_analyzer import PatternAnalyzer
|
17 |
+
from indicator_analyzer import IndicatorAnalyzer
|
18 |
+
from chart_maker import ChartMaker
|
19 |
+
|
20 |
+
app = FastAPI(title="Pattern Analysis API")
|
21 |
+
|
22 |
+
# Add CORS middleware
|
23 |
+
app.add_middleware(
|
24 |
+
CORSMiddleware,
|
25 |
+
allow_origins=["*"],
|
26 |
+
allow_credentials=True,
|
27 |
+
allow_methods=["*"],
|
28 |
+
allow_headers=["*"],
|
29 |
+
)
|
30 |
+
|
31 |
+
# Global instances
|
32 |
+
pattern_analyzer = PatternAnalyzer()
|
33 |
+
indicator_analyzer = IndicatorAnalyzer()
|
34 |
+
chart_maker = ChartMaker()
|
35 |
+
|
36 |
+
async def analyze_stream(symbol: str, timeframe: str):
|
37 |
+
async def generate():
|
38 |
+
try:
|
39 |
+
yield json.dumps({"status": "fetching_data"}) + "\n"
|
40 |
+
|
41 |
+
response = requests.get(
|
42 |
+
f"http://localhost:5000/fetch_candles",
|
43 |
+
params={"symbol": symbol, "timeframe": timeframe}
|
44 |
+
)
|
45 |
+
ohlcv_data = response.json()
|
46 |
+
|
47 |
+
yield json.dumps({"status": "processing_data"}) + "\n"
|
48 |
+
|
49 |
+
df = pd.DataFrame(ohlcv_data)
|
50 |
+
df.index = range(len(df))
|
51 |
+
df['time'] = pd.to_datetime(df['time'], unit='ms')
|
52 |
+
|
53 |
+
required_columns = ['time', 'open', 'high', 'low', 'close', 'volume']
|
54 |
+
for col in required_columns:
|
55 |
+
if col not in df.columns:
|
56 |
+
df[col] = 0
|
57 |
+
|
58 |
+
yield json.dumps({"status": "analyzing_patterns"}) + "\n"
|
59 |
+
patterns = pattern_analyzer.analyze_data(df)
|
60 |
+
|
61 |
+
yield json.dumps({"status": "calculating_indicators"}) + "\n"
|
62 |
+
indicators = indicator_analyzer.analyze_indicators(df)
|
63 |
+
|
64 |
+
yield json.dumps({"status": "generating_charts"}) + "\n"
|
65 |
+
pattern_charts = chart_maker.create_pattern_chart(df, patterns)
|
66 |
+
|
67 |
+
final_response = {
|
68 |
+
"status": "complete",
|
69 |
+
"symbol": symbol,
|
70 |
+
"timeframe": timeframe,
|
71 |
+
"patterns": patterns,
|
72 |
+
"indicators": indicators,
|
73 |
+
"charts": pattern_charts,
|
74 |
+
"data": ohlcv_data
|
75 |
+
}
|
76 |
+
|
77 |
+
yield json.dumps(final_response) + "\n"
|
78 |
+
|
79 |
+
except Exception as e:
|
80 |
+
yield json.dumps({"status": "error", "detail": str(e)}) + "\n"
|
81 |
+
|
82 |
+
return StreamingResponse(generate(), media_type="application/x-ndjson")
|
83 |
+
|
84 |
+
@app.get("/analyze/{symbol}")
|
85 |
+
async def analyze_patterns(symbol: str, timeframe: str = "1D"):
|
86 |
+
return await analyze_stream(symbol, timeframe)
|
87 |
+
|
88 |
+
@app.get("/")
|
89 |
+
def read_root():
|
90 |
+
return {"message": "Pattern Analysis API is running"}
|
91 |
+
|
92 |
+
@app.get("/health")
|
93 |
+
def health_check():
|
94 |
+
return {"status": "healthy"}
|
95 |
+
|
96 |
+
def start():
|
97 |
+
return app
|
98 |
+
|
99 |
+
if __name__ == "__main__":
|
100 |
+
uvicorn.run("app:app", host="0.0.0.0", port=7865, reload=True)
|
auto_chart_generator.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pattern_analyzer import PatternAnalyzer
|
2 |
+
from lightweight_charts import Chart
|
3 |
+
|
4 |
+
class AutoChartGenerator:
|
5 |
+
def __init__(self):
|
6 |
+
self.analyzer = PatternAnalyzer()
|
7 |
+
|
8 |
+
def generate_pattern_charts(self, ohlcv_data):
|
9 |
+
patterns = self.analyzer.analyze_data(ohlcv_data)
|
10 |
+
|
11 |
+
charts = []
|
12 |
+
for pattern in patterns:
|
13 |
+
chart = Chart()
|
14 |
+
chart.candlestick(ohlcv_data)
|
15 |
+
chart.draw_pattern(
|
16 |
+
pattern_type=pattern['type'],
|
17 |
+
coordinates=pattern['coordinates']
|
18 |
+
)
|
19 |
+
charts.append(chart)
|
20 |
+
|
21 |
+
return charts
|
chart_maker.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from lightweight_charts import Chart
|
2 |
+
import pandas as pd
|
3 |
+
import numpy as np
|
4 |
+
|
5 |
+
class ChartMaker:
|
6 |
+
def __init__(self):
|
7 |
+
self.patterns = [
|
8 |
+
'channel', 'triangle', 'head_shoulders',
|
9 |
+
'double_top', 'double_bottom', 'wedge',
|
10 |
+
'flag', 'pennant'
|
11 |
+
]
|
12 |
+
|
13 |
+
self.indicators = [
|
14 |
+
'ema', 'sma', 'rsi', 'macd',
|
15 |
+
'bollinger', 'ichimoku', 'pivot_points'
|
16 |
+
]
|
17 |
+
|
18 |
+
def create_pattern_chart(self, ohlcv_data, patterns):
|
19 |
+
chart = Chart()
|
20 |
+
|
21 |
+
# Format OHLCV data for charting
|
22 |
+
chart_data = []
|
23 |
+
for index, row in ohlcv_data.iterrows():
|
24 |
+
data_point = {
|
25 |
+
'time': row['time'] if isinstance(row['time'], int) else int(row['time'].timestamp() * 1000),
|
26 |
+
'open': float(row['open']),
|
27 |
+
'high': float(row['high']),
|
28 |
+
'low': float(row['low']),
|
29 |
+
'close': float(row['close']),
|
30 |
+
'volume': float(row['volume'])
|
31 |
+
}
|
32 |
+
chart_data.append(data_point)
|
33 |
+
|
34 |
+
# Create main price series
|
35 |
+
chart.create_series('price', 'Candlestick', chart_data)
|
36 |
+
|
37 |
+
# Add volume series
|
38 |
+
volume_data = [{
|
39 |
+
'time': d['time'],
|
40 |
+
'value': d['volume']
|
41 |
+
} for d in chart_data]
|
42 |
+
chart.create_series('volume', 'Histogram', volume_data)
|
43 |
+
|
44 |
+
# Add patterns as overlays
|
45 |
+
for pattern in patterns:
|
46 |
+
if pattern['type'] in self.patterns and 'coordinates' in pattern:
|
47 |
+
chart.create_series(
|
48 |
+
f"pattern_{pattern['type']}",
|
49 |
+
'Line',
|
50 |
+
pattern['coordinates'],
|
51 |
+
{
|
52 |
+
'color': 'rgba(76, 175, 80, 0.5)',
|
53 |
+
'lineWidth': 2,
|
54 |
+
'title': f"{pattern['type']} ({pattern.get('confidence', 0):.2f})"
|
55 |
+
}
|
56 |
+
)
|
57 |
+
|
58 |
+
return chart
|
59 |
+
|
60 |
+
|
61 |
+
def generate_all_variations(self, ohlcv_data):
|
62 |
+
charts = []
|
63 |
+
for pattern in self.patterns:
|
64 |
+
pattern_chart = self.create_pattern_chart(ohlcv_data, [{'type': pattern}])
|
65 |
+
charts.append({
|
66 |
+
'type': pattern,
|
67 |
+
'chart': pattern_chart
|
68 |
+
})
|
69 |
+
return charts
|
indicator_analyzer.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import ollama
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
from lightweight_charts import Chart
|
5 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
6 |
+
|
7 |
+
class IndicatorAnalyzer:
|
8 |
+
def __init__(self):
|
9 |
+
self.model = AutoModelForCausalLM.from_pretrained("tmm-dev/codellama-pattern-analysis")
|
10 |
+
self.tokenizer = AutoTokenizer.from_pretrained("tmm-dev/codellama-pattern-analysis")
|
11 |
+
|
12 |
+
def analyze_indicators(self, ohlcv_data):
|
13 |
+
indicator_prompt = f"""
|
14 |
+
Analyze this OHLCV data and calculate optimal indicators:
|
15 |
+
{ohlcv_data.to_json(orient='records')}
|
16 |
+
Calculate and return:
|
17 |
+
- Moving Averages (EMA, SMA with optimal periods)
|
18 |
+
- Oscillators (RSI, Stochastic, MACD)
|
19 |
+
- Volatility (Bollinger Bands, ATR)
|
20 |
+
- Volume indicators
|
21 |
+
- Custom combinations of indicators
|
22 |
+
|
23 |
+
Return the analysis in JSON format with exact values and coordinates.
|
24 |
+
"""
|
25 |
+
|
26 |
+
response = self.client.chat(
|
27 |
+
model='codellama:latest',
|
28 |
+
messages=[
|
29 |
+
{
|
30 |
+
'role': 'system',
|
31 |
+
'content': 'You are a technical analysis indicator calculation model.'
|
32 |
+
},
|
33 |
+
{
|
34 |
+
'role': 'user',
|
35 |
+
'content': indicator_prompt
|
36 |
+
}
|
37 |
+
]
|
38 |
+
)
|
39 |
+
|
40 |
+
return self.parse_indicator_analysis(response['message']['content'])
|
41 |
+
|
42 |
+
def parse_indicator_analysis(self, analysis):
|
43 |
+
try:
|
44 |
+
# Convert string response to structured data
|
45 |
+
if isinstance(analysis, str):
|
46 |
+
# Extract JSON if embedded in text
|
47 |
+
json_start = analysis.find('{')
|
48 |
+
json_end = analysis.rfind('}') + 1
|
49 |
+
if json_start >= 0 and json_end > 0:
|
50 |
+
analysis = analysis[json_start:json_end]
|
51 |
+
|
52 |
+
indicators = {
|
53 |
+
'moving_averages': {},
|
54 |
+
'oscillators': {},
|
55 |
+
'volatility': {},
|
56 |
+
'volume': {},
|
57 |
+
'custom': {}
|
58 |
+
}
|
59 |
+
|
60 |
+
# Add any custom parsing logic here
|
61 |
+
|
62 |
+
return indicators
|
63 |
+
|
64 |
+
except Exception as e:
|
65 |
+
print(f"Error parsing indicator analysis: {str(e)}")
|
66 |
+
return {}
|
indicator_chart_generator.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from indicator_analyzer import IndicatorAnalyzer
|
2 |
+
from lightweight_charts import Chart
|
3 |
+
|
4 |
+
class AutoIndicatorGenerator:
|
5 |
+
def __init__(self):
|
6 |
+
self.analyzer = IndicatorAnalyzer()
|
7 |
+
|
8 |
+
def generate_indicator_charts(self, ohlcv_data):
|
9 |
+
indicators = self.analyzer.analyze_indicators(ohlcv_data)
|
10 |
+
|
11 |
+
charts = []
|
12 |
+
for indicator in indicators:
|
13 |
+
chart = Chart()
|
14 |
+
chart.candlestick(ohlcv_data)
|
15 |
+
chart.add_indicator(
|
16 |
+
type=indicator['type'],
|
17 |
+
values=indicator['values'],
|
18 |
+
parameters=indicator['parameters']
|
19 |
+
)
|
20 |
+
charts.append(chart)
|
21 |
+
|
22 |
+
return charts
|
main.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
from fastapi.middleware.cors import CORSMiddleware
|
4 |
+
import uvicorn
|
5 |
+
import requests
|
6 |
+
import pandas as pd
|
7 |
+
import numpy as np
|
8 |
+
import asyncio
|
9 |
+
import json
|
10 |
+
from datetime import datetime
|
11 |
+
from typing import Dict, List, Optional
|
12 |
+
import gc
|
13 |
+
|
14 |
+
from pattern_analyzer import PatternAnalyzer
|
15 |
+
from indicator_analyzer import IndicatorAnalyzer
|
16 |
+
from chart_maker import ChartMaker
|
17 |
+
|
18 |
+
app = FastAPI()
|
19 |
+
|
20 |
+
app.add_middleware(
|
21 |
+
CORSMiddleware,
|
22 |
+
allow_origins=["*"],
|
23 |
+
allow_credentials=True,
|
24 |
+
allow_methods=["*"],
|
25 |
+
allow_headers=["*"],
|
26 |
+
)
|
27 |
+
|
28 |
+
pattern_analyzer = PatternAnalyzer()
|
29 |
+
indicator_analyzer = IndicatorAnalyzer()
|
30 |
+
chart_maker = ChartMaker()
|
31 |
+
|
32 |
+
async def analyze_stream(symbol: str, timeframe: str):
|
33 |
+
async def generate():
|
34 |
+
try:
|
35 |
+
yield json.dumps({"status": "fetching_data"}) + "\n"
|
36 |
+
|
37 |
+
response = requests.get(
|
38 |
+
f"http://localhost:5000/fetch_candles",
|
39 |
+
params={"symbol": symbol, "timeframe": timeframe}
|
40 |
+
)
|
41 |
+
ohlcv_data = response.json()
|
42 |
+
|
43 |
+
yield json.dumps({"status": "processing_data"}) + "\n"
|
44 |
+
|
45 |
+
df = pd.DataFrame(ohlcv_data)
|
46 |
+
df.index = range(len(df))
|
47 |
+
df['time'] = pd.to_datetime(df['time'], unit='ms')
|
48 |
+
|
49 |
+
required_columns = ['time', 'open', 'high', 'low', 'close', 'volume']
|
50 |
+
for col in required_columns:
|
51 |
+
if col not in df.columns:
|
52 |
+
df[col] = 0
|
53 |
+
|
54 |
+
yield json.dumps({"status": "analyzing_patterns"}) + "\n"
|
55 |
+
patterns = pattern_analyzer.analyze_data(df)
|
56 |
+
|
57 |
+
yield json.dumps({"status": "calculating_indicators"}) + "\n"
|
58 |
+
indicators = indicator_analyzer.analyze_indicators(df)
|
59 |
+
|
60 |
+
yield json.dumps({"status": "generating_charts"}) + "\n"
|
61 |
+
pattern_charts = chart_maker.create_pattern_chart(df, patterns)
|
62 |
+
|
63 |
+
final_response = {
|
64 |
+
"status": "complete",
|
65 |
+
"symbol": symbol,
|
66 |
+
"timeframe": timeframe,
|
67 |
+
"patterns": patterns,
|
68 |
+
"indicators": indicators,
|
69 |
+
"charts": pattern_charts,
|
70 |
+
"data": ohlcv_data
|
71 |
+
}
|
72 |
+
|
73 |
+
yield json.dumps(final_response) + "\n"
|
74 |
+
|
75 |
+
except Exception as e:
|
76 |
+
yield json.dumps({"status": "error", "detail": str(e)}) + "\n"
|
77 |
+
|
78 |
+
return StreamingResponse(generate(), media_type="application/x-ndjson")
|
79 |
+
|
80 |
+
@app.get("/analyze/{symbol}")
|
81 |
+
async def analyze_patterns(symbol: str, timeframe: str = "1D"):
|
82 |
+
return await analyze_stream(symbol, timeframe)
|
83 |
+
|
84 |
+
if __name__ == "__main__":
|
85 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|
pattern_analyzer.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
import json
|
5 |
+
from pattern_logic import PatternLogic
|
6 |
+
|
7 |
+
class PatternAnalyzer:
|
8 |
+
def __init__(self):
|
9 |
+
self.model = AutoModelForCausalLM.from_pretrained("tmm-dev/codellama-pattern-analysis")
|
10 |
+
self.tokenizer = AutoTokenizer.from_pretrained("tmm-dev/codellama-pattern-analysis")
|
11 |
+
self.basic_patterns = {
|
12 |
+
'channel': {'min_points': 4, 'confidence_threshold': 0.7},
|
13 |
+
'triangle': {'min_points': 3, 'confidence_threshold': 0.75},
|
14 |
+
'support': {'min_touches': 2, 'confidence_threshold': 0.8},
|
15 |
+
'resistance': {'min_touches': 2, 'confidence_threshold': 0.8},
|
16 |
+
'double_top': {'max_deviation': 0.02, 'confidence_threshold': 0.85},
|
17 |
+
'double_bottom': {'max_deviation': 0.02, 'confidence_threshold': 0.85}
|
18 |
+
}
|
19 |
+
self.pattern_logic = PatternLogic()
|
20 |
+
|
21 |
+
def analyze_data(self, ohlcv_data):
|
22 |
+
data_prompt = f"""TASK: Identify high-confidence technical patterns only.
|
23 |
+
Minimum confidence threshold: 0.8
|
24 |
+
Required pattern criteria:
|
25 |
+
1. Channel: Must have at least 3 touching points
|
26 |
+
2. Triangle: Must have clear convergence point
|
27 |
+
3. Support: Minimum 3 price bounces
|
28 |
+
4. Resistance: Minimum 3 price rejections
|
29 |
+
|
30 |
+
INPUT DATA:
|
31 |
+
{ohlcv_data.to_json(orient='records')}
|
32 |
+
|
33 |
+
Return ONLY high-confidence patterns (>0.8) in JSON format with exact price coordinates."""
|
34 |
+
|
35 |
+
inputs = self.tokenizer(data_prompt, return_tensors="pt")
|
36 |
+
outputs = self.model.generate(**inputs, max_length=1000)
|
37 |
+
analysis = self.tokenizer.decode(outputs[0])
|
38 |
+
|
39 |
+
return self.parse_analysis(analysis)
|
40 |
+
|
41 |
+
def parse_analysis(self, analysis_text):
|
42 |
+
try:
|
43 |
+
json_start = analysis_text.find('{')
|
44 |
+
json_end = analysis_text.rfind('}') + 1
|
45 |
+
json_str = analysis_text[json_start:json_end]
|
46 |
+
|
47 |
+
analysis_data = json.loads(json_str)
|
48 |
+
patterns = []
|
49 |
+
|
50 |
+
for pattern in analysis_data.get('patterns', []):
|
51 |
+
pattern_type = pattern.get('type')
|
52 |
+
|
53 |
+
if pattern_type in self.basic_patterns:
|
54 |
+
threshold = self.basic_patterns[pattern_type]['confidence_threshold']
|
55 |
+
if pattern.get('confidence', 0) >= threshold:
|
56 |
+
patterns.append({
|
57 |
+
'type': pattern_type,
|
58 |
+
'coordinates': pattern.get('coordinates', []),
|
59 |
+
'confidence': pattern.get('confidence'),
|
60 |
+
'metadata': {
|
61 |
+
'rules': self.basic_patterns[pattern_type],
|
62 |
+
'timestamp': pd.Timestamp.now().isoformat()
|
63 |
+
}
|
64 |
+
})
|
65 |
+
|
66 |
+
return patterns
|
67 |
+
|
68 |
+
except json.JSONDecodeError:
|
69 |
+
return []
|
pattern_logic.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
from test_data import test_data
|
4 |
+
from typing import List, Dict, Optional, Union
|
5 |
+
|
6 |
+
class PatternLogic:
|
7 |
+
def __init__(self):
|
8 |
+
self.patterns = {
|
9 |
+
'channel': {'min_points': 4, 'confidence_threshold': 0.7},
|
10 |
+
'triangle': {'min_points': 3, 'confidence_threshold': 0.75},
|
11 |
+
'support': {'min_touches': 2, 'confidence_threshold': 0.8},
|
12 |
+
'resistance': {'min_touches': 2, 'confidence_threshold': 0.8},
|
13 |
+
'double_top': {'max_deviation': 0.02, 'confidence_threshold': 0.85},
|
14 |
+
'double_bottom': {'max_deviation': 0.02, 'confidence_threshold': 0.85}
|
15 |
+
}
|
16 |
+
self.test_data = test_data
|
17 |
+
|
18 |
+
def detect_channels(self, data: pd.DataFrame) -> Dict[str, Union[str, List[List[float]], float]]:
|
19 |
+
days = len(data)
|
20 |
+
base_price = 100
|
21 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
22 |
+
base_prices = base_price * (1 + price_changes)
|
23 |
+
|
24 |
+
high_prices = base_prices * (1 + np.random.normal(0.01, 0.008, days))
|
25 |
+
low_prices = base_prices * (1 + np.random.normal(-0.01, 0.008, days))
|
26 |
+
timestamps = np.arange(days)
|
27 |
+
|
28 |
+
upper_channel: List[List[float]] = []
|
29 |
+
lower_channel: List[List[float]] = []
|
30 |
+
|
31 |
+
for i in range(days):
|
32 |
+
upper_channel.append([float(timestamps[i]), float(high_prices[i])])
|
33 |
+
lower_channel.append([float(timestamps[i]), float(low_prices[i])])
|
34 |
+
|
35 |
+
return {
|
36 |
+
'type': 'channel',
|
37 |
+
'upper': upper_channel,
|
38 |
+
'lower': lower_channel,
|
39 |
+
'confidence': 0.85
|
40 |
+
}
|
41 |
+
|
42 |
+
def find_support_resistance(self, data: pd.DataFrame) -> List[Dict[str, Union[str, List[List[float]], float]]]:
|
43 |
+
days = len(data)
|
44 |
+
base_price = 100
|
45 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
46 |
+
close_prices = base_price * (1 + price_changes) * (1 + np.random.normal(0, 0.005, days))
|
47 |
+
timestamps = np.arange(days)
|
48 |
+
levels: List[Dict] = []
|
49 |
+
|
50 |
+
for i in range(1, days-1):
|
51 |
+
current_price = float(close_prices[i])
|
52 |
+
prev_price = float(close_prices[i-1])
|
53 |
+
next_price = float(close_prices[i+1])
|
54 |
+
|
55 |
+
if current_price > prev_price and current_price > next_price:
|
56 |
+
levels.append({
|
57 |
+
'type': 'resistance',
|
58 |
+
'coordinates': [[float(timestamps[i]), current_price]],
|
59 |
+
'confidence': 0.8
|
60 |
+
})
|
61 |
+
if current_price < prev_price and current_price < next_price:
|
62 |
+
levels.append({
|
63 |
+
'type': 'support',
|
64 |
+
'coordinates': [[float(timestamps[i]), current_price]],
|
65 |
+
'confidence': 0.8
|
66 |
+
})
|
67 |
+
|
68 |
+
return levels
|
69 |
+
|
70 |
+
def detect_triangles(self, data: pd.DataFrame) -> Optional[Dict[str, Union[str, List[List[float]], float]]]:
|
71 |
+
days = len(data)
|
72 |
+
base_price = 100
|
73 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
74 |
+
base_prices = base_price * (1 + price_changes)
|
75 |
+
|
76 |
+
high_prices = base_prices * (1 + np.random.normal(0.01, 0.008, days))
|
77 |
+
low_prices = base_prices * (1 + np.random.normal(-0.01, 0.008, days))
|
78 |
+
timestamps = np.arange(days)
|
79 |
+
|
80 |
+
first_high = float(high_prices[0])
|
81 |
+
last_high = float(high_prices[-1])
|
82 |
+
first_low = float(low_prices[0])
|
83 |
+
last_low = float(low_prices[-1])
|
84 |
+
|
85 |
+
if last_high < first_high and last_low > first_low:
|
86 |
+
return {
|
87 |
+
'type': 'triangle',
|
88 |
+
'coordinates': [
|
89 |
+
[float(timestamps[0]), first_high],
|
90 |
+
[float(timestamps[-1]), last_high],
|
91 |
+
[float(timestamps[0]), first_low],
|
92 |
+
[float(timestamps[-1]), last_low]
|
93 |
+
],
|
94 |
+
'confidence': 0.75
|
95 |
+
}
|
96 |
+
return None
|
97 |
+
|
98 |
+
def validate_patterns(self, patterns: List[Dict]) -> List[Dict]:
|
99 |
+
validated = []
|
100 |
+
for pattern in patterns:
|
101 |
+
if pattern.get('confidence', 0) >= 0.8:
|
102 |
+
validated.append(pattern)
|
103 |
+
return validated
|
requirements.txt
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi==0.104.1
|
2 |
+
uvicorn==0.24.0
|
3 |
+
numpy==1.26.1
|
4 |
+
pandas==2.1.1
|
5 |
+
requests==2.31.0
|
6 |
+
python-multipart==0.0.6
|
7 |
+
python-dotenv==1.0.0
|
8 |
+
transformers==4.35.0
|
9 |
+
torch==2.1.0
|
10 |
+
accelerate==0.24.0
|
11 |
+
lightweight-charts==2.0.0
|
12 |
+
gunicorn==20.1.0
|
run.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from app import app
|
3 |
+
|
4 |
+
if __name__ == "__main__":
|
5 |
+
import uvicorn
|
6 |
+
uvicorn.run("app:app", host="0.0.0.0", port=7865, reload=True)
|
test_charts.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
from auto_chart_generator import AutoChartGenerator
|
4 |
+
from indicator_chart_generator import AutoIndicatorGenerator
|
5 |
+
|
6 |
+
# Generate 150 days of OHLCV data
|
7 |
+
np.random.seed(42)
|
8 |
+
days = 150
|
9 |
+
base_price = 100
|
10 |
+
|
11 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
12 |
+
prices = base_price * (1 + price_changes)
|
13 |
+
|
14 |
+
test_data = {
|
15 |
+
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
16 |
+
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
17 |
+
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
18 |
+
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
19 |
+
'volume': np.random.normal(1000000, 200000, days)
|
20 |
+
}
|
21 |
+
|
22 |
+
df = pd.DataFrame(test_data)
|
23 |
+
df['high'] = df[['open', 'high', 'close']].max(axis=1)
|
24 |
+
df['low'] = df[['open', 'low', 'close']].min(axis=1)
|
25 |
+
|
26 |
+
# Test pattern charts
|
27 |
+
pattern_gen = AutoChartGenerator()
|
28 |
+
pattern_charts = pattern_gen.generate_pattern_charts(df)
|
29 |
+
print("Generated Pattern Charts:", len(pattern_charts))
|
30 |
+
|
31 |
+
# Test indicator charts
|
32 |
+
indicator_gen = AutoIndicatorGenerator()
|
33 |
+
indicator_charts = indicator_gen.generate_indicator_charts(df)
|
34 |
+
print("Generated Indicator Charts:", len(indicator_charts))
|
test_data.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
|
4 |
+
# Generate 150 days test data
|
5 |
+
days = 150
|
6 |
+
base_price = 100
|
7 |
+
|
8 |
+
# Generate price changes with realistic volatility
|
9 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
10 |
+
base_prices = base_price * (1 + price_changes)
|
11 |
+
|
12 |
+
# Create DataFrame with all OHLCV components
|
13 |
+
test_data = pd.DataFrame({
|
14 |
+
'open': base_prices * (1 + np.random.normal(0, 0.005, days)), # Opening prices
|
15 |
+
'high': base_prices * (1 + np.random.normal(0.01, 0.008, days)), # Day's high prices
|
16 |
+
'low': base_prices * (1 + np.random.normal(-0.01, 0.008, days)), # Day's low prices
|
17 |
+
'close': base_prices * (1 + np.random.normal(0, 0.005, days)), # Closing prices
|
18 |
+
'volume': np.random.normal(1000000, 200000, days).astype(int) # Daily volume
|
19 |
+
})
|
20 |
+
|
21 |
+
# Ensure high is always highest and low is always lowest
|
22 |
+
test_data['high'] = test_data[['open', 'high', 'close']].max(axis=1)
|
23 |
+
test_data['low'] = test_data[['open', 'low', 'close']].min(axis=1)
|
test_imports.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
try:
|
2 |
+
import gradio as gr
|
3 |
+
print("✅ Gradio imported successfully")
|
4 |
+
except:
|
5 |
+
print("❌ Gradio import failed")
|
6 |
+
|
7 |
+
try:
|
8 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
9 |
+
print("✅ Transformers imported successfully")
|
10 |
+
except:
|
11 |
+
print("❌ Transformers import failed")
|
12 |
+
|
13 |
+
try:
|
14 |
+
from lightweight_charts import Chart
|
15 |
+
print("✅ Lightweight Charts imported successfully")
|
16 |
+
except:
|
17 |
+
print("❌ Lightweight Charts import failed")
|
18 |
+
|
19 |
+
try:
|
20 |
+
from fastapi import FastAPI
|
21 |
+
print("✅ FastAPI imported successfully")
|
22 |
+
except:
|
23 |
+
print("❌ FastAPI import failed")
|
24 |
+
|
25 |
+
try:
|
26 |
+
from datasets import Dataset, load_dataset
|
27 |
+
print("✅ Datasets imported successfully")
|
28 |
+
except:
|
29 |
+
print("❌ Datasets import failed")
|
test_indicators.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
from indicator_analyzer import IndicatorAnalyzer
|
4 |
+
|
5 |
+
# Generate 150 days of OHLCV data
|
6 |
+
np.random.seed(42)
|
7 |
+
days = 150
|
8 |
+
base_price = 100
|
9 |
+
|
10 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
11 |
+
prices = base_price * (1 + price_changes)
|
12 |
+
|
13 |
+
test_data = {
|
14 |
+
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
15 |
+
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
16 |
+
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
17 |
+
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
18 |
+
'volume': np.random.normal(1000000, 200000, days)
|
19 |
+
}
|
20 |
+
|
21 |
+
df = pd.DataFrame(test_data)
|
22 |
+
df['high'] = df[['open', 'high', 'close']].max(axis=1)
|
23 |
+
df['low'] = df[['open', 'low', 'close']].min(axis=1)
|
24 |
+
|
25 |
+
# Test indicator analysis
|
26 |
+
analyzer = IndicatorAnalyzer()
|
27 |
+
indicators = analyzer.analyze_indicators(df)
|
28 |
+
print("Generated Indicators:", indicators)
|
test_model.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
|
5 |
+
# Load model
|
6 |
+
model = AutoModelForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-7b-hf")
|
8 |
+
|
9 |
+
# Create test data
|
10 |
+
days = 150
|
11 |
+
base_price = 100
|
12 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
13 |
+
prices = base_price * (1 + price_changes)
|
14 |
+
|
15 |
+
test_data = pd.DataFrame({
|
16 |
+
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
17 |
+
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
18 |
+
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
19 |
+
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
20 |
+
'volume': np.random.normal(1000000, 200000, days)
|
21 |
+
})
|
22 |
+
|
23 |
+
# Test pattern detection
|
24 |
+
prompt = f"""
|
25 |
+
Analyze this OHLCV data and detect patterns:
|
26 |
+
{test_data.head().to_string()}
|
27 |
+
Return: Pattern type and coordinates
|
28 |
+
"""
|
29 |
+
|
30 |
+
inputs = tokenizer(prompt, return_tensors="pt")
|
31 |
+
outputs = model.generate(**inputs, max_length=500)
|
32 |
+
result = tokenizer.decode(outputs[0])
|
33 |
+
|
34 |
+
print("Model Output:", result)
|
test_patterns.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
from pattern_analyzer import PatternAnalyzer
|
4 |
+
|
5 |
+
# Generate 150 days of realistic OHLCV data
|
6 |
+
np.random.seed(42) # For reproducibility
|
7 |
+
days = 150
|
8 |
+
base_price = 100
|
9 |
+
|
10 |
+
# Create price movements with trends and volatility
|
11 |
+
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
12 |
+
prices = base_price * (1 + price_changes)
|
13 |
+
|
14 |
+
test_data = {
|
15 |
+
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
16 |
+
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
17 |
+
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
18 |
+
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
19 |
+
'volume': np.random.normal(1000000, 200000, days)
|
20 |
+
}
|
21 |
+
|
22 |
+
# Convert to pandas DataFrame for better handling
|
23 |
+
df = pd.DataFrame(test_data)
|
24 |
+
|
25 |
+
# Ensure high is highest and low is lowest for each day
|
26 |
+
df['high'] = df[['open', 'high', 'close']].max(axis=1)
|
27 |
+
df['low'] = df[['open', 'low', 'close']].min(axis=1)
|
28 |
+
|
29 |
+
# Test pattern detection
|
30 |
+
analyzer = PatternAnalyzer()
|
31 |
+
patterns = analyzer.analyze_data(df)
|
32 |
+
print("Detected Patterns:", patterns)
|
train.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import Trainer, TrainingArguments
|
2 |
+
from datasets import Dataset
|
3 |
+
|
4 |
+
def prepare_training_data():
|
5 |
+
# Training data structure
|
6 |
+
return {
|
7 |
+
'pattern_type': ['channel', 'triangle'],
|
8 |
+
'chart_code': ['// Channel code', '// Triangle code']
|
9 |
+
}
|
10 |
+
|
11 |
+
def train_model():
|
12 |
+
# Create dataset
|
13 |
+
data = prepare_training_data()
|
14 |
+
dataset = Dataset.from_dict(data)
|
15 |
+
|
16 |
+
training_args = TrainingArguments(
|
17 |
+
output_dir="./results",
|
18 |
+
num_train_epochs=3,
|
19 |
+
per_device_train_batch_size=8,
|
20 |
+
save_steps=500,
|
21 |
+
)
|
22 |
+
|
23 |
+
trainer = Trainer(
|
24 |
+
model=model,
|
25 |
+
args=training_args,
|
26 |
+
train_dataset=dataset,
|
27 |
+
)
|
28 |
+
|
29 |
+
trainer.train()
|
trigger_handler.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from chart_maker import ChartMaker
|
2 |
+
from fastapi import FastAPI
|
3 |
+
|
4 |
+
|
5 |
+
class TriggerHandler:
|
6 |
+
def __init__(self):
|
7 |
+
self.chart_maker = ChartMaker()
|
8 |
+
|
9 |
+
async def handle_chart_trigger(self, trigger_data):
|
10 |
+
# Extract chart image and OHLCV data
|
11 |
+
chart_image = trigger_data['image']
|
12 |
+
ohlcv_data = trigger_data['ohlcv']
|
13 |
+
|
14 |
+
# Generate pattern charts
|
15 |
+
pattern_charts = self.chart_maker.generate_all_variations(ohlcv_data)
|
16 |
+
|
17 |
+
# Return generated charts through API
|
18 |
+
return {
|
19 |
+
'pattern_charts': pattern_charts,
|
20 |
+
'timestamp': trigger_data['timestamp']
|
21 |
+
}
|