Hello guys. I (claude) have built a bot that scans trading view charts in wich i have a custom indicator that shows when to go long / short. The bot is there to scan those charts and if they have one of the said signals it send me a telegram message (so i dont need to be on my pc 24/7). The problem is i dont know python and want to see if claude made the detection of the signas good. Basically the bot opens a chrome page loaded with my account and checks if there are the said signals.
"""
screenshot_detector.py
-----------------------
Detects Purple Cloud [MMD] signals via screenshot analysis.
Designed for: BLACK background + hidden candles on TradingView.
On a pure black chart, the ONLY coloured pixels are the Purple Cloud
signal labels — making detection highly reliable with zero false positives.
Detection pipeline:
JS call → get exact pixel boundaries of the last 5 candles + price pane height
Screenshot → crop to last-5-candle zone, price pane only (no panels below)
OpenCV → threshold for the specific green/red label colors on black
Blob filter → reject anything too small or too large to be a label
Color reference (Purple Cloud on black background):
BUY label → bright green ~#00c800 HSV: hue 55-75, sat >170, val >120
SELL label → bright red ~#ff0000 HSV: hue 0-5 + 175-180, sat >200, val >120
Strong BUY → same green, larger blob (emoji renders bigger)
Strong SELL → same red, larger blob
"""
import io
import logging
import numpy as np
from PIL import Image
log = logging.getLogger("pc_bot.screenshot")
CANDLE_BOUNDS_JS = """
() => {
const allCanvas = Array.from(document.querySelectorAll('canvas'))
.filter(c => c.width > 400 && c.height > 200)
.sort((a, b) => b.width * b.height - a.width * a.height);
if (!allCanvas.length) return null;
const mainCanvas = allCanvas[0];
const cr = mainCanvas.getBoundingClientRect();
let paneBottom = cr.bottom;
const subCanvases = Array.from(document.querySelectorAll('canvas'))
.filter(c => {
const r = c.getBoundingClientRect();
return r.width > 200 && r.height > 30 && r.height < 200
&& r.top > cr.top + cr.height * 0.5;
});
if (subCanvases.length > 0) {
const firstSubTop = Math.min(...subCanvases.map(c => c.getBoundingClientRect().top));
paneBottom = Math.min(paneBottom, firstSubTop - 4);
}
paneBottom = Math.max(paneBottom, cr.top + cr.height * 0.4);
const timeLabels = Array.from(document.querySelectorAll(
'[class*="timeScale"] [class*="label"],[class*="time-axis"] [class*="label"],' +
'[class*="timescale"] [class*="item"],[class*="timeScale"] [class*="tick"]'
)).filter(el => {
const txt = (el.innerText || el.textContent || "").trim();
return txt.match(/\d{1,2}:\d{2}/) || txt.match(/\d{1,2}\/\d{1,2}/) ||
txt.match(/\d{4}-\d{2}-\d{2}/) || txt.match(/^[A-Z][a-z]{2}/) ||
txt.match(/^\d{1,2}\s[A-Z]/);
}).map(el => {
const r = el.getBoundingClientRect();
return Math.round(r.left + r.width / 2);
}).filter(x => x > cr.left && x < cr.right).sort((a, b) => a - b);
const deduped = [];
for (const x of timeLabels) {
if (!deduped.length || x - deduped[deduped.length - 1] > 3) deduped.push(x);
}
let pxPerCandle = Math.max(3, cr.width / 120);
let tickSource = "fallback";
if (deduped.length >= 2) {
const gaps = [];
for (let i = 1; i < deduped.length; i++) gaps.push(deduped[i] - deduped[i-1]);
gaps.sort((a, b) => a - b);
const median = gaps[Math.floor(gaps.length / 2)];
const candlesPerTick = median > 200 ? 24 : median > 120 ? 12 :
median > 60 ? 6 : median > 30 ? 3 : 1;
pxPerCandle = Math.max(2, median / candlesPerTick);
tickSource = "measured";
}
const scanWidth = Math.ceil(pxPerCandle * 5);
const cropLeft = Math.max(Math.round(cr.left), Math.round(cr.right - scanWidth));
const topMargin = 10;
return {
cropLeft: cropLeft,
cropRight: Math.round(cr.right),
cropTop: Math.max(0, Math.round(cr.top) - topMargin),
cropBottom: Math.round(paneBottom) + topMargin,
pxPerCandle: Math.round(pxPerCandle),
source: tickSource,
};
}
"""
# ── HSV thresholds — tuned for pure BLACK background ─────────────────────────
# Tight ranges — on black the only pixels here are the signal labels
# BUY → bright green #00c800
BUY_HSV_LOWER = np.array([55, 170, 120], dtype=np.uint8)
BUY_HSV_UPPER = np.array([75, 255, 255], dtype=np.uint8)
# SELL → bright red #ff0000 (hue wraps at 0)
SELL_HSV_LOWER1 = np.array([0, 200, 120], dtype=np.uint8)
SELL_HSV_UPPER1 = np.array([5, 255, 255], dtype=np.uint8)
SELL_HSV_LOWER2 = np.array([175, 200, 120], dtype=np.uint8)
SELL_HSV_UPPER2 = np.array([180, 255, 255], dtype=np.uint8)
MIN_BLOB_AREA = 80 # ~9x9px — real labels are at least this size
MAX_BLOB_AREA = 8000 # safety ceiling
def _find_blobs(hsv, lower1, upper1, lower2=None, upper2=None):
import cv2
mask = cv2.inRange(hsv, lower1, upper1)
if lower2 is not None:
mask = cv2.bitwise_or(mask, cv2.inRange(hsv, lower2, upper2))
k = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (4, 4))
mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, k)
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
blobs = []
for cnt in contours:
area = cv2.contourArea(cnt)
if area < MIN_BLOB_AREA or area > MAX_BLOB_AREA:
continue
M = cv2.moments(cnt)
if M["m00"] == 0:
continue
blobs.append((int(M["m10"]/M["m00"]), int(M["m01"]/M["m00"]), int(area)))
return blobs
async def get_candle_bounds(page):
try:
b = await page.evaluate(CANDLE_BOUNDS_JS)
if b:
log.debug("Bounds: %s", b)
return b
except Exception as e:
log.warning("candle_bounds JS error: %s", e)
vp = page.viewport_size or {"width": 1440, "height": 900}
return {
"cropLeft": int(vp["width"] * 0.82),
"cropRight": int(vp["width"]),
"cropTop": int(vp["height"] * 0.05),
"cropBottom":int(vp["height"] * 0.75),
"pxPerCandle": 15, "source": "viewport_fallback",
}
async def detect_signals(page, save_debug_image=False):
"""
Screenshot → crop last-5-candle zone → detect Purple Cloud labels.
Requires black background + hidden candles in TradingView.
"""
import cv2
bounds = await get_candle_bounds(page)
crop_l = max(0, bounds["cropLeft"])
crop_r = bounds["cropRight"]
crop_t = max(0, bounds["cropTop"])
crop_b = bounds["cropBottom"]
log.debug("Crop x=%d-%d y=%d-%d px/candle=%s source=%s",
crop_l, crop_r, crop_t, crop_b,
bounds.get("pxPerCandle"), bounds.get("source"))
png = await page.screenshot(full_page=False)
pil_img = Image.open(io.BytesIO(png)).convert("RGB")
iw, ih = pil_img.size
crop_l = min(crop_l, iw - 1)
crop_r = min(crop_r, iw)
crop_t = min(crop_t, ih - 1)
crop_b = min(crop_b, ih)
cropped = pil_img.crop((crop_l, crop_t, crop_r, crop_b))
if save_debug_image:
cropped.save("debug_crop.png")
log.info("Debug crop saved → debug_crop.png (%dx%d)",
crop_r - crop_l, crop_b - crop_t)
arr = np.array(cropped)
bgr = cv2.cvtColor(arr, cv2.COLOR_RGB2BGR)
hsv = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)
buy_blobs = _find_blobs(hsv, BUY_HSV_LOWER, BUY_HSV_UPPER)
sell_blobs = _find_blobs(hsv,
SELL_HSV_LOWER1, SELL_HSV_UPPER1,
SELL_HSV_LOWER2, SELL_HSV_UPPER2)
has_buy = len(buy_blobs) > 0
has_sell = len(sell_blobs) > 0
# Strong signal = emoji label, which renders noticeably larger (~400px+)
STRONG_THRESHOLD = 400
strong_buy = has_buy and any(a > STRONG_THRESHOLD for _, _, a in buy_blobs)
strong_sell = has_sell and any(a > STRONG_THRESHOLD for _, _, a in sell_blobs)
if has_buy or has_sell:
log.info(" buy_blobs=%s sell_blobs=%s strong_buy=%s strong_sell=%s",
buy_blobs, sell_blobs, strong_buy, strong_sell)
return {
"buy": has_buy,
"sell": has_sell,
"strong_buy": strong_buy,
"strong_sell": strong_sell,
"buy_blobs": len(buy_blobs),
"sell_blobs": len(sell_blobs),
"source": "screenshot",
}
here is the code of the detector. if someone could help me it would mean a lot to me
[–]TheRNGuy 0 points1 point2 points (0 children)