This is a complete walkthrough of the seven snippets currently published in Code Vault — a personal repository of crypto trading radars, autonomous trading systems, and security utilities, all in pure Python with zero or near-zero API costs. Every snippet below includes the full source code so you can read, fork, and run them locally. Use the table of contents on the right to jump to a specific tool.
⚠️ Risk warning — These tools touch live markets and on-chain data. Several of them push real-time alerts to Telegram and one of them (the AI autonomous trader) opens virtual positions on Binance Futures. Read the per-tool notes carefully. Use at your own risk; the author makes no warranty for trading outcomes.
Trading Radar
Vitalik Sell Radar
Date: 2026.05.02 Tags: Python · WebSocket · Ethereum · Telegram · Event-Driven
GitHub: vitalik-sell-radar {rel=“nofollow”}
WebSocket event-driven Vitalik wallet sell detection with Telegram alerts
Monitors Vitalik Buterin’s wallet (vitalik.eth) for ERC-20 token sells via WebSocket event subscription — zero polling, sub-second latency. Automatically classifies recipients as DEX Router (Uniswap, 1inch, SushiSwap), CEX hot wallet (Binance, Coinbase, Kraken), or LP Pool. Fetches real-time token prices from DexScreener. Multi-RPC failover with auto-reconnect. Pure Python, zero cost — uses free public RPC nodes.
Full source code
1#!/usr/bin/env python3
2"""
3Vitalik Sell Radar — Event-Driven Edition
4WebSocket subscription to ERC-20 Transfer events, real-time detection of
5Vitalik's sell activity, instant push to Telegram.
6
7Architecture:
81. WebSocket subscribes to Transfer(from=vitalik) events → sub-second detection
92. Classifies sell behavior (transfers to DEX Router / CEX / LP Pool)
103. Queries token info + price via DexScreener
114. Pushes alert to Telegram
125. Auto-reconnect + multi-RPC failover
13"""
14
15import asyncio
16import json
17import logging
18import os
19import signal
20import sys
21import time
22from datetime import datetime, timezone
23from pathlib import Path
24
25import aiohttp
26import websockets
27
28# Load .env file
29def load_env():
30 env_path = Path(__file__).parent / ".env"
31 if env_path.exists():
32 for line in env_path.read_text().splitlines():
33 line = line.strip()
34 if line and not line.startswith("#") and "=" in line:
35 k, v = line.split("=", 1)
36 os.environ.setdefault(k.strip(), v.strip())
37
38load_env()
39
40# ============================================================
41# Configuration
42# ============================================================
43
44# Vitalik's main wallet
45VITALIK_ADDRESS = "0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045"
46VITALIK_PADDED = "0x" + VITALIK_ADDRESS[2:].lower().zfill(64)
47
48# ERC-20 Transfer event topic
49TRANSFER_TOPIC = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"
50
51# WebSocket RPC endpoints (free, support eth_subscribe)
52WS_ENDPOINTS = [
53 "wss://ethereum-rpc.publicnode.com",
54 "wss://eth.drpc.org",
55 "wss://ethereum.publicnode.com",
56]
57
58# HTTP RPC for querying token info
59HTTP_RPC = os.environ.get("HTTP_RPC", "https://eth.drpc.org")
60
61# Telegram
62TG_BOT_TOKEN = os.environ.get("TG_BOT_TOKEN", "")
63TG_CHAT_ID = os.environ.get("TG_CHAT_ID", "")
64
65# Known DEX Router addresses (sell destinations)
66KNOWN_DEX_ROUTERS = {
67 # Uniswap
68 "0x7a250d5630b4cf539739df2c5dacb4c659f2488d": "Uniswap V2 Router",
69 "0xe592427a0aece92de3edee1f18e0157c05861564": "Uniswap V3 Router",
70 "0x68b3465833fb72a70ecdf485e0e4c7bd8665fc45": "Uniswap V3 Router2",
71 "0x3fc91a3afd70395cd496c647d5a6cc9d4b2b7fad": "Uniswap Universal Router",
72 "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b": "Uniswap Universal Router (old)",
73 # 1inch
74 "0x1111111254eeb25477b68fb85ed929f73a960582": "1inch V5",
75 "0x111111125421ca6dc452d289314280a0f8842a65": "1inch V6",
76 # SushiSwap
77 "0xd9e1ce17f2641f24ae83637ab66a2cca9c378b9f": "SushiSwap Router",
78 # CoW Protocol
79 "0x9008d19f58aabd9ed0d60971565aa8510560ab41": "CoW Settlement",
80 # 0x
81 "0xdef1c0ded9bec7f1a1670819833240f027b25eff": "0x Exchange Proxy",
82 # Curve
83 "0x99a58482bd75cbab83b27ec03ca68ff489b5788f": "Curve Router",
84}
85
86# Known CEX hot wallets (partial list)
87KNOWN_CEX = {
88 "0x28c6c06298d514db089934071355e5743bf21d60": "Binance Hot Wallet",
89 "0x21a31ee1afc51d94c2efccaa2092ad1028285549": "Binance Hot Wallet 2",
90 "0xdfd5293d8e347dfe59e90efd55b2956a1343963d": "Binance Hot Wallet 3",
91 "0x56eddb7aa87536c09ccc2793473599fd21a8b17f": "Binance Hot Wallet 4",
92 "0x71660c4005ba85c37ccec55d0c4493e66fe775d3": "Coinbase",
93 "0xa9d1e08c7793af67e9d92fe308d5697fb81d3e43": "Coinbase 10",
94 "0x503828976d22510aad0201ac7ec88293211d23da": "Coinbase 2",
95 "0x2faf487a4414fe77e2327f0bf4ae2a264a776ad2": "FTX (defunct)",
96 "0x267be1c1d684f78cb4f6a176c4911b741e4ffdc0": "Kraken",
97 "0xae2d4617c862309a3d75a0ffb358c7a5009c673f": "Kraken 10",
98}
99
100# Minimum notification amount (USD), 0 = notify all
101MIN_NOTIFY_USD = int(os.environ.get("MIN_NOTIFY_USD", "0"))
102
103# Reconnect settings
104RECONNECT_DELAY = 5
105MAX_RECONNECT_DELAY = 60
106
107# ============================================================
108# Logging
109# ============================================================
110
111logging.basicConfig(
112 level=logging.INFO,
113 format="%(asctime)s [%(levelname)s] %(message)s",
114 datefmt="%Y-%m-%d %H:%M:%S",
115)
116log = logging.getLogger("vitalik-radar")
117
118# ============================================================
119# Global state
120# ============================================================
121
122# Dedup set for processed tx hashes (last 1000)
123seen_txs: set = set()
124seen_txs_list: list = []
125
126# HTTP session
127http_session: aiohttp.ClientSession | None = None
128
129# Token info cache: {address: {symbol, name, decimals}}
130token_cache: dict = {}
131
132# Running flag
133running = True
134
135# ============================================================
136# Utility functions
137# ============================================================
138
139def shorten_addr(addr: str) -> str:
140 """Shorten address for display"""
141 return f"{addr[:6]}...{addr[-4:]}"
142
143def decode_transfer_value(data_hex: str, decimals: int) -> float:
144 """Decode Transfer event value"""
145 try:
146 raw = int(data_hex, 16)
147 return raw / (10 ** decimals)
148 except:
149 return 0.0
150
151def topic_to_address(topic: str) -> str:
152 """Extract 20-byte address from 32-byte topic"""
153 return "0x" + topic[-40:]
154
155def classify_recipient(to_addr: str) -> tuple[str, str]:
156 """
157 Classify recipient address.
158 Returns (type, name)
159 Type: "dex" / "cex" / "pool" / "unknown"
160 """
161 to_lower = to_addr.lower()
162
163 if to_lower in KNOWN_DEX_ROUTERS:
164 return "dex", KNOWN_DEX_ROUTERS[to_lower]
165
166 if to_lower in KNOWN_CEX:
167 return "cex", KNOWN_CEX[to_lower]
168
169 return "unknown", ""
170
171async def get_http_session() -> aiohttp.ClientSession:
172 global http_session
173 if http_session is None or http_session.closed:
174 http_session = aiohttp.ClientSession()
175 return http_session
176
177async def rpc_call(method: str, params: list) -> dict | None:
178 """HTTP JSON-RPC call"""
179 session = await get_http_session()
180 try:
181 async with session.post(
182 HTTP_RPC,
183 json={"jsonrpc": "2.0", "id": 1, "method": method, "params": params},
184 timeout=aiohttp.ClientTimeout(total=10),
185 ) as resp:
186 data = await resp.json()
187 return data.get("result")
188 except Exception as e:
189 log.error(f"RPC call {method} failed: {e}")
190 return None
191
192async def get_token_info(token_addr: str) -> dict:
193 """Query ERC-20 token info (symbol, name, decimals)"""
194 addr_lower = token_addr.lower()
195 if addr_lower in token_cache:
196 return token_cache[addr_lower]
197
198 info = {"symbol": "???", "name": "Unknown", "decimals": 18, "address": token_addr}
199
200 # symbol()
201 result = await rpc_call("eth_call", [
202 {"to": token_addr, "data": "0x95d89b41"}, "latest"
203 ])
204 if result and len(result) > 2:
205 try:
206 hex_str = result[2:]
207 if len(hex_str) >= 128:
208 offset = int(hex_str[:64], 16) * 2
209 length = int(hex_str[offset:offset+64], 16)
210 symbol_hex = hex_str[offset+64:offset+64+length*2]
211 info["symbol"] = bytes.fromhex(symbol_hex).decode("utf-8", errors="replace").strip('\x00')
212 elif len(hex_str) == 64:
213 info["symbol"] = bytes.fromhex(hex_str).decode("utf-8", errors="replace").strip('\x00')
214 except:
215 pass
216
217 # decimals()
218 result = await rpc_call("eth_call", [
219 {"to": token_addr, "data": "0x313ce567"}, "latest"
220 ])
221 if result and len(result) > 2:
222 try:
223 info["decimals"] = int(result, 16)
224 except:
225 pass
226
227 # name()
228 result = await rpc_call("eth_call", [
229 {"to": token_addr, "data": "0x06fdde03"}, "latest"
230 ])
231 if result and len(result) > 2:
232 try:
233 hex_str = result[2:]
234 if len(hex_str) >= 128:
235 offset = int(hex_str[:64], 16) * 2
236 length = int(hex_str[offset:offset+64], 16)
237 name_hex = hex_str[offset+64:offset+64+length*2]
238 info["name"] = bytes.fromhex(name_hex).decode("utf-8", errors="replace").strip('\x00')
239 elif len(hex_str) == 64:
240 info["name"] = bytes.fromhex(hex_str).decode("utf-8", errors="replace").strip('\x00')
241 except:
242 pass
243
244 token_cache[addr_lower] = info
245 return info
246
247async def check_if_pool(addr: str) -> bool:
248 """Check if address is a Uniswap V2/V3 pool (has token0 method)"""
249 result = await rpc_call("eth_call", [
250 {"to": addr, "data": "0x0dfe1681"}, "latest" # token0()
251 ])
252 if result and len(result) == 66: # 0x + 64 hex chars
253 return True
254 return False
255
256async def get_eth_price() -> float:
257 """Get ETH price from CoinGecko"""
258 session = await get_http_session()
259 try:
260 async with session.get(
261 "https://api.coingecko.com/api/v3/simple/price?ids=ethereum&vs_currencies=usd",
262 timeout=aiohttp.ClientTimeout(total=5),
263 ) as resp:
264 data = await resp.json()
265 return data.get("ethereum", {}).get("usd", 0)
266 except:
267 return 2300 # fallback
268
269async def get_token_price_usd(token_addr: str) -> float | None:
270 """Get token USD price from DexScreener (free, no API key needed)"""
271 session = await get_http_session()
272 try:
273 async with session.get(
274 f"https://api.dexscreener.com/latest/dex/tokens/{token_addr}",
275 timeout=aiohttp.ClientTimeout(total=5),
276 ) as resp:
277 data = await resp.json()
278 pairs = data.get("pairs", [])
279 if pairs:
280 # Pick pair with highest liquidity
281 pairs.sort(key=lambda p: float(p.get("liquidity", {}).get("usd", 0) or 0), reverse=True)
282 price = float(pairs[0].get("priceUsd", 0) or 0)
283 return price if price > 0 else None
284 except:
285 pass
286 return None
287
288# ============================================================
289# Telegram notifications
290# ============================================================
291
292async def send_telegram(text: str):
293 """Send Telegram message"""
294 if not TG_BOT_TOKEN:
295 log.warning("[TG] No bot token configured, skipping notification")
296 return
297
298 session = await get_http_session()
299 url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendMessage"
300 payload = {
301 "chat_id": TG_CHAT_ID,
302 "text": text,
303 "parse_mode": "HTML",
304 "disable_web_page_preview": True,
305 }
306
307 try:
308 async with session.post(url, json=payload, timeout=aiohttp.ClientTimeout(total=10)) as resp:
309 result = await resp.json()
310 if not result.get("ok"):
311 log.error(f"[TG] Send failed: {result.get('description', '')}")
312 # Retry with plain text if HTML parse fails
313 if "parse" in result.get("description", "").lower():
314 payload["parse_mode"] = None
315 async with session.post(url, json=payload) as resp2:
316 pass
317 else:
318 log.info("[TG] Message sent")
319 except Exception as e:
320 log.error(f"[TG] Error: {e}")
321
322# ============================================================
323# Event handling
324# ============================================================
325
326async def handle_transfer_event(log_entry: dict):
327 """Process a single Transfer event"""
328 tx_hash = log_entry.get("transactionHash", "")
329 token_addr = log_entry.get("address", "")
330 topics = log_entry.get("topics", [])
331 data = log_entry.get("data", "0x0")
332
333 # Dedup
334 dedup_key = f"{tx_hash}:{token_addr}"
335 if dedup_key in seen_txs:
336 return
337 seen_txs.add(dedup_key)
338 seen_txs_list.append(dedup_key)
339 # Cleanup (keep last 1000)
340 while len(seen_txs_list) > 1000:
341 old = seen_txs_list.pop(0)
342 seen_txs.discard(old)
343
344 # Parse topics: [Transfer, from, to]
345 if len(topics) < 3:
346 return
347
348 from_addr = topic_to_address(topics[1])
349 to_addr = topic_to_address(topics[2])
350
351 # Confirm it's from Vitalik
352 if from_addr.lower() != VITALIK_ADDRESS.lower():
353 return
354
355 # Get token info
356 token_info = await get_token_info(token_addr)
357 symbol = token_info["symbol"]
358 decimals = token_info["decimals"]
359 amount = decode_transfer_value(data, decimals)
360
361 if amount == 0:
362 return
363
364 # Classify recipient
365 recipient_type, recipient_name = classify_recipient(to_addr)
366
367 # If unknown, check if it's an LP pool
368 is_pool = False
369 if recipient_type == "unknown":
370 is_pool = await check_if_pool(to_addr)
371 if is_pool:
372 recipient_type = "pool"
373 recipient_name = "LP Pool"
374
375 # Determine if this is a "sell" action
376 is_sell = recipient_type in ("dex", "cex", "pool")
377
378 # If not a sell, just a regular transfer — log silently
379 if not is_sell:
380 log.info(f"[Transfer] {symbol} {amount:,.2f} → {shorten_addr(to_addr)} (regular transfer, not notifying)")
381 return
382
383 # Get price
384 token_price = await get_token_price_usd(token_addr)
385 usd_value = amount * token_price if token_price else None
386
387 # Below minimum notification amount — skip
388 if usd_value is not None and usd_value < MIN_NOTIFY_USD:
389 log.info(f"[Sell] {symbol} ${usd_value:.0f} < ${MIN_NOTIFY_USD} minimum, skipping")
390 return
391
392 # Build notification message
393 timestamp = datetime.now(timezone.utc).strftime("%H:%M:%S UTC")
394
395 sell_type_label = {
396 "dex": "🔄 DEX Sell",
397 "cex": "🏦 CEX Transfer",
398 "pool": "🌊 Pool Sell",
399 }
400
401 msg_lines = [
402 f"<b>🚨 Vitalik Sell Signal</b>",
403 f"",
404 f"Token: <b>{symbol}</b>",
405 f"Amount: {amount:,.4f}",
406 ]
407
408 if usd_value is not None:
409 msg_lines.append(f"Value: <b>${usd_value:,.0f}</b>")
410
411 if token_price is not None:
412 msg_lines.append(f"Price: ${token_price:,.8f}")
413
414 msg_lines.extend([
415 f"",
416 f"Type: {sell_type_label.get(recipient_type, 'Unknown')}",
417 f"Destination: {recipient_name or shorten_addr(to_addr)}",
418 f"Time: {timestamp}",
419 f"",
420 f"TX: https://etherscan.io/tx/{tx_hash}",
421 f"Token: https://dexscreener.com/ethereum/{token_addr}",
422 ])
423
424 msg = "\n".join(msg_lines)
425 log.info(f"[SELL DETECTED] {symbol} {amount:,.4f} → {recipient_name or to_addr} | ${usd_value or '?'}")
426 await send_telegram(msg)
427
428# ============================================================
429# WebSocket listener main loop
430# ============================================================
431
432async def subscribe_and_listen(ws_url: str):
433 """Connect to WebSocket and subscribe to Vitalik Transfer events"""
434 log.info(f"Connecting to {ws_url}...")
435
436 async with websockets.connect(
437 ws_url,
438 ping_interval=20,
439 ping_timeout=30,
440 close_timeout=10,
441 max_size=2**20, # 1MB
442 ) as ws:
443 # Subscribe to Transfer FROM Vitalik
444 sub_from = {
445 "jsonrpc": "2.0", "id": 1,
446 "method": "eth_subscribe",
447 "params": ["logs", {
448 "topics": [TRANSFER_TOPIC, VITALIK_PADDED]
449 }]
450 }
451 await ws.send(json.dumps(sub_from))
452 resp = await asyncio.wait_for(ws.recv(), timeout=10)
453 data = json.loads(resp)
454 if "error" in data:
455 raise Exception(f"Subscribe failed: {data['error']}")
456 sub_id_from = data.get("result", "")
457 log.info(f"✅ Subscribed to Transfer FROM Vitalik (id={sub_id_from[:10]}...)")
458
459 # Also subscribe to Transfer TO Vitalik (monitor buys/receives)
460 sub_to = {
461 "jsonrpc": "2.0", "id": 2,
462 "method": "eth_subscribe",
463 "params": ["logs", {
464 "topics": [TRANSFER_TOPIC, None, VITALIK_PADDED]
465 }]
466 }
467 await ws.send(json.dumps(sub_to))
468 resp2 = await asyncio.wait_for(ws.recv(), timeout=10)
469 data2 = json.loads(resp2)
470 sub_id_to = data2.get("result", "")
471 if sub_id_to:
472 log.info(f"✅ Subscribed to Transfer TO Vitalik (id={sub_id_to[:10]}...)")
473
474 log.info("🔍 Monitoring Vitalik wallet... waiting for events")
475
476 # Listen for events
477 async for message in ws:
478 if not running:
479 break
480
481 try:
482 evt = json.loads(message)
483
484 if "params" not in evt:
485 continue
486
487 sub_id = evt["params"].get("subscription", "")
488 log_entry = evt["params"].get("result", {})
489
490 if sub_id == sub_id_from:
491 # Vitalik sent tokens → check if it's a sell
492 await handle_transfer_event(log_entry)
493 elif sub_id == sub_id_to:
494 # Vitalik received tokens — log silently
495 token_addr = log_entry.get("address", "")
496 token_info = await get_token_info(token_addr)
497 data_hex = log_entry.get("data", "0x0")
498 amount = decode_transfer_value(data_hex, token_info["decimals"])
499 log.debug(f"[Receive] {token_info['symbol']} +{amount:,.4f}")
500
501 except json.JSONDecodeError:
502 continue
503 except Exception as e:
504 log.error(f"Event handling error: {e}", exc_info=True)
505
506async def main():
507 """Main loop with auto-reconnect"""
508 global running
509
510 # Validate required config
511 if not TG_BOT_TOKEN:
512 log.warning("TG_BOT_TOKEN not set — Telegram notifications disabled")
513 if not TG_CHAT_ID:
514 log.warning("TG_CHAT_ID not set — Telegram notifications disabled")
515
516 # Signal handling
517 def shutdown(sig, frame):
518 global running
519 log.info(f"Received signal {sig}, shutting down...")
520 running = False
521
522 signal.signal(signal.SIGINT, shutdown)
523 signal.signal(signal.SIGTERM, shutdown)
524
525 # Startup notice
526 log.info("=" * 50)
527 log.info("Vitalik Sell Radar — Started")
528 log.info(f"Monitoring: {VITALIK_ADDRESS}")
529 log.info(f"Min notify: ${MIN_NOTIFY_USD}")
530 log.info(f"Telegram: {'✅ Configured' if TG_BOT_TOKEN else '❌ Not configured'}")
531 log.info("=" * 50)
532
533 if TG_BOT_TOKEN and TG_CHAT_ID:
534 await send_telegram(
535 "🟢 Vitalik Sell Radar — Started\n\n"
536 f"Monitoring: {shorten_addr(VITALIK_ADDRESS)}\n"
537 f"Min notify: ${MIN_NOTIFY_USD}\n"
538 "Mode: WebSocket event-driven (sub-second latency)"
539 )
540
541 endpoint_idx = 0
542 reconnect_delay = RECONNECT_DELAY
543
544 while running:
545 ws_url = WS_ENDPOINTS[endpoint_idx % len(WS_ENDPOINTS)]
546 try:
547 await subscribe_and_listen(ws_url)
548 reconnect_delay = RECONNECT_DELAY # reset on success
549 except websockets.exceptions.ConnectionClosed as e:
550 log.warning(f"WebSocket closed: {e}")
551 except asyncio.TimeoutError:
552 log.warning("WebSocket timeout")
553 except Exception as e:
554 log.error(f"WebSocket error: {e}")
555
556 if not running:
557 break
558
559 # Switch endpoint and retry
560 endpoint_idx += 1
561 log.info(f"Reconnecting in {reconnect_delay}s... (next: {WS_ENDPOINTS[endpoint_idx % len(WS_ENDPOINTS)]})")
562 await asyncio.sleep(reconnect_delay)
563 reconnect_delay = min(reconnect_delay * 1.5, MAX_RECONNECT_DELAY)
564
565 # Cleanup
566 if http_session and not http_session.closed:
567 await http_session.close()
568
569 log.info("Vitalik Sell Radar — Stopped")
570
571if __name__ == "__main__":
572 asyncio.run(main())
On-Chain Narrative Radar
Date: 2026.04.28 Tags: Python · GMGN · DEXScreener · Telegram
Momentum-driven on-chain token discovery across ETH/SOL/BSC/Base
Momentum is the only push engine — narrative is just a classification label. Scans every 30 seconds across 4 chains. Tokens must show 3 consecutive rounds of market cap increase with ≥5% total gain to trigger an alert. Narratives (Musk/Trump, Binance/CZ, celebrity) are classified as ★★★/★★/★ labels but never trigger pushes on their own. Safety checks via RugCheck (SOL) and GoPlus (EVM). Pure Python, zero AI cost.
Full source code
1#!/usr/bin/env python3
2"""
3radar → radar v1
4Python,zero AI cost(key matching + )
5
63. Binance/CZ — BSC
7
8data:GMGN + DEXScreenersearch
9"""
10
11import requests
12import json
13import time
14import os
15import re
16import sqlite3
17import hashlib
18from datetime import datetime, timedelta
19from pathlib import Path
20from difflib import SequenceMatcher
21
22# === configuration ===
23DATA_DIR = os.path.expanduser("~/crypto-trading")
24DB_FILE = os.path.join(DATA_DIR, "narrative_history.db")
25LOG_FILE = os.path.join(DATA_DIR, "narrative_radar.log")
26SEEN_FILE = os.path.join(DATA_DIR, "narrative_seen.json")
27FLAP_SEEN_FILE = os.path.join(DATA_DIR, "flap_seen.json")
28
29SCAN_INTERVAL = 30 # 30(GMGNdata1-5,10data)
30
31# {address: [{'ts': timestamp, 'mc': market_cap, 'vol': volume, 'price': price}, ...]}
32MOMENTUM_TRACKER = {}
33MOMENTUM_PUSHED = {} # {address: {'count': N, 'last_ts': ts, 'last_mc': mc}}
34MOMENTUM_CONSECUTIVE_UP = 3 # consecutive3(data)
35
36# from.envreadTGconfiguration
37def load_env():
38 env = {}
39 env_file = os.path.expanduser("~/.env")
40 if os.path.exists(env_file):
41 with open(env_file) as f:
42 for line in f:
43 line = line.strip()
44 if '=' in line and not line.startswith('#'):
45 k, v = line.split('=', 1)
46 env[k] = v
47 return env
48
49ENV = load_env()
50TG_TOKEN = ENV.get('TELEGRAM_BOT_TOKEN', '')
51TG_CHAT_ID = int(os.environ.get('TG_CHAT_ID', '0'))
52
53GMGN_HEADERS = {
54 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36',
55 'Accept': 'application/json',
56 'Referer': 'https://gmgn.ai/',
57}
58
59# ============================================================
60# ============================================================
61MUSK_TRUMP_KEYWORDS = {
62 'musk', 'elon', 'elonmusk',
63 # SpaceX/Tesla/X
64 'spacex', 'starship', 'tesla', 'cybertruck', 'roadster',
65 'neuralink', 'boring', 'hyperloop', 'xai', 'grok',
66 'floki', 'shiba', #
67 'doge father', 'dogefather', 'technoking',
68 'mars colony', 'mars',
69 'trump', 'donald', 'maga', 'potus', 'trump47',
70 'melania', 'barron', 'ivanka',
71 'dark maga', 'darkmaga', 'ultra maga', 'save america',
72 'truth social', 'covfefe',
73 'doge department', 'd.o.g.e', 'government efficiency',
74}
75
76MUSK_TRUMP_PATTERNS = [
77 r'\belon\b', r'\bmusk\b', r'\btrump\b', r'\bmaga\b',
78 r'\bspacex\b', r'\bstarship\b', r'\btesla\b', r'\bgrok\b',
79 r'\bmelania\b', r'\bbarron\b', r'\bdoge\s*department\b',
80 r'\bd\.?o\.?g\.?e\b', # D.O.G.E
81 r'\bx\s*ai\b', r'\bneuralink\b',
82]
83
84# ============================================================
85# ============================================================
86BINANCE_CZ_KEYWORDS = {
87 # CZcore
88 'cz', 'changpeng', 'zhao', 'czb', 'czbinance',
89 'heyi', 'yi he', 'he yi', '', 'yihe',
90 'sister yi', 'yi jie', '', '',
91 'binance', 'bnb', 'pancake', 'pancakeswap',
92 'giggle academy', 'binance life', 'bnb chain',
93 'principles', 'cz book',
94 'yzi', 'yzi labs',
95 '', 'Binance', '', 'cz', '',
96 'fourmeme', 'four meme', '4meme',
97 'czs dog', 'cz dog', 'bnb dog',
98 'build on bnb', 'bnb ecosystem',
99}
100
101BINANCE_CZ_PATTERNS = [
102 r'\bcz\b', r'\bbinance\b', r'\bbnb\b',
103 r'\bheyi\b', r'\byi\s*he\b', r'\bhe\s*yi\b',
104 r'\b\b', r'\b\b',
105 r'\bpancake\b', r'\bgiggle\b', r'\byzi\b',
106 r'\bfourmeme\b', r'\b4meme\b',
107]
108
109# ============================================================
110# ============================================================
111CELEBRITY_VIRAL_KEYWORDS = {
112 'vitalik', 'buterin', 'sam altman', 'satoshi',
113 'michael saylor', 'saylor', 'cathie wood',
114 'jack dorsey', 'zuckerberg', 'bezos',
115 'jensen huang', 'nvidia', 'tim cook',
116 'justin sun', 'sun yuchen', '', 'tron',
117 'arthur hayes', 'su zhu', '3ac',
118 'brian armstrong', 'coinbase',
119 'larry fink', 'blackrock',
120 'gary gensler', 'sec',
121 'michael novogratz', 'galaxy',
122 'biden', 'obama', 'putin', 'xi jinping',
123 'kanye', 'drake', 'snoop dogg', 'paris hilton',
124 'mark cuban', 'mr beast', 'mrbeast',
125 'lobster', '', 'lobsta',
126 'hawk tuah', 'griddy', 'skibidi',
127 'rizz', 'sigma', 'gyatt',
128 'etf', 'halving', '',
129 'world war', 'wwiii',
130 'fed', 'rate cut', '',
131 'tiktok ban', 'tiktok',
132}
133
134CELEBRITY_VIRAL_PATTERNS = [
135 r'\bvitalik\b', r'\bsaylor\b', r'\bblackrock\b',
136 r'\bcoinbase\b', r'\bjustin\s*sun\b', r'\blobster\b',
137 r'\betf\b', r'\bhalving\b', r'\bmrbeast\b',
138 r'\bsnoop\b', r'\bkanye\b', r'\bdrake\b',
139]
140
141# ============================================================
142# ============================================================
143SPAM_PATTERNS = [
144 r'airdrop', r'presale', r'pre\s*sale',
145 r'1000x', r'100x guaranteed',
146 r'safe\s*moon', r'baby\s*\w+', # babydoge
147 r'pornhub', r'porn', r'xxx', r'nsfw',
148 r'nigga', r'nigger', r'faggot',
149 r'scam', r'rugpull', r'rug\s*pull',
150 r'official\s*token', r'official\s*coin',
151]
152
153COMMON_NOISE_WORDS = {
154 'nice', 'good', 'bad', 'cool', 'hot', 'big', 'small',
155 'life', 'love', 'hate', 'happy', 'sad', 'fun', 'lol',
156 'cat', 'dog', 'moon', 'sun', 'star', 'king', 'queen',
157 'gold', 'rich', 'cash', 'money', 'pay', 'buy', 'sell',
158 'pump', 'dump', 'bull', 'bear', 'green', 'red',
159 'hello', 'world', 'yes', 'no', 'wow', 'omg', 'lmao',
160 'simp', 'chad', 'based', 'cope', 'seethe',
161 'test', 'new', 'old', 'real', 'fake',
162 'shit', 'shitcoin', 'fuck', 'fart', 'poop', 'pee',
163 'cum', 'dick', 'ass', 'boob', 'tit',
164 'nigga', 'retard', 'slop',
165 'the', 'and', 'for', 'from', 'with', 'this', 'that',
166 'coin', 'token', 'meme', 'pepe', 'wojak',
167 'peg', 'usd', 'usdt', 'usdc', 'dai',
168}
169
170# ============================================================
171# toolfunction
172# ============================================================
173def log(msg):
174 ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
175 line = f"[{ts}] {msg}"
176 print(line)
177 os.makedirs(DATA_DIR, exist_ok=True)
178 with open(LOG_FILE, 'a') as f:
179 f.write(line + '\n')
180
181def load_flap_seen():
182 if os.path.exists(FLAP_SEEN_FILE):
183 try:
184 with open(FLAP_SEEN_FILE) as f:
185 return json.load(f)
186 except:
187 pass
188 return {}
189
190def save_flap_seen(data):
191 cutoff = int(time.time()) - 86400 * 7
192 data = {k: v for k, v in data.items() if v > cutoff}
193 with open(FLAP_SEEN_FILE, 'w') as f:
194 json.dump(data, f)
195
196def tg_send(text, parse_mode='Markdown'):
197 if not TG_TOKEN:
198 log(f"[TG] No token, skip: {text[:80]}")
199 return False
200 try:
201 resp = requests.post(
202 f'https://api.telegram.org/bot{TG_TOKEN}/sendMessage',
203 json={'chat_id': TG_CHAT_ID, 'text': text, 'parse_mode': parse_mode},
204 timeout=10
205 )
206 result = resp.json()
207 if not result.get('ok'):
208 if 'can\'t parse' in str(result.get('description', '')).lower():
209 resp = requests.post(
210 f'https://api.telegram.org/bot{TG_TOKEN}/sendMessage',
211 json={'chat_id': TG_CHAT_ID, 'text': text},
212 timeout=10
213 )
214 else:
215 log(f"[TG] Error: {result.get('description', '')}")
216 return False
217 return True
218 except Exception as e:
219 log(f"[TG] Send error: {e}")
220 return False
221
222# ============================================================
223# ============================================================
224def init_db():
225 """initializeSQLitelibrary"""
226 conn = sqlite3.connect(DB_FILE)
227 c = conn.cursor()
228
229 c.execute('''CREATE TABLE IF NOT EXISTS narratives (
230 id INTEGER PRIMARY KEY AUTOINCREMENT,
231 theme TEXT NOT NULL, -- ()
232 first_token_name TEXT, -- token
233 first_token_address TEXT, -- address
234 first_chain TEXT, --
235 first_seen_at INTEGER, --
236 token_count INTEGER DEFAULT 1, --
237 last_seen_at INTEGER --
238 )''')
239
240 c.execute('''CREATE TABLE IF NOT EXISTS tokens_seen (
241 address TEXT PRIMARY KEY,
242 chain TEXT,
243 name TEXT,
244 symbol TEXT,
245 narrative_theme TEXT,
246 category TEXT, -- 'musk_trump' / 'binance_cz' / 'novel' / 'common'
247 first_seen_at INTEGER,
248 market_cap REAL,
249 pushed INTEGER DEFAULT 0, --
250 seen_count INTEGER DEFAULT 1 --
251 )''')
252
253 # index
254 c.execute('CREATE INDEX IF NOT EXISTS idx_theme ON narratives(theme)')
255 c.execute('CREATE INDEX IF NOT EXISTS idx_addr ON tokens_seen(address)')
256
257 conn.commit()
258 return conn
259
260def normalize_theme(name, symbol):
261 """
262 :'Elon Mars Colony' → 'elon mars colony'
263 'TRUMP2028' → 'trump'
264 'PancakeBunny' → 'pancake bunny'
265 """
266 text = f"{name} {symbol}".lower().strip()
267
268 noise = ['token', 'coin', 'inu', 'swap', 'finance', 'protocol',
269 'dao', 'defi', 'nft', 'meta', 'verse', 'fi', 'ai',
270 'pepe', 'wojak', 'chad', 'based']
271
272 # splitcamelCase
273 text = re.sub(r'([a-z])([A-Z])', r'\1 \2', text)
274 text = re.sub(r'\d+x?', '', text)
275 text = re.sub(r'[^a-z\s]', ' ', text)
276 words = [w for w in text.split() if w and len(w) > 1 and w not in noise]
277
278 if not words:
279 return name.lower().strip()
280
281 return ' '.join(sorted(set(words)))
282
283def is_similar_theme(theme1, theme2, threshold=0.7):
284 if theme1 == theme2:
285 return True
286 if theme1 in theme2 or theme2 in theme1:
287 return True
288 words1 = set(theme1.split())
289 words2 = set(theme2.split())
290 if words1 and words2:
291 overlap = len(words1 & words2) / min(len(words1), len(words2))
292 if overlap >= 0.6:
293 return True
294 return SequenceMatcher(None, theme1, theme2).ratio() >= threshold
295
296def check_narrative_novelty(conn, theme, name, symbol, address, chain):
297 """
298 return:
299 ('novel', None) —
300 ('heating', narrative_row) — !!
301 ('existing', existing_theme_row) — ,
302
303 """
304 c = conn.cursor()
305 now = int(time.time())
306 HEAT_WINDOW = 1800 # 30
307 HEAT_THRESHOLD = 2 # 2aboveis
308
309 c.execute('SELECT id, theme, first_token_name, first_token_address, first_chain, first_seen_at, token_count, last_seen_at FROM narratives WHERE theme = ?', (theme,))
310 exact = c.fetchone()
311 if exact:
312 row_id, _, _, _, _, first_seen, count, last_seen = exact
313 new_count = count + 1
314 c.execute('UPDATE narratives SET token_count = ?, last_seen_at = ? WHERE theme = ?',
315 (new_count, now, theme))
316 conn.commit()
317
318 if now - first_seen < HEAT_WINDOW and new_count >= HEAT_THRESHOLD:
319 return ('heating', exact)
320 if now - last_seen < HEAT_WINDOW and new_count >= HEAT_THRESHOLD:
321 return ('heating', exact)
322
323 return ('existing', exact)
324
325 c.execute('SELECT id, theme, first_token_name, first_token_address, first_chain, first_seen_at, token_count, last_seen_at FROM narratives ORDER BY last_seen_at DESC LIMIT 1000')
326 for row in c.fetchall():
327 if is_similar_theme(theme, row[1]):
328 row_id, _, _, _, _, first_seen, count, last_seen = row
329 new_count = count + 1
330 c.execute('UPDATE narratives SET token_count = ?, last_seen_at = ? WHERE id = ?',
331 (new_count, now, row[0]))
332 conn.commit()
333
334 if now - last_seen < HEAT_WINDOW and new_count >= HEAT_THRESHOLD:
335 return ('heating', row)
336
337 return ('existing', row)
338
339 c.execute('''INSERT INTO narratives (theme, first_token_name, first_token_address, first_chain, first_seen_at, last_seen_at)
340 VALUES (?, ?, ?, ?, ?, ?)''',
341 (theme, name, address, chain, now, now))
342 conn.commit()
343 return ('novel', None)
344
345def get_token_seen_count(conn, address):
346 c = conn.cursor()
347 c.execute('SELECT seen_count FROM tokens_seen WHERE address = ?', (address,))
348 row = c.fetchone()
349 return row[0] if row else 0
350
351def is_token_seen(conn, address):
352 c = conn.cursor()
353 c.execute('SELECT address FROM tokens_seen WHERE address = ?', (address,))
354 return c.fetchone() is not None
355
356def record_token(conn, address, chain, name, symbol, theme, category, mc, pushed=False):
357 c = conn.cursor()
358 c.execute('SELECT seen_count FROM tokens_seen WHERE address = ?', (address,))
359 existing = c.fetchone()
360 if existing:
361 new_count = existing[0] + 1
362 c.execute('''UPDATE tokens_seen SET seen_count = ?, market_cap = ?, category = ?
363 WHERE address = ?''', (new_count, mc, category, address))
364 else:
365 c.execute('''INSERT INTO tokens_seen
366 (address, chain, name, symbol, narrative_theme, category, first_seen_at, market_cap, pushed, seen_count)
367 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1)''',
368 (address, chain, name, symbol, theme, category, int(time.time()), mc, 1 if pushed else 0))
369 conn.commit()
370
371# ============================================================
372# ============================================================
373def classify_narrative(name, symbol, chain):
374 """
375 classificationtoken
376 return:('musk_trump', matched_keywords) / ('binance_cz', matched_keywords) / ('novel', None) / ('common', None)
377 """
378 text = f"{name} {symbol}".lower()
379
380 for pat in SPAM_PATTERNS:
381 if re.search(pat, text, re.IGNORECASE):
382 return ('spam', None)
383
384 matched_mt = []
385 for kw in MUSK_TRUMP_KEYWORDS:
386 if kw.lower() in text:
387 matched_mt.append(kw)
388 if not matched_mt:
389 for pat in MUSK_TRUMP_PATTERNS:
390 m = re.search(pat, text, re.IGNORECASE)
391 if m:
392 matched_mt.append(m.group())
393
394 if matched_mt:
395 chain_lower = chain.lower()
396 if chain_lower in ('eth', 'ethereum', 'sol', 'solana', 'bsc', 'base'):
397 return ('musk_trump', matched_mt)
398
399 matched_bc = []
400 for kw in BINANCE_CZ_KEYWORDS:
401 if kw.lower() in text:
402 matched_bc.append(kw)
403 if not matched_bc:
404 for pat in BINANCE_CZ_PATTERNS:
405 m = re.search(pat, text, re.IGNORECASE)
406 if m:
407 matched_bc.append(m.group())
408
409 if matched_bc:
410 chain_lower = chain.lower()
411 if chain_lower in ('bsc',):
412 return ('binance_cz', matched_bc)
413 else:
414 return ('binance_cz_wrong_chain', matched_bc)
415
416 matched_cv = []
417 for kw in CELEBRITY_VIRAL_KEYWORDS:
418 if kw.lower() in text:
419 matched_cv.append(kw)
420 if not matched_cv:
421 for pat in CELEBRITY_VIRAL_PATTERNS:
422 m = re.search(pat, text, re.IGNORECASE)
423 if m:
424 matched_cv.append(m.group())
425
426 if matched_cv:
427 return ('celebrity_viral', matched_cv)
428
429 return ('check_novelty', None)
430
431# ============================================================
432# ============================================================
433def check_token_safety(chain, address):
434 if chain in ('sol', 'solana'):
435 try:
436 r = requests.get(f'https://api.rugcheck.xyz/v1/tokens/{address}/report', timeout=10)
437 if r.status_code == 200:
438 data = r.json()
439 score = data.get('score', 999)
440 mint = data.get('mintAuthority')
441 freeze = data.get('freezeAuthority')
442 return {
443 'safe': not mint and not freeze,
444 'score': score, 'mint': mint is not None,
445 'freeze': freeze is not None
446 }
447 except:
448 pass
449 else:
450 chain_map = {'ethereum': '1', 'eth': '1', 'bsc': '56', 'base': '8453'}
451 cid = chain_map.get(chain, '1')
452 try:
453 r = requests.get(f'https://api.gopluslabs.io/api/v1/token_security/{cid}?contract_addresses={address}', timeout=10)
454 if r.status_code == 200:
455 result = r.json().get('result', {})
456 data = result.get(address.lower(), {})
457 if data:
458 honeypot = data.get('is_honeypot', '0') == '1'
459 mintable = data.get('is_mintable', '0') == '1'
460 sell_tax = float(data.get('sell_tax', '0') or '0')
461 buy_tax = float(data.get('buy_tax', '0') or '0')
462 return {
463 'safe': not honeypot and not mintable, # as
464 'honeypot': honeypot, 'mintable': mintable,
465 'sell_tax': sell_tax, 'buy_tax': buy_tax
466 }
467 except:
468 pass
469
470# ============================================================
471# ============================================================
472def gmgn_get(url):
473 try:
474 resp = requests.get(url, headers=GMGN_HEADERS, timeout=15)
475 if resp.status_code == 200:
476 return resp.json().get('data', {})
477 except:
478 pass
479 return {}
480
481def fetch_token_description(chain, address):
482 """token/ — radarcore """
483 desc = ''
484
485 if chain in ('sol', 'solana'):
486 try:
487 r = requests.get(f'https://frontend-api-v3.pump.fun/coins/{address}', timeout=8)
488 if r.status_code == 200:
489 data = r.json()
490 desc = data.get('description', '') or ''
491 twitter = data.get('twitter', '') or ''
492 telegram = data.get('telegram', '') or ''
493 website = data.get('website', '') or ''
494 return {
495 'description': desc.strip(),
496 'twitter': twitter,
497 'telegram': telegram,
498 'website': website,
499 }
500 except:
501 pass
502
503 try:
504 chain_dex = {'sol': 'solana', 'eth': 'ethereum', 'bsc': 'bsc', 'base': 'base',
505 'solana': 'solana', 'ethereum': 'ethereum'}.get(chain, chain)
506 r = requests.get(f'https://api.dexscreener.com/latest/dex/tokens/{address}', timeout=8)
507 if r.status_code == 200:
508 pairs = r.json().get('pairs', [])
509 if pairs:
510 info = pairs[0].get('info', {})
511 websites = info.get('websites', [])
512 socials = info.get('socials', [])
513 twitter = ''
514 telegram = ''
515 website = ''
516 for s in socials:
517 if s.get('type') == 'twitter':
518 twitter = s.get('url', '')
519 elif s.get('type') == 'telegram':
520 telegram = s.get('url', '')
521 for w in websites:
522 if w.get('label', '').lower() == 'website':
523 website = w.get('url', '')
524 if not desc:
525 return {
526 'description': desc,
527 'twitter': twitter,
528 'telegram': telegram,
529 'website': website,
530 }
531 except:
532 pass
533
534 return {'description': desc, 'twitter': '', 'telegram': '', 'website': ''}
535
536def fetch_new_tokens():
537 """fromGMGN + multi-dimensionalcovering"""
538 all_tokens = []
539 seen_addrs = set()
540
541 for chain in ['eth', 'bsc', 'base']:
542 urls = [
543 f'https://gmgn.ai/defi/quotation/v1/rank/{chain}/swaps/1h?orderby=open_timestamp&direction=desc&limit=100',
544 f'https://gmgn.ai/defi/quotation/v1/rank/{chain}/swaps/1h?orderby=swaps&direction=desc&limit=50',
545 ]
546
547 for url in urls:
548 data = gmgn_get(url)
549 tokens = data.get('rank', [])
550
551 for t in tokens:
552 addr = t.get('address', '')
553 if not addr or addr in seen_addrs:
554 continue
555
556 mc = t.get('market_cap', 0) or t.get('fdv', 0) or 0
557 liq = t.get('liquidity', 0) or 0
558
559 if mc < 1000 or liq < 500 or mc > 10000000:
560 continue
561
562 age_ts = t.get('open_timestamp', 0)
563 age_h = (time.time() - age_ts) / 3600 if age_ts > 0 else 999
564
565
566 seen_addrs.add(addr)
567 all_tokens.append({
568 'address': addr,
569 'chain': chain,
570 'name': t.get('name', '?'),
571 'symbol': t.get('symbol', '?'),
572 'mc': mc,
573 'liq': liq,
574 'volume': t.get('volume', 0) or 0,
575 'holders': t.get('holder_count', 0) or 0,
576 'sm': t.get('smart_degen_count', 0) or 0,
577 'chg_1h': t.get('price_change_percent1h', 0) or 0,
578 'chg_24h': t.get('price_change_percent', 0) or 0,
579 'age_h': age_h,
580 'price': t.get('price', 0),
581 'buys_1h': t.get('buys', 0) or 0,
582 'sells_1h': t.get('sells', 0) or 0,
583 })
584
585 time.sleep(0.3)
586
587 return all_tokens
588
589def fetch_flap_tokens():
590 """
591 FLAPplatformscan — BSCcommunitydriver
592 features:24h,but 1hstabilize/,>sell,holders
593 """
594 data = gmgn_get(
595 'https://gmgn.ai/defi/quotation/v1/rank/bsc/swaps/24h?launchpad=flap&orderby=volume&direction=desc&limit=30'
596 )
597 tokens = data.get('rank', [])
598
599 candidates = []
600 for t in tokens:
601 addr = t.get('address', '')
602 if not addr:
603 continue
604
605 mc = t.get('market_cap', 0) or 0
606 liq = t.get('liquidity', 0) or 0
607 vol = t.get('volume', 0) or 0
608 holders = t.get('holder_count', 0) or 0
609 buys = t.get('buys', 0) or 0
610 sells = t.get('sells', 0) or 0
611 chg_1h = t.get('price_change_percent1h', 0) or 0
612 chg_24h = t.get('price_change_percent', 0) or 0
613 age_ts = t.get('open_timestamp', 0)
614 age_h = (time.time() - age_ts) / 3600 if age_ts > 0 else 0
615
616 if mc < 1000 or liq < 500:
617 continue
618 if holders < 5:
619 continue
620
621 buy_ratio = buys / max(sells, 1)
622
623 is_support = False
624 reason = ''
625
626 if chg_24h < -10 and chg_1h > chg_24h * 0.3:
627 is_support = True
628 reason = f'24h{chg_24h:.0f}%but 1hstabilize{chg_1h:+.0f}%'
629
630 if -10 <= chg_24h <= 30 and chg_1h > -5 and buy_ratio > 1.1:
631 is_support = True
632 reason = f' {buy_ratio:.2f}'
633
634 if chg_24h < -30 and chg_1h > 10:
635 is_support = True
636 reason = f'{chg_24h:.0f}%bounce{chg_1h:+.0f}%'
637
638 if is_support and buy_ratio >= 1.0:
639 candidates.append({
640 'address': addr,
641 'chain': 'bsc',
642 'name': t.get('name', '?'),
643 'symbol': t.get('symbol', '?'),
644 'mc': mc,
645 'liq': liq,
646 'volume': vol,
647 'holders': holders,
648 'sm': 0,
649 'chg_1h': chg_1h,
650 'chg_24h': chg_24h,
651 'age_h': age_h,
652 'price': t.get('price', 0),
653 'buys': buys,
654 'sells': sells,
655 'buy_ratio': buy_ratio,
656 'support_reason': reason,
657 'launchpad': 'flap',
658 })
659
660 candidates.sort(key=lambda x: x['mc'], reverse=True)
661 return candidates
662
663def format_flap_alert(token, desc_info=None):
664 msg = f"radar — FLAP\n"
665 msg += f": BSC | platform: FLAP\n\n"
666 msg += f"{token['name']} ({token['symbol']})\n"
667 msg += f"`{token['address']}`\n\n"
668
669 desc = (desc_info or {}).get('description', '')
670 if desc:
671 if len(desc) > 200:
672 desc = desc[:200] + '...'
673 msg += f": {desc}\n\n"
674
675 msg += f": {token['support_reason']}\n\n"
676 msg += f"```\n"
677 msg += f" ${token['mc']:>12,.0f}\n"
678 msg += f"liquidity ${token['liq']:>12,.0f}\n"
679 msg += f"24h ${token['volume']:>12,.0f}\n"
680 msg += f" {token['holders']:>12,d}\n"
681 msg += f"/ {token['buys']:>6,d}/{token['sells']:>6,d}\n"
682 msg += f" {token['buy_ratio']:>12.2f}\n"
683 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
684 msg += f"24h {token['chg_24h']:>+11.1f}%\n"
685 msg += f"```\n"
686 msg += "\nFLAPcommunity — "
687
688 links = []
689 if (desc_info or {}).get('twitter'):
690 links.append(f"\nTwitter: {desc_info['twitter']}")
691 if (desc_info or {}).get('telegram'):
692 links.append(f"TG: {desc_info['telegram']}")
693 if (desc_info or {}).get('website'):
694 links.append(f"Web: {desc_info['website']}")
695 if links:
696 msg += '\n'.join(links)
697
698 return msg
699
700# ============================================================
701# ============================================================
702def format_musk_trump_alert(token, matched_kw, desc_info=None):
703 chain_map = {'sol': 'SOL', 'eth': 'ETH', 'bsc': 'BSC', 'base': 'BASE'}
704 ch = chain_map.get(token['chain'], token['chain'].upper())
705
706 msg += f": {ch}\n\n"
707 msg += f"{token['name']} ({token['symbol']})\n"
708 msg += f"`{token['address']}`\n\n"
709
710 desc = (desc_info or {}).get('description', '')
711 if desc:
712 if len(desc) > 200:
713 desc = desc[:200] + '...'
714 msg += f": {desc}\n\n"
715
716 msg += f"key : {', '.join(matched_kw[:5])}\n\n"
717 msg += f"```\n"
718 msg += f" ${token['mc']:>12,.0f}\n"
719 msg += f"liquidity ${token['liq']:>12,.0f}\n"
720 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
721 if token.get('sm', 0) > 0:
722 msg += f" {token['sm']:>12d}\n"
723 msg += f" {token['age_h']:>10.1f}h\n"
724 msg += f"```\n"
725
726 links = []
727 if (desc_info or {}).get('twitter'):
728 links.append(f"Twitter: {desc_info['twitter']}")
729 if (desc_info or {}).get('telegram'):
730 links.append(f"TG: {desc_info['telegram']}")
731 if (desc_info or {}).get('website'):
732 links.append(f"Web: {desc_info['website']}")
733 if links:
734 msg += '\n' + '\n'.join(links)
735
736 return msg
737
738def format_binance_cz_alert(token, matched_kw, desc_info=None):
739 """Binance/CZ"""
740 msg = f"radar — Binance/CZ\n"
741 msg += f": BSC\n\n"
742 msg += f"{token['name']} ({token['symbol']})\n"
743 msg += f"`{token['address']}`\n\n"
744
745 desc = (desc_info or {}).get('description', '')
746 if desc:
747 if len(desc) > 200:
748 desc = desc[:200] + '...'
749 msg += f": {desc}\n\n"
750
751 msg += f"key : {', '.join(matched_kw[:5])}\n\n"
752 msg += f"```\n"
753 msg += f" ${token['mc']:>12,.0f}\n"
754 msg += f"liquidity ${token['liq']:>12,.0f}\n"
755 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
756 msg += f" {token['age_h']:>10.1f}h\n"
757 msg += f"```\n"
758
759 links = []
760 if (desc_info or {}).get('twitter'):
761 links.append(f"Twitter: {desc_info['twitter']}")
762 if (desc_info or {}).get('telegram'):
763 links.append(f"TG: {desc_info['telegram']}")
764 if (desc_info or {}).get('website'):
765 links.append(f"Web: {desc_info['website']}")
766 if links:
767 msg += '\n' + '\n'.join(links)
768
769 return msg
770
771def format_novel_narrative_alert(token, theme, desc_info=None):
772 return format_heating_narrative_alert(token, theme, 1, desc_info)
773
774def format_heating_narrative_alert(token, theme, count, desc_info=None):
775 chain_map = {'sol': 'SOL', 'eth': 'ETH', 'bsc': 'BSC', 'base': 'BASE'}
776 ch = chain_map.get(token['chain'], token['chain'].upper())
777
778 msg = f"radar — \n"
779 msg += f": {ch}\n\n"
780 msg += f"{token['name']} ({token['symbol']})\n"
781 msg += f"`{token['address']}`\n\n"
782
783 desc = (desc_info or {}).get('description', '')
784 if desc:
785 if len(desc) > 300:
786 desc = desc[:300] + '...'
787 msg += f": {desc}\n\n"
788 else:
789 msg += f": {theme}\n\n"
790
791 msg += f"```\n"
792 msg += f" ${token['mc']:>12,.0f}\n"
793 msg += f"liquidity ${token['liq']:>12,.0f}\n"
794 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
795 if token.get('sm', 0) > 0:
796 msg += f" {token['sm']:>12d}\n"
797 msg += f" {token['holders']:>12d}\n"
798 msg += f" {token['age_h']:>10.1f}h\n"
799 msg += f"```"
800
801 links = []
802 if (desc_info or {}).get('twitter'):
803 links.append(f"\nTwitter: {desc_info['twitter']}")
804 if (desc_info or {}).get('telegram'):
805 links.append(f"TG: {desc_info['telegram']}")
806 if (desc_info or {}).get('website'):
807 links.append(f"Web: {desc_info['website']}")
808 if links:
809 msg += '\n'.join(links)
810
811 return msg
812
813# ============================================================
814# ============================================================
815def track_momentum(tokens):
816 """
817 """
818 global MOMENTUM_TRACKER, MOMENTUM_PUSHED
819 now = time.time()
820 alerts = []
821
822 current_addrs = set()
823
824 for token in tokens:
825 addr = token['address']
826 mc = token['mc']
827 vol = token.get('volume', 0) or 0
828 price = token.get('price', 0) or 0
829 buys = token.get('buys_1h', 0) or token.get('buys', 0) or 0
830
831 current_addrs.add(addr)
832
833 if mc < 1000 or token.get('liq', 0) < 500 or mc > 10000000:
834 continue
835
836 if addr not in MOMENTUM_TRACKER:
837 MOMENTUM_TRACKER[addr] = []
838
839 snapshots = MOMENTUM_TRACKER[addr]
840
841 if snapshots and snapshots[-1]['mc'] == mc and snapshots[-1]['vol'] == vol:
842 continue # data,
843
844 snapshots.append({
845 'ts': now,
846 'mc': mc,
847 'vol': vol,
848 'price': price,
849 'buys': buys,
850 })
851
852 if len(snapshots) > 20:
853 snapshots[:] = snapshots[-20:]
854
855 if len(snapshots) < MOMENTUM_CONSECUTIVE_UP:
856 continue
857
858 recent = snapshots[-MOMENTUM_CONSECUTIVE_UP:]
859 consecutive_up = True
860 total_gain = 0
861
862 for i in range(1, len(recent)):
863 prev_mc = recent[i-1]['mc']
864 curr_mc = recent[i]['mc']
865 if prev_mc <= 0:
866 consecutive_up = False
867 break
868 gain = (curr_mc - prev_mc) / prev_mc
869 consecutive_up = False
870 break
871 total_gain += gain
872
873 if not consecutive_up:
874 continue
875
876 vol_increasing = True
877 for i in range(1, len(recent)):
878 if recent[i]['buys'] < recent[i-1]['buys'] * 0.8: #
879 vol_increasing = False
880 break
881
882 first_mc = recent[0]['mc']
883 last_mc = recent[-1]['mc']
884 pct_gain = ((last_mc - first_mc) / first_mc * 100) if first_mc > 0 else 0
885
886 if pct_gain < 5:
887 continue
888
889 push_info = MOMENTUM_PUSHED.get(addr, {'count': 0, 'last_ts': 0, 'last_mc': 0})
890
891 if push_info['count'] > 0 and last_mc <= push_info['last_mc']:
892 continue
893
894 push_info['count'] += 1
895 push_info['last_ts'] = now
896 push_info['last_mc'] = last_mc
897 signal_count = push_info['count']
898
899 safety = check_token_safety(token['chain'], addr)
900 if not safety.get('safe'):
901 continue
902
903 category, matched_kw = classify_narrative(token['name'], token['symbol'], token['chain'])
904 is_flap = token.get('launchpad') == 'flap'
905
906 if category == 'musk_trump':
907 stars = 3
908 narrative_tag = f"/ ({', '.join(matched_kw[:3])})"
909 elif category == 'binance_cz':
910 stars = 3
911 narrative_tag = f"Binance/CZ ({', '.join(matched_kw[:3])})"
912 elif category == 'celebrity_viral':
913 stars = 2
914 narrative_tag = f"/ ({', '.join(matched_kw[:3])})"
915 elif is_flap:
916 stars = 2
917 narrative_tag = "FLAPcommunity"
918 else:
919 theme = normalize_theme(token['name'], token['symbol'])
920 theme_words = [w for w in theme.split() if w not in COMMON_NOISE_WORDS and len(w) > 2]
921 if len(theme_words) >= 2:
922 stars = 2
923 narrative_tag = f": {theme}"
924 else:
925 stars = 1
926 narrative_tag = ""
927
928 desc_info = fetch_token_description(token['chain'], addr)
929
930 if is_flap:
931 has_twitter = bool(desc_info.get('twitter'))
932 has_tg = bool(desc_info.get('telegram'))
933 has_web = bool(desc_info.get('website'))
934 community_tags = []
935 if has_twitter:
936 community_tags.append("")
937 if has_tg:
938 community_tags.append("TG")
939 if has_web:
940 community_tags.append("")
941 if community_tags:
942 narrative_tag += f" | {' '.join(community_tags)}"
943 stars = min(3, stars + 1) # community
944 else:
945 narrative_tag += " | communityLinks"
946
947 msg = format_momentum_alert(token, pct_gain, len(recent), vol_increasing, stars, narrative_tag, desc_info, signal_count)
948 alerts.append({'msg': msg, 'token': token})
949 MOMENTUM_PUSHED[addr] = push_info
950
951 log(f"[{signal_count}] {token['name']} ({token['symbol']}) on {token['chain']} — {len(recent)} +{pct_gain:.1f}%")
952
953 stale = [a for a in MOMENTUM_TRACKER if a not in current_addrs]
954 for a in stale:
955 if now - MOMENTUM_TRACKER[a][-1]['ts'] > 600: # 10
956 del MOMENTUM_TRACKER[a]
957
958 MOMENTUM_PUSHED = {k: v for k, v in MOMENTUM_PUSHED.items() if now - v.get('last_ts', 0) < 3600}
959
960 return alerts
961
962def format_momentum_alert(token, pct_gain, rounds, vol_up, stars, narrative_tag, desc_info=None, seen_count=0):
963 chain_map = {'sol': 'SOL', 'eth': 'ETH', 'bsc': 'BSC', 'base': 'BASE'}
964 ch = chain_map.get(token['chain'], token['chain'].upper())
965
966 vol_tag = "" if vol_up else ""
967 star_str = "★" * stars + "☆" * (3 - stars)
968
969 msg = f"radar\n"
970 msg += f": {ch}\n\n"
971 msg += f"{token['name']} ({token['symbol']})\n"
972 msg += f"`{token['address']}`\n\n"
973
974 desc = (desc_info or {}).get('description', '')
975 if desc:
976 if len(desc) > 200:
977 desc = desc[:200] + '...'
978 msg += f": {desc}\n\n"
979
980 msg += f": {narrative_tag}\n"
981 msg += f"{rounds} +{pct_gain:.1f}% {vol_tag}\n\n"
982 msg += f"```\n"
983 msg += f" ${token['mc']:>12,.0f}\n"
984 msg += f"liquidity ${token['liq']:>12,.0f}\n"
985 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
986 if token.get('sm', 0) > 0:
987 msg += f" {token['sm']:>12d}\n"
988 msg += f" {token['age_h']:>10.1f}h\n"
989 msg += f"```\n"
990 msg += f": {star_str} : {seen_count}"
991
992 links = []
993 if (desc_info or {}).get('twitter'):
994 links.append(f"\nTwitter: {desc_info['twitter']}")
995 if (desc_info or {}).get('telegram'):
996 links.append(f"TG: {desc_info['telegram']}")
997 if (desc_info or {}).get('website'):
998 links.append(f"Web: {desc_info['website']}")
999 if links:
1000 msg += '\n'.join(links)
1001
1002 return msg
1003
1004def format_celebrity_alert(token, matched_kw, desc_info=None):
1005 chain_map = {'sol': 'SOL', 'eth': 'ETH', 'bsc': 'BSC', 'base': 'BASE'}
1006 ch = chain_map.get(token['chain'], token['chain'].upper())
1007
1008 msg = f"radar — / ★★\n"
1009 msg += f": {ch}\n\n"
1010 msg += f"{token['name']} ({token['symbol']})\n"
1011 msg += f"`{token['address']}`\n\n"
1012
1013 desc = (desc_info or {}).get('description', '')
1014 if desc:
1015 if len(desc) > 200:
1016 desc = desc[:200] + '...'
1017 msg += f": {desc}\n\n"
1018
1019 msg += f"key : {', '.join(matched_kw[:5])}\n\n"
1020 msg += f"```\n"
1021 msg += f" ${token['mc']:>12,.0f}\n"
1022 msg += f"liquidity ${token['liq']:>12,.0f}\n"
1023 msg += f"1h {token['chg_1h']:>+11.1f}%\n"
1024 if token.get('sm', 0) > 0:
1025 msg += f" {token['sm']:>12d}\n"
1026 msg += f" {token['age_h']:>10.1f}h\n"
1027 msg += f"```"
1028
1029 links = []
1030 if (desc_info or {}).get('twitter'):
1031 links.append(f"\nTwitter: {desc_info['twitter']}")
1032 if (desc_info or {}).get('telegram'):
1033 links.append(f"TG: {desc_info['telegram']}")
1034 if links:
1035 msg += '\n'.join(links)
1036
1037 return msg
1038
1039# ============================================================
1040# ============================================================
1041def scan_narratives():
1042 """scanfunction"""
1043 conn = init_db()
1044 tokens = fetch_new_tokens()
1045
1046 log(f"scan {len(tokens)} ...")
1047
1048 flap_tokens = []
1049 try:
1050 flap_tokens = fetch_flap_tokens()
1051 except:
1052 pass
1053 all_momentum_tokens = tokens + flap_tokens
1054 momentum_alerts = track_momentum(all_momentum_tokens)
1055
1056 for token in tokens:
1057 addr = token['address']
1058 chain = token['chain']
1059 name = token['name']
1060 symbol = token['symbol']
1061
1062 if is_token_seen(conn, addr):
1063 # updateseen_count
1064 c = conn.cursor()
1065 c.execute('UPDATE tokens_seen SET seen_count = seen_count + 1, market_cap = ? WHERE address = ?', (token['mc'], addr))
1066 theme_tmp = normalize_theme(name, symbol)
1067 if theme_tmp:
1068 c.execute('UPDATE narratives SET token_count = token_count + 1, last_seen_at = ? WHERE theme = ?', (int(time.time()), theme_tmp))
1069 conn.commit()
1070 continue
1071
1072 category, matched_kw = classify_narrative(name, symbol, chain)
1073
1074 if category == 'spam':
1075 record_token(conn, addr, chain, name, symbol, '', 'spam', token['mc'])
1076 continue
1077
1078 min_mc = 1000
1079 min_liq = 500
1080 if token['mc'] < min_mc or token['liq'] < min_liq:
1081 record_token(conn, addr, chain, name, symbol, '', 'too_small', token['mc'])
1082 continue
1083
1084 theme = normalize_theme(name, symbol)
1085
1086 record_token(conn, addr, chain, name, symbol, theme, category, token['mc'])
1087 check_narrative_novelty(conn, theme, name, symbol, addr, chain)
1088
1089 conn.close()
1090
1091 pushed = 0
1092 for ma in momentum_alerts[:8]: # 8
1093 if tg_send(ma['msg']):
1094 pushed += 1
1095 time.sleep(1) # TGrate limiting
1096
1097 return pushed, len(momentum_alerts)
1098
1099# ============================================================
1100# ============================================================
1101def main():
1102 log("=" * 50)
1103 log("radar v1 ")
1104 log(f"scan: {SCAN_INTERVAL}s")
1105 log("=" * 50)
1106
1107 # initializeDB
1108 init_db()
1109
1110 tg_send(
1111 "radar v1 \n\n"
1112 "classificationlabels:\n"
1113 "★★★ / | Binance/CZ | FLAPcommunity\n"
1114 "★★ | FLAPcommunity | \n"
1115 f"scan: {SCAN_INTERVAL}"
1116 )
1117
1118 scan_count = 0
1119 total_pushed = 0
1120
1121 while True:
1122 try:
1123 scan_count += 1
1124 pushed, found = scan_narratives()
1125 total_pushed += pushed
1126
1127 if pushed > 0:
1128 log(f"{scan_count}: {found}, {pushed} ({total_pushed})")
1129 else:
1130 if scan_count % 20 == 0: # 20
1131 log(f"{scan_count}: ({total_pushed})")
1132
1133 except Exception as e:
1134 log(f"scan: {e}")
1135
1136 time.sleep(SCAN_INTERVAL)
1137
1138if __name__ == '__main__':
1139 main()
OI + Funding Rate Scanner
Date: 2026.04.25 Tags: Python · Binance Futures · Telegram
Funding rate flip detection + OI surge
Snapshot-based scanner: detects funding rate flipping from positive to negative while OI is rising. Runs every 5 minutes.
Full source code
1#!/usr/bin/env python3
2"""
3"""
4
5import requests
6import json
7import os
8import time
9import sys
10from datetime import datetime, timedelta
11from pathlib import Path
12
13# ============ configuration ============
14SCRIPT_DIR = Path(__file__).parent
15ENV_FILE = SCRIPT_DIR / ".env.oi"
16ALERT_HISTORY_FILE = SCRIPT_DIR / "oi_funding_alerts.json"
17FR_SNAPSHOT_FILE = SCRIPT_DIR / "fr_snapshot.json" #
18
19MIN_OI_CHANGE_PCT = 8 # OI8%
20MIN_VOLUME_USDT = 0 # ,
21MIN_FR_PERIODS_POSITIVE = 2 # 2
22DEDUP_HOURS = 24 # 24
23
24def load_env():
25 env = {}
26 if ENV_FILE.exists():
27 for line in ENV_FILE.read_text().strip().split('\n'):
28 if '=' in line and not line.startswith('#'):
29 k, v = line.split('=', 1)
30 env[k.strip()] = v.strip()
31 return env
32
33env = load_env()
34TG_BOT_TOKEN = env.get('TG_BOT_TOKEN', '')
35TG_CHAT_ID = env.get('TG_CHAT_ID', '')
36
37def send_tg(text):
38 if not TG_BOT_TOKEN or not TG_CHAT_ID:
39 print("[TG] configuration, :")
40 print(text)
41 return
42 url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendMessage"
43 chunks = [text[i:i+4000] for i in range(0, len(text), 4000)]
44 for chunk in chunks:
45 try:
46 resp = requests.post(url, json={
47 'chat_id': TG_CHAT_ID,
48 'text': chunk,
49 'parse_mode': 'Markdown'
50 }, timeout=10)
51 if resp.status_code != 200:
52 requests.post(url, json={
53 'chat_id': TG_CHAT_ID,
54 'text': chunk
55 }, timeout=10)
56 except Exception as e:
57 print(f"[TG] send: {e}")
58
59def load_alert_history():
60 if ALERT_HISTORY_FILE.exists():
61 try:
62 return json.loads(ALERT_HISTORY_FILE.read_text())
63 except:
64 return {}
65 return {}
66
67def save_alert_history(history):
68 ALERT_HISTORY_FILE.write_text(json.dumps(history))
69
70def is_duplicate(symbol, history):
71 if symbol not in history:
72 return False
73 last_alert = datetime.fromisoformat(history[symbol])
74 return (datetime.now() - last_alert).total_seconds() < DEDUP_HOURS * 3600
75
76def mark_alerted(symbol, history):
77 history[symbol] = datetime.now().isoformat()
78 cutoff = datetime.now() - timedelta(hours=DEDUP_HOURS * 2)
79 history = {k: v for k, v in history.items()
80 if datetime.fromisoformat(v) > cutoff}
81 return history
82
83def load_fr_snapshot():
84 if FR_SNAPSHOT_FILE.exists():
85 try:
86 return json.loads(FR_SNAPSHOT_FILE.read_text())
87 except:
88 pass
89 return {}
90
91def save_fr_snapshot(snapshot):
92 FR_SNAPSHOT_FILE.write_text(json.dumps(snapshot))
93
94# ============ core scan ============
95def scan():
96 ts_start = time.time()
97
98 try:
99 info = requests.get('https://fapi.binance.com/fapi/v1/exchangeInfo', timeout=10).json()
100 symbols = [s['symbol'] for s in info['symbols']
101 if s['contractType'] == 'PERPETUAL' and s['quoteAsset'] == 'USDT' and s['status'] == 'TRADING']
102 except Exception as e:
103 print(f"[ERROR] exchangeInfo: {e}")
104 return []
105
106 try:
107 tickers = requests.get('https://fapi.binance.com/fapi/v1/ticker/24hr', timeout=10).json()
108 ticker_map = {t['symbol']: t for t in tickers}
109 except Exception as e:
110 print(f"[ERROR] ticker: {e}")
111 return []
112
113 active = [s for s in symbols if float(ticker_map.get(s, {}).get('quoteVolume', 0)) > MIN_VOLUME_USDT]
114
115 try:
116 fr_all = requests.get('https://fapi.binance.com/fapi/v1/premiumIndex', timeout=10).json()
117 fr_current = {item['symbol']: float(item['lastFundingRate']) for item in fr_all}
118 except:
119 fr_current = {}
120
121 prev_snapshot = load_fr_snapshot()
122
123 save_fr_snapshot(fr_current)
124
125 if not prev_snapshot:
126 print(f"[{datetime.now().strftime('%H:%M:%S')}] run,,")
127 return []
128
129 just_turned_negative = []
130 for sym in active:
131 prev_fr = prev_snapshot.get(sym)
132 curr_fr = fr_current.get(sym)
133 if prev_fr is None or curr_fr is None:
134 continue
135 if prev_fr >= 0 and curr_fr < 0:
136 just_turned_negative.append(sym)
137
138 if not just_turned_negative:
139 elapsed = time.time() - ts_start
140 print(f"[{datetime.now().strftime('%H:%M:%S')}] scan: {len(active)}/{elapsed:.1f}s, ")
141 return []
142
143 print(f"[{datetime.now().strftime('%H:%M:%S')}] {len(just_turned_negative)} : {just_turned_negative}")
144
145 signals = []
146 for sym in just_turned_negative:
147 try:
148 oi_hist = requests.get('https://fapi.binance.com/futures/data/openInterestHist',
149 params={'symbol': sym, 'period': '1h', 'limit': 48}, timeout=10).json()
150
151 oi_chg = 0
152 segs = []
153 oi_rising = False
154 if oi_hist and len(oi_hist) >= 12:
155 oi_values = [float(x['sumOpenInterestValue']) for x in oi_hist]
156 seg_len = len(oi_values) // 4
157 if seg_len >= 3:
158 segs = [
159 sum(oi_values[:seg_len]) / seg_len,
160 sum(oi_values[seg_len:seg_len*2]) / seg_len,
161 sum(oi_values[seg_len*2:seg_len*3]) / seg_len,
162 sum(oi_values[seg_len*3:]) / max(1, len(oi_values[seg_len*3:]))
163 ]
164 oi_chg = (segs[3] - segs[0]) / segs[0] * 100 if segs[0] > 0 else 0
165 oi_rising = oi_chg > 0
166
167 t = ticker_map.get(sym, {})
168 signals.append({
169 'symbol': sym,
170 'price': float(t.get('lastPrice', 0)),
171 'price_chg_24h': float(t.get('priceChangePercent', 0)),
172 'volume': float(t.get('quoteVolume', 0)),
173 'oi_change': oi_chg,
174 'oi_segments': segs,
175 'oi_rising': oi_rising,
176 'current_fr': fr_current.get(sym, 0),
177 'prev_fr': prev_snapshot.get(sym, 0),
178 })
179 except:
180 continue
181
182 elapsed = time.time() - ts_start
183 print(f"[{datetime.now().strftime('%H:%M:%S')}] scan: {len(active)}/{elapsed:.1f}s, : {len(signals)}")
184
185 return signals
186
187def get_square_discussion(coin):
188 try:
189 r = requests.get(
190 "https://www.binance.com/bapi/composite/v4/friendly/pgc/content/queryByHashtag",
191 params={"hashtag": f"#{coin.lower()}", "pageIndex": 1, "pageSize": 1, "orderBy": "HOT"},
192 headers={"User-Agent": "Mozilla/5.0", "Referer": "https://www.binance.com/en/square"},
193 timeout=8
194 )
195 if r.status_code == 200:
196 ht = r.json().get("data", {}).get("hashtag", {})
197 return ht.get("contentCount", 0), ht.get("viewCount", 0)
198 except:
199 pass
200 return 0, 0
201
202def get_market_caps():
203 mcap = {}
204 try:
205 r = requests.get(
206 "https://www.binance.com/bapi/composite/v1/public/marketing/symbol/list",
207 timeout=10
208 )
209 if r.status_code == 200:
210 for item in r.json().get("data", []):
211 name = item.get("name", "")
212 mc = item.get("marketCap", 0)
213 if name and mc:
214 mcap[name] = float(mc)
215 except:
216 pass
217 return mcap
218
219def get_spot_symbols():
220 try:
221 info = requests.get("https://api.binance.com/api/v3/exchangeInfo", timeout=10).json()
222 return {s["baseAsset"] for s in info["symbols"]
223 if s["quoteAsset"] == "USDT" and s["status"] == "TRADING"}
224 except:
225 return set()
226
227def fmt_mcap(v):
228 if v >= 1e9: return f"${v/1e9:.2f}B"
229 if v >= 1e6: return f"${v/1e6:.1f}M"
230 if v >= 1e3: return f"${v/1e3:.0f}K"
231 return f"${v:.0f}"
232
233def fmt_views(v):
234 if v >= 1e6: return f"{v/1e6:.1f}M"
235 if v >= 1e3: return f"{v/1e3:.0f}K"
236 return str(v)
237
238def format_alert(signals):
239 if not signals:
240 return None
241
242 signals.sort(key=lambda x: (-int(x.get('oi_rising', False)), x['current_fr']))
243
244 mcap_map = get_market_caps()
245 spot_set = get_spot_symbols()
246
247 now = datetime.now().strftime('%m-%d %H:%M')
248 lines = [f"*[ +OI ]* {now}\n"]
249
250 for s in signals:
251 coin = s['symbol'].replace('USDT', '')
252
253 fr_change = f"{s['prev_fr']:+.4%} -> {s['current_fr']:+.4%}"
254
255 mcap = mcap_map.get(coin, 0)
256 has_spot = coin in spot_set
257 sq_posts, sq_views = get_square_discussion(coin)
258
259 lines.append(f"```")
260 lines.append(f"{coin}")
261 lines.append(f" : {s['price']:.4f} 24h: {s['price_chg_24h']:+.1f}%")
262 lines.append(f" : {fr_change}")
263 if s['oi_segments']:
264 oi_segs = ' > '.join([f"{v/1e6:.1f}M" for v in s['oi_segments']])
265 lines.append(f" OI: +{s['oi_change']:.1f}% ({oi_segs})")
266 lines.append(f" : ${s['volume']/1e6:.1f}M")
267 lines.append(f" : {fmt_mcap(mcap) if mcap > 0 else ''} : {'' if has_spot else 'futures'}")
268 if sq_posts > 0:
269 lines.append(f" : {sq_posts} / {fmt_views(sq_views)}")
270 else:
271 lines.append(f" : ")
272 lines.append(f"```")
273
274 return '\n'.join(lines)
275
276def main():
277 signals = scan()
278
279 if signals:
280 strong = [s for s in signals if s['current_fr'] < 0 and s.get('oi_rising')]
281 if strong:
282 msg = format_alert(strong)
283 if msg:
284 send_tg(msg)
285 print(f" {len(strong)} ({len(signals)}, {len(strong)}OI)")
286 else:
287 print(f" {len(signals)} but OI, ")
288 else:
289 print(f" ")
290
291if __name__ == '__main__':
292 main()
Accumulation Radar
Date: 2026.04.25 Tags: Python · Binance · CoinGlass · Telegram
Momentum + OI anomaly + smart alerts
Hourly scan: top gainers momentum tracking, OI anomaly detection, and Telegram push. Pure Python, zero AI cost.
Full source code
1#!/usr/bin/env python3
2"""
3longradar v2 — ++OI scan
4
5data:BinancefuturesAPI + CoinGecko Trending()
6"""
7
8import json
9import os
10import sys
11import time
12import requests
13from datetime import datetime, timezone, timedelta
14from pathlib import Path
15from square_heat import get_square_heat
16
17env_file = Path(__file__).parent / ".env.oi"
18if env_file.exists():
19 with open(env_file) as f:
20 for line in f:
21 line = line.strip()
22 if line and not line.startswith("#") and "=" in line:
23 k, v = line.split("=", 1)
24 os.environ.setdefault(k.strip(), v.strip())
25
26# === configuration ===
27TG_BOT_TOKEN = os.getenv("TG_BOT_TOKEN", "")
28TG_CHAT_ID = os.getenv("TG_CHAT_ID", "YOUR_CHAT_ID")
29FAPI = "https://fapi.binance.com"
30
31HEAT_HISTORY_FILE = Path(__file__).parent / "heat_history.json"
32
33VOL_SURGE_MULT = 2.5 # 2.5above=
34MIN_VOL_USD = 20_000_000 # >$20Mdetection
35
36MIN_OI_DELTA_PCT = 3.0 # OI3%
37MIN_OI_USD = 2_000_000 # OI $2M
38
39def api_get(endpoint, params=None):
40 """BinanceAPI"""
41 url = f"{FAPI}{endpoint}"
42 for attempt in range(3):
43 try:
44 resp = requests.get(url, params=params, timeout=10)
45 if resp.status_code == 200:
46 return resp.json()
47 elif resp.status_code == 429:
48 time.sleep(2)
49 else:
50 return None
51 except:
52 time.sleep(1)
53 return None
54
55def format_usd(v):
56 if v >= 1e9: return f"${v/1e9:.1f}B"
57 if v >= 1e6: return f"${v/1e6:.1f}M"
58 if v >= 1e3: return f"${v/1e3:.0f}K"
59 return f"${v:.0f}"
60
61def mcap_str(v):
62 if v >= 1e9: return f"${v/1e9:.1f}B"
63 if v >= 1e6: return f"${v/1e6:.0f}M"
64 if v >= 1e3: return f"${v/1e3:.0f}K"
65 return f"${v:.0f}"
66
67def send_telegram(text):
68 """sendTG"""
69 if not TG_BOT_TOKEN:
70 print("\n[TG] No token, stdout:\n")
71 print(text)
72 return
73
74 url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendMessage"
75
76 chunks = []
77 current = ""
78 for line in text.split("\n"):
79 if len(current) + len(line) + 1 > 3800:
80 chunks.append(current)
81 current = line
82 else:
83 current += "\n" + line if current else line
84 if current:
85 chunks.append(current)
86
87 for chunk in chunks:
88 try:
89 resp = requests.post(url, json={
90 "chat_id": TG_CHAT_ID,
91 "text": chunk,
92 "parse_mode": "Markdown"
93 }, timeout=10)
94 if resp.status_code == 200:
95 print(f"[TG] Sent ✓ ({len(chunk)} chars)")
96 else:
97 resp2 = requests.post(url, json={
98 "chat_id": TG_CHAT_ID,
99 "text": chunk.replace("*", "").replace("_", ""),
100 }, timeout=10)
101 print(f"[TG] Sent plain ({'✓' if resp2.status_code == 200 else '✗'})")
102 except Exception as e:
103 print(f"[TG] Error: {e}")
104 time.sleep(0.5)
105
106def main():
107 print(f"🔥 longradar v2 — {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
108
109 tickers_raw = api_get("/fapi/v1/ticker/24hr")
110 premiums_raw = api_get("/fapi/v1/premiumIndex")
111
112 if not tickers_raw or not premiums_raw:
113 print("❌ API")
114 return
115
116 ticker_map = {}
117 for t in tickers_raw:
118 if t["symbol"].endswith("USDT"):
119 ticker_map[t["symbol"]] = {
120 "px_chg": float(t["priceChangePercent"]),
121 "vol": float(t["quoteVolume"]),
122 "price": float(t["lastPrice"]),
123 }
124
125 funding_map = {}
126 for p in premiums_raw:
127 if p["symbol"].endswith("USDT"):
128 funding_map[p["symbol"]] = float(p["lastFundingRate"])
129
130 mcap_map = {}
131 try:
132 r = requests.get(
133 "https://www.binance.com/bapi/composite/v1/public/marketing/symbol/list",
134 timeout=10
135 )
136 if r.status_code == 200:
137 for item in r.json().get("data", []):
138 name = item.get("name", "")
139 mc = item.get("marketCap", 0)
140 if name and mc:
141 mcap_map[name] = float(mc)
142 print(f"✅ : {len(mcap_map)}")
143 except Exception as e:
144 print(f"⚠️ API: {e}")
145
146 heat_map = {}
147 cg_trending = set()
148 square_trending = set()
149
150 sq_coins = get_square_heat()
151 if sq_coins:
152 for i, c in enumerate(sq_coins):
153 coin = c["coin"]
154 square_trending.add(coin)
155 rank_score = max(50 - i * 4, 10)
156 if c.get("rapidRiser"):
157 rank_score += 15
158 heat_map[coin] = heat_map.get(coin, 0) + rank_score
159 print(f"🏦 : {len(square_trending)} {[c['coin'] for c in sq_coins[:5]]}")
160
161 # 3b. CoinGecko Trending
162 try:
163 r = requests.get("https://api.coingecko.com/api/v3/search/trending", timeout=10)
164 if r.status_code == 200:
165 for item in r.json().get("coins", []):
166 sym = item["item"]["symbol"].upper()
167 rank = item["item"].get("score", 99)
168 cg_trending.add(sym)
169 heat_map[sym] = heat_map.get(sym, 0) + max(50 - rank * 3, 10)
170 print(f"🌐 CG Trending: {len(cg_trending)}")
171 except Exception as e:
172 print(f"⚠️ CG Trending: {e}")
173
174 vol_surge_coins = set()
175 for sym, tk in ticker_map.items():
176 coin = sym.replace("USDT", "")
177 vol_24h = tk["vol"]
178 if vol_24h > MIN_VOL_USD:
179 kl = api_get("/fapi/v1/klines", {"symbol": sym, "interval": "1d", "limit": 8})
180 if kl and len(kl) >= 5:
181 avg_prev = sum(float(k[7]) for k in kl[:-1]) / (len(kl) - 1)
182 if avg_prev > 0:
183 ratio = vol_24h / avg_prev
184 if ratio >= VOL_SURGE_MULT:
185 vol_surge_coins.add(coin)
186 heat_map[coin] = heat_map.get(coin, 0) + min(ratio * 10, 50)
187 time.sleep(0.05)
188
189 print(f"📈 (≥{VOL_SURGE_MULT}x): {len(vol_surge_coins)}")
190
191 dual_heat = cg_trending & vol_surge_coins
192 square_vol = square_trending & vol_surge_coins
193 triple_heat = cg_trending & vol_surge_coins & square_trending
194
195 all_multi_heat = dual_heat | square_vol
196 if all_multi_heat:
197 for coin in all_multi_heat:
198 heat_map[coin] = heat_map.get(coin, 0) + 20
199 if triple_heat:
200 for coin in triple_heat:
201 heat_map[coin] = heat_map.get(coin, 0) + 30 #
202 print(f"🔥🔥🔥 : {triple_heat}")
203 else:
204 print(f"🔥🔥 : {all_multi_heat}")
205
206 scan_syms = set()
207 for coin in heat_map:
208 sym = coin + "USDT"
209 if sym in ticker_map:
210 scan_syms.add(sym)
211 top_by_vol = sorted(ticker_map.items(), key=lambda x: x[1]["vol"], reverse=True)[:100]
212 for sym, _ in top_by_vol:
213 scan_syms.add(sym)
214
215 oi_map = {}
216 for i, sym in enumerate(scan_syms):
217 oi_hist = api_get("/futures/data/openInterestHist", {
218 "symbol": sym, "period": "1h", "limit": 6
219 })
220 if oi_hist and len(oi_hist) >= 2:
221 curr = float(oi_hist[-1]["sumOpenInterestValue"])
222 prev_1h = float(oi_hist[-2]["sumOpenInterestValue"])
223 prev_6h = float(oi_hist[0]["sumOpenInterestValue"])
224 d1h = ((curr - prev_1h) / prev_1h * 100) if prev_1h > 0 else 0
225 d6h = ((curr - prev_6h) / prev_6h * 100) if prev_6h > 0 else 0
226 oi_map[sym] = {"oi_usd": curr, "d1h": d1h, "d6h": d6h}
227 if (i + 1) % 10 == 0:
228 time.sleep(0.5)
229
230 print(f"📊 OIscan: {len(oi_map)}")
231
232 all_syms = set(list(ticker_map.keys()))
233 coin_data = {}
234 for sym in all_syms:
235 tk = ticker_map.get(sym, {})
236 if not tk:
237 continue
238 oi = oi_map.get(sym, {})
239 fr = funding_map.get(sym, 0)
240 coin = sym.replace("USDT", "")
241
242 d6h = oi.get("d6h", 0)
243 fr_pct = fr * 100
244 oi_usd = oi.get("oi_usd", 0)
245
246 if coin in mcap_map:
247 est_mcap = mcap_map[coin]
248 else:
249 est_mcap = max(tk["vol"] * 0.3, oi_usd * 2) if oi_usd > 0 else tk["vol"] * 0.3
250
251 heat = heat_map.get(coin, 0)
252
253 coin_data[sym] = {
254 "coin": coin, "sym": sym,
255 "px_chg": tk["px_chg"], "vol": tk["vol"],
256 "fr_pct": fr_pct, "d6h": d6h,
257 "oi_usd": oi_usd, "est_mcap": est_mcap,
258 "heat": heat,
259 "in_cg": coin in cg_trending,
260 "in_sq": coin in square_trending,
261 "vol_surge": coin in vol_surge_coins,
262 }
263
264 # ═══════════════════════════════════════
265 # ═══════════════════════════════════════
266 hot_coins = sorted(
267 [d for d in coin_data.values() if d["heat"] > 0],
268 key=lambda x: x["heat"], reverse=True
269 )
270
271 heat_history = {}
272 if HEAT_HISTORY_FILE.exists():
273 try:
274 heat_history = json.loads(HEAT_HISTORY_FILE.read_text())
275 except:
276 pass
277
278 now_ts = datetime.now(timezone(timedelta(hours=8))).strftime("%Y-%m-%d %H:%M")
279 new_entries = [] #
280 for s in hot_coins:
281 coin = s["coin"]
282 if coin not in heat_history:
283 heat_history[coin] = {"first_seen": now_ts, "price": s.get("px_chg", 0)}
284 sources = []
285 if s["in_sq"]: sources.append("")
286 if s["in_cg"]: sources.append("CG")
287 if s["vol_surge"]: sources.append("")
288 new_entries.append({"coin": coin, "sources": sources, "data": s})
289
290 cutoff = (datetime.now(timezone(timedelta(hours=8))) - timedelta(days=7)).strftime("%Y-%m-%d")
291 heat_history = {k: v for k, v in heat_history.items()
292 if v.get("first_seen", "9999") >= cutoff}
293
294 HEAT_HISTORY_FILE.write_text(json.dumps(heat_history, indent=2, ensure_ascii=False))
295
296 # ═══════════════════════════════════════
297 # ═══════════════════════════════════════
298 chase = []
299 for sym, d in coin_data.items():
300 if d["px_chg"] > 3 and d["fr_pct"] < -0.005 and d["vol"] > 1_000_000:
301 fr_hist = api_get("/fapi/v1/fundingRate", {"symbol": sym, "limit": 5})
302 fr_rates = [float(f["fundingRate"]) * 100 for f in fr_hist] if fr_hist else [d["fr_pct"]]
303 fr_prev = fr_rates[-2] if len(fr_rates) >= 2 else d["fr_pct"]
304 fr_delta = d["fr_pct"] - fr_prev
305
306 trend = "" if fr_delta < -0.05 else "" if fr_delta < -0.01 else "" if abs(fr_delta) < 0.01 else ""
307
308 chase.append({**d, "fr_delta": fr_delta, "trend": trend,
309 "rates": " → ".join([f"{x:.3f}" for x in fr_rates[-3:]])})
310 time.sleep(0.2)
311
312 chase.sort(key=lambda x: x["fr_pct"])
313
314 # ═══════════════════════════════════════
315 # ═══════════════════════════════════════
316 now = datetime.now(timezone(timedelta(hours=8)))
317 lines = [
318 f"**longradar**",
319 f"{now.strftime('%Y-%m-%d %H:%M')} CST",
320 ]
321
322 if new_entries:
323 tbl = ["```"]
324 tbl.append(f"{'':<10} {'':>8} {'':>7} {''}")
325 tbl.append(f"{'-'*10} {'-'*8} {'-'*7} {'-'*20}")
326 for e in new_entries:
327 s = e["data"]
328 src_str = "/".join(e["sources"])
329 tbl.append(f"{s['coin']:<10} {mcap_str(s['est_mcap']):>8} {s['px_chg']:>+6.0f}% {src_str}")
330 tbl.append("```")
331 lines.append("\n".join(tbl))
332
333 if hot_coins:
334 lines.append(f"\n**[ ]**")
335 tbl = ["```"]
336 tbl.append(f"{'':<10} {'':>8} {'':>7} {''}")
337 tbl.append(f"{'-'*10} {'-'*8} {'-'*7} {'-'*20}")
338 for s in hot_coins[:10]:
339 sources = []
340 if s["in_sq"]: sources.append("")
341 if s["in_cg"]: sources.append("CG")
342 if s["vol_surge"]: sources.append("")
343 extra = []
344 if abs(s["d6h"]) >= 3: extra.append(f"OI{s['d6h']:+.0f}%")
345 if s["fr_pct"] < -0.03: extra.append(f"{s['fr_pct']:.2f}%")
346 src_str = "/".join(sources)
347 if extra:
348 src_str += " " + " ".join(extra)
349 coin_name = s['coin']
350 tbl.append(f"{coin_name:<10} {mcap_str(s['est_mcap']):>8} {s['px_chg']:>+6.0f}% {src_str}")
351 tbl.append("```")
352 lines.append("\n".join(tbl))
353 else:
354 lines.append("\n**[ ]** ")
355
356 lines.append(f"\n**[ ]** +=")
357 if chase:
358 tbl = ["```"]
359 tbl.append(f"{'':<10} {'':>10} {'':>8} {'':>7} {'':>8}")
360 tbl.append(f"{'-'*10} {'-'*10} {'-'*8} {'-'*7} {'-'*8}")
361 for s in chase[:8]:
362 tbl.append(
363 f"{s['coin']:<10} {s['fr_pct']:>+9.3f}% {s['trend']:>8} {s['px_chg']:>+6.0f}% {mcap_str(s['est_mcap']):>7}"
364 )
365 tbl.append("```")
366 lines.append("\n".join(tbl))
367 else:
368 lines.append(" condition")
369
370 oi_alerts = []
371 for sym, oi in oi_map.items():
372 if abs(oi["d6h"]) >= 8:
373 d = coin_data.get(sym)
374 if d and d["heat"] == 0:
375 oi_alerts.append(d)
376 oi_alerts.sort(key=lambda x: abs(x["d6h"]), reverse=True)
377
378 if oi_alerts:
379 lines.append(f"\n**[ OI ]** 6positions>=8%")
380 tbl = ["```"]
381 tbl.append(f"{'':<10} {'':>4} {'OI':>8} {'':>7} {'':>8}")
382 tbl.append(f"{'-'*10} {'-'*4} {'-'*8} {'-'*7} {'-'*8}")
383 for s in oi_alerts[:6]:
384 direction = "" if s["d6h"] > 0 else ""
385 tbl.append(
386 f"{s['coin']:<10} {direction:>4} {s['d6h']:>+7.1f}% {s['px_chg']:>+6.0f}% {mcap_str(s['est_mcap']):>7}"
387 )
388 tbl.append("```")
389 lines.append("\n".join(tbl))
390
391 highlights = []
392
393 hot_oi = [d for d in coin_data.values() if d["heat"] > 0 and d["d6h"] > 5]
394 for s in sorted(hot_oi, key=lambda x: x["d6h"], reverse=True)[:3]:
395 highlights.append(f"{s['coin']} — +OI{s['d6h']:+.0f}%,")
396
397 hot_fuel = [d for d in coin_data.values() if d["heat"] > 0 and d["fr_pct"] < -0.03]
398 for s in sorted(hot_fuel, key=lambda x: x["fr_pct"])[:2]:
399 if s["coin"] not in " ".join(highlights):
400 highlights.append(f"{s['coin']} — +{s['fr_pct']:.2f}%,")
401
402 chase_fire = [s for s in chase[:5] if "" in s.get("trend", "")]
403 for s in chase_fire[:2]:
404 if s["coin"] not in " ".join(highlights):
405 highlights.append(f"{s['coin']} — {s['fr_pct']:.3f}%,short squeeze")
406
407 if highlights:
408 lines.append(f"\n**[ ]**")
409 for h in highlights[:5]:
410 lines.append(f" {h}")
411
412 lines.append(f"\n=Binancesearch / CG=CoinGecko")
413
414 report = "\n".join(lines)
415 send_telegram(report)
416 print("\n✅ ")
417
418if __name__ == "__main__":
419 main()
Binance Alpha Monitor
Date: 2026.04.23 Tags: Python · Binance · Claude AI · Telegram
WebSocket + AI analysis + Telegram alerts
Auto-detects new Binance Alpha listings, analyzes token quality with Claude AI, and sends alerts via Telegram.
Full source code
1#!/usr/bin/env python3
2"""
3Binance Alpha Monitor v2 —
4REST + intelligent filter + + TG
5API Key,zero AI cost(rule engine)
6
7run: python3 alpha_monitor.py
8"""
9
10import asyncio
11import hashlib
12import json
13import logging
14import os
15import re
16import sqlite3
17import time
18from dataclasses import dataclass, field
19from datetime import datetime, timedelta, timezone
20from pathlib import Path
21from typing import Optional
22
23import httpx
24
25# ============================================================
26# configuration
27# ============================================================
28
29BASE_DIR = Path(__file__).parent
30DB_PATH = str(BASE_DIR / "data" / "alpha.db")
31
32# TG
33TG_BOT_TOKEN = os.environ.get("TG_BOT_TOKEN", "")
34TG_CHAT_ID = os.environ.get("TG_CHAT_ID", "")
35
36def _load_anthropic_key():
37 """fromauth.jsonvariableAPI key"""
38 # 1. hermes auth.json credential pool
39 try:
40 auth_file = os.path.expanduser("~/.hermes/auth.json")
41 if os.path.exists(auth_file):
42 with open(auth_file) as f:
43 auth = json.load(f)
44 pool = auth.get("credential_pool", {}).get("anthropic", [])
45 if pool:
46 key = pool[0].get("access_token", "")
47 if key and key != "***":
48 return key
49 except Exception:
50 pass
51 return os.environ.get("ANTHROPIC_API_KEY", "")
52
53ANTHROPIC_API_KEY = _load_anthropic_key()
54ANTHROPIC_BASE_URL = os.environ.get("ANTHROPIC_BASE_URL", "https://api.anthropic.com")
55ANTHROPIC_MODEL = os.environ.get("ANTHROPIC_MODEL", "claude-sonnet-4-6")
56
57ANNOUNCEMENT_POLL_INTERVAL = 30 # 30
58AGGREGATION_POLL_INTERVAL = 15 # 15
59MONITOR_POLL_INTERVAL = 120 # monitoring2
60
61# HTTP
62HEADERS = {
63 "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
64 "Accept": "application/json",
65 "Accept-Language": "en-US,en;q=0.9",
66}
67
68BINANCE_ANNOUNCEMENT_API = "https://www.binance.com/bapi/composite/v1/public/cms/article/list/query"
69
70# ============================================================
71# logging
72# ============================================================
73
74logging.basicConfig(
75 level=logging.INFO,
76 format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
77 datefmt="%Y-%m-%d %H:%M:%S",
78)
79logger = logging.getLogger("alpha")
80
81# ============================================================
82# ============================================================
83
84TRIGGER_KEYWORDS = [
85 "alpha", "", "airdrop", "tge", "token generation",
86 "", "will list", "will launch",
87 "", "exclusive", "binance wallet", "hodler",
88]
89
90EXCLUDE_KEYWORDS = [
91 "delisting", "delist", "", "deprecate", "",
92 "maintenance", "",
93 "launchpool", "megadrop",
94 "buyback", "",
95 "perpetual contract", # futures
96 "futures will launch", #
97 "usdⓢ-margined", # Ufutures
98 "coin-margined", # futures
99 "margin will add", #
100 "trading bots services", # trading
101 "trading pairs", # trading()
102]
103
104ALPHA_BOX_KEYWORDS = ["alpha box", "", "mystery box"]
105
106# ============================================================
107# ============================================================
108
109TIER1_VCS = [
110 "binance labs", "yzi labs",
111 "coinbase ventures", "a16z", "andreessen horowitz", "paradigm",
112 "polychain", "polychain capital", "sequoia", "sequoia china", "sequoia capital",
113 "multicoin", "multicoin capital", "pantera", "pantera capital",
114 "dragonfly", "dragonfly capital", "founders fund",
115]
116
117TIER2_VCS = [
118 "abcde", "iosg", "hashkey", "okx ventures",
119 "sevenx", "folius", "foresight", "hashed",
120 "bitkraft", "framework", "framework ventures",
121 "delphi", "delphi digital", "electric capital",
122 "variant", "1kx", "placeholder",
123 "animoca", "animoca brands", "jump", "jump crypto",
124 "hack vc", "bain capital",
125]
126
127HOT_NARRATIVES = ["defi_perp", "ai_agent", "ai_defi", "defai", "zk_proof"]
128WEAK_NARRATIVES = ["gamefi", "meme", "social"]
129
130BINANCE_DARLING_KEYWORDS = ["yzi labs", "binance labs"]
131
132# ============================================================
133# ============================================================
134
135TIER_ICONS = {"S": "🟢🟢🟢", "A": "🟡🟡", "B": "🟠", "C": "⚪"}
136TIER_LABELS = {"S": "S ()", "A": "A ()", "B": "B ()", "C": "C ()"}
137
138def count_vc_tier(vcs: list, vc_list: list) -> int:
139 count = 0
140 vcs_lower = [v.lower() for v in vcs]
141 for t in vc_list:
142 if any(t in v for v in vcs_lower):
143 count += 1
144 return count
145
146def rate_project(circ_mcap: float, fdv: float, vcs: list,
147 narrative: str, is_darling: bool) -> dict:
148 """S/A/B/C """
149 t1 = count_vc_tier(vcs, TIER1_VCS)
150 t2 = count_vc_tier(vcs, TIER2_VCS)
151 hot = narrative in HOT_NARRATIVES
152 weak = narrative in WEAK_NARRATIVES
153 circ_mcap = circ_mcap or 0
154 fdv = fdv or 0
155
156 warnings = []
157 if weak:
158 warnings.append(f"⚠️ {narrative} ")
159
160 if is_darling:
161 return {"tier": "S", "reason": "Binance(YZi/Binance Labs/CZ)", "warnings": warnings}
162 if hot and t1 >= 1 and fdv < 500_000_000:
163 return {"tier": "S", "reason": f"({narrative})+ Tier1 VC", "warnings": warnings}
164 if t1 >= 2 and circ_mcap < 50_000_000 and fdv < 300_000_000:
165 return {"tier": "S", "reason": "≥2 Tier1 ", "warnings": warnings}
166 if t1 >= 1 and circ_mcap < 10_000_000 and fdv < 100_000_000:
167 return {"tier": "S", "reason": "Tier1 ", "warnings": warnings}
168 if hot and circ_mcap < 10_000_000 and fdv < 50_000_000:
169 return {"tier": "S", "reason": f"({narrative})", "warnings": warnings}
170
171 if t1 >= 1 and circ_mcap < 20_000_000 and fdv < 200_000_000:
172 return {"tier": "A", "reason": "Tier1 ", "warnings": warnings}
173
174 if circ_mcap < 50_000_000 and fdv < 500_000_000:
175 return {"tier": "B", "reason": "", "warnings": warnings}
176
177 return {"tier": "C", "reason": "/", "warnings": warnings}
178
179# ============================================================
180# database
181# ============================================================
182
183def init_db():
184 Path(DB_PATH).parent.mkdir(parents=True, exist_ok=True)
185 conn = sqlite3.connect(DB_PATH)
186 c = conn.cursor()
187 c.execute("""
188 CREATE TABLE IF NOT EXISTS projects (
189 id TEXT PRIMARY KEY,
190 symbol TEXT NOT NULL,
191 name TEXT,
192 launch_time TEXT,
193 source TEXT,
194 raw_text TEXT,
195 tier TEXT DEFAULT 'PENDING',
196 tier_reason TEXT,
197 narrative TEXT,
198 narrative_desc TEXT,
199 vcs_json TEXT DEFAULT '[]',
200 is_darling INTEGER DEFAULT 0,
201 open_price REAL,
202 total_supply REAL,
203 circulating_supply REAL,
204 fdv REAL,
205 circulating_mcap REAL,
206 excluded INTEGER DEFAULT 0,
207 exclude_reason TEXT,
208 discovered_at TEXT,
209 updated_at TEXT
210 )""")
211 c.execute("""
212 CREATE TABLE IF NOT EXISTS pushes (
213 id INTEGER PRIMARY KEY AUTOINCREMENT,
214 project_id TEXT NOT NULL,
215 push_type TEXT,
216 sent_at TEXT,
217 content TEXT
218 )""")
219 c.execute("""
220 CREATE TABLE IF NOT EXISTS snapshots (
221 id INTEGER PRIMARY KEY AUTOINCREMENT,
222 project_id TEXT NOT NULL,
223 timestamp TEXT NOT NULL,
224 price REAL,
225 circulating_mcap REAL,
226 fdv REAL
227 )""")
228 conn.commit()
229 conn.close()
230
231def project_id(symbol: str, date_str: str) -> str:
232 return hashlib.md5(f"{symbol.upper()}_{date_str}".encode()).hexdigest()[:16]
233
234def project_exists(pid: str) -> bool:
235 conn = sqlite3.connect(DB_PATH)
236 exists = conn.execute("SELECT 1 FROM projects WHERE id=?", (pid,)).fetchone() is not None
237 conn.close()
238 return exists
239
240def save_project(project: dict):
241 conn = sqlite3.connect(DB_PATH)
242 now = datetime.utcnow().isoformat()
243 conn.execute("""
244 INSERT OR IGNORE INTO projects
245 (id, symbol, name, launch_time, source, raw_text, tier, tier_reason,
246 narrative, narrative_desc, vcs_json, is_darling,
247 open_price, total_supply, circulating_supply, fdv, circulating_mcap,
248 excluded, exclude_reason, discovered_at, updated_at)
249 VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
250 """, (
251 project["id"], project["symbol"], project.get("name"),
252 project.get("launch_time"), project.get("source"), project.get("raw_text"),
253 project.get("tier", "PENDING"), project.get("tier_reason"),
254 project.get("narrative"), project.get("narrative_desc"),
255 json.dumps(project.get("vcs", [])), int(project.get("is_darling", False)),
256 project.get("open_price"), project.get("total_supply"),
257 project.get("circulating_supply"), project.get("fdv"),
258 project.get("circulating_mcap"),
259 int(project.get("excluded", 0)), project.get("exclude_reason"),
260 now, now,
261 ))
262 conn.commit()
263 conn.close()
264
265def update_project(pid: str, fields: dict):
266 if not fields:
267 return
268 conn = sqlite3.connect(DB_PATH)
269 fields["updated_at"] = datetime.utcnow().isoformat()
270 set_parts = [f"{k}=?" for k in fields]
271 values = list(fields.values()) + [pid]
272 conn.execute(f"UPDATE projects SET {','.join(set_parts)} WHERE id=?", values)
273 conn.commit()
274 conn.close()
275
276def get_project(pid: str) -> Optional[dict]:
277 conn = sqlite3.connect(DB_PATH)
278 conn.row_factory = sqlite3.Row
279 row = conn.execute("SELECT * FROM projects WHERE id=?", (pid,)).fetchone()
280 conn.close()
281 return dict(row) if row else None
282
283def list_pending() -> list:
284 conn = sqlite3.connect(DB_PATH)
285 conn.row_factory = sqlite3.Row
286 rows = conn.execute(
287 "SELECT * FROM projects WHERE excluded=0 AND tier='PENDING' ORDER BY discovered_at"
288 ).fetchall()
289 conn.close()
290 return [dict(r) for r in rows]
291
292def list_active() -> list:
293 """monitoringproject(PENDINGEXCLUDED,launch_time)"""
294 conn = sqlite3.connect(DB_PATH)
295 conn.row_factory = sqlite3.Row
296 rows = conn.execute("""
297 SELECT * FROM projects
298 WHERE excluded=0 AND launch_time IS NOT NULL AND launch_time != ''
299 AND tier NOT IN ('PENDING', 'EXCLUDED', 'ERROR')
300 """).fetchall()
301 conn.close()
302 return [dict(r) for r in rows]
303
304def has_pushed(pid: str, push_type: str) -> bool:
305 conn = sqlite3.connect(DB_PATH)
306 exists = conn.execute(
307 "SELECT 1 FROM pushes WHERE project_id=? AND push_type=?", (pid, push_type)
308 ).fetchone() is not None
309 conn.close()
310 return exists
311
312def log_push(pid: str, push_type: str, content: str):
313 conn = sqlite3.connect(DB_PATH)
314 conn.execute(
315 "INSERT INTO pushes (project_id, push_type, sent_at, content) VALUES (?,?,?,?)",
316 (pid, push_type, datetime.utcnow().isoformat(), content)
317 )
318 conn.commit()
319 conn.close()
320
321def save_snapshot(pid: str, price: float, mcap: float, fdv: float):
322 conn = sqlite3.connect(DB_PATH)
323 conn.execute(
324 "INSERT INTO snapshots (project_id, timestamp, price, circulating_mcap, fdv) VALUES (?,?,?,?,?)",
325 (pid, datetime.utcnow().isoformat(), price, mcap, fdv)
326 )
327 conn.commit()
328 conn.close()
329
330# ============================================================
331# ============================================================
332
333async def send_tg(text: str, silent: bool = False) -> bool:
334 if not TG_BOT_TOKEN or not TG_CHAT_ID:
335 logger.error("TG_BOT_TOKEN TG_CHAT_ID configuration")
336 return False
337 url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendMessage"
338 payload = {
339 "chat_id": TG_CHAT_ID,
340 "text": text,
341 "parse_mode": "HTML",
342 "disable_notification": silent,
343 }
344 try:
345 async with httpx.AsyncClient(timeout=15, headers=HEADERS) as client:
346 resp = await client.post(url, json=payload)
347 if resp.status_code != 200:
348 logger.error(f"TGsend {resp.status_code}: {resp.text[:200]}")
349 return False
350 return True
351 except Exception as e:
352 logger.error(f"TGsend: {e}")
353 return False
354
355# ============================================================
356# ============================================================
357
358def is_trigger(title: str) -> tuple[bool, Optional[str]]:
359 t = title.lower()
360 for kw in EXCLUDE_KEYWORDS:
361 if kw.lower() in t:
362 return False, f": {kw}"
363 for kw in ALPHA_BOX_KEYWORDS:
364 if kw.lower() in t:
365 return False, "Alpha Box "
366 for kw in TRIGGER_KEYWORDS:
367 if kw.lower() in t:
368 return True, None
369 return False, None
370
371def extract_symbol(title: str) -> Optional[str]:
372 m = re.search(r"\(([A-Z0-9]{2,10})\)", title)
373 if m:
374 return m.group(1)
375 m = re.search(r"(([A-Z0-9]{2,10}))", title)
376 if m:
377 return m.group(1)
378 return None
379
380def extract_name(title: str) -> Optional[str]:
381 patterns = [
382 r"(?:|List|list|Launch|launch|featured)\s+([A-Za-z0-9 ]+?)\s*[\((]",
383 ]
384 for p in patterns:
385 m = re.search(p, title, re.IGNORECASE)
386 if m:
387 return m.group(1).strip()
388 return None
389
390# ============================================================
391# ============================================================
392
393async def fetch_announcements(limit: int = 20) -> list:
394 all_articles = []
395 # 48: New Cryptocurrency Listing, 161: Latest Activities, 93: Latest News
396 for catalog_id in [48, 161, 93]:
397 params = {"type": 1, "catalogId": catalog_id, "pageNo": 1, "pageSize": limit}
398 try:
399 async with httpx.AsyncClient(timeout=15, headers=HEADERS) as client:
400 resp = await client.get(BINANCE_ANNOUNCEMENT_API, params=params)
401 resp.raise_for_status()
402 data = resp.json()
403 for catalog in data.get("data", {}).get("catalogs", []):
404 for a in catalog.get("articles", []):
405 a["_catalog_id"] = catalog_id
406 all_articles.append(a)
407 except Exception as e:
408 logger.warning(f"classification {catalog_id} : {e}")
409
410 seen = set()
411 unique = []
412 for a in all_articles:
413 code = a.get("code")
414 if code and code not in seen:
415 seen.add(code)
416 unique.append(a)
417 return unique
418
419# ============================================================
420# CoinGecko data
421# ============================================================
422
423async def fetch_coingecko(symbol: str, project_name: str = "") -> dict:
424 """CoinGeckotokendata。matching:symbolmatching + projectmatching"""
425 result = {"found": False, "price": None, "fdv": None, "mcap": None,
426 "total_supply": None, "circ_supply": None, "chain": None, "contract": None}
427 try:
428 async with httpx.AsyncClient(timeout=15, headers=HEADERS) as client:
429 queries = [symbol]
430 if project_name and project_name.upper() != symbol.upper():
431 queries.append(project_name)
432
433 coin_id = None
434 best_rank = 999999
435 name_exact_match = None # projectmatching
436
437 for query in queries:
438 resp = await client.get("https://api.coingecko.com/api/v3/search",
439 params={"query": query})
440 if resp.status_code != 200:
441 continue
442 coins = resp.json().get("coins", [])
443
444 for c in coins:
445 c_sym = c.get("symbol", "").upper()
446 c_name = c.get("name", "").lower()
447 c_rank = c.get("market_cap_rank") or 999999
448
449 if project_name and c_name == project_name.lower():
450 name_exact_match = c["id"]
451
452 if c_sym == symbol.upper():
453 if c_rank < best_rank:
454 coin_id = c["id"]
455 best_rank = c_rank
456
457 if project_name and project_name.lower() in c_name:
458 if c_rank < best_rank:
459 coin_id = c["id"]
460 best_rank = c_rank
461
462 if name_exact_match:
463 coin_id = name_exact_match
464
465 if not coin_id:
466 logger.info(f"CoinGecko {symbol}/{project_name}")
467 return result
468
469 logger.info(f"CoinGeckomatching: {symbol} -> {coin_id} (rank={best_rank})")
470
471 resp2 = await client.get(
472 f"https://api.coingecko.com/api/v3/coins/{coin_id}",
473 params={"localization": "false", "tickers": "false",
474 "market_data": "true", "community_data": "false",
475 "developer_data": "false"}
476 )
477 if resp2.status_code == 429:
478 await asyncio.sleep(5)
479 resp2 = await client.get(
480 f"https://api.coingecko.com/api/v3/coins/{coin_id}",
481 params={"localization": "false", "tickers": "false",
482 "market_data": "true", "community_data": "false",
483 "developer_data": "false"}
484 )
485 if resp2.status_code != 200:
486 return result
487 d = resp2.json()
488 md = d.get("market_data", {})
489 result.update({
490 "found": True,
491 "price": (md.get("current_price") or {}).get("usd"),
492 "fdv": (md.get("fully_diluted_valuation") or {}).get("usd"),
493 "mcap": (md.get("market_cap") or {}).get("usd"),
494 "total_supply": md.get("total_supply"),
495 "circ_supply": md.get("circulating_supply"),
496 })
497 result["categories"] = d.get("categories", [])
498 result["description"] = (d.get("description") or {}).get("en", "")[:500]
499 platforms = d.get("platforms", {})
500 for chain, addr in platforms.items():
501 if addr:
502 result["chain"] = chain
503 result["contract"] = addr
504 break
505
506 if not result["price"]:
507 binance_price = await _fetch_binance_price(symbol, client)
508 if binance_price:
509 result["price"] = binance_price
510 ts = result.get("total_supply") or 0
511 cs = result.get("circ_supply") or 0
512 if ts > 0:
513 result["fdv"] = binance_price * ts
514 if cs > 0:
515 result["mcap"] = binance_price * cs
516 logger.info(f"Binance {symbol}: ${binance_price}, FDV=${result.get('fdv',0):,.0f}")
517
518 except Exception as e:
519 logger.warning(f"CoinGeckoquery {symbol}: {e}")
520 return result
521
522async def _fetch_binance_price(symbol: str, client: httpx.AsyncClient) -> float:
523 """fromBinancefutures"""
524 pair = f"{symbol.upper()}USDT"
525 try:
526 resp = await client.get(f"https://api.binance.com/api/v3/ticker/price",
527 params={"symbol": pair})
528 if resp.status_code == 200:
529 return float(resp.json()["price"])
530 except Exception:
531 pass
532 # 2. futures
533 try:
534 resp = await client.get(f"https://fapi.binance.com/fapi/v1/ticker/price",
535 params={"symbol": pair})
536 if resp.status_code == 200:
537 return float(resp.json()["price"])
538 except Exception:
539 pass
540 return 0.0
541
542# ============================================================
543# ============================================================
544
545async def llm_extract(raw_text: str, symbol: str, name: str = "", cg_data: dict = None) -> dict:
546 """LLMfrom+CoinGeckodata/VC/"""
547 fallback = {
548 "narrative": "unknown", "narrative_desc": "",
549 "vcs": [], "is_darling": False, "exclude_reason": None,
550 }
551
552 cg_data = cg_data or {}
553 categories = cg_data.get("categories", [])
554 description = cg_data.get("description", "")
555
556 darling_cats = [c for c in categories if any(kw in c.lower() for kw in ["yzi labs", "binance labs"])]
557 if darling_cats:
558 fallback["is_darling"] = True
559
560 if not ANTHROPIC_API_KEY:
561 t = raw_text.lower()
562 for kw in BINANCE_DARLING_KEYWORDS:
563 if kw in t:
564 fallback["is_darling"] = True
565 cat_str = " ".join(categories).lower()
566 if "defi" in cat_str: fallback["narrative"] = "defi"
567 elif "ai" in cat_str: fallback["narrative"] = "ai_agent"
568 elif "gaming" in cat_str or "gamefi" in cat_str: fallback["narrative"] = "gamefi"
569 elif "meme" in cat_str: fallback["narrative"] = "meme"
570 elif "rwa" in cat_str or "real world" in cat_str: fallback["narrative"] = "rwa"
571 return fallback
572
573 extra_context = ""
574 if categories:
575 extra_context += f"\nCoinGeckoclassification: {', '.join(categories)}"
576 if description:
577 extra_context += f"\nproject: {description[:300]}"
578 if cg_data.get("found"):
579 extra_context += f"\ndata: FDV=${cg_data.get('fdv',0):,.0f}, MCap=${cg_data.get('mcap',0):,.0f}, =${cg_data.get('price',0)}"
580 if cg_data.get("chain"):
581 extra_context += f", ={cg_data['chain']}"
582
583 system = "cryptocurrencyresearcher,fromBinanceprojectdatakey 。returnJSON,。"
584 user = f"""Binanceproject:
585token: {symbol}, project: {name or ""}
586: {raw_text}
587{extra_context}
588
589returnJSON:
590{{
591 "narrative": "defi_perp|ai_agent|ai_defi|defai|zk_proof|infra|defi|rwa|gamefi|meme|social|stablecoin|unknown",
592 "narrative_desc": "projectwhat、whatfeatures",
593 "vcs": ["fromCoinGeckoclassification"],
594 "is_darling": true/false,
595 "exclude_reason": null|"already_tge"|"meme_only"
596}}
597
598judge:
599- narrative: main class
600- vcs: CoinGeckoclassification "XXX Portfolio" XXXas
601- is_darling: YZi Labs/Binance Labs CZ/ true
602- exclude_reason: projectmain CEX(Coinbase/OKX/Bybit)3"already_tge"。DEXBinance,already_tge。CoinGeckodataalready_tge。meme"meme_only"
603"""
604
605 try:
606 async with httpx.AsyncClient(timeout=30, headers=HEADERS) as client:
607 resp = await client.post(
608 f"{ANTHROPIC_BASE_URL.rstrip('/')}/v1/messages",
609 headers={
610 "x-api-key": ANTHROPIC_API_KEY,
611 "anthropic-version": "2023-06-01",
612 "content-type": "application/json",
613 },
614 json={
615 "model": ANTHROPIC_MODEL,
616 "max_tokens": 800,
617 "temperature": 0,
618 "system": system,
619 "messages": [{"role": "user", "content": user}],
620 }
621 )
622 if resp.status_code != 200:
623 logger.warning(f"LLMcall {resp.status_code}")
624 return fallback
625 data = resp.json()
626 text = ""
627 for block in data.get("content", []):
628 if block.get("type") == "text":
629 text = block.get("text", "")
630 break
631 text = text.strip()
632 if text.startswith("```"):
633 lines = text.split("\n")
634 text = "\n".join(lines[1:-1])
635 return json.loads(text)
636 except Exception as e:
637 logger.warning(f"LLM: {e}")
638 return fallback
639
640# ============================================================
641# ============================================================
642
643def _fmt_mcap(v):
644 if not v:
645 return "N/A"
646 if v >= 1e9:
647 return f"${v/1e9:.1f}B"
648 if v >= 1e6:
649 return f"${v/1e6:.1f}M"
650 if v >= 1e3:
651 return f"${v/1e3:.0f}K"
652 return f"${v:.0f}"
653
654def _fmt_price(v):
655 if not v:
656 return "N/A"
657 if v >= 1:
658 return f"${v:.2f}"
659 if v >= 0.01:
660 return f"${v:.4f}"
661 return f"${v:.6f}"
662
663def fmt_discovery(p: dict) -> str:
664 tier = p.get("tier", "C")
665 icon = TIER_ICONS.get(tier, "⚪")
666 label = TIER_LABELS.get(tier, "")
667 symbol = p["symbol"]
668 name = p.get("name") or ""
669 vcs = json.loads(p.get("vcs_json", "[]")) if isinstance(p.get("vcs_json"), str) else p.get("vcs", [])
670
671 lines = [
672 f"{icon} <b>Alpha · ${symbol}</b> {icon}",
673 f"📋 {label}",
674 "",
675 f"<b>{name}</b>" if name else "",
676 ]
677
678 if p.get("narrative_desc"):
679 lines.append(f"💡 {p['narrative_desc']}")
680 if p.get("narrative") and p["narrative"] != "unknown":
681 lines.append(f"🏷 : {p['narrative']}")
682 lines.append("")
683
684 if p.get("fdv"):
685 lines.append(f"📊 FDV: {_fmt_mcap(p['fdv'])}")
686 if p.get("circulating_mcap"):
687 lines.append(f"📊 : {_fmt_mcap(p['circulating_mcap'])}")
688 if p.get("open_price"):
689 lines.append(f"💰 : {_fmt_price(p['open_price'])}")
690 if p.get("total_supply") and p.get("circulating_supply"):
691 pct = p["circulating_supply"] / p["total_supply"] * 100
692 lines.append(f"📦 initial: {pct:.1f}%")
693
694 if vcs:
695 lines.append("")
696 lines.append("🏛 <b></b>")
697 for v in vcs[:5]:
698 is_t1 = any(t in v.lower() for t in TIER1_VCS)
699 lines.append(f" {'⭐' if is_t1 else '·'} {v}")
700
701 if p.get("is_darling"):
702 lines.append("")
703 lines.append("🔥 <b>Binance</b>")
704
705 if p.get("tier_reason"):
706 lines.append("")
707 lines.append(f"🎯 {p['tier_reason']}")
708
709 lines.append("")
710 lines.append(f"<i>📌 : {p.get('source', 'binance')}</i>")
711 if p.get("raw_text"):
712 lines.append(f"<i>{p['raw_text'][:120]}</i>")
713
714 return "\n".join(l for l in lines if l is not None)
715
716def fmt_countdown(p: dict, minutes: int) -> str:
717 icon = TIER_ICONS.get(p.get("tier", "C"), "⚪")
718 t = f"{minutes//60}h{minutes%60}m" if minutes >= 60 else f"{minutes}m"
719 lines = [
720 f"{icon} <b></b>",
721 f"<b>${p['symbol']}</b> · {p.get('name', '')}",
722 f"⏰ <b>{t}</b>",
723 ]
724 if p.get("fdv"):
725 lines.append(f"FDV: {_fmt_mcap(p['fdv'])}")
726 if minutes <= 30:
727 lines.append("🔔 <b></b>")
728 return "\n".join(lines)
729
730def fmt_launch(p: dict, price: float, mcap: float, fdv: float) -> str:
731 lines = [
732 f"🚀 <b>${p['symbol']} </b>",
733 f": <b>{_fmt_price(price)}</b>",
734 f": <b>{_fmt_mcap(mcap)}</b>",
735 f"FDV: <b>{_fmt_mcap(fdv)}</b>",
736 ]
737 return "\n".join(lines)
738
739def fmt_periodic(p: dict, idx: int, price: float, mcap: float, change_pct: float) -> str:
740 arrow = "📈" if change_pct > 0 else "📉"
741 minutes = 30 * idx
742 lines = [
743 f"⏱ <b>${p['symbol']} · +{minutes}min</b>",
744 f": {_fmt_mcap(mcap)} ({arrow} {change_pct:+.1f}%)",
745 f": {_fmt_price(price)}",
746 ]
747 if change_pct >= 100:
748 lines.append("💡 <b>,</b>")
749 elif change_pct <= -30:
750 lines.append("⚠️ ,evaluation")
751 return "\n".join(lines)
752
753def fmt_anomaly(p: dict, atype: str, price: float, change_pct: float) -> str:
754 emoji = {"double": "🚀", "halve": "🔻"}.get(atype, "⚡")
755 desc = {"double": "", "halve": ""}.get(atype, "")
756 return f"{emoji} <b>${p['symbol']} {desc}</b>\n: {change_pct:+.1f}%\n: {_fmt_price(price)}"
757
758# ============================================================
759# ============================================================
760
761async def announcement_listener():
762 """Binance,Alphaproject"""
763 logger.info(f"📡 · {ANNOUNCEMENT_POLL_INTERVAL}s")
764 while True:
765 try:
766 articles = await fetch_announcements()
767 new_count = 0
768 for art in articles:
769 title = art.get("title", "")
770 triggered, reason = is_trigger(title)
771 if not triggered:
772 continue
773
774 symbol = extract_symbol(title)
775 if not symbol:
776 continue
777
778 release_ts = art.get("releaseDate")
779 release_iso = datetime.fromtimestamp(release_ts / 1000).isoformat() if release_ts else ""
780 launch_date = release_iso[:10] if release_iso else datetime.utcnow().date().isoformat()
781
782 pid = project_id(symbol, launch_date)
783 if project_exists(pid):
784 continue
785
786 project = {
787 "id": pid,
788 "symbol": symbol,
789 "name": extract_name(title),
790 "launch_time": release_iso,
791 "source": "binance_announcement",
792 "raw_text": title,
793 "tier": "PENDING",
794 "vcs": [],
795 "is_darling": False,
796 "excluded": 0,
797 }
798 save_project(project)
799 new_count += 1
800 logger.info(f"🆕 ${symbol}: {title[:80]}")
801
802 if new_count:
803 logger.info(f" {new_count} project")
804 except Exception as e:
805 logger.error(f": {e}", exc_info=True)
806
807 await asyncio.sleep(ANNOUNCEMENT_POLL_INTERVAL)
808
809# ============================================================
810# ============================================================
811
812async def aggregation_worker():
813 """PENDINGprojectdata、、"""
814 logger.info(f"🧠 · {AGGREGATION_POLL_INTERVAL}s")
815 while True:
816 try:
817 pending = list_pending()
818 for p in pending:
819 symbol = p["symbol"]
820 try:
821 logger.info(f"📦 ${symbol}")
822
823 # 1. CoinGecko
824 cg = await fetch_coingecko(symbol, project_name=p.get('name', ''))
825 await asyncio.sleep(1) # rate limiting
826
827 llm = await llm_extract(p.get("raw_text", ""), symbol, p.get("name"), cg_data=cg)
828 await asyncio.sleep(1)
829
830 if llm.get("exclude_reason") in ("already_tge", "meme_only"):
831 update_project(p["id"], {
832 "excluded": 1,
833 "exclude_reason": llm["exclude_reason"],
834 "tier": "EXCLUDED",
835 })
836 logger.info(f"⏭ ${symbol} : {llm['exclude_reason']}")
837 continue
838
839 cg_fdv = cg.get("fdv", 0) or 0
840 cg_mcap = cg.get("mcap", 0) or 0
841 data_suspect = False
842 data_warnings = []
843
844 if cg.get("found") and cg_fdv > 0 and cg_fdv < 100_000:
845 data_suspect = True
846 data_warnings.append(f"FDV=${cg_fdv:,.0f},CoinGeckomatchingerror")
847 logger.warning(f"⚠️ {symbol} FDV=${cg_fdv:,.0f} ,data")
848
849 if (cg.get("circ_supply") and cg.get("total_supply")
850 and cg["circ_supply"] == cg["total_supply"]
851 and cg_fdv < 1_000_000):
852 data_suspect = True
853
854 if data_suspect:
855 cg_fdv = 0
856 cg_mcap = 0
857 logger.warning(f"⚠️ {symbol} CoinGeckodata,usageLLMjudge")
858
859 is_darling = llm.get("is_darling", False)
860 vcs = llm.get("vcs", [])
861 narrative = llm.get("narrative", "unknown")
862 rating = rate_project(
863 cg_mcap, cg_fdv,
864 vcs, narrative, is_darling
865 )
866
867 update_project(p["id"], {
868 "tier": rating["tier"],
869 "tier_reason": rating["reason"],
870 "narrative": narrative,
871 "narrative_desc": llm.get("narrative_desc", ""),
872 "vcs_json": json.dumps(vcs),
873 "is_darling": int(is_darling),
874 "open_price": cg.get("price") if not data_suspect else None,
875 "total_supply": cg.get("total_supply") if not data_suspect else None,
876 "circulating_supply": cg.get("circ_supply") if not data_suspect else None,
877 "fdv": cg_fdv if cg_fdv else None,
878 "circulating_mcap": cg_mcap if cg_mcap else None,
879 })
880
881 full = get_project(p["id"])
882 if full and not has_pushed(p["id"], "discovery"):
883 text = fmt_discovery(full)
884 silent = rating["tier"] in ("B", "C")
885 ok = await send_tg(text, silent=silent)
886 if ok:
887 log_push(p["id"], "discovery", text)
888 logger.info(f"✅ ${symbol} [{rating['tier']}]")
889
890 except Exception as e:
891 logger.error(f" {symbol} : {e}", exc_info=True)
892 update_project(p["id"], {"tier": "ERROR", "tier_reason": str(e)[:100]})
893
894 except Exception as e:
895 logger.error(f"loop: {e}", exc_info=True)
896
897 await asyncio.sleep(AGGREGATION_POLL_INTERVAL)
898
899# ============================================================
900# ============================================================
901
902async def post_launch_monitor():
903 """ + + 30min×4tracking + """
904 logger.info(f"📊 monitoring · {MONITOR_POLL_INTERVAL}s")
905 while True:
906 try:
907 projects = list_active()
908 for p in projects:
909 try:
910 await _monitor_project(p)
911 except Exception as e:
912 logger.error(f"monitoring {p['symbol']} : {e}")
913 except Exception as e:
914 logger.error(f"monitoringloop: {e}", exc_info=True)
915
916 await asyncio.sleep(MONITOR_POLL_INTERVAL)
917
918async def _monitor_project(p: dict):
919 pid = p["id"]
920 symbol = p["symbol"]
921 launch_str = p.get("launch_time", "")
922 if not launch_str:
923 return
924
925 try:
926 launch = datetime.fromisoformat(launch_str.replace("Z", "").split("+")[0])
927 except:
928 return
929
930 now = datetime.utcnow()
931 delta_sec = (launch - now).total_seconds()
932
933 # T-3h
934 if 3*3600 - 300 <= delta_sec <= 3*3600 + 300:
935 if not has_pushed(pid, "t_minus_3h"):
936 text = fmt_countdown(p, int(delta_sec / 60))
937 ok = await send_tg(text, silent=p.get("tier") in ("B", "C"))
938 if ok:
939 log_push(pid, "t_minus_3h", text)
940
941 # T-30m
942 elif 30*60 - 150 <= delta_sec <= 30*60 + 150:
943 if not has_pushed(pid, "t_minus_30m"):
944 text = fmt_countdown(p, int(delta_sec / 60))
945 ok = await send_tg(text, silent=False)
946 if ok:
947 log_push(pid, "t_minus_30m", text)
948
949 elif -300 <= delta_sec <= 0:
950 if not has_pushed(pid, "at_launch"):
951 cg = await fetch_coingecko(symbol, project_name=p.get('name', ''))
952 if cg.get("price"):
953 text = fmt_launch(p, cg["price"], cg.get("mcap", 0), cg.get("fdv", 0))
954 ok = await send_tg(text, silent=False)
955 if ok:
956 log_push(pid, "at_launch", text)
957 save_snapshot(pid, cg["price"], cg.get("mcap", 0), cg.get("fdv", 0))
958
959 elif 0 < -delta_sec <= 2.5 * 3600:
960 minutes_after = int(-delta_sec / 60)
961 for idx, target in enumerate([30, 60, 90, 120], 1):
962 if abs(minutes_after - target) <= 5:
963 ptype = f"post_30m_{idx}"
964 if not has_pushed(pid, ptype):
965 cg = await fetch_coingecko(symbol, project_name=p.get('name', ''))
966 if cg.get("price"):
967 open_price = p.get("open_price") or cg["price"]
968 change = ((cg["price"] - open_price) / open_price * 100) if open_price else 0
969 text = fmt_periodic(p, idx, cg["price"], cg.get("mcap", 0), change)
970 ok = await send_tg(text, silent=p.get("tier") in ("B", "C") and idx > 1)
971 if ok:
972 log_push(pid, ptype, text)
973 save_snapshot(pid, cg["price"], cg.get("mcap", 0), cg.get("fdv", 0))
974
975 if change >= 100 and not has_pushed(pid, "anomaly_double"):
976 t = fmt_anomaly(p, "double", cg["price"], change)
977 if await send_tg(t):
978 log_push(pid, "anomaly_double", t)
979 elif change <= -50 and not has_pushed(pid, "anomaly_halve"):
980 t = fmt_anomaly(p, "halve", cg["price"], change)
981 if await send_tg(t):
982 log_push(pid, "anomaly_halve", t)
983 break
984
985# ============================================================
986# ============================================================
987
988async def main():
989 init_db()
990 logger.info(f"📂 database: {DB_PATH}")
991
992 # testingTG
993 ok = await send_tg("🎉 <b>Alpha Monitor v2 </b>\n\n📡 Binance...\n🔔 Alpha")
994 if ok:
995 logger.info("✅ TG")
996 else:
997 logger.warning("⚠️ TG,configuration")
998
999 tasks = [
1000 asyncio.create_task(announcement_listener(), name="announcements"),
1001 asyncio.create_task(aggregation_worker(), name="aggregator"),
1002 asyncio.create_task(post_launch_monitor(), name="monitor"),
1003 ]
1004
1005 logger.info("=" * 50)
1006 logger.info("🚀 Alpha Monitor v2 ")
1007 logger.info(f" 📡 : {ANNOUNCEMENT_POLL_INTERVAL}s")
1008 logger.info(f" 🧠 LLM: {'Sonnet' if ANTHROPIC_API_KEY else 'degradation()'}")
1009 logger.info(f" 🔔 TG: {'✅' if TG_BOT_TOKEN else '❌'}")
1010 logger.info("=" * 50)
1011
1012 try:
1013 await asyncio.gather(*tasks)
1014 except KeyboardInterrupt:
1015 for t in tasks:
1016 t.cancel()
1017
1018if __name__ == "__main__":
1019 try:
1020 asyncio.run(main())
1021 except KeyboardInterrupt:
1022 pass
AI Autonomous Trading
Futures + Alpha Autonomous Trading v1
Date: 2026.04.29 Tags: Python · Binance Futures · Autonomous · Telegram
AI scans market → analyzes → virtual trades → monitors → reviews — fully autonomous
⚠️ RISK WARNING: This code has only been tested on virtual/paper trading. It has NO real trading experience and should NOT be used as a basis for live trading. Not validated with real funds — use at your own risk.
An AI that autonomously scans the entire Binance futures market every 30 seconds, detects anomalies, and makes virtual trades. 4 signal detection strategies (extreme negative funding rate → long squeeze, extreme positive funding → short, post-pump short, crash bounce). Before opening any position, runs a multi-dimensional environment check (BTC trend + Fear&Greed sentiment + OI attention + volume activity) — needs score ≥3/7 to proceed. Auto stop-loss/take-profit monitoring every 30 seconds.
Current results (honest): 4 closed trades, 75% win rate, +13.94U. But profit concentrated in one trade (IR short +45%), rest basically break-even. Position diversification insufficient — tends to stack same-direction same-logic trades. Still rule-based scoring, not true independent thinking.
Training path: scan → trade → close → review → find problems → improve → repeat. Goal: evolve from rule executor to independent-thinking trader.b:T5192,#!/usr/bin/env pytho
Full source code
1#!/usr/bin/env python3
2"""
3"""
4
5import json
6import os
7import sys
8import time
9import requests
10from datetime import datetime, timezone, timedelta
11
12SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
13DATA_FILE = os.path.join(SCRIPT_DIR, "trades.json")
14SCANNER_STATE = os.path.join(SCRIPT_DIR, "scanner_state.json")
15SCANNER_LOG = os.path.join(SCRIPT_DIR, "scanner.log")
16INITIAL_BALANCE = 100.0
17TZ_UTC8 = timezone(timedelta(hours=8))
18
19# === configuration ===
20MAX_OPEN_POSITIONS = 3 # positions
21POSITION_PCT = 30 # %
22LEVERAGE = 3 #
23COOLDOWN_HOURS = 4 #
24MIN_VOLUME_M = 10 # 24h(U)
25
26def load_tg_config():
27 """Load TG config from environment variables or .env file"""
28 env = {}
29 # Try .env in script directory, then current directory
30 for env_path in [
31 os.path.join(SCRIPT_DIR, ".env"),
32 os.path.join(os.getcwd(), ".env"),
33 ]:
34 if os.path.exists(env_path):
35 with open(env_path) as f:
36 for line in f:
37 line = line.strip()
38 if '=' in line and not line.startswith('#'):
39 k, v = line.split('=', 1)
40 env[k] = v.strip().strip('"').strip("'")
41 break
42 # OS environment variables override file
43 for key in ['TG_BOT_TOKEN', 'TELEGRAM_BOT_TOKEN', 'TG_CHAT_ID']:
44 val = os.environ.get(key)
45 if val:
46 env[key] = val
47 return env
48
49def send_tg(text):
50 try:
51 env = load_tg_config()
52 token = env.get('TG_BOT_TOKEN', env.get('TELEGRAM_BOT_TOKEN', ''))
53 if not token:
54 return
55 chat_id = env.get('TG_CHAT_ID', '')
56 if not chat_id:
57 return
58 url = f"https://api.telegram.org/bot{token}/sendMessage"
59 requests.post(url, json={
60 "chat_id": chat_id,
61 "text": text,
62 "parse_mode": "Markdown"
63 }, timeout=10)
64 except:
65 pass
66
67def load_trades():
68 if os.path.exists(DATA_FILE):
69 with open(DATA_FILE, "r", encoding="utf-8") as f:
70 return json.load(f)
71 return {"initial_balance": INITIAL_BALANCE, "trades": []}
72
73def save_trades(data):
74 with open(DATA_FILE, "w", encoding="utf-8") as f:
75 json.dump(data, f, ensure_ascii=False, indent=2)
76
77def load_state():
78 if os.path.exists(SCANNER_STATE):
79 with open(SCANNER_STATE, "r") as f:
80 return json.load(f)
81 return {"last_opens": {}, "signals_seen": {}}
82
83def save_state(state):
84 with open(SCANNER_STATE, "w") as f:
85 json.dump(state, f, ensure_ascii=False, indent=2)
86
87def get_balance(data):
88 balance = data.get("initial_balance", INITIAL_BALANCE)
89 for t in data["trades"]:
90 if t["status"] == "closed" and t["pnl_usd"] is not None:
91 balance += t["pnl_usd"]
92 return balance
93
94def next_id(data):
95 if not data["trades"]:
96 return "001"
97 max_id = max(int(t["id"]) for t in data["trades"])
98 return f"{max_id + 1:03d}"
99
100def now_str():
101 return datetime.now(TZ_UTC8).strftime("%Y-%m-%dT%H:%M:%S")
102
103def log(msg):
104 ts = datetime.now(TZ_UTC8).strftime("%m-%d %H:%M:%S")
105 line = f"[{ts}] {msg}"
106 print(line)
107 with open(SCANNER_LOG, "a") as f:
108 f.write(line + "\n")
109
110# === BinanceAPI ===
111def get_all_tickers():
112 url = "https://fapi.binance.com/fapi/v1/ticker/24hr"
113 resp = requests.get(url, timeout=10)
114 return resp.json()
115
116def get_funding_rates():
117 url = "https://fapi.binance.com/fapi/v1/premiumIndex"
118 resp = requests.get(url, timeout=10)
119 return {item['symbol']: float(item['lastFundingRate']) * 100
120 for item in resp.json()}
121
122def get_funding_history(symbol, limit=8):
123 url = f"https://fapi.binance.com/fapi/v1/fundingRate?symbol={symbol}&limit={limit}"
124 resp = requests.get(url, timeout=10)
125 return [float(item['fundingRate']) * 100 for item in resp.json()]
126
127def get_open_interest(symbol):
128 url = f"https://fapi.binance.com/fapi/v1/openInterest?symbol={symbol}"
129 resp = requests.get(url, timeout=10)
130 data = resp.json()
131 return float(data['openInterest'])
132
133def get_klines(symbol, interval="4h", limit=6):
134 url = f"https://fapi.binance.com/fapi/v1/klines?symbol={symbol}&interval={interval}&limit={limit}"
135 resp = requests.get(url, timeout=10)
136 return resp.json()
137
138def detect_extreme_negative_funding(symbol, funding_rate, funding_rates_map):
139 """
140 policy1: extreme funding ratedeep negative → long(short squeeze)
141 condition: <-0.08% consecutiveperiods negative
142 """
143 if funding_rate >= -0.08:
144 return None
145
146 try:
147 history = get_funding_history(symbol, 8)
148 neg_count = sum(1 for r in history if r < -0.03)
149 if neg_count < 4:
150 return None
151
152 avg_rate = sum(history) / len(history)
153
154 # more extreme rate,stronger signal
155 strength = "S" if avg_rate < -0.15 else "A" if avg_rate < -0.10 else "B"
156
157 return {
158 "type": "extreme_neg_funding",
159 "direction": "long",
160 "strength": strength,
161 "reason": f"extreme funding ratedeep negative avg:{avg_rate:.4f}% consecutive{neg_count}/8 short squeezeprobability",
162 "sl_pct": 0.08, # 8%
163 "tp_pct": 0.12, # 12%
164 }
165 except:
166 return None
167
168def detect_extreme_positive_funding(symbol, funding_rate, funding_rates_map):
169 """
170 policy2: extreme funding rate → short(crowded longs)
171 condition: >0.10% consecutivehigh positive
172 """
173 if funding_rate <= 0.10:
174 return None
175
176 try:
177 history = get_funding_history(symbol, 8)
178 pos_count = sum(1 for r in history if r > 0.05)
179 if pos_count < 4:
180 return None
181
182 avg_rate = sum(history) / len(history)
183 strength = "S" if avg_rate > 0.20 else "A" if avg_rate > 0.12 else "B"
184
185 return {
186 "type": "extreme_pos_funding",
187 "direction": "short",
188 "strength": strength,
189 "reason": f"extreme funding rate avg:{avg_rate:.4f}% consecutive{pos_count}/8high positive ",
190 "sl_pct": 0.10,
191 "tp_pct": 0.15,
192 }
193 except:
194 return None
195
196def detect_crash_bounce(ticker):
197 """
198 policy3: crashbounce(oversold bounce)
199 condition: 24h>25% but 4hstabilize/
200 """
201 change_pct = float(ticker['priceChangePercent'])
202 if change_pct >= -25:
203 return None
204
205 symbol = ticker['symbol']
206 try:
207 klines = get_klines(symbol, "1h", 6)
208 recent_closes = [float(k[4]) for k in klines[-3:]]
209 if len(recent_closes) >= 2 and recent_closes[-1] >= recent_closes[-2]:
210 return {
211 "type": "crash_bounce",
212 "direction": "long",
213 "strength": "B", # B
214 "reason": f"24hcrash{change_pct:.1f}%stabilize oversold bounce",
215 "sl_pct": 0.10,
216 "tp_pct": 0.15,
217 }
218 except:
219 pass
220 return None
221
222def detect_pump_short(ticker):
223 """
224 policy4: pumpshort after(ATHpullback)
225 condition: 24h>40% — model,pumpcallbackprobability>85%
226 """
227 change_pct = float(ticker['priceChangePercent'])
228 if change_pct <= 40:
229 return None
230
231 symbol = ticker['symbol']
232 try:
233 klines = get_klines(symbol, "1h", 6)
234 highs = [float(k[2]) for k in klines]
235 closes = [float(k[4]) for k in klines]
236 current = closes[-1]
237 peak = max(highs)
238
239 pullback = (peak - current) / peak * 100
240 if pullback < 10:
241 return None
242
243 strength = "A" if change_pct > 80 else "B"
244
245 return {
246 "type": "pump_short",
247 "direction": "short",
248 "strength": strength,
249 "reason": f"24hpump{change_pct:.1f}%pullback{pullback:.1f}% callbackprobability>85%",
250 "sl_pct": 0.15, # pump,
251 "tp_pct": 0.20,
252 }
253 except:
254 pass
255 return None
256
257# === environment check ===
258def check_environment(symbol, signal):
259 """
260 before opening:,multi-dimensionalalignment
261 return (pass/fail, analysis_dict, adjusted_strength)
262 """
263 analysis = {
264 "btc_env": "",
265 "sentiment": "",
266 "oi_check": "",
267 "volume_check": "",
268 "verdict": ""
269 }
270 score = 0 # score,>=3to open
271
272 try:
273 btc_url = "https://fapi.binance.com/fapi/v1/ticker/24hr?symbol=BTCUSDT"
274 btc = requests.get(btc_url, timeout=5).json()
275 btc_chg = float(btc['priceChangePercent'])
276
277 if signal["direction"] == "long":
278 if btc_chg > -2:
279 score += 1
280 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% +1"
281 elif btc_chg < -5:
282 score -= 1
283 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% crashlong -1"
284 else:
285 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% 0"
286 else: # short
287 if btc_chg < 2:
288 score += 1
289 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% +1"
290 elif btc_chg > 5:
291 score -= 1
292 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% pumpshort -1"
293 else:
294 analysis["btc_env"] = f"BTC {btc_chg:+.1f}% 0"
295
296 try:
297 fng = requests.get("https://api.alternative.me/fng/", timeout=5).json()
298 fng_val = int(fng['data'][0]['value'])
299 if signal["direction"] == "long":
300 if fng_val <= 25:
301 score += 1
302 analysis["sentiment"] = f"FGI={fng_val} long +1"
303 elif fng_val >= 75:
304 score -= 1
305 analysis["sentiment"] = f"FGI={fng_val} long -1"
306 else:
307 analysis["sentiment"] = f"FGI={fng_val} 0"
308 else:
309 if fng_val >= 75:
310 score += 1
311 analysis["sentiment"] = f"FGI={fng_val} short +1"
312 elif fng_val <= 25:
313 score -= 1
314 analysis["sentiment"] = f"FGI={fng_val} short -1"
315 else:
316 analysis["sentiment"] = f"FGI={fng_val} 0"
317 except:
318 analysis["sentiment"] = "FGI 0"
319
320 try:
321 oi = get_open_interest(symbol)
322 ticker = requests.get(f"https://fapi.binance.com/fapi/v1/ticker/24hr?symbol={symbol}", timeout=5).json()
323 price = float(ticker['lastPrice'])
324 oi_usd = oi * price
325
326 if oi_usd > 5_000_000: # OI > 5M
327 score += 1
328 analysis["oi_check"] = f"OI={oi_usd/1e6:.1f}M +1"
329 else:
330 analysis["oi_check"] = f"OI={oi_usd/1e6:.1f}M 0"
331 except:
332 analysis["oi_check"] = "OI 0"
333
334 try:
335 vol = float(ticker.get('quoteVolume', 0))
336 if vol > 50_000_000:
337 score += 1
338 analysis["volume_check"] = f"24h={vol/1e6:.0f}M +1"
339 elif vol > 20_000_000:
340 analysis["volume_check"] = f"24h={vol/1e6:.0f}M 0"
341 else:
342 score -= 1
343 analysis["volume_check"] = f"24h={vol/1e6:.0f}M -1"
344 except:
345 analysis["volume_check"] = " 0"
346
347 if signal["strength"] == "S":
348 score += 2
349 elif signal["strength"] == "A":
350 score += 1
351
352 analysis["verdict"] = f"score:{score}/7"
353
354 if score >= 3:
355 return True, analysis, signal["strength"]
356 else:
357 return False, analysis, signal["strength"]
358
359 except Exception as e:
360 analysis["verdict"] = f":{e} "
361 return False, analysis, signal["strength"]
362
363def execute_open(data, state, symbol, price, signal):
364 """execute — environment check"""
365
366 # environment check
367 passed, env_analysis, strength = check_environment(symbol, signal)
368 env_summary = " | ".join(v for v in env_analysis.values() if v)
369
370 if not passed:
371 log(f"via {symbol}: {env_summary}")
372 return
373
374 log(f"via {symbol}: {env_summary}")
375
376 balance = get_balance(data)
377 position_usd = balance * POSITION_PCT / 100
378
379 if signal["direction"] == "long":
380 sl = round(price * (1 - signal["sl_pct"]), 6)
381 tp = round(price * (1 + signal["tp_pct"]), 6)
382 else:
383 sl = round(price * (1 + signal["sl_pct"]), 6)
384 tp = round(price * (1 - signal["tp_pct"]), 6)
385
386 trade = {
387 "id": next_id(data),
388 "symbol": symbol,
389 "direction": signal["direction"],
390 "leverage": LEVERAGE,
391 "position_pct": POSITION_PCT,
392 "position_usd": round(position_usd, 4),
393 "notional_usd": round(position_usd * LEVERAGE, 4),
394 "entry_price": price,
395 "stop_loss": sl,
396 "take_profit": tp,
397 "entry_time": now_str(),
398 "exit_price": None,
399 "exit_time": None,
400 "exit_reason": None,
401 "pnl_pct": None,
402 "pnl_usd": None,
403 "status": "open",
404 "pre_analysis": {
405 "btc_env": env_analysis.get("btc_env", ""),
406 "sentiment": env_analysis.get("sentiment", ""),
407 "oi": env_analysis.get("oi_check", ""),
408 "volume": env_analysis.get("volume_check", ""),
409 "key_reason": f"[{signal['strength']}] {signal['reason']}",
410 "risk": f"score:{env_analysis.get('verdict','')} policy:{signal['type']}"
411 },
412 "post_review": None
413 }
414
415 data["trades"].append(trade)
416 save_trades(data)
417
418 state["last_opens"][symbol] = now_str()
419 save_state(state)
420
421 direction_cn = "long" if signal["direction"] == "long" else "short"
422
423 msg = f"""```
424[scan] #{trade['id']}
425: {symbol}
426: {direction_cn} {LEVERAGE}x
427: {price}
428: {sl}
429: {tp}
430: {position_usd:.2f}U
431: [{signal['strength']}] {signal['reason']}
432: {trade['entry_time']}
433```"""
434
435 log(f" #{trade['id']} {symbol} {direction_cn} @ {price} | {signal['reason']}")
436 send_tg(msg)
437 print(msg)
438
439def swap_weakest(data, state, open_positions, new_signal, tickers):
440 ticker_map = {t['symbol']: float(t['lastPrice']) for t in tickers}
441
442 worst_trade = None
443 worst_pnl = float('inf')
444
445 for t in open_positions:
446 price = ticker_map.get(t["symbol"])
447 if price is None:
448 continue
449 if t["direction"] == "long":
450 pnl_pct = (price - t["entry_price"]) / t["entry_price"] * 100
451 else:
452 pnl_pct = (t["entry_price"] - price) / t["entry_price"] * 100
453
454 if pnl_pct < worst_pnl:
455 worst_pnl = pnl_pct
456 worst_trade = t
457 worst_price = price
458
459 if worst_trade is None:
460 return
461
462 if worst_pnl > 0:
463 log(f"but positions, | : {new_signal['symbol']}")
464 return
465
466 if worst_trade["direction"] == "long":
467 pnl_pct_lev = (worst_price - worst_trade["entry_price"]) / worst_trade["entry_price"] * 100 * worst_trade["leverage"]
468 else:
469 pnl_pct_lev = (worst_trade["entry_price"] - worst_price) / worst_trade["entry_price"] * 100 * worst_trade["leverage"]
470
471 pos_usd = worst_trade.get("position_usd", worst_trade.get("position_pct", 30))
472 pnl_usd = round(pnl_pct_lev / 100 * pos_usd, 4)
473
474 worst_trade["exit_price"] = worst_price
475 worst_trade["exit_time"] = now_str()
476 worst_trade["exit_reason"] = f"→{new_signal['symbol']}"
477 worst_trade["pnl_pct"] = round(pnl_pct_lev, 2)
478 worst_trade["pnl_usd"] = pnl_usd
479 worst_trade["status"] = "closed"
480 save_trades(data)
481
482 direction_cn = "" if worst_trade["direction"] == "long" else ""
483 msg = f"""```
484[close] #{worst_trade['id']}
485: {worst_trade['symbol']} {direction_cn}
486: {worst_trade['entry_price']}
487: {worst_price}
488: {pnl_pct_lev:+.2f}% ({pnl_usd:+.2f}U)
489: S{new_signal['symbol']}
490```"""
491 log(f" #{worst_trade['id']} {worst_trade['symbol']} {pnl_usd:+.2f}U → {new_signal['symbol']}")
492 send_tg(msg)
493
494 execute_open(data, state, new_signal["symbol"], new_signal["price"], new_signal)
495
496def scan():
497 data = load_trades()
498 state = load_state()
499 now = datetime.now(TZ_UTC8)
500
501 open_positions = [t for t in data["trades"] if t["status"] == "open"]
502 open_symbols = set(t["symbol"] for t in open_positions)
503
504 if len(open_positions) >= MAX_OPEN_POSITIONS:
505 return
506
507 try:
508 tickers = get_all_tickers()
509 funding_rates = get_funding_rates()
510 except Exception as e:
511 log(f"APIerror: {e}")
512 return
513
514 exclude = {"BTCUSDT", "ETHUSDT", "USDCUSDT", "FDUSDUSDT", "BTCDOMUSDT", "BTCSTUSDT"}
515 candidates = [t for t in tickers
516 if t['symbol'].endswith('USDT')
517 and t['symbol'] not in exclude
518 and float(t['quoteVolume']) > MIN_VOLUME_M * 1e6]
519
520 signals = []
521
522 for ticker in candidates:
523 symbol = ticker['symbol']
524
525 if symbol in open_symbols:
526 continue
527
528 last_open = state.get("last_opens", {}).get(symbol)
529 if last_open:
530 try:
531 last_dt = datetime.fromisoformat(last_open)
532 if last_dt.tzinfo is None:
533 last_dt = last_dt.replace(tzinfo=TZ_UTC8)
534 if (now - last_dt).total_seconds() < COOLDOWN_HOURS * 3600:
535 continue
536 except:
537 pass
538
539 fr = funding_rates.get(symbol, 0)
540
541 for detect_fn in [
542 lambda s, f, m: detect_extreme_negative_funding(s, f, m),
543 lambda s, f, m: detect_extreme_positive_funding(s, f, m),
544 lambda s, f, m: detect_crash_bounce(ticker),
545 lambda s, f, m: detect_pump_short(ticker),
546 ]:
547 try:
548 signal = detect_fn(symbol, fr, funding_rates)
549 if signal:
550 signal["symbol"] = symbol
551 signal["price"] = float(ticker['lastPrice'])
552 signal["volume_m"] = float(ticker['quoteVolume']) / 1e6
553 signals.append(signal)
554 except:
555 continue
556
557 if not signals:
558 return
559
560 strength_order = {"S": 0, "A": 1, "B": 2}
561 signals.sort(key=lambda x: strength_order.get(x["strength"], 3))
562
563 best = signals[0]
564
565 if best["strength"] == "B":
566 log(f"B: {best['symbol']} {best['reason']}")
567 return
568
569 slots = MAX_OPEN_POSITIONS - len(open_positions)
570 if slots > 0:
571 execute_open(data, state, best["symbol"], best["price"], best)
572 elif best["strength"] == "S":
573 swap_weakest(data, state, open_positions, best, tickers)
574
575if __name__ == "__main__":
576 scan()
Utility Tools
VoiceKey — Speaker Verification
Date: 2026.04.27 Tags: Python · Security · Telegram · Speaker Verification
Protect your AI agent with voiceprint authentication
Why this tool? More people use Telegram to control AI agents (servers, trading bots, smart home). If your TG account gets compromised, attackers can do anything with your AI. Passwords can be stolen or socially engineered — but your voice is unique and non-transferable. VoiceKey requires a voice message to verify identity before unlocking any commands. Zero AI cost, runs entirely on local CPU.
Full source code
1#!/usr/bin/env python3
2"""
3VoiceKey — voiceprintkey (Voiceprint Authentication)
4Speaker verification for Telegram bot security.
5
6Uses resemblyzer (GE2E model) for speaker embedding extraction
7and cosine similarity for verification.
8
9Zero AI cost — runs entirely on local CPU.
10
11Usage:
12 # Register voiceprint from audio files
13 python voicekey.py register --audio voice1.ogg voice2.ogg --owner "YourName"
14
15 # Verify a voice against stored voiceprint
16 python voicekey.py verify --audio test.ogg
17
18 # As a Python module
19 from voicekey import VoiceKey
20 vk = VoiceKey()
21 vk.register(["voice1.ogg", "voice2.ogg"], owner="YourName")
22 is_owner, score = vk.verify("test.ogg")
23"""
24
25import os
26import json
27import tempfile
28import argparse
29import numpy as np
30from pathlib import Path
31from datetime import datetime
32
33# Lazy imports to speed up module load when not needed
34_encoder = None
35
36def _get_encoder():
37 """Lazy-load the voice encoder model (first call takes ~1s)."""
38 global _encoder
39 if _encoder is None:
40 from resemblyzer import VoiceEncoder
41 _encoder = VoiceEncoder()
42 return _encoder
43
44def _audio_to_wav(audio_path: str) -> str:
45 """Convert any audio format to 16kHz mono WAV for processing."""
46 from pydub import AudioSegment
47
48 ext = Path(audio_path).suffix.lower()
49 if ext in ('.ogg', '.oga'):
50 audio = AudioSegment.from_ogg(audio_path)
51 elif ext == '.mp3':
52 audio = AudioSegment.from_mp3(audio_path)
53 elif ext == '.wav':
54 return audio_path # already wav
55 elif ext in ('.m4a', '.aac'):
56 audio = AudioSegment.from_file(audio_path, format=ext.lstrip('.'))
57 else:
58 audio = AudioSegment.from_file(audio_path)
59
60 # Convert to 16kHz mono
61 audio = audio.set_frame_rate(16000).set_channels(1)
62
63 tmp = tempfile.NamedTemporaryFile(suffix='.wav', delete=False)
64 audio.export(tmp.name, format='wav')
65 return tmp.name
66
67def _extract_embedding(audio_path: str) -> np.ndarray:
68 """Extract voice embedding from an audio file."""
69 from resemblyzer import preprocess_wav
70
71 encoder = _get_encoder()
72
73 wav_path = _audio_to_wav(audio_path)
74 wav = preprocess_wav(wav_path)
75
76 # Cleanup temp file
77 if wav_path != audio_path:
78 os.unlink(wav_path)
79
80 if len(wav) < 1600: # Less than 0.1s
81 raise ValueError(f"Audio too short: {len(wav)/16000:.1f}s (need >0.1s)")
82
83 return encoder.embed_utterance(wav)
84
85class VoiceKey:
86 """
87 Speaker verification using voice embeddings.
88
89 Attributes:
90 data_dir: Directory to store voiceprint data
91 threshold: Cosine similarity threshold for verification (default: 0.75)
92 voiceprint: The registered owner's voice embedding (256-dim vector)
93 """
94
95 def __init__(self, data_dir: str = None, threshold: float = 0.75):
96 if data_dir is None:
97 data_dir = os.path.expanduser("~/.hermes/voiceprint")
98 self.data_dir = Path(data_dir)
99 self.data_dir.mkdir(parents=True, exist_ok=True)
100 self.threshold = threshold
101 self.voiceprint = None
102 self.metadata = {}
103 self._load()
104
105 def _load(self):
106 """Load existing voiceprint if available."""
107 vp_path = self.data_dir / "voiceprint.npy"
108 meta_path = self.data_dir / "voiceprint_meta.json"
109
110 if vp_path.exists():
111 self.voiceprint = np.load(str(vp_path))
112 if meta_path.exists():
113 with open(meta_path) as f:
114 self.metadata = json.load(f)
115 self.threshold = self.metadata.get("threshold", self.threshold)
116
117 @property
118 def is_registered(self) -> bool:
119 """Check if a voiceprint is registered."""
120 return self.voiceprint is not None
121
122 def register(self, audio_paths: list, owner: str = "owner") -> dict:
123 """
124 Register a voiceprint from multiple audio samples.
125
126 Args:
127 audio_paths: List of audio file paths (ogg, mp3, wav, etc.)
128 owner: Name of the voiceprint owner
129
130 Returns:
131 dict with registration results
132 """
133 embeddings = []
134 results = []
135
136 for path in audio_paths:
137 try:
138 embed = _extract_embedding(path)
139 embeddings.append(embed)
140 results.append({"file": os.path.basename(path), "status": "ok"})
141 except Exception as e:
142 results.append({"file": os.path.basename(path), "status": "error", "error": str(e)})
143
144 if not embeddings:
145 raise ValueError("No valid audio samples provided")
146
147 # Average embeddings and normalize
148 voiceprint = np.mean(embeddings, axis=0)
149 voiceprint = voiceprint / np.linalg.norm(voiceprint)
150
151 # Save
152 np.save(str(self.data_dir / "voiceprint.npy"), voiceprint)
153
154 self.metadata = {
155 "owner": owner,
156 "samples_used": len(embeddings),
157 "embedding_dim": int(voiceprint.shape[0]),
158 "threshold": self.threshold,
159 "created": datetime.now().isoformat(),
160 }
161 with open(self.data_dir / "voiceprint_meta.json", "w") as f:
162 json.dump(self.metadata, f, indent=2)
163
164 self.voiceprint = voiceprint
165
166 # Self-test
167 similarities = []
168 for embed in embeddings:
169 sim = float(np.dot(voiceprint, embed / np.linalg.norm(embed)))
170 similarities.append(sim)
171
172 return {
173 "owner": owner,
174 "samples": len(embeddings),
175 "self_test_scores": similarities,
176 "min_score": min(similarities),
177 "details": results,
178 }
179
180 def verify(self, audio_path: str) -> tuple:
181 """
182 Verify if an audio sample matches the registered voiceprint.
183
184 Args:
185 audio_path: Path to audio file to verify
186
187 Returns:
188 tuple: (is_verified: bool, similarity_score: float)
189 """
190 if not self.is_registered:
191 raise RuntimeError("No voiceprint registered. Call register() first.")
192
193 embed = _extract_embedding(audio_path)
194 embed = embed / np.linalg.norm(embed)
195
196 similarity = float(np.dot(self.voiceprint, embed))
197 is_verified = similarity >= self.threshold
198
199 return is_verified, similarity
200
201 def get_info(self) -> dict:
202 """Get voiceprint registration info."""
203 if not self.is_registered:
204 return {"registered": False}
205 return {
206 "registered": True,
207 **self.metadata,
208 }
209
210def main():
211 parser = argparse.ArgumentParser(
212 description="VoiceKey — Speaker verification for security"
213 )
214 sub = parser.add_subparsers(dest="command")
215
216 # Register
217 reg = sub.add_parser("register", help="Register voiceprint from audio files")
218 reg.add_argument("--audio", nargs="+", required=True, help="Audio files (ogg/mp3/wav)")
219 reg.add_argument("--owner", default="owner", help="Owner name")
220 reg.add_argument("--data-dir", default=None, help="Data directory")
221 reg.add_argument("--threshold", type=float, default=0.75, help="Verification threshold")
222
223 # Verify
224 ver = sub.add_parser("verify", help="Verify audio against voiceprint")
225 ver.add_argument("--audio", required=True, help="Audio file to verify")
226 ver.add_argument("--data-dir", default=None, help="Data directory")
227
228 # Info
229 sub.add_parser("info", help="Show voiceprint info")
230
231 args = parser.parse_args()
232
233 if args.command == "register":
234 vk = VoiceKey(data_dir=args.data_dir, threshold=args.threshold)
235 result = vk.register(args.audio, owner=args.owner)
236 print(f"Registered voiceprint for: {result['owner']}")
237 print(f"Samples used: {result['samples']}")
238 print(f"Self-test scores: {[f'{s:.4f}' for s in result['self_test_scores']]}")
239 print(f"Min score: {result['min_score']:.4f} (threshold: {args.threshold})")
240
241 elif args.command == "verify":
242 vk = VoiceKey(data_dir=getattr(args, 'data_dir', None))
243 is_ok, score = vk.verify(args.audio)
244 status = "PASS" if is_ok else "FAIL"
245 print(f"[{status}] Similarity: {score:.4f} (threshold: {vk.threshold})")
246
247 elif args.command == "info":
248 vk = VoiceKey()
249 info = vk.get_info()
250 for k, v in info.items():
251 print(f" {k}: {v}")
252 else:
253 parser.print_help()
254
255if __name__ == "__main__":
256 main()
Closing notes
All seven snippets above are pulled from connectfarm1.com
on 2026-05-04. They share three philosophies: (1) zero or near-zero AI cost — most use rule engines and free public APIs instead of LLMs; (2) Python only, easy to run on a cheap VPS with crontab or a simple while True; (3) Telegram is the universal output — no dashboards, no front-end, just push messages where you actually read them. Combine the radars to triangulate signals, and treat the autonomous trader as a research sandbox, not a money printer.
Related Articles
- Free Claude Code: Use Claude Code CLI for Free with Any AI Provider — AI-powered coding assistant
- 28 Tools Behind a $1M Polymarket Trading Bot: Full Stack Breakdown — Prediction market trading automation
- Accept Payments in All Currencies with NowPayments — Crypto payment integration
Have questions or ideas? Feel free to leave a comment below. Sign in with GitHub to join the discussion.