This commit is contained in:
27942
2025-12-30 13:06:58 +08:00
parent 203102d1cb
commit 70903f2472
3409 changed files with 1162 additions and 14 deletions

195
1.json Normal file
View File

@@ -0,0 +1,195 @@
{
"question": "XRP Up or Down - December 30, 12:00AM-12:15AM ET",
"slug": "xrp-updown-15m-1767070800",
"market_id": "1054552",
"up_pct": 71.0,
"down_pct": 29.0,
"up": {
"token_id": "88333223784817577260404788890543585618923977728870152454078834106487198357156",
"ask": 0.72,
"bid": 0.7,
"book": {
"bids": [
{
"price": "0.7",
"size": "22.66"
},
{
"price": "0.69",
"size": "25"
},
{
"price": "0.68",
"size": "424"
},
{
"price": "0.67",
"size": "60"
},
{
"price": "0.66",
"size": "50"
},
{
"price": "0.65",
"size": "50"
},
{
"price": "0.64",
"size": "50"
},
{
"price": "0.63",
"size": "50"
},
{
"price": "0.62",
"size": "90"
},
{
"price": "0.61",
"size": "90"
}
],
"asks": [
{
"price": "0.72",
"size": "25"
},
{
"price": "0.73",
"size": "10"
},
{
"price": "0.74",
"size": "65"
},
{
"price": "0.75",
"size": "60"
},
{
"price": "0.76",
"size": "424"
},
{
"price": "0.77",
"size": "50"
},
{
"price": "0.78",
"size": "50"
},
{
"price": "0.79",
"size": "50"
},
{
"price": "0.8",
"size": "50"
},
{
"price": "0.81",
"size": "79.79"
}
]
},
"mid": 0.71,
"spread": 0.020000000000000018
},
"down": {
"token_id": "63095634778460194511452398951690172781969352726093505852484059082082045238495",
"ask": 0.3,
"bid": 0.28,
"book": {
"bids": [
{
"price": "0.28",
"size": "25"
},
{
"price": "0.27",
"size": "10"
},
{
"price": "0.26",
"size": "65"
},
{
"price": "0.25",
"size": "60"
},
{
"price": "0.24",
"size": "424"
},
{
"price": "0.23",
"size": "50"
},
{
"price": "0.22",
"size": "50"
},
{
"price": "0.21",
"size": "50"
},
{
"price": "0.2",
"size": "50"
},
{
"price": "0.19",
"size": "79.79"
}
],
"asks": [
{
"price": "0.3",
"size": "22.66"
},
{
"price": "0.31",
"size": "25"
},
{
"price": "0.32",
"size": "424"
},
{
"price": "0.33",
"size": "60"
},
{
"price": "0.34",
"size": "50"
},
{
"price": "0.35",
"size": "50"
},
{
"price": "0.36",
"size": "50"
},
{
"price": "0.37",
"size": "50"
},
{
"price": "0.38",
"size": "90"
},
{
"price": "0.39",
"size": "90"
}
]
},
"mid": 0.29000000000000004,
"spread": 0.019999999999999962
},
"bucket_ts": 1767070800,
"fetched_at": 1767071095
}

246
btc抓取.py Normal file
View File

@@ -0,0 +1,246 @@
import json
import time
import sqlite3
import requests
from typing import Any, Dict, Optional, Tuple
from concurrent.futures import ThreadPoolExecutor, as_completed
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
GAMMA = "https://gamma-api.polymarket.com"
CLOB = "https://clob.polymarket.com"
INTERVAL_SEC = 15 * 60 # 900
def make_session() -> requests.Session:
s = requests.Session()
s.headers.update({"User-Agent": "Mozilla/5.0"})
retry = Retry(
total=5,
backoff_factor=0.4,
status_forcelist=(429, 500, 502, 503, 504),
allowed_methods=("GET",),
raise_on_status=False,
)
adapter = HTTPAdapter(max_retries=retry, pool_connections=20, pool_maxsize=20)
s.mount("https://", adapter)
s.mount("http://", adapter)
return s
def parse_jsonish_list(v):
if v is None:
return []
if isinstance(v, list):
return v
if isinstance(v, str):
s = v.strip()
if s.startswith("[") and s.endswith("]"):
return json.loads(s)
return [x.strip() for x in s.split(",") if x.strip()]
return []
def get_market_by_slug(session: requests.Session, slug: str) -> Dict[str, Any]:
r = session.get(f"{GAMMA}/markets/slug/{slug}", timeout=20)
if r.status_code == 404:
raise FileNotFoundError(slug)
r.raise_for_status()
return r.json()
def get_price(session: requests.Session, token_id: str, side: str) -> Optional[float]:
r = session.get(f"{CLOB}/price", params={"token_id": token_id, "side": side}, timeout=20)
r.raise_for_status()
p = r.json().get("price")
return float(p) if p is not None else None
def get_book(session: requests.Session, token_id: str) -> Dict[str, Any]:
r = session.get(f"{CLOB}/book", params={"token_id": token_id}, timeout=20)
r.raise_for_status()
return r.json()
def compute_mid(bid: Optional[float], ask: Optional[float]) -> Tuple[Optional[float], Optional[float]]:
if bid is None and ask is None:
return None, None
if bid is None:
return ask, None
if ask is None:
return bid, None
return (bid + ask) / 2.0, (ask - bid)
def trim_book(book: Dict[str, Any], top_n: int = 10) -> Dict[str, Any]:
bids = book.get("bids") or []
asks = book.get("asks") or []
bids = sorted(bids, key=lambda x: float(x["price"]), reverse=True)[:top_n]
asks = sorted(asks, key=lambda x: float(x["price"]))[:top_n]
return {"bids": bids, "asks": asks}
def fetch_token_bundle(session: requests.Session, token_id: str, top_n: int = 10) -> Dict[str, Any]:
with ThreadPoolExecutor(max_workers=3) as ex:
futs = {
ex.submit(get_price, session, token_id, "buy"): "bid",
ex.submit(get_price, session, token_id, "sell"): "ask",
ex.submit(get_book, session, token_id): "book",
}
out: Dict[str, Any] = {"token_id": token_id}
for fut in as_completed(futs):
out[futs[fut]] = fut.result()
mid, spread = compute_mid(out.get("bid"), out.get("ask"))
out["mid"] = mid
out["spread"] = spread
out["book"] = trim_book(out.get("book") or {}, top_n=top_n)
return out
def current_bucket_ts(interval_sec: int = INTERVAL_SEC, mode: str = "floor") -> int:
now = int(time.time())
if mode == "ceil":
return ((now + interval_sec - 1) // interval_sec) * interval_sec
return (now // interval_sec) * interval_sec
def build_slug(prefix: str, bucket_ts: int) -> str:
return f"{prefix}-{bucket_ts}"
def fetch_updown_by_slug(session: requests.Session, slug: str, decimals: int = 0, top_n: int = 10) -> Dict[str, Any]:
m = get_market_by_slug(session, slug)
outcomes = [str(x) for x in parse_jsonish_list(m.get("outcomes"))]
token_ids = [str(x) for x in parse_jsonish_list(m.get("clobTokenIds"))]
if len(token_ids) < 2:
raise RuntimeError(f"clobTokenIds missing/too short: {token_ids}")
token_map = dict(zip(outcomes, token_ids))
up_id = token_map.get("Up") or token_map.get("Yes") or token_ids[0]
dn_id = token_map.get("Down") or token_map.get("No") or token_ids[1]
with ThreadPoolExecutor(max_workers=2) as ex:
up = ex.submit(fetch_token_bundle, session, up_id, top_n).result()
dn = ex.submit(fetch_token_bundle, session, dn_id, top_n).result()
if up["mid"] is None or dn["mid"] is None:
return {
"error": "missing mid price",
"slug": slug,
"market_id": m.get("id"),
"question": m.get("question"),
"up": up,
"down": dn,
}
s = float(up["mid"]) + float(dn["mid"])
up_pct = round(float(up["mid"]) / s * 100, decimals)
dn_pct = round(float(dn["mid"]) / s * 100, decimals)
return {
"question": m.get("question"),
"slug": m.get("slug"),
"market_id": m.get("id"),
"up_pct": up_pct,
"down_pct": dn_pct,
"up": up,
"down": dn,
}
def fetch_current_market(
slug_prefix: str = "eth-updown-15m",
decimals: int = 0,
top_n: int = 10,
ts_mode: str = "floor",
probe_offsets: Tuple[int, ...] = (0, -INTERVAL_SEC, INTERVAL_SEC),
) -> Dict[str, Any]:
session = make_session()
base_ts = current_bucket_ts(INTERVAL_SEC, mode=ts_mode)
fetched_at = int(time.time()) # 当前抓取时间戳
last_err: Optional[Exception] = None
for off in probe_offsets:
bucket_ts = base_ts + off
slug = build_slug(slug_prefix, bucket_ts)
try:
data = fetch_updown_by_slug(session, slug, decimals=decimals, top_n=top_n)
data["bucket_ts"] = bucket_ts
data["fetched_at"] = fetched_at
return data
except FileNotFoundError as e:
last_err = e
continue
raise RuntimeError(f"Could not find market for current bucket. last_err={last_err}")
# ----------------------------
# SQLite (3 columns)
# ----------------------------
DDL = """
CREATE TABLE IF NOT EXISTS pm_snapshots
(
bucket_ts
INTEGER
NOT
NULL,
fetched_at
INTEGER
NOT
NULL,
raw_json
TEXT
NOT
NULL,
PRIMARY
KEY
(
bucket_ts,
fetched_at
)
);
CREATE INDEX IF NOT EXISTS idx_pm_snapshots_fetched_at ON pm_snapshots(fetched_at); \
"""
def init_db(db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.executescript(DDL)
conn.commit()
finally:
conn.close()
def save_snapshot(bucket_ts: int, fetched_at: int, raw: Dict[str, Any], db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.execute(
"INSERT OR IGNORE INTO pm_snapshots(bucket_ts, fetched_at, raw_json) VALUES (?, ?, ?)",
(int(bucket_ts), int(fetched_at), json.dumps(raw, ensure_ascii=False)),
)
conn.commit()
finally:
conn.close()
if __name__ == "__main__":
init_db("polymarket.db")
while True:
data = fetch_current_market(
slug_prefix="btc-updown-15m",
decimals=0,
top_n=10,
ts_mode="floor", # 如果你发现应该取下一档,改成 'ceil'
probe_offsets=(0, -900, 900), # 先试当前,再试前一档/后一档
)
save_snapshot(data["bucket_ts"], data["fetched_at"], data, "polymarket.db")
print("saved:", data["bucket_ts"], data["fetched_at"])

View File

@@ -141,4 +141,4 @@ def polymarket_updown(slug: str, decimals: int = 0, top_n: int = 10) -> Dict[str
}
if __name__ == "__main__":
print(polymarket_updown("eth-updown-15m-1766912400", decimals=0, top_n=10))
print(polymarket_updown("eth-updown-15m-1767069900", decimals=0, top_n=10))

View File

@@ -75,4 +75,4 @@ def web_like_up_down(slug: str, decimals=0):
}
if __name__ == "__main__":
print(web_like_up_down("eth-updown-15m-1766912400", decimals=0))
print(web_like_up_down("eth-updown-15m-1767069900", decimals=0))

BIN
polymarket.db Normal file

Binary file not shown.

BIN
polymarket.db-shm Normal file

Binary file not shown.

BIN
polymarket.db-wal Normal file

Binary file not shown.

246
sol抓取.py Normal file
View File

@@ -0,0 +1,246 @@
import json
import time
import sqlite3
import requests
from typing import Any, Dict, Optional, Tuple
from concurrent.futures import ThreadPoolExecutor, as_completed
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
GAMMA = "https://gamma-api.polymarket.com"
CLOB = "https://clob.polymarket.com"
INTERVAL_SEC = 15 * 60 # 900
def make_session() -> requests.Session:
s = requests.Session()
s.headers.update({"User-Agent": "Mozilla/5.0"})
retry = Retry(
total=5,
backoff_factor=0.4,
status_forcelist=(429, 500, 502, 503, 504),
allowed_methods=("GET",),
raise_on_status=False,
)
adapter = HTTPAdapter(max_retries=retry, pool_connections=20, pool_maxsize=20)
s.mount("https://", adapter)
s.mount("http://", adapter)
return s
def parse_jsonish_list(v):
if v is None:
return []
if isinstance(v, list):
return v
if isinstance(v, str):
s = v.strip()
if s.startswith("[") and s.endswith("]"):
return json.loads(s)
return [x.strip() for x in s.split(",") if x.strip()]
return []
def get_market_by_slug(session: requests.Session, slug: str) -> Dict[str, Any]:
r = session.get(f"{GAMMA}/markets/slug/{slug}", timeout=20)
if r.status_code == 404:
raise FileNotFoundError(slug)
r.raise_for_status()
return r.json()
def get_price(session: requests.Session, token_id: str, side: str) -> Optional[float]:
r = session.get(f"{CLOB}/price", params={"token_id": token_id, "side": side}, timeout=20)
r.raise_for_status()
p = r.json().get("price")
return float(p) if p is not None else None
def get_book(session: requests.Session, token_id: str) -> Dict[str, Any]:
r = session.get(f"{CLOB}/book", params={"token_id": token_id}, timeout=20)
r.raise_for_status()
return r.json()
def compute_mid(bid: Optional[float], ask: Optional[float]) -> Tuple[Optional[float], Optional[float]]:
if bid is None and ask is None:
return None, None
if bid is None:
return ask, None
if ask is None:
return bid, None
return (bid + ask) / 2.0, (ask - bid)
def trim_book(book: Dict[str, Any], top_n: int = 10) -> Dict[str, Any]:
bids = book.get("bids") or []
asks = book.get("asks") or []
bids = sorted(bids, key=lambda x: float(x["price"]), reverse=True)[:top_n]
asks = sorted(asks, key=lambda x: float(x["price"]))[:top_n]
return {"bids": bids, "asks": asks}
def fetch_token_bundle(session: requests.Session, token_id: str, top_n: int = 10) -> Dict[str, Any]:
with ThreadPoolExecutor(max_workers=3) as ex:
futs = {
ex.submit(get_price, session, token_id, "buy"): "bid",
ex.submit(get_price, session, token_id, "sell"): "ask",
ex.submit(get_book, session, token_id): "book",
}
out: Dict[str, Any] = {"token_id": token_id}
for fut in as_completed(futs):
out[futs[fut]] = fut.result()
mid, spread = compute_mid(out.get("bid"), out.get("ask"))
out["mid"] = mid
out["spread"] = spread
out["book"] = trim_book(out.get("book") or {}, top_n=top_n)
return out
def current_bucket_ts(interval_sec: int = INTERVAL_SEC, mode: str = "floor") -> int:
now = int(time.time())
if mode == "ceil":
return ((now + interval_sec - 1) // interval_sec) * interval_sec
return (now // interval_sec) * interval_sec
def build_slug(prefix: str, bucket_ts: int) -> str:
return f"{prefix}-{bucket_ts}"
def fetch_updown_by_slug(session: requests.Session, slug: str, decimals: int = 0, top_n: int = 10) -> Dict[str, Any]:
m = get_market_by_slug(session, slug)
outcomes = [str(x) for x in parse_jsonish_list(m.get("outcomes"))]
token_ids = [str(x) for x in parse_jsonish_list(m.get("clobTokenIds"))]
if len(token_ids) < 2:
raise RuntimeError(f"clobTokenIds missing/too short: {token_ids}")
token_map = dict(zip(outcomes, token_ids))
up_id = token_map.get("Up") or token_map.get("Yes") or token_ids[0]
dn_id = token_map.get("Down") or token_map.get("No") or token_ids[1]
with ThreadPoolExecutor(max_workers=2) as ex:
up = ex.submit(fetch_token_bundle, session, up_id, top_n).result()
dn = ex.submit(fetch_token_bundle, session, dn_id, top_n).result()
if up["mid"] is None or dn["mid"] is None:
return {
"error": "missing mid price",
"slug": slug,
"market_id": m.get("id"),
"question": m.get("question"),
"up": up,
"down": dn,
}
s = float(up["mid"]) + float(dn["mid"])
up_pct = round(float(up["mid"]) / s * 100, decimals)
dn_pct = round(float(dn["mid"]) / s * 100, decimals)
return {
"question": m.get("question"),
"slug": m.get("slug"),
"market_id": m.get("id"),
"up_pct": up_pct,
"down_pct": dn_pct,
"up": up,
"down": dn,
}
def fetch_current_market(
slug_prefix: str = "eth-updown-15m",
decimals: int = 0,
top_n: int = 10,
ts_mode: str = "floor",
probe_offsets: Tuple[int, ...] = (0, -INTERVAL_SEC, INTERVAL_SEC),
) -> Dict[str, Any]:
session = make_session()
base_ts = current_bucket_ts(INTERVAL_SEC, mode=ts_mode)
fetched_at = int(time.time()) # 当前抓取时间戳
last_err: Optional[Exception] = None
for off in probe_offsets:
bucket_ts = base_ts + off
slug = build_slug(slug_prefix, bucket_ts)
try:
data = fetch_updown_by_slug(session, slug, decimals=decimals, top_n=top_n)
data["bucket_ts"] = bucket_ts
data["fetched_at"] = fetched_at
return data
except FileNotFoundError as e:
last_err = e
continue
raise RuntimeError(f"Could not find market for current bucket. last_err={last_err}")
# ----------------------------
# SQLite (3 columns)
# ----------------------------
DDL = """
CREATE TABLE IF NOT EXISTS pm_snapshots
(
bucket_ts
INTEGER
NOT
NULL,
fetched_at
INTEGER
NOT
NULL,
raw_json
TEXT
NOT
NULL,
PRIMARY
KEY
(
bucket_ts,
fetched_at
)
);
CREATE INDEX IF NOT EXISTS idx_pm_snapshots_fetched_at ON pm_snapshots(fetched_at); \
"""
def init_db(db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.executescript(DDL)
conn.commit()
finally:
conn.close()
def save_snapshot(bucket_ts: int, fetched_at: int, raw: Dict[str, Any], db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.execute(
"INSERT OR IGNORE INTO pm_snapshots(bucket_ts, fetched_at, raw_json) VALUES (?, ?, ?)",
(int(bucket_ts), int(fetched_at), json.dumps(raw, ensure_ascii=False)),
)
conn.commit()
finally:
conn.close()
if __name__ == "__main__":
init_db("polymarket.db")
while True:
data = fetch_current_market(
slug_prefix="sol-updown-15m",
decimals=0,
top_n=10,
ts_mode="floor", # 如果你发现应该取下一档,改成 'ceil'
probe_offsets=(0, -900, 900), # 先试当前,再试前一档/后一档
)
save_snapshot(data["bucket_ts"], data["fetched_at"], data, "polymarket.db")
print("saved:", data["bucket_ts"], data["fetched_at"])

238
test1.py
View File

@@ -1,32 +1,246 @@
import requests
import json
import time
import sqlite3
import requests
from typing import Any, Dict, Optional, Tuple
from concurrent.futures import ThreadPoolExecutor, as_completed
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
GAMMA = "https://gamma-api.polymarket.com"
CLOB = "https://clob.polymarket.com"
INTERVAL_SEC = 15 * 60 # 900
def make_session() -> requests.Session:
s = requests.Session()
s.headers.update({"User-Agent": "Mozilla/5.0"})
retry = Retry(
total=5,
backoff_factor=0.4,
status_forcelist=(429, 500, 502, 503, 504),
allowed_methods=("GET",),
raise_on_status=False,
)
adapter = HTTPAdapter(max_retries=retry, pool_connections=20, pool_maxsize=20)
s.mount("https://", adapter)
s.mount("http://", adapter)
return s
def parse_jsonish_list(v):
if v is None:
return []
if isinstance(v, list):
return v
if isinstance(v, str):
return json.loads(v)
s = v.strip()
if s.startswith("[") and s.endswith("]"):
return json.loads(s)
return [x.strip() for x in s.split(",") if x.strip()]
return []
def get_tokens_from_slug(slug: str):
r = requests.get(f"{GAMMA}/markets/slug/{slug}", timeout=20)
def get_market_by_slug(session: requests.Session, slug: str) -> Dict[str, Any]:
r = session.get(f"{GAMMA}/markets/slug/{slug}", timeout=20)
if r.status_code == 404:
raise FileNotFoundError(slug)
r.raise_for_status()
m = r.json()
return r.json()
outcomes = parse_jsonish_list(m.get("outcomes"))
token_ids = parse_jsonish_list(m.get("clobTokenIds"))
mapping = dict(zip(outcomes, token_ids))
def get_price(session: requests.Session, token_id: str, side: str) -> Optional[float]:
r = session.get(f"{CLOB}/price", params={"token_id": token_id, "side": side}, timeout=20)
r.raise_for_status()
p = r.json().get("price")
return float(p) if p is not None else None
def get_book(session: requests.Session, token_id: str) -> Dict[str, Any]:
r = session.get(f"{CLOB}/book", params={"token_id": token_id}, timeout=20)
r.raise_for_status()
return r.json()
def compute_mid(bid: Optional[float], ask: Optional[float]) -> Tuple[Optional[float], Optional[float]]:
if bid is None and ask is None:
return None, None
if bid is None:
return ask, None
if ask is None:
return bid, None
return (bid + ask) / 2.0, (ask - bid)
def trim_book(book: Dict[str, Any], top_n: int = 10) -> Dict[str, Any]:
bids = book.get("bids") or []
asks = book.get("asks") or []
bids = sorted(bids, key=lambda x: float(x["price"]), reverse=True)[:top_n]
asks = sorted(asks, key=lambda x: float(x["price"]))[:top_n]
return {"bids": bids, "asks": asks}
def fetch_token_bundle(session: requests.Session, token_id: str, top_n: int = 10) -> Dict[str, Any]:
with ThreadPoolExecutor(max_workers=3) as ex:
futs = {
ex.submit(get_price, session, token_id, "buy"): "bid",
ex.submit(get_price, session, token_id, "sell"): "ask",
ex.submit(get_book, session, token_id): "book",
}
out: Dict[str, Any] = {"token_id": token_id}
for fut in as_completed(futs):
out[futs[fut]] = fut.result()
mid, spread = compute_mid(out.get("bid"), out.get("ask"))
out["mid"] = mid
out["spread"] = spread
out["book"] = trim_book(out.get("book") or {}, top_n=top_n)
return out
def current_bucket_ts(interval_sec: int = INTERVAL_SEC, mode: str = "floor") -> int:
now = int(time.time())
if mode == "ceil":
return ((now + interval_sec - 1) // interval_sec) * interval_sec
return (now // interval_sec) * interval_sec
def build_slug(prefix: str, bucket_ts: int) -> str:
return f"{prefix}-{bucket_ts}"
def fetch_updown_by_slug(session: requests.Session, slug: str, decimals: int = 0, top_n: int = 10) -> Dict[str, Any]:
m = get_market_by_slug(session, slug)
outcomes = [str(x) for x in parse_jsonish_list(m.get("outcomes"))]
token_ids = [str(x) for x in parse_jsonish_list(m.get("clobTokenIds"))]
if len(token_ids) < 2:
raise RuntimeError(f"clobTokenIds missing/too short: {token_ids}")
token_map = dict(zip(outcomes, token_ids))
up_id = token_map.get("Up") or token_map.get("Yes") or token_ids[0]
dn_id = token_map.get("Down") or token_map.get("No") or token_ids[1]
with ThreadPoolExecutor(max_workers=2) as ex:
up = ex.submit(fetch_token_bundle, session, up_id, top_n).result()
dn = ex.submit(fetch_token_bundle, session, dn_id, top_n).result()
if up["mid"] is None or dn["mid"] is None:
return {
"error": "missing mid price",
"slug": slug,
"market_id": m.get("id"),
"question": m.get("question"),
"up": up,
"down": dn,
}
s = float(up["mid"]) + float(dn["mid"])
up_pct = round(float(up["mid"]) / s * 100, decimals)
dn_pct = round(float(dn["mid"]) / s * 100, decimals)
return {
"question": m.get("question"),
"slug": m.get("slug"),
"market_id": m.get("id"),
"outcomes": outcomes,
"token_map": mapping, # {"Up": "...", "Down": "..."}
"up_pct": up_pct,
"down_pct": dn_pct,
"up": up,
"down": dn,
}
def fetch_current_market(
slug_prefix: str = "eth-updown-15m",
decimals: int = 0,
top_n: int = 10,
ts_mode: str = "floor",
probe_offsets: Tuple[int, ...] = (0, -INTERVAL_SEC, INTERVAL_SEC),
) -> Dict[str, Any]:
session = make_session()
base_ts = current_bucket_ts(INTERVAL_SEC, mode=ts_mode)
fetched_at = int(time.time()) # 当前抓取时间戳
last_err: Optional[Exception] = None
for off in probe_offsets:
bucket_ts = base_ts + off
slug = build_slug(slug_prefix, bucket_ts)
try:
data = fetch_updown_by_slug(session, slug, decimals=decimals, top_n=top_n)
data["bucket_ts"] = bucket_ts
data["fetched_at"] = fetched_at
return data
except FileNotFoundError as e:
last_err = e
continue
raise RuntimeError(f"Could not find market for current bucket. last_err={last_err}")
# ----------------------------
# SQLite (3 columns)
# ----------------------------
DDL = """
CREATE TABLE IF NOT EXISTS pm_snapshots
(
bucket_ts
INTEGER
NOT
NULL,
fetched_at
INTEGER
NOT
NULL,
raw_json
TEXT
NOT
NULL,
PRIMARY
KEY
(
bucket_ts,
fetched_at
)
);
CREATE INDEX IF NOT EXISTS idx_pm_snapshots_fetched_at ON pm_snapshots(fetched_at); \
"""
def init_db(db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.executescript(DDL)
conn.commit()
finally:
conn.close()
def save_snapshot(bucket_ts: int, fetched_at: int, raw: Dict[str, Any], db_path: str = "polymarket.db") -> None:
conn = sqlite3.connect(db_path)
try:
conn.execute("PRAGMA journal_mode=WAL;")
conn.execute(
"INSERT OR IGNORE INTO pm_snapshots(bucket_ts, fetched_at, raw_json) VALUES (?, ?, ?)",
(int(bucket_ts), int(fetched_at), json.dumps(raw, ensure_ascii=False)),
)
conn.commit()
finally:
conn.close()
if __name__ == "__main__":
info = get_tokens_from_slug("eth-updown-15m-1766912400")
print(info)
init_db("polymarket.db")
while True:
data = fetch_current_market(
slug_prefix="eth-updown-15m",
decimals=0,
top_n=10,
ts_mode="floor", # 如果你发现应该取下一档,改成 'ceil'
probe_offsets=(0, -900, 900), # 先试当前,再试前一档/后一档
)
save_snapshot(data["bucket_ts"], data["fetched_at"], data, "polymarket.db")
print("saved:", data["bucket_ts"], data["fetched_at"])

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More