-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path_combined_status.py
More file actions
133 lines (115 loc) · 4.2 KB
/
_combined_status.py
File metadata and controls
133 lines (115 loc) · 4.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
"""Combined status checker for all Ramanujan search processes."""
import json
from pathlib import Path
from collections import Counter
SEP = '=' * 70
DASH = '-' * 50
def check_log(logfile, label):
lf = Path(logfile)
if not lf.exists():
print(" %s: no log file yet" % label)
return []
lines = [l for l in lf.read_text().strip().split('\n') if l.strip()]
data = [json.loads(l) for l in lines]
seen = {}
for d in data:
if 'a' not in d or 'b' not in d:
continue
key = (tuple(d['a']), tuple(d['b']))
seen[key] = d
types = Counter(d.get('type', '?') for d in data)
print(" %s: %d entries, %d unique CFs" % (label, len(data), len(seen)))
print(" Types: %s" % dict(types))
# Show non-trivial matches
interesting = []
for d in seen.values():
m = d['match']
try:
int(m)
continue
except ValueError:
pass
if '**(' in m and m.count('/') > 3:
continue
if m.count('*') > 4:
continue
interesting.append(d)
# Group by target constant
targets = Counter()
for d in interesting:
m = d['match']
if '*' in m:
targets[m.split('*', 1)[-1]] += 1
else:
targets[m] += 1
if targets:
print(" Targets: %s" % dict(targets.most_common(10)))
# Show newest 5
recent = sorted(data, key=lambda x: x.get('timestamp', ''))[-5:]
if recent:
print(" Latest:")
for d in recent:
vd = d.get('verified_digits', '?')
print(" a=%-22s b=%-12s -> %-25s (vd=%s)" % (
str(d['a']), str(d['b']), d['match'][:25], vd))
return data
def main():
print(SEP)
print(" COMBINED STATUS REPORT")
print(SEP)
# Main search state
sf = Path('ramanujan_state.json')
if sf.exists():
s = json.loads(sf.read_text())
print("\n Main Search State:")
print(" Cycle: %d | T: %.3f | Discoveries: %d" % (
s['cycle'], s['temperature'], s['discoveries']))
print(" Timestamp: %s" % s['timestamp'])
scores = s.get('best_scores', [])
if scores:
print(" Recent top scores: %s" % [round(x, 1) for x in scores[-5:]])
ldc = s.get('last_discovery_cycle', 0)
stale = s['cycle'] - ldc
print(" Stale cycles: %d (last discovery at cycle %d)" % (stale, ldc))
else:
print("\n No main search state file.")
# All logs
print("\n" + DASH)
all_data = []
for logfile, label in [
('ramanujan_discoveries.jsonl', 'Main (evolve)'),
('zeta3_discoveries.jsonl', 'Zeta(3) CMF'),
('catalan_discoveries.jsonl', 'Catalan CMF'),
]:
data = check_log(logfile, label)
all_data.extend(data)
# Cross-log summary
print("\n" + DASH)
if all_data:
all_unique = {}
for d in all_data:
if 'a' not in d or 'b' not in d:
continue
key = (tuple(d['a']), tuple(d['b']))
if key not in all_unique or d.get('verified_digits', 0) > all_unique[key].get('verified_digits', 0):
all_unique[key] = d
# Best by verified digits
verified = [(k, v) for k, v in all_unique.items()
if isinstance(v.get('verified_digits'), (int, float)) and v['verified_digits'] > 50]
verified.sort(key=lambda x: x[1]['verified_digits'], reverse=True)
print(" Top verified CFs (>50 digits):")
for (a, b), d in verified[:10]:
print(" a=%-22s b=%-12s -> %-25s (%dd verified)" % (
str(list(a)), str(list(b)), d['match'][:25], int(d['verified_digits'])))
# Highlight any non-pi, non-phi, non-S^ discoveries
novel = [d for d in all_unique.values()
if not any(x in d['match'] for x in ['4/pi', 'S^', 'phi', '1', '2', '3', '4', '5'])]
if novel:
print("\n NOVEL (non-pi/phi) discoveries:")
for d in novel:
print(" a=%-22s b=%-12s -> %s" % (str(d['a']), str(d['b']), d['match']))
else:
print(" No discoveries across any log.")
print("\n" + SEP)
if __name__ == '__main__':
main()