-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy path13_llm.py
More file actions
63 lines (50 loc) · 1.91 KB
/
13_llm.py
File metadata and controls
63 lines (50 loc) · 1.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#!/usr/bin/env python3
"""
Step 13: LLM Processing (Thin Orchestrator)
This step orchestrates LLM processing for GNN models.
Architectural Role:
This is a "thin orchestrator" - a minimal script that delegates core functionality
to the corresponding module (src/llm/). It handles argument parsing, logging
setup, and calls the actual processing functions from the llm module.
Pipeline Flow:
main.py → 13_llm.py (this script) → llm/ (modular implementation)
How to run:
python src/13_llm.py --target-dir input/gnn_files --output-dir output --verbose
python src/main.py # (runs as part of the pipeline)
Expected outputs:
- LLM processing results in the specified output directory
- Comprehensive LLM reports and summaries
- Actionable error messages if dependencies or paths are missing
- Clear logging of all resolved arguments and paths
If you encounter errors:
- Check that LLM dependencies are installed
- Check that src/llm/ contains LLM modules
- Check that the output directory is writable
- Verify LLM configuration and requirements
"""
import sys
from pathlib import Path
# Add src to path for imports
sys.path.insert(0, str(Path(__file__).parent))
from utils.pipeline_template import create_standardized_pipeline_script
# Import module function
try:
from llm import process_llm
except ImportError:
def process_llm(target_dir, output_dir, logger=None, **kwargs) -> bool:
"""Recovery LLM processing when module unavailable."""
import logging
if logger is None:
logger = logging.getLogger(__name__)
logger.warning("LLM module not available - using recovery")
return True
run_script = create_standardized_pipeline_script(
"13_llm.py",
process_llm,
"LLM processing for GNN analysis",
)
def main() -> int:
"""Main entry point for the LLM step."""
return run_script()
if __name__ == "__main__":
raise SystemExit(main())