@ -149,17 +149,38 @@ class OllamaDocGenerator:
" content " : prompt
}
] ,
" tools " : [
{
' type ' : ' function ' ,
' function ' : {
' name ' : ' analyze_file ' ,
' description ' : ' This tool allows you to examine other Python files in the project and it returns the same structured information you received for the current file (imports, classes, functions, constants, etc.). ' ,
' parameters ' : {
' type ' : ' object ' ,
' properties ' : {
' path ' : {
' type ' : ' string ' ,
' description ' : ' Relative (from the root of the current project) path to the file ' ,
} ,
} ,
' required ' : [ ' path ' ] ,
} ,
} ,
} ,
] ,
" stream " : False ,
" options " : {
" temperature " : 0.1 ,
" top_p " : 0.9 ,
}
} ,
timeout = 600 # 10 minute timeout for thinking models
timeout = 60 * 60 * 24
)
if response . status_code == 200 :
result = response . json ( )
tool_calls = result . get ( ' tool_calls ' , [ ] )
print ( result , tool_calls )
message = result . get ( ' message ' , { } )
content = message . get ( ' content ' , ' ' )
# Parse and display thinking process
@ -171,11 +192,10 @@ class OllamaDocGenerator:
return final_answer if final_answer else content
else :
print ( f " Error generating documentation: { response . status_code } " )
print ( f " Error generating documentation: { response . status_code } { response . text } ")
return None
else :
print ( " None thinking model chosen " )
# Standard generation for regular models
response = self . session . post (
f " { self . ollama_url } /api/generate " ,
json = {
@ -188,7 +208,7 @@ class OllamaDocGenerator:
" top_p " : 0.9 ,
}
} ,
timeout = 300 # 5 minute timeout
timeout = 60 * 60 * 12
)
if response . status_code == 200 :
@ -205,7 +225,6 @@ class OllamaDocGenerator:
""" Parse thinking model response to extract thinking process and final answer. """
import re
# Try different thinking tag patterns
thinking_patterns = [
r ' <thinking>(.*?)</thinking> ' ,
r ' <think>(.*?)</think> ' ,
@ -220,13 +239,10 @@ class OllamaDocGenerator:
match = re . search ( pattern , content , re . DOTALL )
if match :
thinking_content = match . group ( 1 ) . strip ( )
# Remove thinking section from final answer
final_answer = re . sub ( pattern , ' ' , content , flags = re . DOTALL ) . strip ( )
break
# If no thinking tags found, check for other patterns like "I need to think about..."
if not thinking_content :
# Look for thinking indicators at the start
thinking_indicators = [
r ' ^(Let me think about.*?(?= \ n \ n| \ n#| \ nI \' ll)) ' ,
r ' ^(I need to analyze.*?(?= \ n \ n| \ n#| \ nI \' ll)) ' ,
@ -243,15 +259,10 @@ class OllamaDocGenerator:
return thinking_content , final_answer
def create_documentation_prompt ( self , file_analysis : Dict , project_context : Dict ) - > str :
""" Create a comprehensive prompt for documentation generation. """
def get_code_documentation ( self , file_analysis : Dict , project_context : Dict ) :
file_path = file_analysis [ ' file_path ' ]
relative_path = os . path . relpath ( file_path , project_context [ ' root_path ' ] )
prompt = f """ You are a technical documentation expert. Generate comprehensive markdown documentation for the Python file: ` { relative_path } `
## PROJECT CONTEXT:
return f """ ## PROJECT CONTEXT:
- * * Project Root * * : { project_context [ ' root_path ' ] }
- * * Total Python Files * * : { len ( project_context [ ' all_files ' ] ) }
- * * External Dependencies * * : { ' , ' . join ( project_context [ ' external_dependencies ' ] ) if project_context [ ' external_dependencies ' ] else ' None detected ' }
@ -283,10 +294,27 @@ class OllamaDocGenerator:
{ file_analysis [ ' content ' ] }
` ` `
## DOCUMENTATION REQUIREMENTS:
"""
def create_documentation_prompt ( self , file_analysis : Dict , project_context : Dict ) - > str :
file_path = file_analysis [ ' file_path ' ]
relative_path = os . path . relpath ( file_path , project_context [ ' root_path ' ] )
prompt = f """ You are a technical documentation expert. Generate comprehensive markdown documentation for the Python file: ` { relative_path } `
## AVAILABLE TOOLS
Generate a complete markdown documentation file that includes :
You have access to a ` analyze_file ` tool that allows you to examine other Python files in the project . This tool returns the same structured information you received for the current file ( imports , classes , functions , constants , etc . ) .
* * When to use this tool : * *
- When you need to understand how other files interact with the current file
- To verify import relationships and dependencies
- To provide more accurate cross - references in your documentation
- To understand the broader context of classes or functions used in the current file
- TRY TO USE IT NOW !
{ self . get_code_documentation ( file_analysis , project_context ) }
## DOCUMENTATION REQUIREMENTS:
* * Generate a complete markdown documentation file that includes : * *
1. * * File Header * * : Title ( ' Documentation ' + file ) , purpose , and brief description
2. * * Overview * * : What this module / file does and its role in the project
3. * * Dependencies * * : External and internal dependencies with explanations
@ -322,7 +350,7 @@ Generate the complete markdown documentation now:"""
for file in files :
if file . endswith ( ' .py ' ) :
lines . append ( f " { subindent } - { file } " )
return ' \n ' . join ( lines [ : 20 ] ) # Limit to first 20 lines
return ' \n ' . join ( lines [ : 20 ] )
def format_classes ( self , classes : List [ Dict ] ) - > str :
""" Format class information for the prompt. """
@ -356,7 +384,6 @@ Generate the complete markdown documentation now:"""
return ' \n ' . join ( lines )
def format_constants ( self , constants : List [ Dict ] ) - > str :
""" Format constant information for the prompt. """
if not constants :
return " None "
@ -366,7 +393,6 @@ Generate the complete markdown documentation now:"""
return ' \n ' . join ( lines )
def format_related_files ( self , file_analysis : Dict , project_context : Dict ) - > str :
""" Format related files information. """
current_imports = set ( file_analysis [ ' imports ' ] )
related_files = [ ]
@ -374,8 +400,6 @@ Generate the complete markdown documentation now:"""
if other_file != file_analysis [ ' file_path ' ] :
rel_path = os . path . relpath ( other_file , project_context [ ' root_path ' ] )
module_name = rel_path . replace ( ' / ' , ' . ' ) . replace ( ' \\ ' , ' . ' ) . replace ( ' .py ' , ' ' )
# Check if this file imports the other or vice versa
if any ( imp . startswith ( module_name ) for imp in current_imports ) :
related_files . append ( f " - ` { rel_path } ` (imported by this file) " )
@ -390,52 +414,52 @@ class ProjectAnalyzer:
self . external_dependencies = set ( )
def scan_project ( self , exclude_dirs : List [ str ] = None ) - > Dict :
""" Scan the project and collect all Python files. """
if exclude_dirs is None : exclude_dirs = [ ' .git ' , ' __pycache__ ' , ' .pytest_cache ' , ' venv ' , ' env ' , ' .venv ' , ' node_modules ' ]
else : exclude_dirs = exclude_dirs + [ ' .git ' , ' __pycache__ ' , ' .pytest_cache ' , ' venv ' , ' env ' , ' .venv ' , ' node_modules ' ]
self . python_files = [ ]
file_structure = [ ]
for root , dirs , files in os . walk ( self . root_path ) :
# Remove excluded directories
dirs [ : ] = [ d for d in dirs if d not in exclude_dirs ]
files [ : ] = [ f for f in files if f not in exclude_dirs ]
file_structure . append ( ( root , dirs , files ) )
if ( os . path . isdir ( self . root_path ) ) :
if exclude_dirs is None : exclude_dirs = [ ' .git ' , ' __pycache__ ' , ' .pytest_cache ' , ' venv ' , ' env ' , ' .venv ' , ' node_modules ' ]
else : exclude_dirs = exclude_dirs + [ ' .git ' , ' __pycache__ ' , ' .pytest_cache ' , ' venv ' , ' env ' , ' .venv ' , ' node_modules ' ]
for file in files :
if file . endswith ( ' .py ' ) :
self . python_files . append ( os . path . join ( root , file ) )
# Analyze dependencies
self . analyze_dependencies ( )
return {
' root_path ' : str ( self . root_path ) ,
' all_files ' : self . python_files ,
' file_structure ' : file_structure ,
' external_dependencies ' : list ( self . external_dependencies )
}
self . python_files = [ ]
file_structure = [ ]
for root , dirs , files in os . walk ( self . root_path ) :
dirs [ : ] = [ d for d in dirs if d not in exclude_dirs ]
files [ : ] = [ f for f in files if f not in exclude_dirs ]
file_structure . append ( ( root , dirs , files ) )
for file in files :
if file . endswith ( ' .py ' ) :
self . python_files . append ( os . path . join ( root , file ) )
self . analyze_dependencies ( )
return {
' root_path ' : str ( self . root_path ) ,
' all_files ' : self . python_files ,
' file_structure ' : file_structure ,
' external_dependencies ' : list ( self . external_dependencies )
}
else :
self . python_files = [ os . path . basename ( self . root_path ) ]
self . root_path = os . path . dirname ( self . root_path )
self . analyze_dependencies ( )
return {
' root_path ' : str ( self . root_path ) ,
' all_files ' : self . python_files ,
' file_structure ' : [ ] ,
' external_dependencies ' : list ( self . external_dependencies )
}
def analyze_dependencies ( self ) :
""" Analyze external dependencies across all Python files. """
analyzer = PythonAnalyzer ( )
for file_path in self . python_files :
analysis = analyzer . analyze_file ( file_path )
if analysis :
for imp in analysis [ ' imports ' ] :
# Check if it's an external dependency (not local)
if not self . is_local_import ( imp ) :
self . external_dependencies . add ( imp . split ( ' . ' ) [ 0 ] )
def is_local_import ( self , import_name : str ) - > bool :
""" Check if an import is local to the project. """
# Simple heuristic: if the import starts with a relative path or matches a local file
if import_name . startswith ( ' . ' ) :
return True
# Check if it matches any of our Python files
if import_name . startswith ( ' . ' ) : return True
for py_file in self . python_files :
rel_path = os . path . relpath ( py_file , self . root_path )
module_path = rel_path . replace ( ' / ' , ' . ' ) . replace ( ' \\ ' , ' . ' ) . replace ( ' .py ' , ' ' )
@ -445,15 +469,12 @@ class ProjectAnalyzer:
return False
class DocumentationManager :
""" Manages the documentation generation process. """
def __init__ ( self , output_dir : str = " ./pydocs " ) :
self . output_dir = Path ( output_dir )
self . output_dir . mkdir ( exist_ok = True )
os . makedirs ( self . output_dir , exist_ok = True )
def generate_index ( self , project_context : Dict , generated_docs : List [ str ] ) :
""" Generate an index.md file linking to all documentation. """
index_content = f """ # Project Documentation
Auto - generated documentation for Python project : ` { os . path . basename ( project_context [ ' root_path ' ] ) } `
@ -475,7 +496,7 @@ Auto-generated documentation for Python project: `{os.path.basename(project_cont
for doc_file in sorted ( generated_docs ) :
rel_path = os . path . relpath ( doc_file . replace ( ' .md ' , ' .py ' ) , ' . ' )
doc_name = os . path . basename ( doc_file )
index_content + = f " - [` { rel_path } `](./ { doc_name } ) \n "
index_content + = f " - [` { rel_path } `](./ { rel_path } ) \n "
index_content + = f """
## Project Structure
@ -492,7 +513,7 @@ Auto-generated documentation for Python project: `{os.path.basename(project_cont
with open ( self . output_dir / " index.md " , ' w ' , encoding = ' utf-8 ' ) as f :
f . write ( index_content )
def generate_tree_structure ( self , project_context : Dict , max_depth : int = 3 ) - > str :
def generate_tree_structure ( self , project_context : Dict , max_depth : int = 5 ) - > str :
""" Generate a tree-like structure of the project. """
lines = [ ]
root_path = project_context [ ' root_path ' ]
@ -501,9 +522,9 @@ Auto-generated documentation for Python project: `{os.path.basename(project_cont
rel_path = os . path . relpath ( py_file , root_path )
depth = rel_path . count ( os . sep )
if depth < = max_depth :
indent = " " * depth
indent = ( " " * depth ) + " └──── "
filename = os . path . basename ( rel_path )
lines . append ( f " { indent } { filename } " )
lines . append ( f " { indent } [` { filename } `](./ { rel_path } ) " )
return ' \n ' . join ( lines [ : 50 ] ) # Limit output