Spaces:
Running
Running
use daggr latest version
Browse files- backend_deploy.py +14 -3
- backend_docs_manager.py +1 -1
- backend_parsers.py +25 -3
- backend_prompts.py +5 -1
backend_deploy.py
CHANGED
|
@@ -23,7 +23,8 @@ from backend_parsers import (
|
|
| 23 |
strip_tool_call_markers,
|
| 24 |
remove_code_block,
|
| 25 |
extract_import_statements,
|
| 26 |
-
generate_requirements_txt_with_llm
|
|
|
|
| 27 |
)
|
| 28 |
|
| 29 |
|
|
@@ -823,6 +824,8 @@ def deploy_to_huggingface_space(
|
|
| 823 |
file_path = temp_path / filename
|
| 824 |
print(f"[Deploy] Writing {filename} ({len(content)} chars) to {file_path}")
|
| 825 |
# Use text mode - Python handles encoding automatically
|
|
|
|
|
|
|
| 826 |
file_path.write_text(content, encoding='utf-8')
|
| 827 |
# Verify the write was successful
|
| 828 |
written_size = file_path.stat().st_size
|
|
@@ -846,7 +849,7 @@ def deploy_to_huggingface_space(
|
|
| 846 |
html_code = prettify_comfyui_json_for_html(code)
|
| 847 |
(temp_path / "index.html").write_text(html_code, encoding='utf-8')
|
| 848 |
|
| 849 |
-
elif language in ["gradio", "streamlit"]:
|
| 850 |
files = parse_multi_file_python_output(code)
|
| 851 |
|
| 852 |
# Fallback: if no files parsed (missing === markers), treat entire code as app.py
|
|
@@ -862,6 +865,8 @@ def deploy_to_huggingface_space(
|
|
| 862 |
for filename, content in files.items():
|
| 863 |
file_path = temp_path / filename
|
| 864 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
| 865 |
file_path.write_text(content, encoding='utf-8')
|
| 866 |
|
| 867 |
# Ensure requirements.txt exists - generate from imports if missing
|
|
@@ -927,6 +932,8 @@ def deploy_to_huggingface_space(
|
|
| 927 |
for filename, content in files.items():
|
| 928 |
file_path = temp_path / filename
|
| 929 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
| 930 |
file_path.write_text(content, encoding='utf-8')
|
| 931 |
|
| 932 |
# Generate requirements.txt from imports if missing
|
|
@@ -1532,6 +1539,8 @@ Generated by [AnyCoder](https://huggingface.co/spaces/akhaliq/anycoder)"""
|
|
| 1532 |
# Write transformers.js files
|
| 1533 |
for filename, content in files.items():
|
| 1534 |
file_path = temp_path / filename
|
|
|
|
|
|
|
| 1535 |
file_path.write_text(content, encoding='utf-8')
|
| 1536 |
|
| 1537 |
except Exception as e:
|
|
@@ -1587,11 +1596,13 @@ Generated by [AnyCoder](https://huggingface.co/spaces/akhaliq/anycoder)"""
|
|
| 1587 |
for filename, content in files.items():
|
| 1588 |
file_path = temp_path / filename
|
| 1589 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
| 1590 |
file_path.write_text(content, encoding='utf-8')
|
| 1591 |
|
| 1592 |
# Skip requirements.txt generation for Gradio PRs (preserve existing)
|
| 1593 |
# For Streamlit, generate requirements.txt if missing
|
| 1594 |
-
if language
|
| 1595 |
main_app = files.get('streamlit_app.py') or files.get('app.py', '')
|
| 1596 |
if main_app:
|
| 1597 |
print(f"[PR] Generating requirements.txt from imports")
|
|
|
|
| 23 |
strip_tool_call_markers,
|
| 24 |
remove_code_block,
|
| 25 |
extract_import_statements,
|
| 26 |
+
generate_requirements_txt_with_llm,
|
| 27 |
+
enforce_critical_versions
|
| 28 |
)
|
| 29 |
|
| 30 |
|
|
|
|
| 824 |
file_path = temp_path / filename
|
| 825 |
print(f"[Deploy] Writing {filename} ({len(content)} chars) to {file_path}")
|
| 826 |
# Use text mode - Python handles encoding automatically
|
| 827 |
+
if filename == "requirements.txt":
|
| 828 |
+
content = enforce_critical_versions(content)
|
| 829 |
file_path.write_text(content, encoding='utf-8')
|
| 830 |
# Verify the write was successful
|
| 831 |
written_size = file_path.stat().st_size
|
|
|
|
| 849 |
html_code = prettify_comfyui_json_for_html(code)
|
| 850 |
(temp_path / "index.html").write_text(html_code, encoding='utf-8')
|
| 851 |
|
| 852 |
+
elif language in ["gradio", "streamlit", "daggr"]:
|
| 853 |
files = parse_multi_file_python_output(code)
|
| 854 |
|
| 855 |
# Fallback: if no files parsed (missing === markers), treat entire code as app.py
|
|
|
|
| 865 |
for filename, content in files.items():
|
| 866 |
file_path = temp_path / filename
|
| 867 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 868 |
+
if filename == "requirements.txt":
|
| 869 |
+
content = enforce_critical_versions(content)
|
| 870 |
file_path.write_text(content, encoding='utf-8')
|
| 871 |
|
| 872 |
# Ensure requirements.txt exists - generate from imports if missing
|
|
|
|
| 932 |
for filename, content in files.items():
|
| 933 |
file_path = temp_path / filename
|
| 934 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 935 |
+
if filename == "requirements.txt":
|
| 936 |
+
content = enforce_critical_versions(content)
|
| 937 |
file_path.write_text(content, encoding='utf-8')
|
| 938 |
|
| 939 |
# Generate requirements.txt from imports if missing
|
|
|
|
| 1539 |
# Write transformers.js files
|
| 1540 |
for filename, content in files.items():
|
| 1541 |
file_path = temp_path / filename
|
| 1542 |
+
if filename == "requirements.txt":
|
| 1543 |
+
content = enforce_critical_versions(content)
|
| 1544 |
file_path.write_text(content, encoding='utf-8')
|
| 1545 |
|
| 1546 |
except Exception as e:
|
|
|
|
| 1596 |
for filename, content in files.items():
|
| 1597 |
file_path = temp_path / filename
|
| 1598 |
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 1599 |
+
if filename == "requirements.txt":
|
| 1600 |
+
content = enforce_critical_versions(content)
|
| 1601 |
file_path.write_text(content, encoding='utf-8')
|
| 1602 |
|
| 1603 |
# Skip requirements.txt generation for Gradio PRs (preserve existing)
|
| 1604 |
# For Streamlit, generate requirements.txt if missing
|
| 1605 |
+
if language in ["streamlit", "daggr"] and "requirements.txt" not in files:
|
| 1606 |
main_app = files.get('streamlit_app.py') or files.get('app.py', '')
|
| 1607 |
if main_app:
|
| 1608 |
print(f"[PR] Generating requirements.txt from imports")
|
backend_docs_manager.py
CHANGED
|
@@ -444,7 +444,7 @@ When creating Gradio applications, organize your code into multiple files for pr
|
|
| 444 |
|
| 445 |
**File Organization:**
|
| 446 |
- `app.py` - Main application entry point (REQUIRED)
|
| 447 |
-
- `requirements.txt` - Python dependencies (REQUIRED, auto-generated from imports)
|
| 448 |
- `utils.py` - Utility functions and helpers (optional)
|
| 449 |
- `models.py` - Model loading and inference functions (optional)
|
| 450 |
- `config.py` - Configuration and constants (optional)
|
|
|
|
| 444 |
|
| 445 |
**File Organization:**
|
| 446 |
- `app.py` - Main application entry point (REQUIRED)
|
| 447 |
+
- `requirements.txt` - Python dependencies (REQUIRED, auto-generated from imports). ALWAYS use `daggr>=0.5.4` and `gradio>=6.0.2` if applicable.
|
| 448 |
- `utils.py` - Utility functions and helpers (optional)
|
| 449 |
- `models.py` - Model loading and inference functions (optional)
|
| 450 |
- `config.py` - Configuration and constants (optional)
|
backend_parsers.py
CHANGED
|
@@ -192,8 +192,8 @@ def parse_multi_file_python_output(code: str) -> Dict[str, str]:
|
|
| 192 |
"""Parse multi-file Python output (e.g., Gradio, Streamlit)"""
|
| 193 |
files = {}
|
| 194 |
|
| 195 |
-
# Pattern to match file sections
|
| 196 |
-
pattern = r'===\s*(\S
|
| 197 |
matches = re.finditer(pattern, code, re.DOTALL | re.IGNORECASE)
|
| 198 |
|
| 199 |
for match in matches:
|
|
@@ -204,6 +204,8 @@ def parse_multi_file_python_output(code: str) -> Dict[str, str]:
|
|
| 204 |
content = re.sub(r'^```\w*\s*', '', content, flags=re.MULTILINE)
|
| 205 |
content = re.sub(r'```\s*$', '', content, flags=re.MULTILINE)
|
| 206 |
|
|
|
|
|
|
|
| 207 |
files[filename] = content
|
| 208 |
|
| 209 |
return files
|
|
@@ -364,6 +366,22 @@ def parse_react_output(text: str) -> Dict[str, str]:
|
|
| 364 |
return files if isinstance(files, dict) and files else {}
|
| 365 |
|
| 366 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 367 |
def generate_requirements_txt_with_llm(import_statements):
|
| 368 |
"""Generate requirements.txt content using LLM based on import statements."""
|
| 369 |
if not import_statements:
|
|
@@ -467,6 +485,8 @@ Generate a comprehensive requirements.txt that ensures the application will work
|
|
| 467 |
|
| 468 |
requirements_content = '\n'.join(clean_lines).strip()
|
| 469 |
|
|
|
|
|
|
|
| 470 |
# Ensure it ends with a newline
|
| 471 |
if requirements_content and not requirements_content.endswith('\n'):
|
| 472 |
requirements_content += '\n'
|
|
@@ -481,7 +501,9 @@ Generate a comprehensive requirements.txt that ensures the application will work
|
|
| 481 |
'PIL': 'Pillow',
|
| 482 |
'sklearn': 'scikit-learn',
|
| 483 |
'skimage': 'scikit-image',
|
| 484 |
-
'bs4': 'beautifulsoup4'
|
|
|
|
|
|
|
| 485 |
}
|
| 486 |
|
| 487 |
for stmt in import_statements:
|
|
|
|
| 192 |
"""Parse multi-file Python output (e.g., Gradio, Streamlit)"""
|
| 193 |
files = {}
|
| 194 |
|
| 195 |
+
# Pattern to match file sections like === filename.ext ===
|
| 196 |
+
pattern = r'===\s*(\S+\.\w+)\s*===\s*(.*?)(?=\n\s*===\s*\S+\.\w+\s*===|$)'
|
| 197 |
matches = re.finditer(pattern, code, re.DOTALL | re.IGNORECASE)
|
| 198 |
|
| 199 |
for match in matches:
|
|
|
|
| 204 |
content = re.sub(r'^```\w*\s*', '', content, flags=re.MULTILINE)
|
| 205 |
content = re.sub(r'```\s*$', '', content, flags=re.MULTILINE)
|
| 206 |
|
| 207 |
+
if filename == "requirements.txt":
|
| 208 |
+
content = enforce_critical_versions(content)
|
| 209 |
files[filename] = content
|
| 210 |
|
| 211 |
return files
|
|
|
|
| 366 |
return files if isinstance(files, dict) and files else {}
|
| 367 |
|
| 368 |
|
| 369 |
+
def enforce_critical_versions(requirements_content: str) -> str:
|
| 370 |
+
"""Ensure critical packages like daggr and gradio have minimum required versions"""
|
| 371 |
+
if 'daggr' in requirements_content:
|
| 372 |
+
# Check if version is already specified
|
| 373 |
+
if 'daggr>=' not in requirements_content and 'daggr==' not in requirements_content:
|
| 374 |
+
# Replace plain 'daggr' with pinned version, preserving comments
|
| 375 |
+
requirements_content = re.sub(r'^daggr\s*(?=[#\n]|$)', 'daggr>=0.5.4', requirements_content, flags=re.MULTILINE)
|
| 376 |
+
|
| 377 |
+
if 'gradio' in requirements_content:
|
| 378 |
+
if 'gradio>=' not in requirements_content and 'gradio==' not in requirements_content:
|
| 379 |
+
# Replace plain 'gradio' with pinned version, preserving comments
|
| 380 |
+
requirements_content = re.sub(r'^gradio\s*(?=[#\n]|$)', 'gradio>=6.0.2', requirements_content, flags=re.MULTILINE)
|
| 381 |
+
|
| 382 |
+
return requirements_content
|
| 383 |
+
|
| 384 |
+
|
| 385 |
def generate_requirements_txt_with_llm(import_statements):
|
| 386 |
"""Generate requirements.txt content using LLM based on import statements."""
|
| 387 |
if not import_statements:
|
|
|
|
| 485 |
|
| 486 |
requirements_content = '\n'.join(clean_lines).strip()
|
| 487 |
|
| 488 |
+
requirements_content = enforce_critical_versions(requirements_content)
|
| 489 |
+
|
| 490 |
# Ensure it ends with a newline
|
| 491 |
if requirements_content and not requirements_content.endswith('\n'):
|
| 492 |
requirements_content += '\n'
|
|
|
|
| 501 |
'PIL': 'Pillow',
|
| 502 |
'sklearn': 'scikit-learn',
|
| 503 |
'skimage': 'scikit-image',
|
| 504 |
+
'bs4': 'beautifulsoup4',
|
| 505 |
+
'daggr': 'daggr>=0.5.4',
|
| 506 |
+
'gradio': 'gradio>=6.0.2'
|
| 507 |
}
|
| 508 |
|
| 509 |
for stmt in import_statements:
|
backend_prompts.py
CHANGED
|
@@ -162,7 +162,7 @@ You MUST use this exact format and ALWAYS include Dockerfile, streamlit_app.py,
|
|
| 162 |
[main application code]
|
| 163 |
|
| 164 |
=== requirements.txt ===
|
| 165 |
-
[dependencies]
|
| 166 |
|
| 167 |
=== utils.py ===
|
| 168 |
[utility functions - optional]
|
|
@@ -622,6 +622,10 @@ from daggr import GradioNode, FnNode, InferenceNode, Graph
|
|
| 622 |
graph = Graph(name="My Workflow", nodes=[node1, node2])
|
| 623 |
graph.launch()
|
| 624 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 625 |
**🚨 CRITICAL: DO NOT Generate README.md Files**
|
| 626 |
- NEVER generate README.md files under any circumstances
|
| 627 |
- A template README.md is automatically provided and will be overridden by the deployment system
|
|
|
|
| 162 |
[main application code]
|
| 163 |
|
| 164 |
=== requirements.txt ===
|
| 165 |
+
[dependencies]. ALWAYS use `daggr>=0.5.4` and `gradio>=6.0.2` if applicable.
|
| 166 |
|
| 167 |
=== utils.py ===
|
| 168 |
[utility functions - optional]
|
|
|
|
| 622 |
graph = Graph(name="My Workflow", nodes=[node1, node2])
|
| 623 |
graph.launch()
|
| 624 |
|
| 625 |
+
=== requirements.txt ===
|
| 626 |
+
daggr>=0.5.4
|
| 627 |
+
gradio>=6.0.2
|
| 628 |
+
|
| 629 |
**🚨 CRITICAL: DO NOT Generate README.md Files**
|
| 630 |
- NEVER generate README.md files under any circumstances
|
| 631 |
- A template README.md is automatically provided and will be overridden by the deployment system
|