Commit
·
7725750
1
Parent(s):
92ea683
feat: support spaces
Browse filesRemove .opencode/ from staging
test
- .gitignore +2 -1
- OpenCode.md +21 -0
- app.py +240 -167
- pyproject.toml +1 -0
- test_app.py +49 -0
- uv.lock +77 -0
.gitignore
CHANGED
|
@@ -8,4 +8,5 @@ wheels/
|
|
| 8 |
|
| 9 |
# Virtual environments
|
| 10 |
.venv
|
| 11 |
-
.ruff_cache
|
|
|
|
|
|
| 8 |
|
| 9 |
# Virtual environments
|
| 10 |
.venv
|
| 11 |
+
.ruff_cache
|
| 12 |
+
.opencode/
|
OpenCode.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# OpenCode Guide for or-tools repo
|
| 2 |
+
|
| 3 |
+
## Commands
|
| 4 |
+
- **Install deps:** `python -m venv .venv && . .venv/bin/activate && pip install -r requirements.txt`
|
| 5 |
+
- **Lint:** `.venv/bin/ruff check app.py`
|
| 6 |
+
- **Format:** `.venv/bin/ruff format app.py`
|
| 7 |
+
- **Run Gradio app:** `python app.py`
|
| 8 |
+
- **No tests currently found.**
|
| 9 |
+
|
| 10 |
+
## Code Style & Conventions
|
| 11 |
+
- **Ruff** is used for linting & formatting; comply with its default (PEP8-like) style.
|
| 12 |
+
- **Imports:** Built-ins, then 3rd party, then local; one per line; no unused imports.
|
| 13 |
+
- **Types:** Type annotations are encouraged for function signatures and public APIs.
|
| 14 |
+
- **Names:** Use `snake_case` for functions/vars, `CamelCase` for classes, `ALL_CAPS` for constants.
|
| 15 |
+
- **Formatting:** 4-space indent, 120-char lines, no trailing whitespace.
|
| 16 |
+
- **Error handling:** Prefer informative exceptions & user messages. Wrap untrusted input in try/except when user-facing.
|
| 17 |
+
- **Docstrings:** Use triple-double-quote for all public functions/classes. Prefer Google or NumPy style.
|
| 18 |
+
- **No tests yet:** Place tests in files named `test_*.py` when adding.
|
| 19 |
+
- **Dependencies:** Only add to `pyproject.toml`/`requirements.txt`.
|
| 20 |
+
|
| 21 |
+
*See https://docs.astral.sh/ruff/ for further lint rules. Contribute new code in line with this guide.*
|
app.py
CHANGED
|
@@ -5,78 +5,83 @@ from collections import defaultdict
|
|
| 5 |
from ortools.sat.python import cp_model
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
|
|
|
|
| 8 |
def normalize_task_name(task):
|
| 9 |
"""Normalize task name to lowercase for consistent comparison."""
|
| 10 |
return task.strip().lower()
|
| 11 |
|
|
|
|
| 12 |
def display_task_name(task):
|
| 13 |
"""Format task name for display (title case)."""
|
| 14 |
-
return task.replace(
|
|
|
|
| 15 |
|
| 16 |
def parse_requirements(requirements):
|
| 17 |
"""Parse requirements list into dependency graph."""
|
| 18 |
dependencies = defaultdict(list)
|
| 19 |
all_tasks = set()
|
| 20 |
original_names = {} # Store original casing
|
| 21 |
-
|
| 22 |
for req in requirements:
|
| 23 |
if not req.strip():
|
| 24 |
continue
|
| 25 |
-
# Match pattern like "TaskA requires TaskB"
|
| 26 |
-
match = re.match(r
|
| 27 |
if match:
|
| 28 |
task_orig, required_orig = match.groups()
|
| 29 |
task = normalize_task_name(task_orig)
|
| 30 |
required = normalize_task_name(required_orig)
|
| 31 |
-
|
| 32 |
all_tasks.add(task)
|
| 33 |
all_tasks.add(required)
|
| 34 |
dependencies[task].append(required)
|
| 35 |
-
|
| 36 |
# Store original names for display
|
| 37 |
original_names[task] = task_orig
|
| 38 |
original_names[required] = required_orig
|
| 39 |
-
|
| 40 |
return dependencies, list(all_tasks), original_names
|
| 41 |
|
|
|
|
| 42 |
def solve_all_tasks(dependencies, all_tasks):
|
| 43 |
"""Try to schedule all tasks (works if DAG)."""
|
| 44 |
model = cp_model.CpModel()
|
| 45 |
n_tasks = len(all_tasks)
|
| 46 |
-
|
| 47 |
position = {}
|
| 48 |
for task in all_tasks:
|
| 49 |
-
position[task] = model.NewIntVar(0, n_tasks - 1, f
|
| 50 |
-
|
| 51 |
model.AddAllDifferent(list(position.values()))
|
| 52 |
-
|
| 53 |
for task, required_list in dependencies.items():
|
| 54 |
for required_task in required_list:
|
| 55 |
if required_task in position:
|
| 56 |
model.Add(position[required_task] < position[task])
|
| 57 |
-
|
| 58 |
solver = cp_model.CpSolver()
|
| 59 |
status = solver.Solve(model)
|
| 60 |
-
|
| 61 |
if status in [cp_model.OPTIMAL, cp_model.FEASIBLE]:
|
| 62 |
task_order = [(solver.Value(position[task]), task) for task in all_tasks]
|
| 63 |
task_order.sort()
|
| 64 |
return [task for _, task in task_order]
|
| 65 |
-
|
| 66 |
return None
|
| 67 |
|
|
|
|
| 68 |
def solve_maximum_subset(dependencies, all_tasks):
|
| 69 |
"""Find maximum subset of tasks that can be executed (handles cycles)."""
|
| 70 |
model = cp_model.CpModel()
|
| 71 |
n_tasks = len(all_tasks)
|
| 72 |
-
|
| 73 |
include = {}
|
| 74 |
position = {}
|
| 75 |
-
|
| 76 |
for task in all_tasks:
|
| 77 |
-
include[task] = model.NewBoolVar(f
|
| 78 |
-
position[task] = model.NewIntVar(0, n_tasks - 1, f
|
| 79 |
-
|
| 80 |
for task, required_list in dependencies.items():
|
| 81 |
for required_task in required_list:
|
| 82 |
if required_task in include:
|
|
@@ -84,59 +89,61 @@ def solve_maximum_subset(dependencies, all_tasks):
|
|
| 84 |
model.Add(position[required_task] < position[task]).OnlyEnforceIf(
|
| 85 |
[include[task], include[required_task]]
|
| 86 |
)
|
| 87 |
-
|
| 88 |
for i, task1 in enumerate(all_tasks):
|
| 89 |
for j, task2 in enumerate(all_tasks):
|
| 90 |
if i < j:
|
| 91 |
model.Add(position[task1] != position[task2]).OnlyEnforceIf(
|
| 92 |
[include[task1], include[task2]]
|
| 93 |
)
|
| 94 |
-
|
| 95 |
for task in all_tasks:
|
| 96 |
model.Add(position[task] == n_tasks).OnlyEnforceIf(include[task].Not())
|
| 97 |
-
|
| 98 |
model.Maximize(sum(include.values()))
|
| 99 |
-
|
| 100 |
solver = cp_model.CpSolver()
|
| 101 |
status = solver.Solve(model)
|
| 102 |
-
|
| 103 |
if status in [cp_model.OPTIMAL, cp_model.FEASIBLE]:
|
| 104 |
included_tasks = []
|
| 105 |
for task in all_tasks:
|
| 106 |
if solver.Value(include[task]) == 1:
|
| 107 |
pos = solver.Value(position[task])
|
| 108 |
included_tasks.append((pos, task))
|
| 109 |
-
|
| 110 |
included_tasks.sort()
|
| 111 |
return [task for _, task in included_tasks]
|
| 112 |
-
|
| 113 |
return []
|
| 114 |
|
|
|
|
| 115 |
def parse_tasks(tasks_text):
|
| 116 |
"""Parse tasks from text input."""
|
| 117 |
if not tasks_text.strip():
|
| 118 |
return [], {}
|
| 119 |
-
|
| 120 |
# Split by lines or commas and clean up
|
| 121 |
tasks = []
|
| 122 |
original_names = {}
|
| 123 |
-
|
| 124 |
-
for line in tasks_text.strip().split(
|
| 125 |
-
for task_orig in line.split(
|
| 126 |
task_orig = task_orig.strip()
|
| 127 |
if task_orig:
|
| 128 |
task = normalize_task_name(task_orig)
|
| 129 |
if task not in [normalize_task_name(t) for t in tasks]:
|
| 130 |
tasks.append(task)
|
| 131 |
original_names[task] = task_orig
|
| 132 |
-
|
| 133 |
return sorted(tasks), original_names
|
| 134 |
|
|
|
|
| 135 |
def generate_mermaid_gantt(task_order, original_names):
|
| 136 |
"""Generate Mermaid Gantt chart syntax."""
|
| 137 |
if not task_order:
|
| 138 |
return "gantt\n title Task Execution Timeline\n dateFormat YYYY-MM-DD\n section No Tasks\n No tasks to display : 2024-01-01, 1d"
|
| 139 |
-
|
| 140 |
# Start from today
|
| 141 |
start_date = datetime.now()
|
| 142 |
|
|
@@ -144,240 +151,274 @@ def generate_mermaid_gantt(task_order, original_names):
|
|
| 144 |
gantt += " title Task Execution Timeline\n"
|
| 145 |
gantt += " dateFormat YYYY-MM-DD\n"
|
| 146 |
gantt += " section Tasks\n"
|
| 147 |
-
|
| 148 |
current_date = start_date
|
| 149 |
for i, task in enumerate(task_order):
|
| 150 |
display_name = original_names.get(task, display_task_name(task))
|
| 151 |
# Clean task name for Mermaid (remove special chars)
|
| 152 |
-
clean_name = re.sub(r
|
| 153 |
-
|
| 154 |
task_start = current_date.strftime("%Y-%m-%d")
|
| 155 |
current_date += timedelta(days=1)
|
| 156 |
-
|
| 157 |
gantt += f" {clean_name} : {task_start}, 1d\n"
|
| 158 |
-
|
| 159 |
gantt += "```"
|
| 160 |
return gantt
|
| 161 |
|
|
|
|
| 162 |
def generate_mermaid_flowchart(dependencies, all_tasks, original_names):
|
| 163 |
"""Generate Mermaid flowchart syntax."""
|
| 164 |
if not all_tasks:
|
| 165 |
return "flowchart TD\n A[No tasks to display]"
|
| 166 |
-
|
| 167 |
flowchart = "```mermaid\nflowchart TD\n"
|
| 168 |
-
|
| 169 |
# Create nodes for all tasks
|
| 170 |
for task in all_tasks:
|
| 171 |
display_name = original_names.get(task, display_task_name(task))
|
| 172 |
# Create a clean ID for the node
|
| 173 |
-
node_id = task.replace(
|
| 174 |
flowchart += f" {node_id}[{display_name}]\n"
|
| 175 |
-
|
| 176 |
# Add dependencies as edges
|
| 177 |
if dependencies:
|
| 178 |
for task, required_list in dependencies.items():
|
| 179 |
-
task_id = task.replace(
|
| 180 |
for required_task in required_list:
|
| 181 |
-
req_id = required_task.replace(
|
| 182 |
flowchart += f" {req_id} --> {task_id}\n"
|
| 183 |
else:
|
| 184 |
# If no dependencies, just show all tasks
|
| 185 |
flowchart += " style A fill:#e1f5fe\n"
|
| 186 |
-
|
| 187 |
flowchart += "```"
|
| 188 |
return flowchart
|
| 189 |
|
|
|
|
| 190 |
def update_dropdowns(tasks_text):
|
| 191 |
"""Update dropdown choices when tasks change."""
|
| 192 |
tasks, original_names = parse_tasks(tasks_text)
|
| 193 |
-
|
| 194 |
# Create display choices (original casing)
|
| 195 |
-
task_display_choices = [""] + [
|
| 196 |
-
|
| 197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
return (
|
| 199 |
gr.Dropdown(choices=task_display_choices, value=""),
|
| 200 |
-
gr.CheckboxGroup(choices=req_display_choices, value=[])
|
| 201 |
)
|
| 202 |
|
|
|
|
| 203 |
def add_dependencies(task_display, required_display_list, current_deps):
|
| 204 |
"""Add multiple dependencies for a single task."""
|
| 205 |
if not task_display:
|
| 206 |
return current_deps, "⚠️ Please select a task"
|
| 207 |
-
|
| 208 |
if not required_display_list:
|
| 209 |
return current_deps, "⚠️ Please select at least one requirement"
|
| 210 |
-
|
| 211 |
# Normalize for comparison
|
| 212 |
task = normalize_task_name(task_display)
|
| 213 |
required_tasks = [normalize_task_name(req) for req in required_display_list]
|
| 214 |
-
|
| 215 |
# Remove the selected task from requirements if somehow selected
|
| 216 |
required_tasks = [req for req in required_tasks if req != task]
|
| 217 |
-
|
| 218 |
if not required_tasks:
|
| 219 |
return current_deps, "⚠️ A task cannot require itself"
|
| 220 |
-
|
| 221 |
# Create new dependencies (using original display names)
|
| 222 |
new_deps = []
|
| 223 |
existing_deps = []
|
| 224 |
-
|
| 225 |
for req_display in required_display_list:
|
| 226 |
if normalize_task_name(req_display) in required_tasks:
|
| 227 |
new_dep = f"{task_display} requires {req_display}"
|
| 228 |
-
|
| 229 |
# Check if this dependency already exists (case-insensitive)
|
| 230 |
exists = any(
|
| 231 |
-
normalize_task_name(dep.split(
|
| 232 |
-
normalize_task_name(dep.split(
|
| 233 |
-
|
|
|
|
|
|
|
| 234 |
)
|
| 235 |
-
|
| 236 |
if exists:
|
| 237 |
existing_deps.append(req_display)
|
| 238 |
else:
|
| 239 |
new_deps.append(new_dep)
|
| 240 |
-
|
| 241 |
if not new_deps and existing_deps:
|
| 242 |
return current_deps, f"⚠️ All selected dependencies already exist"
|
| 243 |
-
|
| 244 |
updated_deps = current_deps + new_deps
|
| 245 |
-
|
| 246 |
# Create status message
|
| 247 |
status_parts = []
|
| 248 |
if new_deps:
|
| 249 |
status_parts.append(f"✅ Added {len(new_deps)} dependencies")
|
| 250 |
if existing_deps:
|
| 251 |
status_parts.append(f"⚠️ {len(existing_deps)} already existed")
|
| 252 |
-
|
| 253 |
status = " | ".join(status_parts)
|
| 254 |
-
|
| 255 |
return updated_deps, status
|
| 256 |
|
|
|
|
| 257 |
def remove_dependency(deps_to_remove, current_deps):
|
| 258 |
"""Remove selected dependencies."""
|
| 259 |
if not deps_to_remove:
|
| 260 |
return current_deps, "⚠️ Please select dependencies to remove"
|
| 261 |
-
|
| 262 |
updated_deps = [dep for dep in current_deps if dep not in deps_to_remove]
|
| 263 |
removed_count = len(current_deps) - len(updated_deps)
|
| 264 |
return updated_deps, f"✅ Removed {removed_count} dependencies"
|
| 265 |
|
|
|
|
| 266 |
def format_dependencies_display(dependencies_list):
|
| 267 |
"""Format dependencies for display."""
|
| 268 |
if not dependencies_list:
|
| 269 |
return "No dependencies added yet."
|
| 270 |
-
|
| 271 |
# Group by task for better readability
|
| 272 |
task_deps = defaultdict(list)
|
| 273 |
for dep in dependencies_list:
|
| 274 |
-
parts = dep.split(
|
| 275 |
if len(parts) == 2:
|
| 276 |
task, required = parts
|
| 277 |
task_deps[task].append(required)
|
| 278 |
-
|
| 279 |
display = "**Current Dependencies:**\n"
|
| 280 |
for task in sorted(task_deps.keys(), key=str.lower):
|
| 281 |
requirements = ", ".join(sorted(task_deps[task], key=str.lower))
|
| 282 |
display += f"• **{task}** requires: {requirements}\n"
|
| 283 |
-
|
| 284 |
return display
|
| 285 |
|
|
|
|
| 286 |
def solve_dependencies(tasks_text, dependencies_list):
|
| 287 |
"""Solve the task ordering problem."""
|
| 288 |
tasks, task_original_names = parse_tasks(tasks_text)
|
| 289 |
-
|
| 290 |
if not tasks:
|
| 291 |
return "❌ Please enter some tasks!", "", "", "", ""
|
| 292 |
-
|
| 293 |
if not dependencies_list:
|
| 294 |
# No dependencies, just return tasks in alphabetical order
|
| 295 |
output = "✅ **No dependencies - alphabetical order:**\n\n"
|
| 296 |
-
display_tasks = [
|
|
|
|
|
|
|
| 297 |
for i, task_display in enumerate(display_tasks, 1):
|
| 298 |
output += f"{i}. {task_display}\n"
|
| 299 |
-
|
| 300 |
json_output = json.dumps(display_tasks, indent=2)
|
| 301 |
gantt = generate_mermaid_gantt(tasks, task_original_names)
|
| 302 |
flowchart = generate_mermaid_flowchart({}, tasks, task_original_names)
|
| 303 |
return output, "", json_output, gantt, flowchart
|
| 304 |
-
|
| 305 |
try:
|
| 306 |
-
dependencies, all_tasks, dep_original_names = parse_requirements(
|
| 307 |
-
|
|
|
|
|
|
|
| 308 |
# Merge original names
|
| 309 |
all_original_names = {**task_original_names, **dep_original_names}
|
| 310 |
-
|
| 311 |
# Add any tasks that aren't in dependencies
|
| 312 |
for task in tasks:
|
| 313 |
if task not in all_tasks:
|
| 314 |
all_tasks.append(task)
|
| 315 |
-
|
| 316 |
# Create dependency summary
|
| 317 |
dep_summary = "**Parsed Dependencies:**\n"
|
| 318 |
if dependencies:
|
| 319 |
for task, deps in dependencies.items():
|
| 320 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 321 |
-
deps_display = [
|
|
|
|
|
|
|
| 322 |
dep_summary += f"• {task_display} requires: {', '.join(deps_display)}\n"
|
| 323 |
-
|
| 324 |
dep_summary += f"\n**Total tasks:** {len(all_tasks)}\n"
|
| 325 |
-
task_displays = [
|
|
|
|
|
|
|
|
|
|
| 326 |
dep_summary += f"**Tasks:** {', '.join(task_displays)}"
|
| 327 |
-
|
| 328 |
# Try to solve
|
| 329 |
result = solve_all_tasks(dependencies, all_tasks)
|
| 330 |
-
|
| 331 |
if result:
|
| 332 |
# All tasks can be executed
|
| 333 |
output = "✅ **All tasks can be executed!**\n\n"
|
| 334 |
output += "**Optimal execution order:**\n"
|
| 335 |
-
|
| 336 |
result_display = []
|
| 337 |
for i, task in enumerate(result, 1):
|
| 338 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 339 |
output += f"{i}. {task_display}\n"
|
| 340 |
result_display.append(task_display)
|
| 341 |
-
|
| 342 |
json_output = json.dumps(result_display, indent=2)
|
| 343 |
gantt = generate_mermaid_gantt(result, all_original_names)
|
| 344 |
-
flowchart = generate_mermaid_flowchart(
|
| 345 |
-
|
|
|
|
|
|
|
| 346 |
else:
|
| 347 |
# Try maximum subset
|
| 348 |
result = solve_maximum_subset(dependencies, all_tasks)
|
| 349 |
-
|
| 350 |
if result:
|
| 351 |
excluded_tasks = set(all_tasks) - set(result)
|
| 352 |
output = f"⚠️ **Circular dependencies detected!**\n\n"
|
| 353 |
-
output +=
|
| 354 |
-
|
|
|
|
|
|
|
| 355 |
result_display = []
|
| 356 |
for i, task in enumerate(result, 1):
|
| 357 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 358 |
output += f"{i}. {task_display}\n"
|
| 359 |
result_display.append(task_display)
|
| 360 |
-
|
| 361 |
if excluded_tasks:
|
| 362 |
-
output +=
|
|
|
|
|
|
|
| 363 |
for task in sorted(excluded_tasks):
|
| 364 |
-
task_display = all_original_names.get(
|
|
|
|
|
|
|
| 365 |
output += f"• {task_display}\n"
|
| 366 |
-
|
| 367 |
json_output = json.dumps(result_display, indent=2)
|
| 368 |
gantt = generate_mermaid_gantt(result, all_original_names)
|
| 369 |
-
flowchart = generate_mermaid_flowchart(
|
|
|
|
|
|
|
| 370 |
else:
|
| 371 |
output = "❌ **No solution found!** There might be complex circular dependencies."
|
| 372 |
json_output = "[]"
|
| 373 |
gantt = generate_mermaid_gantt([], all_original_names)
|
| 374 |
-
flowchart = generate_mermaid_flowchart(
|
| 375 |
-
|
|
|
|
|
|
|
| 376 |
return output, dep_summary, json_output, gantt, flowchart
|
| 377 |
-
|
| 378 |
except Exception as e:
|
| 379 |
return f"❌ **Error:** {str(e)}", "", "", "", ""
|
| 380 |
|
|
|
|
| 381 |
# Example tasks
|
| 382 |
example_tasks = """Sleep
|
| 383 |
Dinner
|
|
@@ -401,10 +442,10 @@ with gr.Blocks(title="Task Dependency Solver", theme=gr.themes.Soft()) as demo:
|
|
| 401 |
3. Click "Add Dependencies" to add them all
|
| 402 |
4. Click "Solve Dependencies" to get the optimal execution order with visualizations
|
| 403 |
""")
|
| 404 |
-
|
| 405 |
# State to store current dependencies
|
| 406 |
dependencies_state = gr.State([])
|
| 407 |
-
|
| 408 |
with gr.Row():
|
| 409 |
with gr.Column(scale=1):
|
| 410 |
gr.Markdown("### 📝 Step 1: Define Your Tasks")
|
|
@@ -413,58 +454,56 @@ with gr.Blocks(title="Task Dependency Solver", theme=gr.themes.Soft()) as demo:
|
|
| 413 |
placeholder="Enter your tasks like:\nSleep\nDinner\nPrep\nShopping",
|
| 414 |
lines=6,
|
| 415 |
value=example_tasks,
|
| 416 |
-
info="Case-insensitive: 'Sleep' and 'sleep' are treated the same"
|
| 417 |
)
|
| 418 |
-
|
| 419 |
gr.Markdown("### 🔗 Step 2: Build Dependencies")
|
| 420 |
task_dropdown = gr.Dropdown(
|
| 421 |
label="Select Task",
|
| 422 |
choices=[],
|
| 423 |
value="",
|
| 424 |
-
info="Choose the task that has requirements"
|
| 425 |
)
|
| 426 |
-
|
| 427 |
requirements_checkbox = gr.CheckboxGroup(
|
| 428 |
label="Select Requirements (can select multiple)",
|
| 429 |
choices=[],
|
| 430 |
value=[],
|
| 431 |
-
info="Choose what the task requires"
|
| 432 |
)
|
| 433 |
-
|
| 434 |
with gr.Row():
|
| 435 |
add_btn = gr.Button("➕ Add Dependencies", variant="primary", scale=2)
|
| 436 |
clear_all_btn = gr.Button("🗑️ Clear All", variant="secondary", scale=1)
|
| 437 |
-
|
| 438 |
add_status = gr.Markdown("")
|
| 439 |
-
|
| 440 |
with gr.Column(scale=1):
|
| 441 |
gr.Markdown("### 📋 Current Dependencies")
|
| 442 |
dependencies_display = gr.Markdown("No dependencies added yet.")
|
| 443 |
-
|
| 444 |
with gr.Accordion("🗑️ Remove Dependencies", open=False):
|
| 445 |
remove_deps = gr.CheckboxGroup(
|
| 446 |
-
label="Select dependencies to remove:",
|
| 447 |
-
choices=[],
|
| 448 |
-
value=[]
|
| 449 |
)
|
| 450 |
-
|
| 451 |
remove_btn = gr.Button("Remove Selected", variant="secondary")
|
| 452 |
remove_status = gr.Markdown("")
|
| 453 |
-
|
| 454 |
gr.Markdown("### 🚀 Step 3: Solve")
|
| 455 |
solve_btn = gr.Button("🎯 Solve Dependencies", variant="primary", size="lg")
|
| 456 |
-
|
| 457 |
with gr.Row():
|
| 458 |
with gr.Column(scale=1):
|
| 459 |
result_output = gr.Markdown()
|
| 460 |
-
|
| 461 |
with gr.Column(scale=1):
|
| 462 |
with gr.Accordion("📊 Analysis", open=False):
|
| 463 |
dep_analysis = gr.Markdown()
|
| 464 |
-
|
| 465 |
with gr.Accordion("💾 JSON Output", open=False):
|
| 466 |
json_output = gr.Code(language="json")
|
| 467 |
-
|
| 468 |
# Mermaid visualizations
|
| 469 |
gr.Markdown("### 📊 Visualizations")
|
| 470 |
with gr.Row():
|
|
@@ -473,20 +512,20 @@ with gr.Blocks(title="Task Dependency Solver", theme=gr.themes.Soft()) as demo:
|
|
| 473 |
gantt_chart = gr.Markdown(
|
| 474 |
value="gantt\n title Task Execution Timeline\n dateFormat YYYY-MM-DD\n section Tasks\n Click Solve to see timeline : 2024-01-01, 1d"
|
| 475 |
)
|
| 476 |
-
|
| 477 |
with gr.Column(scale=1):
|
| 478 |
gr.Markdown("#### 🔀 Dependency Flowchart")
|
| 479 |
dependency_flowchart = gr.Markdown(
|
| 480 |
value="flowchart TD\n A[Click Solve to see dependencies]"
|
| 481 |
)
|
| 482 |
-
|
| 483 |
# Examples section
|
| 484 |
with gr.Accordion("📚 Quick Examples", open=False):
|
| 485 |
with gr.Row():
|
| 486 |
morning_btn = gr.Button("🌅 Morning Routine")
|
| 487 |
cooking_btn = gr.Button("🍳 Cooking Tasks")
|
| 488 |
project_btn = gr.Button("💼 Project Tasks")
|
| 489 |
-
|
| 490 |
gr.Markdown("""
|
| 491 |
**Click the buttons above to load example task sets!**
|
| 492 |
|
|
@@ -494,126 +533,160 @@ with gr.Blocks(title="Task Dependency Solver", theme=gr.themes.Soft()) as demo:
|
|
| 494 |
|
| 495 |
**Note:** All matching is case-insensitive. You can mix cases freely!
|
| 496 |
""")
|
| 497 |
-
|
| 498 |
# Event handlers
|
| 499 |
def update_ui_after_tasks_change(tasks_text, current_deps):
|
| 500 |
"""Update dropdowns and dependency checkboxes when tasks change."""
|
| 501 |
tasks, original_names = parse_tasks(tasks_text)
|
| 502 |
-
|
| 503 |
# Create display choices (original casing)
|
| 504 |
-
task_display_choices = [""] + [
|
| 505 |
-
|
| 506 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 507 |
# Update dependency removal checkboxes
|
| 508 |
checkbox_choices = current_deps if current_deps else []
|
| 509 |
-
|
| 510 |
return (
|
| 511 |
gr.Dropdown(choices=task_display_choices, value=""), # task_dropdown
|
| 512 |
-
gr.CheckboxGroup(
|
| 513 |
-
|
|
|
|
|
|
|
| 514 |
)
|
| 515 |
-
|
| 516 |
def handle_add_dependencies(task, required_tasks, current_deps):
|
| 517 |
"""Handle adding dependencies and update UI."""
|
| 518 |
updated_deps, status = add_dependencies(task, required_tasks, current_deps)
|
| 519 |
display = format_dependencies_display(updated_deps)
|
| 520 |
checkbox_choices = updated_deps if updated_deps else []
|
| 521 |
-
|
| 522 |
return (
|
| 523 |
updated_deps, # dependencies_state
|
| 524 |
-
display,
|
| 525 |
-
status,
|
| 526 |
gr.CheckboxGroup(choices=checkbox_choices, value=[]), # remove_deps
|
| 527 |
-
"",
|
| 528 |
-
[]
|
| 529 |
)
|
| 530 |
-
|
| 531 |
def handle_remove_dependencies(deps_to_remove, current_deps):
|
| 532 |
"""Handle removing dependencies and update UI."""
|
| 533 |
updated_deps, status = remove_dependency(deps_to_remove, current_deps)
|
| 534 |
display = format_dependencies_display(updated_deps)
|
| 535 |
checkbox_choices = updated_deps if updated_deps else []
|
| 536 |
-
|
| 537 |
return (
|
| 538 |
updated_deps, # dependencies_state
|
| 539 |
-
display,
|
| 540 |
-
status,
|
| 541 |
-
gr.CheckboxGroup(choices=checkbox_choices, value=[]) # remove_deps
|
| 542 |
)
|
| 543 |
-
|
| 544 |
def clear_all_dependencies():
|
| 545 |
"""Clear all dependencies."""
|
| 546 |
return (
|
| 547 |
[], # dependencies_state
|
| 548 |
"No dependencies added yet.", # dependencies_display
|
| 549 |
"✅ All dependencies cleared", # add_status
|
| 550 |
-
gr.CheckboxGroup(choices=[], value=[]) # remove_deps
|
| 551 |
)
|
| 552 |
-
|
| 553 |
# Example loaders
|
| 554 |
def load_morning_example():
|
| 555 |
tasks = "Wake_up\nShower\nBrush_teeth\nGet_dressed\nEat_breakfast\nMake_coffee\nLeave_house"
|
| 556 |
-
return
|
| 557 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 558 |
def load_cooking_example():
|
| 559 |
tasks = "Shop_ingredients\nPrep_vegetables\nPreheat_oven\nCook_main_dish\nMake_sauce\nPlate_food\nServe_dinner"
|
| 560 |
-
return
|
| 561 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 562 |
def load_project_example():
|
| 563 |
tasks = "Research\nPlan\nDesign\nDevelop\nTest\nDeploy\nDocument\nReview"
|
| 564 |
-
return
|
| 565 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 566 |
# Wire up events
|
| 567 |
tasks_input.change(
|
| 568 |
fn=update_ui_after_tasks_change,
|
| 569 |
inputs=[tasks_input, dependencies_state],
|
| 570 |
-
outputs=[task_dropdown, requirements_checkbox, remove_deps]
|
| 571 |
)
|
| 572 |
-
|
| 573 |
add_btn.click(
|
| 574 |
fn=handle_add_dependencies,
|
| 575 |
inputs=[task_dropdown, requirements_checkbox, dependencies_state],
|
| 576 |
-
outputs=[
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 577 |
)
|
| 578 |
-
|
| 579 |
remove_btn.click(
|
| 580 |
fn=handle_remove_dependencies,
|
| 581 |
inputs=[remove_deps, dependencies_state],
|
| 582 |
-
outputs=[dependencies_state, dependencies_display, remove_status, remove_deps]
|
| 583 |
)
|
| 584 |
-
|
| 585 |
clear_all_btn.click(
|
| 586 |
fn=clear_all_dependencies,
|
| 587 |
-
outputs=[dependencies_state, dependencies_display, add_status, remove_deps]
|
| 588 |
)
|
| 589 |
-
|
| 590 |
solve_btn.click(
|
| 591 |
fn=solve_dependencies,
|
| 592 |
inputs=[tasks_input, dependencies_state],
|
| 593 |
-
outputs=[
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 594 |
)
|
| 595 |
-
|
| 596 |
# Example buttons
|
| 597 |
morning_btn.click(
|
| 598 |
fn=load_morning_example,
|
| 599 |
-
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps]
|
| 600 |
)
|
| 601 |
-
|
| 602 |
cooking_btn.click(
|
| 603 |
fn=load_cooking_example,
|
| 604 |
-
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps]
|
| 605 |
)
|
| 606 |
-
|
| 607 |
project_btn.click(
|
| 608 |
fn=load_project_example,
|
| 609 |
-
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps]
|
| 610 |
)
|
| 611 |
-
|
| 612 |
# Initialize on load
|
| 613 |
demo.load(
|
| 614 |
fn=update_ui_after_tasks_change,
|
| 615 |
inputs=[tasks_input, dependencies_state],
|
| 616 |
-
outputs=[task_dropdown, requirements_checkbox, remove_deps]
|
| 617 |
)
|
| 618 |
|
| 619 |
if __name__ == "__main__":
|
|
|
|
| 5 |
from ortools.sat.python import cp_model
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
|
| 8 |
+
|
| 9 |
def normalize_task_name(task):
|
| 10 |
"""Normalize task name to lowercase for consistent comparison."""
|
| 11 |
return task.strip().lower()
|
| 12 |
|
| 13 |
+
|
| 14 |
def display_task_name(task):
|
| 15 |
"""Format task name for display (title case)."""
|
| 16 |
+
return task.replace("_", " ").title()
|
| 17 |
+
|
| 18 |
|
| 19 |
def parse_requirements(requirements):
|
| 20 |
"""Parse requirements list into dependency graph."""
|
| 21 |
dependencies = defaultdict(list)
|
| 22 |
all_tasks = set()
|
| 23 |
original_names = {} # Store original casing
|
| 24 |
+
|
| 25 |
for req in requirements:
|
| 26 |
if not req.strip():
|
| 27 |
continue
|
| 28 |
+
# Match pattern like "TaskA requires TaskB" (now supports spaces)
|
| 29 |
+
match = re.match(r"(.+?)\s+requires\s+(.+)", req.strip(), re.IGNORECASE)
|
| 30 |
if match:
|
| 31 |
task_orig, required_orig = match.groups()
|
| 32 |
task = normalize_task_name(task_orig)
|
| 33 |
required = normalize_task_name(required_orig)
|
| 34 |
+
|
| 35 |
all_tasks.add(task)
|
| 36 |
all_tasks.add(required)
|
| 37 |
dependencies[task].append(required)
|
| 38 |
+
|
| 39 |
# Store original names for display
|
| 40 |
original_names[task] = task_orig
|
| 41 |
original_names[required] = required_orig
|
| 42 |
+
|
| 43 |
return dependencies, list(all_tasks), original_names
|
| 44 |
|
| 45 |
+
|
| 46 |
def solve_all_tasks(dependencies, all_tasks):
|
| 47 |
"""Try to schedule all tasks (works if DAG)."""
|
| 48 |
model = cp_model.CpModel()
|
| 49 |
n_tasks = len(all_tasks)
|
| 50 |
+
|
| 51 |
position = {}
|
| 52 |
for task in all_tasks:
|
| 53 |
+
position[task] = model.NewIntVar(0, n_tasks - 1, f"pos_{task}")
|
| 54 |
+
|
| 55 |
model.AddAllDifferent(list(position.values()))
|
| 56 |
+
|
| 57 |
for task, required_list in dependencies.items():
|
| 58 |
for required_task in required_list:
|
| 59 |
if required_task in position:
|
| 60 |
model.Add(position[required_task] < position[task])
|
| 61 |
+
|
| 62 |
solver = cp_model.CpSolver()
|
| 63 |
status = solver.Solve(model)
|
| 64 |
+
|
| 65 |
if status in [cp_model.OPTIMAL, cp_model.FEASIBLE]:
|
| 66 |
task_order = [(solver.Value(position[task]), task) for task in all_tasks]
|
| 67 |
task_order.sort()
|
| 68 |
return [task for _, task in task_order]
|
| 69 |
+
|
| 70 |
return None
|
| 71 |
|
| 72 |
+
|
| 73 |
def solve_maximum_subset(dependencies, all_tasks):
|
| 74 |
"""Find maximum subset of tasks that can be executed (handles cycles)."""
|
| 75 |
model = cp_model.CpModel()
|
| 76 |
n_tasks = len(all_tasks)
|
| 77 |
+
|
| 78 |
include = {}
|
| 79 |
position = {}
|
| 80 |
+
|
| 81 |
for task in all_tasks:
|
| 82 |
+
include[task] = model.NewBoolVar(f"include_{task}")
|
| 83 |
+
position[task] = model.NewIntVar(0, n_tasks - 1, f"pos_{task}")
|
| 84 |
+
|
| 85 |
for task, required_list in dependencies.items():
|
| 86 |
for required_task in required_list:
|
| 87 |
if required_task in include:
|
|
|
|
| 89 |
model.Add(position[required_task] < position[task]).OnlyEnforceIf(
|
| 90 |
[include[task], include[required_task]]
|
| 91 |
)
|
| 92 |
+
|
| 93 |
for i, task1 in enumerate(all_tasks):
|
| 94 |
for j, task2 in enumerate(all_tasks):
|
| 95 |
if i < j:
|
| 96 |
model.Add(position[task1] != position[task2]).OnlyEnforceIf(
|
| 97 |
[include[task1], include[task2]]
|
| 98 |
)
|
| 99 |
+
|
| 100 |
for task in all_tasks:
|
| 101 |
model.Add(position[task] == n_tasks).OnlyEnforceIf(include[task].Not())
|
| 102 |
+
|
| 103 |
model.Maximize(sum(include.values()))
|
| 104 |
+
|
| 105 |
solver = cp_model.CpSolver()
|
| 106 |
status = solver.Solve(model)
|
| 107 |
+
|
| 108 |
if status in [cp_model.OPTIMAL, cp_model.FEASIBLE]:
|
| 109 |
included_tasks = []
|
| 110 |
for task in all_tasks:
|
| 111 |
if solver.Value(include[task]) == 1:
|
| 112 |
pos = solver.Value(position[task])
|
| 113 |
included_tasks.append((pos, task))
|
| 114 |
+
|
| 115 |
included_tasks.sort()
|
| 116 |
return [task for _, task in included_tasks]
|
| 117 |
+
|
| 118 |
return []
|
| 119 |
|
| 120 |
+
|
| 121 |
def parse_tasks(tasks_text):
|
| 122 |
"""Parse tasks from text input."""
|
| 123 |
if not tasks_text.strip():
|
| 124 |
return [], {}
|
| 125 |
+
|
| 126 |
# Split by lines or commas and clean up
|
| 127 |
tasks = []
|
| 128 |
original_names = {}
|
| 129 |
+
|
| 130 |
+
for line in tasks_text.strip().split("\n"):
|
| 131 |
+
for task_orig in line.split(","):
|
| 132 |
task_orig = task_orig.strip()
|
| 133 |
if task_orig:
|
| 134 |
task = normalize_task_name(task_orig)
|
| 135 |
if task not in [normalize_task_name(t) for t in tasks]:
|
| 136 |
tasks.append(task)
|
| 137 |
original_names[task] = task_orig
|
| 138 |
+
|
| 139 |
return sorted(tasks), original_names
|
| 140 |
|
| 141 |
+
|
| 142 |
def generate_mermaid_gantt(task_order, original_names):
|
| 143 |
"""Generate Mermaid Gantt chart syntax."""
|
| 144 |
if not task_order:
|
| 145 |
return "gantt\n title Task Execution Timeline\n dateFormat YYYY-MM-DD\n section No Tasks\n No tasks to display : 2024-01-01, 1d"
|
| 146 |
+
|
| 147 |
# Start from today
|
| 148 |
start_date = datetime.now()
|
| 149 |
|
|
|
|
| 151 |
gantt += " title Task Execution Timeline\n"
|
| 152 |
gantt += " dateFormat YYYY-MM-DD\n"
|
| 153 |
gantt += " section Tasks\n"
|
| 154 |
+
|
| 155 |
current_date = start_date
|
| 156 |
for i, task in enumerate(task_order):
|
| 157 |
display_name = original_names.get(task, display_task_name(task))
|
| 158 |
# Clean task name for Mermaid (remove special chars)
|
| 159 |
+
clean_name = re.sub(r"[^a-zA-Z0-9\s]", "", display_name)
|
| 160 |
+
|
| 161 |
task_start = current_date.strftime("%Y-%m-%d")
|
| 162 |
current_date += timedelta(days=1)
|
| 163 |
+
|
| 164 |
gantt += f" {clean_name} : {task_start}, 1d\n"
|
| 165 |
+
|
| 166 |
gantt += "```"
|
| 167 |
return gantt
|
| 168 |
|
| 169 |
+
|
| 170 |
def generate_mermaid_flowchart(dependencies, all_tasks, original_names):
|
| 171 |
"""Generate Mermaid flowchart syntax."""
|
| 172 |
if not all_tasks:
|
| 173 |
return "flowchart TD\n A[No tasks to display]"
|
| 174 |
+
|
| 175 |
flowchart = "```mermaid\nflowchart TD\n"
|
| 176 |
+
|
| 177 |
# Create nodes for all tasks
|
| 178 |
for task in all_tasks:
|
| 179 |
display_name = original_names.get(task, display_task_name(task))
|
| 180 |
# Create a clean ID for the node
|
| 181 |
+
node_id = task.replace(" ", "_").replace("-", "_")
|
| 182 |
flowchart += f" {node_id}[{display_name}]\n"
|
| 183 |
+
|
| 184 |
# Add dependencies as edges
|
| 185 |
if dependencies:
|
| 186 |
for task, required_list in dependencies.items():
|
| 187 |
+
task_id = task.replace(" ", "_").replace("-", "_")
|
| 188 |
for required_task in required_list:
|
| 189 |
+
req_id = required_task.replace(" ", "_").replace("-", "_")
|
| 190 |
flowchart += f" {req_id} --> {task_id}\n"
|
| 191 |
else:
|
| 192 |
# If no dependencies, just show all tasks
|
| 193 |
flowchart += " style A fill:#e1f5fe\n"
|
| 194 |
+
|
| 195 |
flowchart += "```"
|
| 196 |
return flowchart
|
| 197 |
|
| 198 |
+
|
| 199 |
def update_dropdowns(tasks_text):
|
| 200 |
"""Update dropdown choices when tasks change."""
|
| 201 |
tasks, original_names = parse_tasks(tasks_text)
|
| 202 |
+
|
| 203 |
# Create display choices (original casing)
|
| 204 |
+
task_display_choices = [""] + [
|
| 205 |
+
original_names.get(task, display_task_name(task)) for task in tasks
|
| 206 |
+
]
|
| 207 |
+
req_display_choices = [
|
| 208 |
+
original_names.get(task, display_task_name(task)) for task in tasks
|
| 209 |
+
]
|
| 210 |
+
|
| 211 |
return (
|
| 212 |
gr.Dropdown(choices=task_display_choices, value=""),
|
| 213 |
+
gr.CheckboxGroup(choices=req_display_choices, value=[]),
|
| 214 |
)
|
| 215 |
|
| 216 |
+
|
| 217 |
def add_dependencies(task_display, required_display_list, current_deps):
|
| 218 |
"""Add multiple dependencies for a single task."""
|
| 219 |
if not task_display:
|
| 220 |
return current_deps, "⚠️ Please select a task"
|
| 221 |
+
|
| 222 |
if not required_display_list:
|
| 223 |
return current_deps, "⚠️ Please select at least one requirement"
|
| 224 |
+
|
| 225 |
# Normalize for comparison
|
| 226 |
task = normalize_task_name(task_display)
|
| 227 |
required_tasks = [normalize_task_name(req) for req in required_display_list]
|
| 228 |
+
|
| 229 |
# Remove the selected task from requirements if somehow selected
|
| 230 |
required_tasks = [req for req in required_tasks if req != task]
|
| 231 |
+
|
| 232 |
if not required_tasks:
|
| 233 |
return current_deps, "⚠️ A task cannot require itself"
|
| 234 |
+
|
| 235 |
# Create new dependencies (using original display names)
|
| 236 |
new_deps = []
|
| 237 |
existing_deps = []
|
| 238 |
+
|
| 239 |
for req_display in required_display_list:
|
| 240 |
if normalize_task_name(req_display) in required_tasks:
|
| 241 |
new_dep = f"{task_display} requires {req_display}"
|
| 242 |
+
|
| 243 |
# Check if this dependency already exists (case-insensitive)
|
| 244 |
exists = any(
|
| 245 |
+
normalize_task_name(dep.split(" requires ")[0]) == task
|
| 246 |
+
and normalize_task_name(dep.split(" requires ")[1])
|
| 247 |
+
== normalize_task_name(req_display)
|
| 248 |
+
for dep in current_deps
|
| 249 |
+
if " requires " in dep
|
| 250 |
)
|
| 251 |
+
|
| 252 |
if exists:
|
| 253 |
existing_deps.append(req_display)
|
| 254 |
else:
|
| 255 |
new_deps.append(new_dep)
|
| 256 |
+
|
| 257 |
if not new_deps and existing_deps:
|
| 258 |
return current_deps, f"⚠️ All selected dependencies already exist"
|
| 259 |
+
|
| 260 |
updated_deps = current_deps + new_deps
|
| 261 |
+
|
| 262 |
# Create status message
|
| 263 |
status_parts = []
|
| 264 |
if new_deps:
|
| 265 |
status_parts.append(f"✅ Added {len(new_deps)} dependencies")
|
| 266 |
if existing_deps:
|
| 267 |
status_parts.append(f"⚠️ {len(existing_deps)} already existed")
|
| 268 |
+
|
| 269 |
status = " | ".join(status_parts)
|
| 270 |
+
|
| 271 |
return updated_deps, status
|
| 272 |
|
| 273 |
+
|
| 274 |
def remove_dependency(deps_to_remove, current_deps):
|
| 275 |
"""Remove selected dependencies."""
|
| 276 |
if not deps_to_remove:
|
| 277 |
return current_deps, "⚠️ Please select dependencies to remove"
|
| 278 |
+
|
| 279 |
updated_deps = [dep for dep in current_deps if dep not in deps_to_remove]
|
| 280 |
removed_count = len(current_deps) - len(updated_deps)
|
| 281 |
return updated_deps, f"✅ Removed {removed_count} dependencies"
|
| 282 |
|
| 283 |
+
|
| 284 |
def format_dependencies_display(dependencies_list):
|
| 285 |
"""Format dependencies for display."""
|
| 286 |
if not dependencies_list:
|
| 287 |
return "No dependencies added yet."
|
| 288 |
+
|
| 289 |
# Group by task for better readability
|
| 290 |
task_deps = defaultdict(list)
|
| 291 |
for dep in dependencies_list:
|
| 292 |
+
parts = dep.split(" requires ")
|
| 293 |
if len(parts) == 2:
|
| 294 |
task, required = parts
|
| 295 |
task_deps[task].append(required)
|
| 296 |
+
|
| 297 |
display = "**Current Dependencies:**\n"
|
| 298 |
for task in sorted(task_deps.keys(), key=str.lower):
|
| 299 |
requirements = ", ".join(sorted(task_deps[task], key=str.lower))
|
| 300 |
display += f"• **{task}** requires: {requirements}\n"
|
| 301 |
+
|
| 302 |
return display
|
| 303 |
|
| 304 |
+
|
| 305 |
def solve_dependencies(tasks_text, dependencies_list):
|
| 306 |
"""Solve the task ordering problem."""
|
| 307 |
tasks, task_original_names = parse_tasks(tasks_text)
|
| 308 |
+
|
| 309 |
if not tasks:
|
| 310 |
return "❌ Please enter some tasks!", "", "", "", ""
|
| 311 |
+
|
| 312 |
if not dependencies_list:
|
| 313 |
# No dependencies, just return tasks in alphabetical order
|
| 314 |
output = "✅ **No dependencies - alphabetical order:**\n\n"
|
| 315 |
+
display_tasks = [
|
| 316 |
+
task_original_names.get(task, display_task_name(task)) for task in tasks
|
| 317 |
+
]
|
| 318 |
for i, task_display in enumerate(display_tasks, 1):
|
| 319 |
output += f"{i}. {task_display}\n"
|
| 320 |
+
|
| 321 |
json_output = json.dumps(display_tasks, indent=2)
|
| 322 |
gantt = generate_mermaid_gantt(tasks, task_original_names)
|
| 323 |
flowchart = generate_mermaid_flowchart({}, tasks, task_original_names)
|
| 324 |
return output, "", json_output, gantt, flowchart
|
| 325 |
+
|
| 326 |
try:
|
| 327 |
+
dependencies, all_tasks, dep_original_names = parse_requirements(
|
| 328 |
+
dependencies_list
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
# Merge original names
|
| 332 |
all_original_names = {**task_original_names, **dep_original_names}
|
| 333 |
+
|
| 334 |
# Add any tasks that aren't in dependencies
|
| 335 |
for task in tasks:
|
| 336 |
if task not in all_tasks:
|
| 337 |
all_tasks.append(task)
|
| 338 |
+
|
| 339 |
# Create dependency summary
|
| 340 |
dep_summary = "**Parsed Dependencies:**\n"
|
| 341 |
if dependencies:
|
| 342 |
for task, deps in dependencies.items():
|
| 343 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 344 |
+
deps_display = [
|
| 345 |
+
all_original_names.get(dep, display_task_name(dep)) for dep in deps
|
| 346 |
+
]
|
| 347 |
dep_summary += f"• {task_display} requires: {', '.join(deps_display)}\n"
|
| 348 |
+
|
| 349 |
dep_summary += f"\n**Total tasks:** {len(all_tasks)}\n"
|
| 350 |
+
task_displays = [
|
| 351 |
+
all_original_names.get(task, display_task_name(task))
|
| 352 |
+
for task in sorted(all_tasks)
|
| 353 |
+
]
|
| 354 |
dep_summary += f"**Tasks:** {', '.join(task_displays)}"
|
| 355 |
+
|
| 356 |
# Try to solve
|
| 357 |
result = solve_all_tasks(dependencies, all_tasks)
|
| 358 |
+
|
| 359 |
if result:
|
| 360 |
# All tasks can be executed
|
| 361 |
output = "✅ **All tasks can be executed!**\n\n"
|
| 362 |
output += "**Optimal execution order:**\n"
|
| 363 |
+
|
| 364 |
result_display = []
|
| 365 |
for i, task in enumerate(result, 1):
|
| 366 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 367 |
output += f"{i}. {task_display}\n"
|
| 368 |
result_display.append(task_display)
|
| 369 |
+
|
| 370 |
json_output = json.dumps(result_display, indent=2)
|
| 371 |
gantt = generate_mermaid_gantt(result, all_original_names)
|
| 372 |
+
flowchart = generate_mermaid_flowchart(
|
| 373 |
+
dependencies, all_tasks, all_original_names
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
else:
|
| 377 |
# Try maximum subset
|
| 378 |
result = solve_maximum_subset(dependencies, all_tasks)
|
| 379 |
+
|
| 380 |
if result:
|
| 381 |
excluded_tasks = set(all_tasks) - set(result)
|
| 382 |
output = f"⚠️ **Circular dependencies detected!**\n\n"
|
| 383 |
+
output += (
|
| 384 |
+
f"**Maximum executable tasks ({len(result)}/{len(all_tasks)}):**\n"
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
result_display = []
|
| 388 |
for i, task in enumerate(result, 1):
|
| 389 |
task_display = all_original_names.get(task, display_task_name(task))
|
| 390 |
output += f"{i}. {task_display}\n"
|
| 391 |
result_display.append(task_display)
|
| 392 |
+
|
| 393 |
if excluded_tasks:
|
| 394 |
+
output += (
|
| 395 |
+
f"\n**❌ Excluded tasks (due to circular dependencies):**\n"
|
| 396 |
+
)
|
| 397 |
for task in sorted(excluded_tasks):
|
| 398 |
+
task_display = all_original_names.get(
|
| 399 |
+
task, display_task_name(task)
|
| 400 |
+
)
|
| 401 |
output += f"• {task_display}\n"
|
| 402 |
+
|
| 403 |
json_output = json.dumps(result_display, indent=2)
|
| 404 |
gantt = generate_mermaid_gantt(result, all_original_names)
|
| 405 |
+
flowchart = generate_mermaid_flowchart(
|
| 406 |
+
dependencies, all_tasks, all_original_names
|
| 407 |
+
)
|
| 408 |
else:
|
| 409 |
output = "❌ **No solution found!** There might be complex circular dependencies."
|
| 410 |
json_output = "[]"
|
| 411 |
gantt = generate_mermaid_gantt([], all_original_names)
|
| 412 |
+
flowchart = generate_mermaid_flowchart(
|
| 413 |
+
dependencies, all_tasks, all_original_names
|
| 414 |
+
)
|
| 415 |
+
|
| 416 |
return output, dep_summary, json_output, gantt, flowchart
|
| 417 |
+
|
| 418 |
except Exception as e:
|
| 419 |
return f"❌ **Error:** {str(e)}", "", "", "", ""
|
| 420 |
|
| 421 |
+
|
| 422 |
# Example tasks
|
| 423 |
example_tasks = """Sleep
|
| 424 |
Dinner
|
|
|
|
| 442 |
3. Click "Add Dependencies" to add them all
|
| 443 |
4. Click "Solve Dependencies" to get the optimal execution order with visualizations
|
| 444 |
""")
|
| 445 |
+
|
| 446 |
# State to store current dependencies
|
| 447 |
dependencies_state = gr.State([])
|
| 448 |
+
|
| 449 |
with gr.Row():
|
| 450 |
with gr.Column(scale=1):
|
| 451 |
gr.Markdown("### 📝 Step 1: Define Your Tasks")
|
|
|
|
| 454 |
placeholder="Enter your tasks like:\nSleep\nDinner\nPrep\nShopping",
|
| 455 |
lines=6,
|
| 456 |
value=example_tasks,
|
| 457 |
+
info="Case-insensitive: 'Sleep' and 'sleep' are treated the same",
|
| 458 |
)
|
| 459 |
+
|
| 460 |
gr.Markdown("### 🔗 Step 2: Build Dependencies")
|
| 461 |
task_dropdown = gr.Dropdown(
|
| 462 |
label="Select Task",
|
| 463 |
choices=[],
|
| 464 |
value="",
|
| 465 |
+
info="Choose the task that has requirements",
|
| 466 |
)
|
| 467 |
+
|
| 468 |
requirements_checkbox = gr.CheckboxGroup(
|
| 469 |
label="Select Requirements (can select multiple)",
|
| 470 |
choices=[],
|
| 471 |
value=[],
|
| 472 |
+
info="Choose what the task requires",
|
| 473 |
)
|
| 474 |
+
|
| 475 |
with gr.Row():
|
| 476 |
add_btn = gr.Button("➕ Add Dependencies", variant="primary", scale=2)
|
| 477 |
clear_all_btn = gr.Button("🗑️ Clear All", variant="secondary", scale=1)
|
| 478 |
+
|
| 479 |
add_status = gr.Markdown("")
|
| 480 |
+
|
| 481 |
with gr.Column(scale=1):
|
| 482 |
gr.Markdown("### 📋 Current Dependencies")
|
| 483 |
dependencies_display = gr.Markdown("No dependencies added yet.")
|
| 484 |
+
|
| 485 |
with gr.Accordion("🗑️ Remove Dependencies", open=False):
|
| 486 |
remove_deps = gr.CheckboxGroup(
|
| 487 |
+
label="Select dependencies to remove:", choices=[], value=[]
|
|
|
|
|
|
|
| 488 |
)
|
| 489 |
+
|
| 490 |
remove_btn = gr.Button("Remove Selected", variant="secondary")
|
| 491 |
remove_status = gr.Markdown("")
|
| 492 |
+
|
| 493 |
gr.Markdown("### 🚀 Step 3: Solve")
|
| 494 |
solve_btn = gr.Button("🎯 Solve Dependencies", variant="primary", size="lg")
|
| 495 |
+
|
| 496 |
with gr.Row():
|
| 497 |
with gr.Column(scale=1):
|
| 498 |
result_output = gr.Markdown()
|
| 499 |
+
|
| 500 |
with gr.Column(scale=1):
|
| 501 |
with gr.Accordion("📊 Analysis", open=False):
|
| 502 |
dep_analysis = gr.Markdown()
|
| 503 |
+
|
| 504 |
with gr.Accordion("💾 JSON Output", open=False):
|
| 505 |
json_output = gr.Code(language="json")
|
| 506 |
+
|
| 507 |
# Mermaid visualizations
|
| 508 |
gr.Markdown("### 📊 Visualizations")
|
| 509 |
with gr.Row():
|
|
|
|
| 512 |
gantt_chart = gr.Markdown(
|
| 513 |
value="gantt\n title Task Execution Timeline\n dateFormat YYYY-MM-DD\n section Tasks\n Click Solve to see timeline : 2024-01-01, 1d"
|
| 514 |
)
|
| 515 |
+
|
| 516 |
with gr.Column(scale=1):
|
| 517 |
gr.Markdown("#### 🔀 Dependency Flowchart")
|
| 518 |
dependency_flowchart = gr.Markdown(
|
| 519 |
value="flowchart TD\n A[Click Solve to see dependencies]"
|
| 520 |
)
|
| 521 |
+
|
| 522 |
# Examples section
|
| 523 |
with gr.Accordion("📚 Quick Examples", open=False):
|
| 524 |
with gr.Row():
|
| 525 |
morning_btn = gr.Button("🌅 Morning Routine")
|
| 526 |
cooking_btn = gr.Button("🍳 Cooking Tasks")
|
| 527 |
project_btn = gr.Button("💼 Project Tasks")
|
| 528 |
+
|
| 529 |
gr.Markdown("""
|
| 530 |
**Click the buttons above to load example task sets!**
|
| 531 |
|
|
|
|
| 533 |
|
| 534 |
**Note:** All matching is case-insensitive. You can mix cases freely!
|
| 535 |
""")
|
| 536 |
+
|
| 537 |
# Event handlers
|
| 538 |
def update_ui_after_tasks_change(tasks_text, current_deps):
|
| 539 |
"""Update dropdowns and dependency checkboxes when tasks change."""
|
| 540 |
tasks, original_names = parse_tasks(tasks_text)
|
| 541 |
+
|
| 542 |
# Create display choices (original casing)
|
| 543 |
+
task_display_choices = [""] + [
|
| 544 |
+
original_names.get(task, display_task_name(task)) for task in tasks
|
| 545 |
+
]
|
| 546 |
+
req_display_choices = [
|
| 547 |
+
original_names.get(task, display_task_name(task)) for task in tasks
|
| 548 |
+
]
|
| 549 |
+
|
| 550 |
# Update dependency removal checkboxes
|
| 551 |
checkbox_choices = current_deps if current_deps else []
|
| 552 |
+
|
| 553 |
return (
|
| 554 |
gr.Dropdown(choices=task_display_choices, value=""), # task_dropdown
|
| 555 |
+
gr.CheckboxGroup(
|
| 556 |
+
choices=req_display_choices, value=[]
|
| 557 |
+
), # requirements_checkbox
|
| 558 |
+
gr.CheckboxGroup(choices=checkbox_choices, value=[]), # remove_deps
|
| 559 |
)
|
| 560 |
+
|
| 561 |
def handle_add_dependencies(task, required_tasks, current_deps):
|
| 562 |
"""Handle adding dependencies and update UI."""
|
| 563 |
updated_deps, status = add_dependencies(task, required_tasks, current_deps)
|
| 564 |
display = format_dependencies_display(updated_deps)
|
| 565 |
checkbox_choices = updated_deps if updated_deps else []
|
| 566 |
+
|
| 567 |
return (
|
| 568 |
updated_deps, # dependencies_state
|
| 569 |
+
display, # dependencies_display
|
| 570 |
+
status, # add_status
|
| 571 |
gr.CheckboxGroup(choices=checkbox_choices, value=[]), # remove_deps
|
| 572 |
+
"", # task_dropdown (clear)
|
| 573 |
+
[], # requirements_checkbox (clear)
|
| 574 |
)
|
| 575 |
+
|
| 576 |
def handle_remove_dependencies(deps_to_remove, current_deps):
|
| 577 |
"""Handle removing dependencies and update UI."""
|
| 578 |
updated_deps, status = remove_dependency(deps_to_remove, current_deps)
|
| 579 |
display = format_dependencies_display(updated_deps)
|
| 580 |
checkbox_choices = updated_deps if updated_deps else []
|
| 581 |
+
|
| 582 |
return (
|
| 583 |
updated_deps, # dependencies_state
|
| 584 |
+
display, # dependencies_display
|
| 585 |
+
status, # remove_status
|
| 586 |
+
gr.CheckboxGroup(choices=checkbox_choices, value=[]), # remove_deps
|
| 587 |
)
|
| 588 |
+
|
| 589 |
def clear_all_dependencies():
|
| 590 |
"""Clear all dependencies."""
|
| 591 |
return (
|
| 592 |
[], # dependencies_state
|
| 593 |
"No dependencies added yet.", # dependencies_display
|
| 594 |
"✅ All dependencies cleared", # add_status
|
| 595 |
+
gr.CheckboxGroup(choices=[], value=[]), # remove_deps
|
| 596 |
)
|
| 597 |
+
|
| 598 |
# Example loaders
|
| 599 |
def load_morning_example():
|
| 600 |
tasks = "Wake_up\nShower\nBrush_teeth\nGet_dressed\nEat_breakfast\nMake_coffee\nLeave_house"
|
| 601 |
+
return (
|
| 602 |
+
tasks,
|
| 603 |
+
[],
|
| 604 |
+
"No dependencies added yet.",
|
| 605 |
+
gr.CheckboxGroup(choices=[], value=[]),
|
| 606 |
+
)
|
| 607 |
+
|
| 608 |
def load_cooking_example():
|
| 609 |
tasks = "Shop_ingredients\nPrep_vegetables\nPreheat_oven\nCook_main_dish\nMake_sauce\nPlate_food\nServe_dinner"
|
| 610 |
+
return (
|
| 611 |
+
tasks,
|
| 612 |
+
[],
|
| 613 |
+
"No dependencies added yet.",
|
| 614 |
+
gr.CheckboxGroup(choices=[], value=[]),
|
| 615 |
+
)
|
| 616 |
+
|
| 617 |
def load_project_example():
|
| 618 |
tasks = "Research\nPlan\nDesign\nDevelop\nTest\nDeploy\nDocument\nReview"
|
| 619 |
+
return (
|
| 620 |
+
tasks,
|
| 621 |
+
[],
|
| 622 |
+
"No dependencies added yet.",
|
| 623 |
+
gr.CheckboxGroup(choices=[], value=[]),
|
| 624 |
+
)
|
| 625 |
+
|
| 626 |
# Wire up events
|
| 627 |
tasks_input.change(
|
| 628 |
fn=update_ui_after_tasks_change,
|
| 629 |
inputs=[tasks_input, dependencies_state],
|
| 630 |
+
outputs=[task_dropdown, requirements_checkbox, remove_deps],
|
| 631 |
)
|
| 632 |
+
|
| 633 |
add_btn.click(
|
| 634 |
fn=handle_add_dependencies,
|
| 635 |
inputs=[task_dropdown, requirements_checkbox, dependencies_state],
|
| 636 |
+
outputs=[
|
| 637 |
+
dependencies_state,
|
| 638 |
+
dependencies_display,
|
| 639 |
+
add_status,
|
| 640 |
+
remove_deps,
|
| 641 |
+
task_dropdown,
|
| 642 |
+
requirements_checkbox,
|
| 643 |
+
],
|
| 644 |
)
|
| 645 |
+
|
| 646 |
remove_btn.click(
|
| 647 |
fn=handle_remove_dependencies,
|
| 648 |
inputs=[remove_deps, dependencies_state],
|
| 649 |
+
outputs=[dependencies_state, dependencies_display, remove_status, remove_deps],
|
| 650 |
)
|
| 651 |
+
|
| 652 |
clear_all_btn.click(
|
| 653 |
fn=clear_all_dependencies,
|
| 654 |
+
outputs=[dependencies_state, dependencies_display, add_status, remove_deps],
|
| 655 |
)
|
| 656 |
+
|
| 657 |
solve_btn.click(
|
| 658 |
fn=solve_dependencies,
|
| 659 |
inputs=[tasks_input, dependencies_state],
|
| 660 |
+
outputs=[
|
| 661 |
+
result_output,
|
| 662 |
+
dep_analysis,
|
| 663 |
+
json_output,
|
| 664 |
+
gantt_chart,
|
| 665 |
+
dependency_flowchart,
|
| 666 |
+
],
|
| 667 |
)
|
| 668 |
+
|
| 669 |
# Example buttons
|
| 670 |
morning_btn.click(
|
| 671 |
fn=load_morning_example,
|
| 672 |
+
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps],
|
| 673 |
)
|
| 674 |
+
|
| 675 |
cooking_btn.click(
|
| 676 |
fn=load_cooking_example,
|
| 677 |
+
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps],
|
| 678 |
)
|
| 679 |
+
|
| 680 |
project_btn.click(
|
| 681 |
fn=load_project_example,
|
| 682 |
+
outputs=[tasks_input, dependencies_state, dependencies_display, remove_deps],
|
| 683 |
)
|
| 684 |
+
|
| 685 |
# Initialize on load
|
| 686 |
demo.load(
|
| 687 |
fn=update_ui_after_tasks_change,
|
| 688 |
inputs=[tasks_input, dependencies_state],
|
| 689 |
+
outputs=[task_dropdown, requirements_checkbox, remove_deps],
|
| 690 |
)
|
| 691 |
|
| 692 |
if __name__ == "__main__":
|
pyproject.toml
CHANGED
|
@@ -7,4 +7,5 @@ requires-python = ">=3.10"
|
|
| 7 |
dependencies = [
|
| 8 |
"gradio>=5.34.2",
|
| 9 |
"ortools>=9.14.6206",
|
|
|
|
| 10 |
]
|
|
|
|
| 7 |
dependencies = [
|
| 8 |
"gradio>=5.34.2",
|
| 9 |
"ortools>=9.14.6206",
|
| 10 |
+
"pytest>=8.4.1",
|
| 11 |
]
|
test_app.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
from app import parse_requirements, parse_tasks, solve_all_tasks, solve_maximum_subset
|
| 3 |
+
|
| 4 |
+
def test_parse_requirements_with_spaces():
|
| 5 |
+
reqs = [
|
| 6 |
+
"take out trash requires bag",
|
| 7 |
+
"Dinner requires Shopping",
|
| 8 |
+
"Make sandwich requires bread",
|
| 9 |
+
]
|
| 10 |
+
dependencies, all_tasks, original_names = parse_requirements(reqs)
|
| 11 |
+
assert set(all_tasks) == {
|
| 12 |
+
"take out trash",
|
| 13 |
+
"bag",
|
| 14 |
+
"dinner",
|
| 15 |
+
"shopping",
|
| 16 |
+
"make sandwich",
|
| 17 |
+
"bread",
|
| 18 |
+
}
|
| 19 |
+
assert dependencies["take out trash"] == ["bag"]
|
| 20 |
+
assert dependencies["dinner"] == ["shopping"]
|
| 21 |
+
assert dependencies["make sandwich"] == ["bread"]
|
| 22 |
+
assert original_names["take out trash"] == "take out trash"
|
| 23 |
+
|
| 24 |
+
def test_parse_tasks_and_original_names():
|
| 25 |
+
txt = "Wash dishes\nTake out trash, Make sandwich "
|
| 26 |
+
tasks, originals = parse_tasks(txt)
|
| 27 |
+
assert set(tasks) == {"wash dishes", "take out trash", "make sandwich"}
|
| 28 |
+
assert originals["take out trash"] == "Take out trash"
|
| 29 |
+
assert originals["make sandwich"] == "Make sandwich"
|
| 30 |
+
assert originals["wash dishes"] == "Wash dishes"
|
| 31 |
+
|
| 32 |
+
def test_solve_all_tasks_simple():
|
| 33 |
+
reqs = ["a requires b", "b requires c"]
|
| 34 |
+
dependencies, all_tasks, _ = parse_requirements(reqs)
|
| 35 |
+
result = solve_all_tasks(dependencies, all_tasks)
|
| 36 |
+
assert result == ["c", "b", "a"]
|
| 37 |
+
|
| 38 |
+
def test_solve_all_tasks_with_spaces():
|
| 39 |
+
reqs = ["Do homework requires eat dinner", "eat dinner requires cook"]
|
| 40 |
+
dependencies, all_tasks, _ = parse_requirements(reqs)
|
| 41 |
+
result = solve_all_tasks(dependencies, all_tasks)
|
| 42 |
+
assert result == ["cook", "eat dinner", "do homework"]
|
| 43 |
+
|
| 44 |
+
def test_solve_maximum_subset_cycle():
|
| 45 |
+
reqs = ["a requires b", "b requires c", "c requires a", "d requires c"]
|
| 46 |
+
dependencies, all_tasks, _ = parse_requirements(reqs)
|
| 47 |
+
result = solve_maximum_subset(dependencies, all_tasks)
|
| 48 |
+
# Only d can run because a<->b<->c form a cycle
|
| 49 |
+
assert result == ["d"]
|
uv.lock
CHANGED
|
@@ -390,6 +390,15 @@ wheels = [
|
|
| 390 |
{ url = "https://files.pythonhosted.org/packages/59/56/25ca7b848164b7d93dbd5fc97dd7751700c93e324fe854afbeb562ee2f98/immutabledict-4.2.1-py3-none-any.whl", hash = "sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373", size = 4700, upload-time = "2024-11-17T13:25:19.52Z" },
|
| 391 |
]
|
| 392 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 393 |
[[package]]
|
| 394 |
name = "jinja2"
|
| 395 |
version = "3.1.6"
|
|
@@ -616,12 +625,14 @@ source = { virtual = "." }
|
|
| 616 |
dependencies = [
|
| 617 |
{ name = "gradio" },
|
| 618 |
{ name = "ortools" },
|
|
|
|
| 619 |
]
|
| 620 |
|
| 621 |
[package.metadata]
|
| 622 |
requires-dist = [
|
| 623 |
{ name = "gradio", specifier = ">=5.34.2" },
|
| 624 |
{ name = "ortools", specifier = ">=9.14.6206" },
|
|
|
|
| 625 |
]
|
| 626 |
|
| 627 |
[[package]]
|
|
@@ -873,6 +884,15 @@ wheels = [
|
|
| 873 |
{ url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751, upload-time = "2025-04-12T17:49:59.628Z" },
|
| 874 |
]
|
| 875 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 876 |
[[package]]
|
| 877 |
name = "protobuf"
|
| 878 |
version = "6.31.1"
|
|
@@ -1007,6 +1027,24 @@ wheels = [
|
|
| 1007 |
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
| 1008 |
]
|
| 1009 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1010 |
[[package]]
|
| 1011 |
name = "python-dateutil"
|
| 1012 |
version = "2.9.0.post0"
|
|
@@ -1195,6 +1233,45 @@ wheels = [
|
|
| 1195 |
{ url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" },
|
| 1196 |
]
|
| 1197 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1198 |
[[package]]
|
| 1199 |
name = "tomlkit"
|
| 1200 |
version = "0.13.3"
|
|
|
|
| 390 |
{ url = "https://files.pythonhosted.org/packages/59/56/25ca7b848164b7d93dbd5fc97dd7751700c93e324fe854afbeb562ee2f98/immutabledict-4.2.1-py3-none-any.whl", hash = "sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373", size = 4700, upload-time = "2024-11-17T13:25:19.52Z" },
|
| 391 |
]
|
| 392 |
|
| 393 |
+
[[package]]
|
| 394 |
+
name = "iniconfig"
|
| 395 |
+
version = "2.1.0"
|
| 396 |
+
source = { registry = "https://pypi.org/simple" }
|
| 397 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
|
| 398 |
+
wheels = [
|
| 399 |
+
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
| 400 |
+
]
|
| 401 |
+
|
| 402 |
[[package]]
|
| 403 |
name = "jinja2"
|
| 404 |
version = "3.1.6"
|
|
|
|
| 625 |
dependencies = [
|
| 626 |
{ name = "gradio" },
|
| 627 |
{ name = "ortools" },
|
| 628 |
+
{ name = "pytest" },
|
| 629 |
]
|
| 630 |
|
| 631 |
[package.metadata]
|
| 632 |
requires-dist = [
|
| 633 |
{ name = "gradio", specifier = ">=5.34.2" },
|
| 634 |
{ name = "ortools", specifier = ">=9.14.6206" },
|
| 635 |
+
{ name = "pytest", specifier = ">=8.4.1" },
|
| 636 |
]
|
| 637 |
|
| 638 |
[[package]]
|
|
|
|
| 884 |
{ url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751, upload-time = "2025-04-12T17:49:59.628Z" },
|
| 885 |
]
|
| 886 |
|
| 887 |
+
[[package]]
|
| 888 |
+
name = "pluggy"
|
| 889 |
+
version = "1.6.0"
|
| 890 |
+
source = { registry = "https://pypi.org/simple" }
|
| 891 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
| 892 |
+
wheels = [
|
| 893 |
+
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
| 894 |
+
]
|
| 895 |
+
|
| 896 |
[[package]]
|
| 897 |
name = "protobuf"
|
| 898 |
version = "6.31.1"
|
|
|
|
| 1027 |
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
| 1028 |
]
|
| 1029 |
|
| 1030 |
+
[[package]]
|
| 1031 |
+
name = "pytest"
|
| 1032 |
+
version = "8.4.1"
|
| 1033 |
+
source = { registry = "https://pypi.org/simple" }
|
| 1034 |
+
dependencies = [
|
| 1035 |
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
| 1036 |
+
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
| 1037 |
+
{ name = "iniconfig" },
|
| 1038 |
+
{ name = "packaging" },
|
| 1039 |
+
{ name = "pluggy" },
|
| 1040 |
+
{ name = "pygments" },
|
| 1041 |
+
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
| 1042 |
+
]
|
| 1043 |
+
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
|
| 1044 |
+
wheels = [
|
| 1045 |
+
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
|
| 1046 |
+
]
|
| 1047 |
+
|
| 1048 |
[[package]]
|
| 1049 |
name = "python-dateutil"
|
| 1050 |
version = "2.9.0.post0"
|
|
|
|
| 1233 |
{ url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" },
|
| 1234 |
]
|
| 1235 |
|
| 1236 |
+
[[package]]
|
| 1237 |
+
name = "tomli"
|
| 1238 |
+
version = "2.2.1"
|
| 1239 |
+
source = { registry = "https://pypi.org/simple" }
|
| 1240 |
+
sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" }
|
| 1241 |
+
wheels = [
|
| 1242 |
+
{ url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" },
|
| 1243 |
+
{ url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" },
|
| 1244 |
+
{ url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" },
|
| 1245 |
+
{ url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" },
|
| 1246 |
+
{ url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" },
|
| 1247 |
+
{ url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" },
|
| 1248 |
+
{ url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" },
|
| 1249 |
+
{ url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" },
|
| 1250 |
+
{ url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" },
|
| 1251 |
+
{ url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" },
|
| 1252 |
+
{ url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" },
|
| 1253 |
+
{ url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" },
|
| 1254 |
+
{ url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" },
|
| 1255 |
+
{ url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" },
|
| 1256 |
+
{ url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" },
|
| 1257 |
+
{ url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" },
|
| 1258 |
+
{ url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" },
|
| 1259 |
+
{ url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" },
|
| 1260 |
+
{ url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" },
|
| 1261 |
+
{ url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" },
|
| 1262 |
+
{ url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" },
|
| 1263 |
+
{ url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" },
|
| 1264 |
+
{ url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" },
|
| 1265 |
+
{ url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" },
|
| 1266 |
+
{ url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" },
|
| 1267 |
+
{ url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" },
|
| 1268 |
+
{ url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" },
|
| 1269 |
+
{ url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" },
|
| 1270 |
+
{ url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" },
|
| 1271 |
+
{ url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" },
|
| 1272 |
+
{ url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" },
|
| 1273 |
+
]
|
| 1274 |
+
|
| 1275 |
[[package]]
|
| 1276 |
name = "tomlkit"
|
| 1277 |
version = "0.13.3"
|