Comprehensive developer toolkit providing reusable skills for Java/Spring Boot, TypeScript/NestJS/React/Next.js, Python, PHP, AWS CloudFormation, AI/RAG, DevOps, and more.
89
89%
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Risky
Do not use without reviewing
#!/usr/bin/env python3
"""Tests for specs-task-tdd-red-phase.py."""
import importlib.util
import os
import subprocess
import sys
import tempfile
import textwrap
from pathlib import Path
from unittest import mock
hooks_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
red_phase_path = os.path.join(hooks_dir, "specs-task-tdd-red-phase.py")
spec = importlib.util.spec_from_file_location("specs_task_tdd_red_phase", red_phase_path)
task_tdd_red_phase = importlib.util.module_from_spec(spec)
sys.modules["specs_task_tdd_red_phase"] = task_tdd_red_phase
spec.loader.exec_module(task_tdd_red_phase)
def write_task_file(directory: str, content: str) -> str:
path = Path(directory) / "TASK-010.md"
path.write_text(textwrap.dedent(content).strip() + "\n", encoding="utf-8")
return str(path)
def build_task(directory: str, lang: str) -> str:
return write_task_file(
directory,
f"""
---
id: TASK-010
title: "RED Phase Verification"
spec: docs/specs/002-tdd-command/2026-04-05--tdd-command.md
lang: {lang}
dependencies: [TASK-003]
status: pending
---
# TASK-010: RED Phase Verification
## Test Instructions
**1. Mandatory Unit Tests:**
- [ ] Verify that generated tests fail in RED phase.
**2. Mandatory Integration Tests:**
- [ ] Verify that complete RED verification flow succeeds.
""",
)
def create_generated_test(project_root: str, relative_path: str) -> None:
path = Path(project_root) / relative_path
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("placeholder\n", encoding="utf-8")
def test_resolve_runner_uses_pytest_for_python_tasks():
runner = task_tdd_red_phase.resolve_runner("python", ".", "tests/test_spec_behavior.py")
assert runner.framework == "pytest"
assert runner.command[0] in {"pytest", "python3"}
assert "tests/test_spec_behavior.py" in runner.command
def test_resolve_runner_prefers_maven_wrapper_when_present():
with tempfile.TemporaryDirectory() as temp_dir:
mvnw = Path(temp_dir) / "mvnw"
mvnw.write_text("#!/bin/sh\nexit 0\n", encoding="utf-8")
mvnw.chmod(0o755)
runner = task_tdd_red_phase.resolve_runner("spring", temp_dir, "src/test/java/com/example/FooTest.java")
assert runner.framework == "maven"
assert runner.command[0] == "./mvnw"
assert "-Dtest=FooTest" in runner.command
def test_verify_red_phase_confirms_failing_tests_and_captures_artifacts():
with tempfile.TemporaryDirectory() as temp_dir:
task_file = build_task(temp_dir, "python")
create_generated_test(temp_dir, "tests/test_verification.py")
generated = mock.Mock(language="python", output_path="tests/test_verification.py", warnings=[])
completed = subprocess.CompletedProcess(
args=["pytest"],
returncode=1,
stdout="FAIL tests/test_verification.py::test_red\n",
stderr="AssertionError: RED expected\nTraceback (most recent call last):\n",
)
with mock.patch.object(
task_tdd_red_phase, "load_generator_module"
) as load_generator, mock.patch.object(
task_tdd_red_phase.subprocess, "run", return_value=completed
):
load_generator.return_value.generate_from_task_file.return_value = generated
result = task_tdd_red_phase.verify_red_phase(task_file, project_root=temp_dir)
assert result.status == "red-confirmed"
assert result.red_confirmed is True
assert result.returncode == 1
assert "AssertionError" in "\n".join(result.failure_artifacts)
def test_verify_red_phase_reports_unexpected_pass_warning():
with tempfile.TemporaryDirectory() as temp_dir:
task_file = build_task(temp_dir, "python")
create_generated_test(temp_dir, "tests/test_verification.py")
generated = mock.Mock(language="python", output_path="tests/test_verification.py", warnings=[])
completed = subprocess.CompletedProcess(
args=["pytest"],
returncode=0,
stdout="1 passed\n",
stderr="",
)
with mock.patch.object(
task_tdd_red_phase, "load_generator_module"
) as load_generator, mock.patch.object(
task_tdd_red_phase.subprocess, "run", return_value=completed
):
load_generator.return_value.generate_from_task_file.return_value = generated
result = task_tdd_red_phase.verify_red_phase(task_file, project_root=temp_dir)
assert result.status == "unexpected-pass"
assert result.red_confirmed is False
assert any("passed unexpectedly" in warning for warning in result.warnings)
def test_verify_red_phase_returns_timeout_result():
with tempfile.TemporaryDirectory() as temp_dir:
task_file = build_task(temp_dir, "python")
create_generated_test(temp_dir, "tests/test_verification.py")
generated = mock.Mock(language="python", output_path="tests/test_verification.py", warnings=[])
timeout = subprocess.TimeoutExpired(cmd=["pytest"], timeout=5, output="running\n", stderr="still running\n")
with mock.patch.object(
task_tdd_red_phase, "load_generator_module"
) as load_generator, mock.patch.object(
task_tdd_red_phase.subprocess, "run", side_effect=timeout
):
load_generator.return_value.generate_from_task_file.return_value = generated
result = task_tdd_red_phase.verify_red_phase(
task_file,
project_root=temp_dir,
timeout_seconds=5,
)
assert result.status == "execution-timeout"
assert result.returncode is None
assert any("timed out" in warning for warning in result.warnings)
def test_resolve_runner_raises_e2_when_framework_is_missing():
with mock.patch.object(task_tdd_red_phase, "command_exists", return_value=False):
try:
task_tdd_red_phase.resolve_runner("php", ".", "tests/SpecBehaviorTest.php")
assert False, "Expected RedPhaseError"
except task_tdd_red_phase.RedPhaseError as exc:
assert exc.code == "E2"
assert "PHPUnit" in str(exc)
def test_verify_red_phase_requires_generated_test_file_to_exist():
with tempfile.TemporaryDirectory() as temp_dir:
task_file = build_task(temp_dir, "python")
generated = mock.Mock(language="python", output_path="tests/test_verification.py", warnings=[])
with mock.patch.object(task_tdd_red_phase, "load_generator_module") as load_generator:
load_generator.return_value.generate_from_task_file.return_value = generated
try:
task_tdd_red_phase.verify_red_phase(task_file, project_root=temp_dir)
assert False, "Expected RedPhaseError"
except task_tdd_red_phase.RedPhaseError as exc:
assert exc.code == "E1"
assert "Generated test file not found" in str(exc)docs
plugins
developer-kit-ai
developer-kit-aws
agents
docs
skills
aws
aws-cli-beast
aws-cost-optimization
aws-drawio-architecture-diagrams
aws-sam-bootstrap
aws-cloudformation
aws-cloudformation-auto-scaling
aws-cloudformation-bedrock
aws-cloudformation-cloudfront
aws-cloudformation-cloudwatch
aws-cloudformation-dynamodb
aws-cloudformation-ec2
aws-cloudformation-ecs
aws-cloudformation-elasticache
references
aws-cloudformation-iam
references
aws-cloudformation-lambda
aws-cloudformation-rds
aws-cloudformation-s3
aws-cloudformation-security
aws-cloudformation-task-ecs-deploy-gh
aws-cloudformation-vpc
references
developer-kit-core
agents
commands
skills
developer-kit-devops
developer-kit-java
agents
commands
docs
skills
aws-lambda-java-integration
aws-rds-spring-boot-integration
aws-sdk-java-v2-bedrock
aws-sdk-java-v2-core
aws-sdk-java-v2-dynamodb
aws-sdk-java-v2-kms
aws-sdk-java-v2-lambda
aws-sdk-java-v2-messaging
aws-sdk-java-v2-rds
aws-sdk-java-v2-s3
aws-sdk-java-v2-secrets-manager
clean-architecture
graalvm-native-image
langchain4j-ai-services-patterns
references
langchain4j-mcp-server-patterns
references
langchain4j-rag-implementation-patterns
references
langchain4j-spring-boot-integration
langchain4j-testing-strategies
langchain4j-tool-function-calling-patterns
langchain4j-vector-stores-configuration
references
qdrant
references
spring-ai-mcp-server-patterns
spring-boot-actuator
spring-boot-cache
spring-boot-crud-patterns
spring-boot-dependency-injection
spring-boot-event-driven-patterns
spring-boot-openapi-documentation
spring-boot-project-creator
spring-boot-resilience4j
spring-boot-rest-api-standards
spring-boot-saga-pattern
spring-boot-security-jwt
assets
references
scripts
spring-boot-test-patterns
spring-data-jpa
references
spring-data-neo4j
references
unit-test-application-events
unit-test-bean-validation
unit-test-boundary-conditions
unit-test-caching
unit-test-config-properties
references
unit-test-controller-layer
unit-test-exception-handler
references
unit-test-json-serialization
unit-test-mapper-converter
references
unit-test-parameterized
unit-test-scheduled-async
references
unit-test-service-layer
references
unit-test-utility-methods
unit-test-wiremock-rest-api
references
developer-kit-php
developer-kit-project-management
developer-kit-python
developer-kit-specs
commands
docs
hooks
test-templates
tests
skills
developer-kit-tools
developer-kit-typescript
agents
docs
hooks
rules
skills
aws-cdk
aws-lambda-typescript-integration
better-auth
clean-architecture
drizzle-orm-patterns
dynamodb-toolbox-patterns
references
nestjs
nestjs-best-practices
nestjs-code-review
nestjs-drizzle-crud-generator
nextjs-app-router
nextjs-authentication
nextjs-code-review
nextjs-data-fetching
nextjs-deployment
nextjs-performance
nx-monorepo
react-code-review
react-patterns
shadcn-ui
tailwind-css-patterns
tailwind-design-system
references
turborepo-monorepo
typescript-docs
typescript-security-review
zod-validation-utilities
references
github-spec-kit