Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions concore_cli/commands/init.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from pathlib import Path
from rich.panel import Panel

from .metadata import write_study_metadata

SAMPLE_GRAPHML = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd" xmlns:y="http://www.yworks.com/xml/graphml">
<key for="node" id="d6" yfiles.type="nodegraphics"/>
Expand Down Expand Up @@ -87,10 +89,25 @@ def init_project(name, template, console):
with open(readme_file, "w") as f:
f.write(README_TEMPLATE.format(project_name=name))

metadata_info = ""
try:
metadata_path = write_study_metadata(
project_path,
generated_by="concore init",
workflow_file=workflow_file,
)
metadata_info = f"Metadata:\n {metadata_path.name}\n\n"
except Exception as exc:
# Metadata is additive, so project creation should still succeed on failure.
console.print(
f"[yellow]Warning:[/yellow] Failed to write study metadata: {exc}"
)

console.print()
console.print(
Panel.fit(
f"[green]✓[/green] Project created successfully!\n\n"
f"{metadata_info}"
f"Next steps:\n"
f" cd {name}\n"
f" concore validate workflow.graphml\n"
Expand Down
77 changes: 77 additions & 0 deletions concore_cli/commands/metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import hashlib
import json
import platform
import shutil
from datetime import datetime, timezone
from pathlib import Path

from concore_cli import __version__


def _checksum_file(path: Path) -> str:
hasher = hashlib.sha256()
with path.open("rb") as handle:
for chunk in iter(lambda: handle.read(8192), b""):
hasher.update(chunk)
return f"sha256:{hasher.hexdigest()}"


def _detect_tools() -> dict:
tool_candidates = {
"python": ["python", "python3"],
"g++": ["g++"],
"docker": ["docker"],
"octave": ["octave"],
"iverilog": ["iverilog"],
}
detected = {}
for tool_name, candidates in tool_candidates.items():
detected_path = None
for candidate in candidates:
detected_path = shutil.which(candidate)
if detected_path:
break
detected[tool_name] = detected_path or "not found"
return detected


def write_study_metadata(
study_path: Path, generated_by: str, workflow_file: Path = None
):
checksums = {}
checksum_candidates = [
"workflow.graphml",
"docker-compose.yml",
"concore.toml",
"runner.py",
"README.md",
"build",
"run",
"build.bat",
"run.bat",
]

if workflow_file is not None and workflow_file.exists():
checksums[workflow_file.name] = _checksum_file(workflow_file)

for relative_name in checksum_candidates:
file_path = study_path / relative_name
if file_path.exists() and file_path.is_file():
checksums[relative_name] = _checksum_file(file_path)

metadata = {
"generated_by": generated_by,
"concore_version": __version__,
"timestamp": datetime.now(timezone.utc).replace(microsecond=0).isoformat(),
"python_version": platform.python_version(),
"platform": platform.platform(),
"study_name": study_path.name,
"working_directory": str(study_path.resolve()),
"tools_detected": _detect_tools(),
"checksums": checksums,
"schema_version": 1,
}

metadata_path = study_path / "STUDY.json"
metadata_path.write_text(json.dumps(metadata, indent=2) + "\n", encoding="utf-8")
return metadata_path
16 changes: 16 additions & 0 deletions concore_cli/commands/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
from rich.panel import Panel
from rich.progress import Progress, SpinnerColumn, TextColumn

from .metadata import write_study_metadata


def _find_mkconcore_path():
for parent in Path(__file__).resolve().parents:
Expand Down Expand Up @@ -71,6 +73,20 @@ def run_workflow(workflow_file, source, output, exec_type, auto_build, console):
console.print(
f"[green]✓[/green] Workflow generated in [cyan]{output_path}[/cyan]"
)
try:
metadata_path = write_study_metadata(
output_path,
generated_by="concore run",
workflow_file=workflow_path,
)
console.print(
f"[green]✓[/green] Metadata written to [cyan]{metadata_path}[/cyan]"
)
except Exception as exc:
# Metadata is additive, so workflow generation should still succeed on failure.
console.print(
f"[yellow]Warning:[/yellow] Failed to write study metadata for [cyan]{output_path}[/cyan]: {exc}"
)

except subprocess.CalledProcessError as e:
progress.stop()
Expand Down
15 changes: 15 additions & 0 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import tempfile
import shutil
import os
import json
from pathlib import Path
from click.testing import CliRunner
from concore_cli.cli import cli
Expand Down Expand Up @@ -38,6 +39,13 @@ def test_init_command(self):
self.assertTrue((project_path / "src").exists())
self.assertTrue((project_path / "README.md").exists())
self.assertTrue((project_path / "src" / "script.py").exists())
self.assertTrue((project_path / "STUDY.json").exists())

metadata = json.loads((project_path / "STUDY.json").read_text())
self.assertEqual(metadata["generated_by"], "concore init")
self.assertEqual(metadata["study_name"], "test-project")
self.assertEqual(metadata["schema_version"], 1)
self.assertIn("workflow.graphml", metadata["checksums"])

def test_init_existing_directory(self):
with self.runner.isolated_filesystem(temp_dir=self.temp_dir):
Expand Down Expand Up @@ -108,6 +116,13 @@ def test_run_command_from_project_dir(self):
)
self.assertEqual(result.exit_code, 0)
self.assertTrue(Path("out/src/concore.py").exists())
self.assertTrue(Path("out/STUDY.json").exists())

metadata = json.loads(Path("out/STUDY.json").read_text())
self.assertEqual(metadata["generated_by"], "concore run")
self.assertEqual(metadata["study_name"], "out")
self.assertEqual(metadata["schema_version"], 1)
self.assertIn("workflow.graphml", metadata["checksums"])

def test_run_command_default_type(self):
with self.runner.isolated_filesystem(temp_dir=self.temp_dir):
Expand Down
26 changes: 21 additions & 5 deletions tests/test_openjupyter_security.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,33 @@
@pytest.fixture(autouse=True)
def reset_jupyter_process():
"""Reset the module-level jupyter_process before each test."""
import fri.server.main as mod
with patch.dict(
os.environ,
{
"CONCORE_API_KEY": TEST_API_KEY,
"FLASK_SECRET_KEY": "test-flask-secret-key",
},
clear=False,
):
import fri.server.main as mod

mod.jupyter_process = None
yield
mod.jupyter_process = None
mod.API_KEY = TEST_API_KEY
mod.jupyter_process = None
yield
mod.jupyter_process = None


@pytest.fixture
def client():
"""Create a Flask test client with the API key configured."""
with patch.dict(os.environ, {"CONCORE_API_KEY": TEST_API_KEY}):
with patch.dict(
os.environ,
{
"CONCORE_API_KEY": TEST_API_KEY,
"FLASK_SECRET_KEY": "test-flask-secret-key",
},
clear=False,
):
# Re-read env var after patching
import fri.server.main as mod

Expand Down
Loading