diff --git a/LLM/requirements.txt b/LLM/requirements.txt index 9417894..6cac674 100644 --- a/LLM/requirements.txt +++ b/LLM/requirements.txt @@ -1,3 +1,5 @@ openai>=1.0.0 anthropic>=0.18.0 +packaging>=23.1 +requests>=2.31.0 PyYAML>=6.0 diff --git a/README.md b/README.md index fc7b80c..9f56ca1 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,513 @@ +# Cortex Linux + +> **The AI-Native Operating System** - Linux that understands you. No documentation required. + +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) +[![Python](https://img.shields.io/badge/Python-3.10%2B-blue.svg)](https://python.org) +[![Status](https://img.shields.io/badge/Status-Alpha-orange.svg)]() +[![Discord](https://img.shields.io/discord/1234567890?color=7289da&label=Discord)](https://discord.gg/uCqHvxjU83) + +```bash +$ cortex install oracle-23-ai --optimize-gpu + Analyzing system: NVIDIA RTX 4090 detected + Installing CUDA 12.3 + dependencies + Configuring Oracle for GPU acceleration + Running validation tests + Oracle 23 AI ready at localhost:1521 (4m 23s) +``` + +--- + +## Table of Contents + +- [The Problem](#the-problem) +- [The Solution](#the-solution) +- [Features](#features) +- [Quick Start](#quick-start) +- [Installation](#installation) +- [Usage](#usage) +- [Configuration](#configuration) +- [Architecture](#architecture) +- [Development](#development) +- [Contributing](#contributing) +- [Roadmap](#roadmap) +- [FAQ](#faq) +- [Community](#community) +- [License](#license) + +--- + +## The Problem + +Installing complex software on Linux is broken: + +- **47 Stack Overflow tabs** to install CUDA drivers +- **Dependency hell** that wastes days +- **Configuration files** written in ancient runes +- **"Works on my machine"** syndrome + +**Developers spend 30% of their time fighting the OS instead of building.** + +## The Solution + +Cortex Linux embeds AI at the operating system level. Tell it what you need in plain English - it handles everything: + +| Feature | Description | +|---------|-------------| +| **Natural Language Commands** | System understands intent, not syntax | +| **Hardware-Aware Optimization** | Automatically configures for your GPU/CPU | +| **Self-Healing Configuration** | Fixes broken dependencies automatically | +| **Enterprise-Grade Security** | AI actions are sandboxed and validated | +| **Installation History** | Track and rollback any installation | + +| Feature | Description | +|---------|-------------| +| **Natural Language Commands** | System understands intent, not syntax | +| **Hardware-Aware Optimization** | Automatically configures for your GPU/CPU | +| **Self-Healing Configuration** | Fixes broken dependencies automatically | +| **Enterprise-Grade Security** | AI actions are sandboxed and validated | +| **Installation History** | Track and rollback any installation | + +--- + +## Features + +### Core Capabilities + +- **Natural Language Parsing** - "Install Python for machine learning" just works +- **Multi-Provider LLM Support** - Claude (Anthropic) and OpenAI GPT-4 +- **Intelligent Package Management** - Wraps apt/yum/dnf with semantic understanding +- **Hardware Detection** - Automatic GPU, CPU, RAM, storage profiling +- **Sandboxed Execution** - Firejail-based isolation for all commands +- **Installation Rollback** - Undo any installation with one command +- **Error Analysis** - AI-powered error diagnosis and fix suggestions + +### Supported Software (32+ Categories) + +| Category | Examples | +|----------|----------| +| Languages | Python, Node.js, Go, Rust | +| Databases | PostgreSQL, MySQL, MongoDB, Redis | +| Web Servers | Nginx, Apache | +| Containers | Docker, Kubernetes | +| DevOps | Terraform, Ansible | +| ML/AI | CUDA, TensorFlow, PyTorch | + +--- + +## Quick Start + +```bash +# Install cortex +pip install cortex-linux + +# Set your API key (choose one) +export ANTHROPIC_API_KEY="your-key-here" +# or +export OPENAI_API_KEY="your-key-here" + +# Install software with natural language +cortex install docker +cortex install "python for data science" +cortex install "web development environment" + +# Execute the installation +cortex install docker --execute + +# Preview without executing +cortex install nginx --dry-run +``` + +--- + +## Installation + +### Prerequisites + +| Requirement | Version | Notes | +|-------------|---------|-------| +| **OS** | Ubuntu 24.04 LTS | Other Debian-based coming soon | +| **Python** | 3.10+ | Required | +| **Firejail** | Latest | Recommended for sandboxing | +| **API Key** | - | Anthropic or OpenAI | + +### Step-by-Step Installation + +```bash +# 1. Install system dependencies +sudo apt update +sudo apt install -y python3 python3-pip python3-venv firejail + +# 2. Create virtual environment (recommended) +python3 -m venv ~/.cortex-venv +source ~/.cortex-venv/bin/activate + +# 3. Install Cortex +pip install cortex-linux + +# 4. Configure API key +echo 'export ANTHROPIC_API_KEY="your-key"' >> ~/.bashrc +source ~/.bashrc + +# 5. Verify installation +cortex --help +``` + +### From Source + +```bash +git clone https://github.com/cortexlinux/cortex.git +cd cortex +pip install -e . +``` + +--- + +## Usage + +### Basic Commands + +```bash +# Install software +cortex install # Show commands only +cortex install --execute # Execute installation +cortex install --dry-run # Preview mode + +# Installation history +cortex history # List recent installations +cortex history show # Show installation details + +# Rollback +cortex rollback # Undo an installation +cortex rollback --dry-run # Preview rollback +``` + +### Examples + +```bash +# Simple installations +cortex install docker --execute +cortex install postgresql --execute +cortex install nginx --execute + +# Natural language requests +cortex install "python with machine learning libraries" --execute +cortex install "web development stack with nodejs and npm" --execute +cortex install "database tools for postgresql" --execute + +# Complex requests +cortex install "cuda drivers for nvidia gpu" --execute +cortex install "complete devops toolchain" --execute +``` + +### Environment Variables + +| Variable | Description | Required | +|----------|-------------|----------| +| `ANTHROPIC_API_KEY` | Anthropic Claude API key | One of these | +| `OPENAI_API_KEY` | OpenAI GPT-4 API key | required | +| `MOONSHOT_API_KEY` | Kimi K2 API key | Optional | +| `CORTEX_LOG_LEVEL` | Logging level (DEBUG, INFO, WARNING) | No | +| `CORTEX_DATA_DIR` | Data directory path | No | + +--- + +## Configuration + +### Configuration File + +Create `~/.config/cortex/config.yaml`: + +```yaml +# LLM Provider Settings +llm: + default_provider: claude # claude, openai, kimi + temperature: 0.3 + max_tokens: 1000 + +# Security Settings +security: + enable_sandbox: true + require_confirmation: true + allowed_directories: + - /tmp + - ~/.local + +# Logging +logging: + level: INFO + file: ~/.local/share/cortex/cortex.log +``` + +--- + +## Architecture + +``` + User Input + + Natural Language + + Cortex CLI + + +--------+--------+ + | | + LLM Router Hardware + | Profiler + | + +-------+-------+ + | | | +Claude GPT-4 Kimi K2 + | + Command Generator + | + Security Validator + | + Sandbox Executor + | + +-------+-------+ + | | +apt/yum/dnf Verifier + | + Installation + History +``` + +### Key Components + +| Component | File | Purpose | +|-----------|------|---------| +| CLI | `cortex/cli.py` | Command-line interface | +| Coordinator | `cortex/coordinator.py` | Installation orchestration | +| LLM Interpreter | `LLM/interpreter.py` | Natural language to commands | +| Package Manager | `cortex/packages.py` | Package manager abstraction | +| Sandbox | `src/sandbox_executor.py` | Secure command execution | +| Hardware Profiler | `src/hwprofiler.py` | System hardware detection | +| History | `installation_history.py` | Installation tracking | +| Error Parser | `error_parser.py` | Error analysis and fixes | + +--- + +## Development + +### Setup Development Environment + +```bash +# Clone repository +git clone https://github.com/cortexlinux/cortex.git +cd cortex + +# Create virtual environment +python3 -m venv venv +source venv/bin/activate + +# Install dependencies +pip install -r requirements.txt +pip install -r requirements-dev.txt + +# Install in development mode +pip install -e . + +# Run tests +pytest test/ -v + +# Run with coverage +pytest test/ --cov=cortex --cov-report=html +``` + +### Code Style + +```bash +# Format code +black cortex/ + +# Lint +pylint cortex/ + +# Type checking +mypy cortex/ +``` + +### Project Structure + +``` +cortex/ + cortex/ # Core Python package + __init__.py + cli.py # CLI entry point + coordinator.py # Installation coordinator + packages.py # Package manager wrapper + LLM/ # LLM integration + interpreter.py # Command interpreter + requirements.txt + src/ # Additional modules + sandbox_executor.py + hwprofiler.py + progress_tracker.py + test/ # Unit tests + docs/ # Documentation + examples/ # Usage examples + .github/ # CI/CD workflows + requirements.txt # Dependencies + setup.py # Package config +``` + +--- + +## Contributing + +We welcome contributions! See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines. + +### Quick Contribution Guide + +### Phase 1: Foundation (Weeks 1-2) +- ✅ LLM integration layer (PR #5 by @Sahilbhatane) +- ✅ Safe command execution sandbox (PR #6 by @dhvil) +- ✅ Hardware detection (PR #4 by @dhvil) +- ✅ Package manager AI wrapper +- ✅ Installation history & rollback +- [ ] Basic multi-step orchestration +1. **Fork** the repository +2. **Create** a feature branch (`git checkout -b feature/amazing-feature`) +3. **Commit** your changes (`git commit -m 'Add amazing feature'`) +4. **Push** to the branch (`git push origin feature/amazing-feature`) +5. **Open** a Pull Request + +### Bounty Program + +Cash bounties on merge: + +| Tier | Amount | Examples | +|------|--------|----------| +| Critical | $150-200 | Security fixes, core features | +| Standard | $75-150 | New features, integrations | +| Testing | $25-75 | Tests, documentation | + +**Payment methods:** Bitcoin, USDC, PayPal + +See [Bounties.md](Bounties.md) for available bounties. + +--- + +## Roadmap + +### Current Status: Alpha (Phase 1) + +- LLM integration layer +- Safe command execution sandbox +- Hardware detection +- Installation history & rollback +- Error parsing & suggestions +- Multi-provider LLM support + +### Coming Soon (Phase 2) + +- Advanced dependency resolution +- Configuration file generation +- Multi-step installation orchestration +- Plugin architecture + +### Future (Phase 3) + +- Enterprise deployment tools +- Security hardening & audit logging +- Role-based access control +- Air-gapped deployment support + +See [ROADMAP.md](ROADMAP.md) for detailed plans. + +--- + +## FAQ + +
+What operating systems are supported? + +## Keeping Cortex Up to Date + +- Cortex automatically checks for new releases (stable by default) when you run `cortex install ...`. Disable with `CORTEX_UPDATE_CHECK=0`. +- See the current channel or switch tracks: + - `cortex channel show` + - `cortex channel set beta` +- Upgrade in-place with release notes, checksum verification, and automatic rollback on failure: + - `cortex update` (use `--dry-run` to preview, `--channel beta` to override per run). +- Update metadata lives in `~/.config/cortex/updater/`, including logs and last upgrade state. + +### Join the Community +Currently Ubuntu 24.04 LTS. Other Debian-based distributions coming soon. +
+ +
+Is it free? + +Yes! Community edition is free and open source (Apache 2.0). Enterprise subscriptions will be available for advanced features. +
+ +
+Is it secure? + +Yes. All commands are validated and executed in a Firejail sandbox with AppArmor policies. AI-generated commands are checked against a security allowlist. +
+ +
+Can I use my own LLM? + +Currently supports Claude (Anthropic) and OpenAI. Local LLM support is planned for future releases. +
+ +
+What if something goes wrong? + +Every installation is tracked and can be rolled back with `cortex rollback `. +
+ +See [FAQ.md](FAQ.md) for more questions. + +--- + +## Community + +### Get Help + +- **Discord:** [Join our server](https://discord.gg/uCqHvxjU83) +- **GitHub Issues:** [Report bugs](https://github.com/cortexlinux/cortex/issues) +- **Discussions:** [Ask questions](https://github.com/cortexlinux/cortex/discussions) + +### Stay Updated + +- Star this repository +- Follow [@cortexlinux](https://twitter.com/cortexlinux) on Twitter +- Subscribe to our [newsletter](https://cortexlinux.com) + +--- + +## License + +This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details. + +--- + +## Acknowledgments + +- Built with [Claude](https://anthropic.com) and [OpenAI](https://openai.com) +- Sandbox powered by [Firejail](https://firejail.wordpress.com/) +- Inspired by the pain of every developer who spent hours on Stack Overflow + +--- + +## Features + +### Core Capabilities + +- **Natural Language Parsing** - "Install Python for machine learning" just works +- **Multi-Provider LLM Support** - Claude (Anthropic) and OpenAI GPT-4 +- **Intelligent Package Management** - Wraps apt/yum/dnf with semantic understanding +- **Hardware Detection** - Automatic GPU, CPU, RAM, storage profiling +- **Sandboxed Execution** - Firejail-based isolation for all commands +- **Installation Rollback** - Undo any installation with one command +- **Error Analysis** - AI-powered error diagnosis and fix suggestions + +### Supported Software (32+ Categories) diff --git a/README_DEPENDENCIES (1).md b/README_DEPENDENCIES (1).md deleted file mode 100644 index 30e5580..0000000 --- a/README_DEPENDENCIES (1).md +++ /dev/null @@ -1,249 +0,0 @@ -# Dependency Resolution System - -AI-powered dependency detection and resolution for Cortex Linux. - -## Features - -- ✅ Automatic dependency detection via apt-cache -- ✅ Predefined patterns for 8+ common packages -- ✅ Transitive dependency resolution -- ✅ Conflict detection -- ✅ Optimal installation order calculation -- ✅ Installation plan generation -- ✅ Dependency tree visualization -- ✅ JSON export for automation - -## Usage - -### Show Dependency Tree - -```bash -python3 dependency_resolver.py docker --tree -``` - -Output: -``` -📦 Dependency tree for docker: -============================================================ -❌ docker - ❌ containerd - Required dependency - ❌ docker-ce-cli - Required dependency - ❌ docker-buildx-plugin - Required dependency - ✅ iptables (1.8.7-1) - System dependency - ✅ ca-certificates (20230311) - System dependency -``` - -### Generate Installation Plan - -```bash -python3 dependency_resolver.py postgresql --plan -``` - -Output: -``` -📋 Installation plan for postgresql: -============================================================ - -Package: postgresql -Total dependencies: 5 -✅ Already satisfied: 2 -❌ Need to install: 3 - -📝 Installation order: - 1. ❌ postgresql-common - 2. ❌ postgresql-client - 3. ❌ postgresql - -⏱️ Estimated time: 1.5 minutes - -💻 Commands to run: - sudo apt-get update - sudo apt-get install -y postgresql-common - sudo apt-get install -y postgresql-client - sudo apt-get install -y postgresql -``` - -### Show Missing Dependencies Only - -```bash -python3 dependency_resolver.py nginx --missing -``` - -### Export to JSON - -```bash -python3 dependency_resolver.py redis-server --export redis-deps.json -``` - -## Programmatic Usage - -```python -from dependency_resolver import DependencyResolver - -resolver = DependencyResolver() - -# Get dependency graph -graph = resolver.resolve_dependencies('docker') - -print(f"Total dependencies: {len(graph.all_dependencies)}") -print(f"Installation order: {graph.installation_order}") - -# Check for conflicts -if graph.conflicts: - print("⚠️ Conflicts detected:") - for pkg1, pkg2 in graph.conflicts: - print(f" {pkg1} <-> {pkg2}") - -# Get missing dependencies -missing = resolver.get_missing_dependencies('docker') -for dep in missing: - print(f"Need to install: {dep.name} ({dep.reason})") - -# Generate installation plan -plan = resolver.generate_install_plan('nginx') -print(f"Estimated install time: {plan['estimated_time_minutes']} minutes") - -# Execute installation commands -for cmd in plan['install_commands']: - print(f"Run: {cmd}") -``` - -## Supported Packages - -Predefined dependency patterns for: -- docker -- postgresql -- mysql-server -- nginx -- apache2 -- nodejs -- redis-server -- python3-pip - -For other packages, uses apt-cache dependency data. - -## Architecture - -### Dependency Class -Represents a single package dependency: -- `name`: Package name -- `version`: Required version (optional) -- `reason`: Why this dependency exists -- `is_satisfied`: Whether already installed -- `installed_version`: Current version if installed - -### DependencyGraph Class -Complete dependency information: -- `package_name`: Target package -- `direct_dependencies`: Immediate dependencies -- `all_dependencies`: Including transitive deps -- `conflicts`: Conflicting packages -- `installation_order`: Optimal install sequence - -### DependencyResolver Class -Main resolver with: -- **Dependency Detection**: Via apt-cache and predefined patterns -- **Conflict Detection**: Identifies incompatible packages -- **Installation Planning**: Generates optimal install sequence -- **Caching**: Speeds up repeated queries - -## Conflict Detection - -Detects known conflicts: -- mysql-server ↔ mariadb-server -- apache2 ↔ nginx (port conflicts) - -Example: -```python -resolver = DependencyResolver() -graph = resolver.resolve_dependencies('mysql-server') - -if graph.conflicts: - print("Cannot install - conflicts detected!") -``` - -## Installation Order - -Uses intelligent ordering: -1. System libraries (libc, libssl, etc.) -2. Base dependencies (ca-certificates, curl, etc.) -3. Package-specific dependencies -4. Target package - -This minimizes installation failures. - -## Integration with Cortex - -```python -# In cortex install command -from dependency_resolver import DependencyResolver - -resolver = DependencyResolver() - -# Get installation plan -plan = resolver.generate_install_plan(package_name) - -# Check for conflicts -if plan['conflicts']: - raise InstallationError(f"Conflicts: {plan['conflicts']}") - -# Execute in order -for package in plan['installation_order']: - if not resolver.is_package_installed(package): - install_package(package) -``` - -## Testing - -```bash -python3 test_dependency_resolver.py -``` - -## Performance - -- **Cache**: Dependency graphs are cached per session -- **Speed**: ~0.5s per package for apt-cache queries -- **Memory**: <50MB for typical dependency graphs - -## Future Enhancements - -- [ ] Support for pip/npm dependencies -- [ ] AI-powered dependency suggestions -- [ ] Version constraint resolution -- [ ] Automatic conflict resolution -- [ ] PPA repository detection -- [ ] Circular dependency detection -- [ ] Parallel installation planning - -## Example: Complete Workflow - -```python -from dependency_resolver import DependencyResolver -from installation_verifier import InstallationVerifier - -# Step 1: Resolve dependencies -resolver = DependencyResolver() -plan = resolver.generate_install_plan('docker') - -# Step 2: Check conflicts -if plan['conflicts']: - print("⚠️ Resolve conflicts first") - exit(1) - -# Step 3: Install in order -for package in plan['installation_order']: - if not resolver.is_package_installed(package): - print(f"Installing {package}...") - # execute: apt-get install package - -# Step 4: Verify installation -verifier = InstallationVerifier() -result = verifier.verify_package('docker') - -if result.status == VerificationStatus.SUCCESS: - print("✅ Installation complete and verified!") -``` - -## License - -MIT License - Part of Cortex Linux diff --git a/cortex/__init__.py b/cortex/__init__.py index 1f824b2..4aab677 100644 --- a/cortex/__init__.py +++ b/cortex/__init__.py @@ -1,6 +1,14 @@ +from importlib import metadata + from .cli import main from .packages import PackageManager, PackageManagerType +try: + __version__ = metadata.version("cortex-linux") +except metadata.PackageNotFoundError: + __version__ = "0.1.0" + +__all__ = ["__version__", "main", "PackageManager", "PackageManagerType"] __version__ = "0.1.0" __all__ = ["main", "PackageManager", "PackageManagerType"] diff --git a/cortex/cli.py b/cortex/cli.py index b3981a9..f27235d 100644 --- a/cortex/cli.py +++ b/cortex/cli.py @@ -10,6 +10,8 @@ from LLM.interpreter import CommandInterpreter from cortex.coordinator import InstallationCoordinator, StepStatus +from cortex.update_manifest import UpdateChannel +from cortex.updater import ChecksumMismatch, InstallError, UpdateError, UpdateService from installation_history import ( InstallationHistory, InstallationType, @@ -26,6 +28,7 @@ class CortexCLI: def __init__(self): self.spinner_chars = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'] self.spinner_idx = 0 + self.update_service = UpdateService() self.prefs_manager = None # Lazy initialization def _get_api_key(self) -> Optional[str]: @@ -62,6 +65,7 @@ def _clear_line(self): sys.stdout.flush() def install(self, software: str, execute: bool = False, dry_run: bool = False): + self._notify_update_if_available() api_key = self._get_api_key() if not api_key: return 1 @@ -188,6 +192,85 @@ def progress_callback(current, total, step): self._print_error(f"Unexpected error: {str(e)}") return 1 + def update(self, channel: Optional[str] = None, force: bool = False, dry_run: bool = False): + try: + channel_enum = UpdateChannel.from_string(channel) if channel else self.update_service.get_channel() + except ValueError as exc: + self._print_error(str(exc)) + return 1 + + try: + result = self.update_service.perform_update(force=force, channel=channel_enum, dry_run=dry_run) + except ChecksumMismatch as exc: + self._print_error(f"Security check failed: {exc}") + return 1 + except InstallError as exc: + self._print_error(f"Installer error: {exc}") + return 1 + except UpdateError as exc: + self._print_error(f"Update failed: {exc}") + return 1 + except Exception as exc: + self._print_error(f"Unexpected update failure: {exc}") + return 1 + + if not result.release: + self._print_status("ℹ️", result.message or "Cortex is already up to date.") + return 0 + + release = result.release + + if not result.updated: + self._print_status("🔔", f"Update available: {release.version.raw} ({release.channel.value})") + if release.release_notes: + self._print_status("🆕", "What's new:") + for line in release.release_notes.strip().splitlines(): + print(f" {line}") + self._print_status("ℹ️", result.message or "Dry run complete.") + return 0 + + self._print_success(f"Update complete! {result.previous_version.raw} → {release.version.raw}") + self._print_status("🗂️", f"Log saved to {result.log_path}") + if release.release_notes: + self._print_status("🆕", "What's new:") + for line in release.release_notes.strip().splitlines(): + print(f" {line}") + + return 0 + + def _notify_update_if_available(self): + if os.environ.get("CORTEX_UPDATE_CHECK", "1") in ("0", "false", "False"): + return + + try: + result = self.update_service.check_for_updates() + except Exception: + return + + if result.update_available and result.release: + release = result.release + print( + f"\n🔔 Cortex update available: {release.version.raw} " + f"({result.channel.value} channel)\n" + " Run 'cortex update' to learn more.\n" + ) + + def show_channel(self): + channel = self.update_service.get_channel() + self._print_status("ℹ️", f"Current update channel: {channel.value}") + return 0 + + def set_channel(self, channel: str): + try: + channel_enum = UpdateChannel.from_string(channel) + except ValueError as exc: + self._print_error(str(exc)) + return 1 + + self.update_service.set_channel(channel_enum) + self._print_success(f"Update channel set to '{channel_enum.value}'") + return 0 + def history(self, limit: int = 20, status: Optional[str] = None, show_id: Optional[str] = None): """Show installation history""" history = InstallationHistory() @@ -521,6 +604,17 @@ def main(): install_parser.add_argument('--execute', action='store_true', help='Execute the generated commands') install_parser.add_argument('--dry-run', action='store_true', help='Show commands without executing') + update_parser = subparsers.add_parser('update', help='Check for Cortex updates or upgrade') + update_parser.add_argument('--channel', choices=[c.value for c in UpdateChannel], help='Update channel to use') + update_parser.add_argument('--force', action='store_true', help='Force network check') + update_parser.add_argument('--dry-run', action='store_true', help='Show details without installing') + + channel_parser = subparsers.add_parser('channel', help='Manage Cortex update channel') + channel_sub = channel_parser.add_subparsers(dest='channel_command', required=True) + channel_sub.add_parser('show', help='Display current update channel') + channel_set_parser = channel_sub.add_parser('set', help='Set update channel') + channel_set_parser.add_argument('channel', choices=[c.value for c in UpdateChannel], help='Channel to use') + # History command history_parser = subparsers.add_parser('history', help='View installation history') history_parser.add_argument('--limit', type=int, default=20, help='Number of records to show') @@ -557,6 +651,13 @@ def main(): try: if args.command == 'install': return cli.install(args.software, execute=args.execute, dry_run=args.dry_run) + elif args.command == 'update': + return cli.update(channel=args.channel, force=args.force, dry_run=args.dry_run) + elif args.command == 'channel': + if args.channel_command == 'show': + return cli.show_channel() + if args.channel_command == 'set': + return cli.set_channel(args.channel) elif args.command == 'history': return cli.history(limit=args.limit, status=args.status, show_id=args.show_id) elif args.command == 'rollback': diff --git a/cortex/update_manifest.py b/cortex/update_manifest.py new file mode 100644 index 0000000..f6526c4 --- /dev/null +++ b/cortex/update_manifest.py @@ -0,0 +1,178 @@ +""" +Structures and helpers for Cortex update manifests. +""" + +from __future__ import annotations + +import platform +from dataclasses import dataclass, field +from enum import Enum +from typing import Any, Dict, Iterable, List, Optional + +from packaging.specifiers import InvalidSpecifier, SpecifierSet +from packaging.version import Version + +from cortex.versioning import CortexVersion, is_newer_version + + +class UpdateChannel(str, Enum): + STABLE = "stable" + BETA = "beta" + + @classmethod + def from_string(cls, raw: str) -> "UpdateChannel": + try: + return cls(raw.lower()) + except ValueError as exc: + valid = ", ".join(c.value for c in cls) + raise ValueError(f"Unknown update channel '{raw}'. Valid options: {valid}") from exc + + +@dataclass +class SystemInfo: + python_version: Version + os_name: str + architecture: str + distro: Optional[str] = None + + @classmethod + def current(cls) -> "SystemInfo": + return cls( + python_version=Version(platform.python_version()), + os_name=platform.system().lower(), + architecture=platform.machine().lower(), + distro=_detect_distro(), + ) + + +def _detect_distro() -> Optional[str]: + try: + import distro # type: ignore + + return distro.id() + except Exception: + return None + + +@dataclass +class CompatibilityRule: + python_spec: Optional[SpecifierSet] = None + os_names: List[str] = field(default_factory=list) + architectures: List[str] = field(default_factory=list) + distros: List[str] = field(default_factory=list) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "CompatibilityRule": + specifier_value = data.get("python") + specifier = None + if specifier_value: + try: + specifier = SpecifierSet(specifier_value) + except InvalidSpecifier as exc: + raise ValueError(f"Invalid python specifier '{specifier_value}'") from exc + + return cls( + python_spec=specifier, + os_names=[name.lower() for name in data.get("os", [])], + architectures=[arch.lower() for arch in data.get("arch", [])], + distros=[dist.lower() for dist in data.get("distro", [])], + ) + + def is_compatible(self, system: SystemInfo) -> bool: + if self.python_spec and system.python_version not in self.python_spec: + return False + + if self.os_names and system.os_name not in self.os_names: + return False + + if self.architectures and system.architecture not in self.architectures: + return False + + if self.distros and system.distro not in self.distros: + return False + + return True + + +@dataclass +class ReleaseEntry: + version: CortexVersion + channel: UpdateChannel + download_url: str + sha256: str + release_notes: str + published_at: Optional[str] = None + compatibility: List[CompatibilityRule] = field(default_factory=list) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ReleaseEntry": + compatibility_data = data.get("compatibility", []) + compatibility = [CompatibilityRule.from_dict(entry) for entry in compatibility_data] + + return cls( + version=CortexVersion.from_string(data["version"]), + channel=UpdateChannel.from_string(data.get("channel", UpdateChannel.STABLE.value)), + download_url=data["download_url"], + sha256=data["sha256"], + release_notes=data.get("release_notes", ""), + published_at=data.get("published_at"), + compatibility=compatibility, + ) + + def is_compatible(self, system: SystemInfo) -> bool: + if not self.compatibility: + return True + + return any(rule.is_compatible(system) for rule in self.compatibility) + + +@dataclass +class UpdateManifest: + releases: List[ReleaseEntry] + signature: Optional[str] = None + generated_at: Optional[str] = None + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "UpdateManifest": + releases_data = data.get("releases", []) + releases = [ReleaseEntry.from_dict(entry) for entry in releases_data] + return cls( + releases=releases, + signature=data.get("signature"), + generated_at=data.get("generated_at"), + ) + + def iter_releases( + self, + *, + channel: Optional[UpdateChannel] = None, + system: Optional[SystemInfo] = None, + ) -> Iterable[ReleaseEntry]: + for release in self.releases: + if channel and release.channel != channel: + continue + if system and not release.is_compatible(system): + continue + yield release + + def find_latest( + self, + *, + current_version: CortexVersion, + channel: UpdateChannel, + system: Optional[SystemInfo] = None, + ) -> Optional[ReleaseEntry]: + system_info = system or SystemInfo.current() + + eligible = [ + release + for release in self.iter_releases(channel=channel, system=system_info) + if is_newer_version(current_version, release.version) + ] + + if not eligible: + return None + + eligible.sort(key=lambda release: release.version.parsed, reverse=True) + return eligible[0] + diff --git a/cortex/updater.py b/cortex/updater.py new file mode 100644 index 0000000..66c3ea8 --- /dev/null +++ b/cortex/updater.py @@ -0,0 +1,320 @@ +""" +Update checking and coordination for Cortex. +""" + +from __future__ import annotations + +import hashlib +import json +import os +import shutil +import subprocess +import sys +import tempfile +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Any, Dict, Optional, Tuple + +import requests +from cortex.update_manifest import ( + ReleaseEntry, + SystemInfo, + UpdateChannel, + UpdateManifest, +) +from cortex.versioning import PACKAGE_NAME, CortexVersion, get_installed_version + +DEFAULT_MANIFEST_URL = "https://updates.cortexlinux.com/manifest.json" +STATE_DIR = Path.home() / ".config" / "cortex" / "updater" +STATE_FILE = STATE_DIR / "state.json" +DEFAULT_LOG_FILE = STATE_DIR / "update.log" +CACHE_TTL = timedelta(hours=6) + + +@dataclass +class UpdateCheckResult: + update_available: bool + release: Optional[ReleaseEntry] + channel: UpdateChannel + last_checked: datetime + from_cache: bool = False + + +@dataclass +class UpdatePerformResult: + success: bool + updated: bool + release: Optional[ReleaseEntry] + previous_version: CortexVersion + current_version: CortexVersion + log_path: Path + message: Optional[str] = None + + +class UpdateError(Exception): + """Generic update failure.""" + + +class ChecksumMismatch(UpdateError): + """Raised when downloaded artifacts do not match expected checksum.""" + + +class InstallError(UpdateError): + """Raised when pip install fails.""" + + +class UpdateService: + def __init__( + self, + *, + manifest_url: Optional[str] = None, + state_file: Optional[Path] = None, + system_info: Optional[SystemInfo] = None, + log_file: Optional[Path] = None, + ) -> None: + self.manifest_url = manifest_url or os.environ.get("CORTEX_UPDATE_MANIFEST_URL", DEFAULT_MANIFEST_URL) + self.state_file = state_file or STATE_FILE + self.system_info = system_info or SystemInfo.current() + self.log_file = log_file or DEFAULT_LOG_FILE + self.state_file.parent.mkdir(parents=True, exist_ok=True) + self.log_file.parent.mkdir(parents=True, exist_ok=True) + + # ------------------------------------------------------------------ State + def _load_state(self) -> Dict[str, Any]: + if not self.state_file.exists(): + return {} + try: + with self.state_file.open("r", encoding="utf-8") as fh: + return json.load(fh) + except Exception: + return {} + + def _save_state(self, state: Dict[str, Any]) -> None: + tmp_path = self.state_file.with_suffix(".tmp") + with tmp_path.open("w", encoding="utf-8") as fh: + json.dump(state, fh, indent=2) + tmp_path.replace(self.state_file) + + # ---------------------------------------------------------------- Channels + def get_channel(self) -> UpdateChannel: + state = self._load_state() + channel_raw = state.get("channel", UpdateChannel.STABLE.value) + try: + return UpdateChannel.from_string(channel_raw) + except ValueError: + return UpdateChannel.STABLE + + def set_channel(self, channel: UpdateChannel) -> None: + state = self._load_state() + state["channel"] = channel.value + self._save_state(state) + + # --------------------------------------------------------------- Manifest + def _fetch_manifest(self) -> UpdateManifest: + response = requests.get(self.manifest_url, timeout=10) + response.raise_for_status() + payload = response.json() + return UpdateManifest.from_dict(payload) + + def _should_use_cache(self, last_checked: Optional[str]) -> bool: + if not last_checked: + return False + try: + last_dt = datetime.fromisoformat(last_checked) + except ValueError: + return False + return datetime.now(timezone.utc) - last_dt < CACHE_TTL + + # --------------------------------------------------------------- Checking + def check_for_updates( + self, + *, + force: bool = False, + channel: Optional[UpdateChannel] = None, + current_version: Optional[CortexVersion] = None, + ) -> UpdateCheckResult: + state = self._load_state() + resolved_channel = channel or self.get_channel() + current = current_version or get_installed_version() + + if not force and self._should_use_cache(state.get("last_checked")): + cached_release = state.get("cached_release") + release = ReleaseEntry.from_dict(cached_release) if cached_release else None + last_checked = datetime.fromisoformat(state.get("last_checked")).astimezone(timezone.utc) + return UpdateCheckResult( + update_available=bool(release), + release=release, + channel=resolved_channel, + last_checked=last_checked, + from_cache=True, + ) + + manifest = self._fetch_manifest() + release = manifest.find_latest( + current_version=current, + channel=resolved_channel, + system=self.system_info, + ) + + last_checked = datetime.now(timezone.utc) + state["last_checked"] = last_checked.isoformat() + state["cached_release"] = _release_to_dict(release) if release else None + state["channel"] = resolved_channel.value + self._save_state(state) + + return UpdateCheckResult( + update_available=release is not None, + release=release, + channel=resolved_channel, + last_checked=last_checked, + from_cache=False, + ) + + # --------------------------------------------------------------- Upgrades + def perform_update( + self, + *, + force: bool = False, + channel: Optional[UpdateChannel] = None, + dry_run: bool = False, + ) -> UpdatePerformResult: + current_version = get_installed_version() + check_result = self.check_for_updates(force=force, channel=channel, current_version=current_version) + + if not check_result.update_available or not check_result.release: + return UpdatePerformResult( + success=True, + updated=False, + release=None, + previous_version=current_version, + current_version=current_version, + log_path=self.log_file, + message="Already up to date.", + ) + + release = check_result.release + + if dry_run: + return UpdatePerformResult( + success=True, + updated=False, + release=release, + previous_version=current_version, + current_version=current_version, + log_path=self.log_file, + message=f"Update available (dry run): {release.version.raw}", + ) + + temp_dir: Optional[Path] = None + try: + artifact_path, temp_dir = self._download_release(release) + self._log(f"Installing Cortex {release.version.raw} from {artifact_path}") + self._install_artifact(artifact_path) + self._record_last_upgrade(previous=current_version, new_version=release.version) + + return UpdatePerformResult( + success=True, + updated=True, + release=release, + previous_version=current_version, + current_version=release.version, + log_path=self.log_file, + message=f"Updated to {release.version.raw}", + ) + except UpdateError as exc: + self._log(f"Update error: {exc}. Rolling back to {current_version.raw}.") + self._rollback(previous=current_version) + raise + finally: + if temp_dir: + shutil.rmtree(temp_dir, ignore_errors=True) + + # ----------------------------------------------------------- Implementation + def _download_release(self, release: ReleaseEntry) -> Tuple[Path, Path]: + temp_dir = Path(tempfile.mkdtemp(prefix="cortex-update-")) + artifact_name = release.download_url.split("/")[-1] or f"cortex-{release.version.raw}.whl" + artifact_path = temp_dir / artifact_name + + with requests.get(release.download_url, stream=True, timeout=60) as response: + response.raise_for_status() + with artifact_path.open("wb") as fh: + for chunk in response.iter_content(chunk_size=1024 * 1024): + if chunk: + fh.write(chunk) + + self._log(f"Downloaded release to {artifact_path}") + self._verify_checksum(artifact_path, release.sha256) + return artifact_path, temp_dir + + def _verify_checksum(self, path: Path, expected_sha256: str) -> None: + sha256 = hashlib.sha256() + with path.open("rb") as fh: + for chunk in iter(lambda: fh.read(1024 * 1024), b""): + sha256.update(chunk) + computed = sha256.hexdigest() + if computed.lower() != expected_sha256.lower(): + raise ChecksumMismatch( + f"Checksum mismatch for {path.name}: expected {expected_sha256}, got {computed}" + ) + self._log(f"Checksum verified for {path.name}") + + def _install_artifact(self, artifact_path: Path) -> None: + self._log(f"Running pip install for {artifact_path}") + self._run_pip(["install", str(artifact_path)]) + + def _rollback(self, previous: CortexVersion) -> None: + self._log(f"Rolling back to Cortex {previous.raw}") + self._run_pip(["install", f"{PACKAGE_NAME}=={previous.raw}"]) + + def _run_pip(self, args: list[str]) -> None: + cmd = [sys.executable, "-m", "pip"] + args + self._log(f"Executing command: {' '.join(cmd)}") + try: + result = subprocess.run( + cmd, + check=True, + capture_output=True, + text=True, + ) + self._log(f"Pip output: {result.stdout.strip()}") + except subprocess.CalledProcessError as exc: + self._log(f"Pip failed: {exc.stderr}") + raise InstallError(f"pip exited with code {exc.returncode}") from exc + + def _record_last_upgrade(self, *, previous: CortexVersion, new_version: CortexVersion) -> None: + state = self._load_state() + state["last_success_version"] = new_version.raw + state["previous_version"] = previous.raw + state["last_upgrade_at"] = datetime.now(timezone.utc).isoformat() + self._save_state(state) + + def _log(self, message: str) -> None: + timestamp = datetime.now(timezone.utc).isoformat() + log_line = f"[{timestamp}] {message}\n" + with self.log_file.open("a", encoding="utf-8") as fh: + fh.write(log_line) + + +def _release_to_dict(release: Optional[ReleaseEntry]) -> Optional[Dict[str, Any]]: + if not release: + return None + + return { + "version": release.version.raw, + "channel": release.channel.value, + "download_url": release.download_url, + "sha256": release.sha256, + "release_notes": release.release_notes, + "published_at": release.published_at, + "compatibility": [ + { + "python": str(rule.python_spec) if rule.python_spec else None, + "os": rule.os_names, + "arch": rule.architectures, + "distro": rule.distros, + } + for rule in release.compatibility + ], + } + diff --git a/cortex/versioning.py b/cortex/versioning.py new file mode 100644 index 0000000..eac45da --- /dev/null +++ b/cortex/versioning.py @@ -0,0 +1,68 @@ +""" +Utilities for working with Cortex package versions. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from importlib import metadata +from typing import Optional + +from packaging.version import InvalidVersion, Version + +PACKAGE_NAME = "cortex-linux" +__all__ = [ + "PACKAGE_NAME", + "CortexVersion", + "get_installed_version", + "is_newer_version", +] + + +@dataclass(frozen=True) +class CortexVersion: + """Wrapper that keeps both raw and parsed versions.""" + + raw: str + parsed: Version + + @classmethod + def from_string(cls, raw_version: str) -> "CortexVersion": + try: + parsed = Version(raw_version) + except InvalidVersion as exc: + raise ValueError(f"Invalid Cortex version string: {raw_version}") from exc + return cls(raw=raw_version, parsed=parsed) + + def __str__(self) -> str: + return self.raw + + +def get_installed_version() -> CortexVersion: + """ + Return the version of Cortex that is currently installed. + + Falls back to the package's __version__ attribute when metadata is unavailable. + """ + + raw_version: Optional[str] = None + + try: + raw_version = metadata.version(PACKAGE_NAME) + except metadata.PackageNotFoundError: + try: + from cortex import __version__ as package_version # type: ignore + + raw_version = package_version + except Exception: + raw_version = "0.0.0" + + return CortexVersion.from_string(raw_version) + + +def is_newer_version(current: CortexVersion, candidate: CortexVersion) -> bool: + """Return True when ``candidate`` is newer than ``current``.""" + + return candidate.parsed > current.parsed + + diff --git a/test/test_update_service.py b/test/test_update_service.py new file mode 100644 index 0000000..5b491db --- /dev/null +++ b/test/test_update_service.py @@ -0,0 +1,112 @@ +import json + +from packaging.version import Version + +from cortex.update_manifest import UpdateChannel, UpdateManifest, SystemInfo +from cortex.versioning import CortexVersion +from cortex.updater import UpdateService + + +def make_manifest(version: str = "0.2.0", channel: str = "stable"): + return UpdateManifest.from_dict( + { + "releases": [ + { + "version": version, + "channel": channel, + "download_url": "https://example.com/cortex.whl", + "sha256": "0" * 64, + "release_notes": "Test release", + "compatibility": [ + { + "python": ">=3.8", + "os": ["linux"], + "arch": ["x86_64"], + } + ], + } + ] + } + ) + + +def current_system(): + return SystemInfo( + python_version=Version("3.10.0"), + os_name="linux", + architecture="x86_64", + distro="ubuntu", + ) + + +def test_manifest_selects_newer_release(): + manifest = UpdateManifest.from_dict( + { + "releases": [ + { + "version": "0.1.5", + "channel": "stable", + "download_url": "https://example.com/old.whl", + "sha256": "1" * 64, + }, + { + "version": "0.2.0", + "channel": "stable", + "download_url": "https://example.com/new.whl", + "sha256": "2" * 64, + }, + ] + } + ) + current = CortexVersion.from_string("0.1.0") + latest = manifest.find_latest(current_version=current, channel=UpdateChannel.STABLE, system=current_system()) + + assert latest is not None + assert latest.version.raw == "0.2.0" + + +def test_update_service_persists_channel_choice(tmp_path): + state_file = tmp_path / "state.json" + log_file = tmp_path / "update.log" + + service = UpdateService( + manifest_url="https://invalid.local", + state_file=state_file, + log_file=log_file, + system_info=current_system(), + ) + + service.set_channel(UpdateChannel.BETA) + assert service.get_channel() == UpdateChannel.BETA + + service.set_channel(UpdateChannel.STABLE) + assert service.get_channel() == UpdateChannel.STABLE + + with state_file.open() as fh: + data = json.load(fh) + assert data["channel"] == "stable" + + +def test_perform_update_dry_run(monkeypatch, tmp_path): + state_file = tmp_path / "state.json" + log_file = tmp_path / "update.log" + + service = UpdateService( + manifest_url="https://invalid.local", + state_file=state_file, + log_file=log_file, + system_info=current_system(), + ) + + manifest = make_manifest() + + monkeypatch.setattr("cortex.updater.get_installed_version", lambda: CortexVersion.from_string("0.1.0")) + monkeypatch.setattr(UpdateService, "_fetch_manifest", lambda self: manifest) + + result = service.perform_update(dry_run=True) + + assert result.release is not None + assert result.updated is False + assert result.release.version.raw == "0.2.0" + assert "dry run" in (result.message or "").lower() +