diff --git a/samples/microsoft/python/migration/.dockerignore b/samples/microsoft/python/migration/.dockerignore new file mode 100644 index 00000000..b0d79fa3 --- /dev/null +++ b/samples/microsoft/python/migration/.dockerignore @@ -0,0 +1,13 @@ +# Docker-related files +.env +output/ +*.log + +# Python cache +__pycache__/ +*.pyc +*.pyo + +# Migration output +migration_*.json +agent_*.json \ No newline at end of file diff --git a/samples/microsoft/python/migration/.env.example b/samples/microsoft/python/migration/.env.example new file mode 100644 index 00000000..ccc54696 --- /dev/null +++ b/samples/microsoft/python/migration/.env.example @@ -0,0 +1,53 @@ +# V1 to V2 Migration Environment Configuration +# Copy this file to .env and fill in your values + +# Azure Project Configuration (choose one method) +# Method 1: Project Endpoint +PROJECT_ENDPOINT_URL=https://your-project-name.cognitiveservices.azure.com + +# Method 2: Project Connection String +# PROJECT_CONNECTION_STRING=endpoint=https://your-project.cognitiveservices.azure.com;subscriptionid=your-sub-id;resourcegroupname=your-rg;projectname=your-project + +# Cosmos DB Configuration (optional - for Cosmos input/output) +# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://your-cosmos.documents.azure.com:443/;AccountKey=your-key==; +# COSMOS_DB_DATABASE_NAME=assistants +# COSMOS_DB_CONTAINER_NAME=v1_assistants + +# OpenAI v1 API Configuration (optional - for v1 API input) +# ASSISTANT_API_BASE=https://api.openai.com/v1 +# ASSISTANT_API_KEY=sk-your-openai-key +# ASSISTANT_API_VERSION=v1 + +# Azure OpenAI v1 Configuration (optional - alternative to OpenAI) +# ASSISTANT_API_BASE=https://your-aoai.openai.azure.com +# ASSISTANT_API_KEY=your-azure-openai-key +# ASSISTANT_API_VERSION=2024-02-15-preview + +# v2 API Configuration (optional - for v2 API output) +# V2_API_BASE=https://your-v2-api.cognitiveservices.azure.com +# V2_API_KEY=your-v2-api-key +# V2_API_VERSION=2024-05-01-preview + +# Azure Authentication (optional - for service principal auth) +# AZURE_TENANT_ID=your-tenant-id +# AZURE_CLIENT_ID=your-client-id +# AZURE_CLIENT_SECRET=your-client-secret +# AZURE_SUBSCRIPTION_ID=your-subscription-id +# AZURE_RESOURCE_GROUP=your-resource-group +# AZURE_PROJECT_NAME=your-project-name + +# Example configurations for common scenarios: + +# Scenario 1: Migrate from Azure AI Project to v2 API +# PROJECT_ENDPOINT_URL=https://myproject-eastus.cognitiveservices.azure.com +# V2_API_BASE=https://myproject-v2-eastus.cognitiveservices.azure.com +# V2_API_KEY=your-v2-key + +# Scenario 2: Migrate from OpenAI to Cosmos DB +# ASSISTANT_API_BASE=https://api.openai.com/v1 +# ASSISTANT_API_KEY=sk-your-key +# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...; + +# Scenario 3: Migrate from Cosmos DB to Azure AI Project v2 +# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...; +# PROJECT_ENDPOINT_URL=https://myproject.cognitiveservices.azure.com \ No newline at end of file diff --git a/samples/microsoft/python/migration/Dockerfile b/samples/microsoft/python/migration/Dockerfile new file mode 100644 index 00000000..7773c824 --- /dev/null +++ b/samples/microsoft/python/migration/Dockerfile @@ -0,0 +1,58 @@ +# Use Python 3.11 slim image +FROM python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + gnupg \ + lsb-release \ + && rm -rf /var/lib/apt/lists/* + +# Install Azure CLI +RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash + +# Copy requirements file +COPY requirements.txt . + +# Install Python dependencies from requirements.txt +RUN pip install -r requirements.txt + +# Copy the migration script and supporting files +COPY v1_to_v2_migration.py . +COPY read_cosmos_data.py . + +# Create a non-root user for security +RUN useradd -m -u 1000 migration + +# Create Azure CLI directory with proper permissions for the migration user +RUN mkdir -p /home/migration/.azure && chown -R migration:migration /home/migration/.azure + +# Install gosu for safe user switching (before creating entrypoint) +RUN apt-get update && apt-get install -y gosu && rm -rf /var/lib/apt/lists/* + +# Create entrypoint script that runs as root for package installation, then drops to migration user +RUN echo '#!/bin/bash\n\ +set -e\n\ +if [ "$NEED_BETA_VERSION" = "true" ]; then\n\ + echo "๐Ÿ”ง Installing azure-ai-projects beta version for connection string support..."\n\ + pip install --quiet --upgrade azure-ai-projects==1.0.0b10\n\ + echo "โœ… Beta version 1.0.0b10 installed"\n\ +else\n\ + echo "โœ… Using standard azure-ai-projects version 1.0.0"\n\ +fi\n\ +echo "๐Ÿ” Switching to migration user..."\n\ +chown -R migration:migration /app\n\ +exec gosu migration python v1_to_v2_migration.py "$@"\n\ +' > /app/entrypoint.sh && chmod +x /app/entrypoint.sh + +# Set environment variables +ENV PYTHONUNBUFFERED=1 +ENV PYTHONPATH=/app +ENV AZURE_CONFIG_DIR=/home/migration/.azure + +# Default command - entrypoint runs as root for package management, then switches to migration user +ENTRYPOINT ["/app/entrypoint.sh"] +CMD ["--help"] \ No newline at end of file diff --git a/samples/microsoft/python/migration/README-Docker.md b/samples/microsoft/python/migration/README-Docker.md new file mode 100644 index 00000000..486e4c91 --- /dev/null +++ b/samples/microsoft/python/migration/README-Docker.md @@ -0,0 +1,141 @@ +# V1 to V2 Agent Migration - Docker Container + +This directory contains the containerized version of the v1 to v2 agent migration script. + +## Quick Start + +### Prerequisites +- Docker installed and running +- Azure CLI installed and authenticated (`az login`) +- Access to source data (Cosmos DB, API, or Project) + +### Build and Run + +#### Option 1: Using the helper scripts (Recommended) + +**Linux/macOS:** +```bash +# Make the script executable +chmod +x run-migration.sh + +# Run migration with arguments +./run-migration.sh --help +./run-migration.sh --use-api --use-v2-api +./run-migration.sh asst_abc123 --project-connection-string "eastus.api.azureml.ms;...;...;..." +``` + +**Windows:** +```cmd +# Run migration with arguments +run-migration.bat --help +run-migration.bat --use-api --use-v2-api +run-migration.bat asst_abc123 --project-connection-string "eastus.api.azureml.ms;...;...;..." +``` + +#### Option 2: Using Docker directly + +```bash +# Build the image +docker build -t v1-to-v2-migration . + +# Run with arguments +docker run --rm -it \ + --network host \ + -v ~/.azure:/home/migration/.azure:ro \ + -v "$(pwd)/output:/app/output" \ + -e COSMOS_CONNECTION_STRING \ + v1-to-v2-migration --help +``` + +#### Option 3: Using Docker Compose + +```bash +# Create .env file from example +cp .env.example .env +# Edit .env with your values + +# Run with Docker Compose +docker-compose run --rm v1-to-v2-migration --help +docker-compose run --rm v1-to-v2-migration --use-api --use-v2-api +``` + +## Configuration + +### Environment Variables + +The container supports all the same environment variables as the standalone script: + +- `COSMOS_CONNECTION_STRING`: Cosmos DB connection string +- `AGENTS_HOST`: API host (default: eastus.api.azureml.ms) +- `AGENTS_SUBSCRIPTION`: Azure subscription ID +- `AGENTS_RESOURCE_GROUP`: Resource group for v1 API +- `AGENTS_RESOURCE_GROUP_V2`: Resource group for v2 API +- `AGENTS_WORKSPACE`: Workspace for v1 API +- `AGENTS_WORKSPACE_V2`: Workspace for v2 API +- `AGENTS_API_VERSION`: API version (default: 2025-05-15-preview) +- `AZ_TOKEN`: Optional Azure token (will use Azure CLI if not provided) + +### Volume Mounts + +- `~/.azure:/home/migration/.azure:ro`: Azure CLI configuration (read-only) +- `./output:/app/output`: Output directory for logs and results + +## Usage Examples + +### Test Tool Injection +```bash +# Test all tool types +./run-migration.sh --add-test-function --add-test-mcp --add-test-computer --add-test-imagegen --add-test-azurefunction --use-api + +# Test specific tool combinations +./run-migration.sh --add-test-mcp --add-test-computer --project-connection-string "eastus.api.azureml.ms;...;...;..." +``` + +### Migration Workflows +```bash +# Migrate all assistants from API to v2 API +./run-migration.sh --use-api --use-v2-api + +# Migrate specific assistant from Cosmos DB to Cosmos DB +./run-migration.sh asst_abc123 + +# Migrate from project connection to v2 API +./run-migration.sh --project-connection-string "eastus.api.azureml.ms;...;...;..." --use-v2-api +``` + +## Features + +โœ… **Complete Migration Pipeline**: All 4 input methods, 2 output methods +โœ… **Tool Testing**: 5 different test tool types for validation +โœ… **Azure CLI Integration**: Automatic token management +โœ… **Security**: Non-root user, read-only mounts +โœ… **Cross-Platform**: Works on Linux, macOS, and Windows +โœ… **Flexible Configuration**: Environment variables, volume mounts +โœ… **Network Access**: Host networking for localhost v2 API access + +## Troubleshooting + +### Docker Issues +- Ensure Docker is running: `docker info` +- Check image build: `docker images | grep v1-to-v2-migration` + +### Authentication Issues +- Verify Azure CLI: `az account show` +- Check token: `az account get-access-token --scope https://ai.azure.com/.default` + +### Network Issues +- For localhost v2 API, ensure `--network host` is used +- Check firewall settings for container network access + +### Permission Issues +- Ensure Azure CLI config is readable: `ls -la ~/.azure` +- Check output directory permissions: `ls -la ./output` + +## Development + +To modify the container: + +1. Edit the migration script: `v1_to_v2_migration.py` +2. Update dependencies: `requirements.txt` +3. Rebuild the image: `docker build -t v1-to-v2-migration .` +4. Test changes: `./run-migration.sh --help` \ No newline at end of file diff --git a/samples/microsoft/python/migration/README.md b/samples/microsoft/python/migration/README.md new file mode 100644 index 00000000..83dce814 --- /dev/null +++ b/samples/microsoft/python/migration/README.md @@ -0,0 +1,486 @@ +# V1 to V2 Assistant Migration Tool + +A comprehensive tool for migrating OpenAI v1 assistants to Azure AI v2 agents with Docker containerization support, extensive test capabilities, and cross-platform compatibility. + +## ๐Ÿš€ Quick Start + +### Prerequisites + +- **Docker Desktop** installed and running +- **Azure CLI** installed on your host system +- **Azure account** with appropriate permissions +- **Python 3.11+** (if running locally without Docker) + +### Authentication Setup + +Choose your platform and run the authentication setup script: + +**Windows (PowerShell):** +```powershell +.\setup-azure-auth.bat +``` + +**Linux/macOS (Bash):** +```bash +./setup-azure-auth.sh +``` + +This script will: +- โœ… Verify Docker is running +- โœ… Check Azure CLI installation +- โœ… Handle Azure authentication +- โœ… Build the Docker image +- โœ… Test authentication in container + +### Running Migrations + +After authentication setup, use the platform-specific runner: + +**Windows (PowerShell):** +```powershell +# Regular output +.\run-migration.bat --help + +# Verbose output +.\run-migration-verbose.bat --help + +# Docker-based with automatic authentication (recommended) +.\run-migration-docker-auth.ps1 --help +``` + +**Linux/macOS (Bash):** +```bash +./run-migration.sh --help +``` + +## ๐Ÿ“‹ Usage Examples + +### 1. Production Migration with Dual-Tenant Authentication (REQUIRED) + +**All migrations require production parameters:** + +```powershell +# Windows PowerShell - Migrate from v1 API to production v2 API +.\run-migration-docker-auth.ps1 ` + --use-api ` + --source-tenant "72f988bf-86f1-41af-91ab-2d7cd011db47" ` + --production-resource "nextgen-eastus" ` + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" ` + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" ` + asst_abc123def456 + +# Linux/macOS - Same command structure +./run-migration-docker-auth.sh \ + --use-api \ + --source-tenant "72f988bf-86f1-41af-91ab-2d7cd011db47" \ + --production-resource "nextgen-eastus" \ + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" \ + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" \ + asst_abc123def456 +``` + +**Required Parameters:** +- `--production-resource`: Azure AI resource name (e.g., "nextgen-eastus") +- `--production-subscription`: Subscription ID for production tenant +- `--production-tenant`: Production tenant ID for writing agents +- `--source-tenant`: Source tenant ID for reading assistants (optional, defaults to Microsoft tenant) + +### 2. Migrate Using Project Connection String (Beta) +```bash +# Connection string format: region.api.azureml.ms;subscription-id;resource-group;project-name +./run-migration-docker-auth.sh \ + --project-connection-string "eastus.api.azureml.ms;abc-123;my-rg;my-project" \ + --production-resource "nextgen-eastus" \ + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" \ + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" \ + asst_abc123def456 + +# Windows PowerShell +.\run-migration-docker-auth.ps1 ` + --project-connection-string "eastus.api.azureml.ms;abc-123;my-rg;my-project" ` + --production-resource "nextgen-eastus" ` + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" ` + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" ` + asst_abc123def456 +``` +> **Note**: Connection string support requires `azure-ai-projects==1.0.0b10` (beta). The script automatically detects and installs the correct version. Production parameters are always required. + +### 3. Migrate from Project Endpoint to Production v2 API +```bash +# Using project endpoint (production parameters required) +./run-migration-docker-auth.sh \ + --project-endpoint "https://your-project.cognitiveservices.azure.com" \ + --production-resource "nextgen-eastus" \ + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" \ + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" \ + assistant-id +``` + +### 4. Add Test Tools to Migration +```bash +# Add function calling test (production parameters required) +./run-migration-docker-auth.sh \ + --use-api \ + --add-test-function \ + --production-resource "nextgen-eastus" \ + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" \ + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" \ + assistant-id + +# Add multiple test tools +./run-migration-docker-auth.sh \ + --use-api \ + --add-test-function \ + --add-test-mcp \ + --add-test-computer \ + --production-resource "nextgen-eastus" \ + --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" \ + --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" \ + assistant-id +``` + +### 5. Environment Variables Support +Create a `.env` file in the project directory: +```env +# Azure Project Configuration +PROJECT_ENDPOINT_URL=https://your-project.cognitiveservices.azure.com +PROJECT_CONNECTION_STRING=your-connection-string + +# Cosmos DB Configuration (optional) +# Use either COSMOS_CONNECTION_STRING (recommended) or individual parameters +COSMOS_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...; +# OR (legacy - still supported) +COSMOS_DB_CONNECTION_STRING=your-cosmos-connection-string +COSMOS_DB_DATABASE_NAME=your-database +COSMOS_DB_CONTAINER_NAME=your-container + +# v1 API Configuration (optional) +ASSISTANT_API_BASE=https://api.openai.com/v1 +ASSISTANT_API_KEY=your-openai-key +ASSISTANT_API_VERSION=v1 + +# v2 API Configuration (optional) +V2_API_BASE=https://your-v2-api.cognitiveservices.azure.com +V2_API_KEY=your-v2-key +V2_API_VERSION=2024-05-01-preview + +# Azure Authentication (optional) +AZURE_TENANT_ID=your-tenant-id +AZURE_CLIENT_ID=your-client-id +AZURE_CLIENT_SECRET=your-client-secret +AZURE_SUBSCRIPTION_ID=your-subscription-id +AZURE_RESOURCE_GROUP=your-resource-group +AZURE_PROJECT_NAME=your-project-name +``` + +## ๐Ÿ› ๏ธ Command Line Options + +### Input Methods (choose one) +- `--use-api` - Read from v1 API (recommended) +- `--project-endpoint URL` - Use Azure AI Project endpoint +- `--project-connection-string STRING` - Use Azure AI Project connection string +- `--cosmos` - Read from Cosmos DB (legacy) + +### Output Methods +- Always uses **production v2 API** (requires production parameters) + +### Test Tool Options (optional, can use multiple) +- `--add-test-tool function` - Add function calling test +- `--add-test-tool mcp` - Add Model Context Protocol test +- `--add-test-tool computer-use` - Add computer use test +- `--add-test-tool image-gen` - Add image generation test +- `--add-test-tool azure-function` - Add Azure Function test + +### Production Migration Options (REQUIRED for Docker Auth Scripts) +- `--production-resource RESOURCE_NAME` - **REQUIRED** Production Azure AI resource name (e.g., "nextgen-eastus") +- `--production-subscription SUBSCRIPTION_ID` - **REQUIRED** Production subscription ID +- `--production-tenant TENANT_ID` - **REQUIRED** Production tenant for writing agents +- `--source-tenant TENANT_ID` - *Optional* Source tenant for reading assistants (defaults to Microsoft tenant: 72f988bf-86f1-41af-91ab-2d7cd011db47) + +### Configuration Options +- `--v1-api-version VERSION` - v1 API version (default: v1) +- `--v2-api-version VERSION` - v2 API version (default: 2024-05-01-preview) +- `--cosmos-database DATABASE` - Cosmos database name +- `--cosmos-container CONTAINER` - Cosmos container name + +## ๏ฟฝ Unsupported Classic Assistant Features + +The migration tool will **continue migration** for classic assistants (v1) that use features not supported in new agents (v2), but will **skip the unsupported tools** and display warnings: + +### Connected Agent Tool +``` +โš ๏ธ WARNING: Your classic agent includes connected agents, which aren't supported in the new experience. +โ„น๏ธ These connected agents won't be carried over when you create the new agent. +๐Ÿ’ก To orchestrate multiple agents, use a workflow instead. +๐Ÿ“‹ Unsupported tools that will be skipped: connected_agent +``` +**What happens**: The connected_agent tool is skipped during migration. The new agent is created successfully without this tool. + +**Recommendation**: Use **new agent workflows** to connect multiple agents together. + +### Event Binding Tool +``` +โš ๏ธ WARNING: Your classic agent uses 'event_binding' which isn't supported in the new experience. +โ„น๏ธ This tool won't be carried over when you create the new agent. +๐Ÿ“‹ Unsupported tools that will be skipped: event_binding +``` +**What happens**: The event_binding tool is skipped during migration. The new agent is created successfully without this tool. + +**Recommendation**: This feature has no direct equivalent in new agents. + +### Output Binding Tool +``` +โš ๏ธ WARNING: Your classic agent uses 'output_binding' which isn't supported in the new experience. +โ„น๏ธ This tool won't be carried over when you create the new agent. +๐Ÿ’ก Consider using 'capture_structured_outputs' in your new agent instead. +๐Ÿ“‹ Unsupported tools that will be skipped: output_binding +``` +**What happens**: The output_binding tool is skipped during migration. The new agent is created successfully without this tool. + +**Recommendation**: Use **`capture_structured_outputs`** in new agents for structured output capture. + +> **Note**: Migration completes successfully even when unsupported tools are present. Only the unsupported tools are excluded from the new agent. All other tools and properties are migrated normally. + +## ๏ฟฝ๐Ÿณ Docker Architecture + +### Container Features +- **Base Image**: Python 3.11-slim with Azure CLI +- **Non-root User**: Runs as `migration` user for security +- **Volume Mounts**: Azure CLI config directory for authentication +- **Network**: Host networking for localhost API access +- **Environment**: Comprehensive environment variable support + +### Dynamic Package Installation +The container automatically installs the correct `azure-ai-projects` package version based on your usage: + +- **Standard version (1.0.0)**: Used for project endpoints and most scenarios +- **Beta version (1.0.0b10)**: Automatically installed when using `--project-connection-string` + - Required for `from_connection_string()` method support + - Detection and installation happens at container startup + - No manual intervention needed + +The script detects connection string usage and sets the `NEED_BETA_VERSION` flag automatically. + +### Dual-Tenant Authentication (REQUIRED) +The `run-migration-docker-auth.ps1` and `run-migration-docker-auth.sh` scripts require production parameters for all migrations: + +- **Production-First Architecture**: All migrations write to production v2 API (no localhost mode) +- **Required Production Parameters**: Must specify production resource, subscription, and tenant +- **Source Tenant Authentication**: Reads assistants from source tenant (defaults to Microsoft tenant) +- **Production Tenant Authentication**: Writes agents to production tenant +- **Automatic Token Management**: Generates and manages separate tokens for each tenant +- **Seamless Tenant Switching**: Handles Azure CLI tenant switching automatically +- **Token Isolation**: Source and production tokens are isolated for security +- **Cross-Platform Support**: Both PowerShell (Windows) and Bash (Linux/macOS) versions + +### Security Considerations +- Non-root container execution +- Read-write Azure CLI directory for token management +- Environment variable injection for sensitive data +- Host network isolation when possible + +## ๐Ÿงช Test Tool Capabilities + +The migration tool can inject various test tools into migrated agents: + +### Function Calling Test +```python +def get_weather(location: str) -> str: + """Get current weather for a location.""" + return f"Weather in {location}: 72ยฐF, sunny" +``` + +### Model Context Protocol (MCP) Test +```python +def mcp_filesystem_tool(action: str, path: str) -> str: + """MCP filesystem operations.""" + return f"MCP {action} operation on {path} completed" +``` + +### Computer Use Test +```python +def computer_screenshot() -> str: + """Take a screenshot of the current screen.""" + return "Screenshot taken: desktop_1024x768.png" +``` + +### Image Generation Test +```python +def generate_image(prompt: str) -> str: + """Generate an image from a text prompt.""" + return f"Generated image for: {prompt}" +``` + +### Azure Function Test +```python +def azure_function_call(function_name: str, data: dict) -> str: + """Call an Azure Function.""" + return f"Azure Function {function_name} called with {data}" +``` + +## ๐Ÿ—๏ธ Architecture Details + +### Core Components + +1. **Migration Engine** (`v1_to_v2_migration.py`) + - Smart parameter extraction from endpoints + - Multi-input/output method support + - Comprehensive error handling + - Test tool injection capabilities + +2. **Docker Container** (`Dockerfile`) + - Multi-stage build process + - Azure CLI integration + - Proper user permissions + - Environment configuration + +3. **Platform Scripts** + - Windows: `setup-azure-auth.bat`, `run-migration.bat`, `run-migration-verbose.bat` + - Linux/macOS: `setup-azure-auth.sh`, `run-migration.sh` + +### Data Flow + +``` +Input Sources โ†’ Migration Engine โ†’ Output Destinations + โ†“ โ†“ โ†“ +- Cosmos DB โ†’ Transform v1โ†’v2 โ†’ - Cosmos DB +- v1 API โ†’ Add test tools โ†’ - v2 API +- Project EP โ†’ Parameter extract +- Connection โ†’ Error handling +``` + +### Parameter Extraction + +The tool automatically extracts Azure AI project parameters from endpoints and connection strings: + +**From Project Endpoint:** +``` +https://projectname-region.cognitiveservices.azure.com +โ†’ subscription_id, resource_group_name, project_name +``` + +**From Connection String:** +``` +endpoint=https://...;subscriptionid=...;resourcegroupname=...;projectname=... +โ†’ Parsed individual components +``` + +## ๐Ÿ”ง Troubleshooting + +### Common Issues + +1. **Docker Not Running** + ``` + โŒ Docker is not running. Please start Docker and try again. + ``` + **Solution**: Start Docker Desktop + +2. **Azure CLI Not Authenticated** + ``` + โš ๏ธ Not authenticated to Azure CLI + ``` + **Solution**: Run `az login` or use the setup script + +3. **Unsupported Tool Types** + ``` + WARNING: Your classic agent includes connected agents... + ``` + **Solution**: Migration will continue but unsupported tools will be skipped. See the "Unsupported Classic Assistant Features" section above for details and alternatives + +4. **Connection String Format** + ``` + Failed to parse connection string + ``` + **Solution**: Use format `region.api.azureml.ms;subscription-id;resource-group;project-name` + +5. **Dual-Tenant Authentication Issues** + ``` + Token tenant does not match resource tenant + ``` + **Solution**: Ensure correct source and production tenant IDs are specified + +6. **Agent Name Case Sensitivity** + ``` + 400 Bad Request on production endpoint + ``` + **Solution**: Agent names are automatically converted to lowercase with proper formatting + +5. **Container Authentication Fails** + ``` + Authentication test failed + ``` + **Solution**: Ensure Azure CLI directory has proper permissions + +6. **Network Connection Issues** + ``` + Connection refused to localhost + ``` + **Solution**: Use `--network host` flag (included in scripts) + +### Debug Mode + +Use verbose scripts for detailed output: +```powershell +# Windows +.\run-migration-verbose.bat --help + +# Linux/macOS +./run-migration.sh --help # Already verbose +``` + +### Manual Docker Commands + +If scripts fail, run manually: +```bash +# Build image +docker build -t v1-to-v2-migration . + +# Run with debugging +docker run --rm -it \ + --network host \ + -v ~/.azure:/home/migration/.azure \ + v1-to-v2-migration \ + /bin/bash +``` + +## ๐Ÿ“š API Compatibility + +### Supported Azure AI Project APIs +- **2024-05-01-preview** (default) +- **2024-02-15-preview** +- **2023-12-01-preview** + +### Supported OpenAI v1 APIs +- **OpenAI API v1** +- **Azure OpenAI v1** +- **Compatible third-party APIs** + +## ๐Ÿค Contributing + +### Development Setup +1. Clone repository +2. Install dependencies: `pip install -r requirements.txt` +3. Run tests: `python -m pytest` +4. Build Docker: `docker build -t v1-to-v2-migration .` + +### Adding New Test Tools +1. Add tool definition to `TEST_TOOLS` dictionary +2. Update `inject_test_tools()` function +3. Add command-line option handling +4. Update documentation + +## ๐Ÿ“„ License + +MIT License - see LICENSE file for details + +## ๐Ÿ†˜ Support + +For issues and questions: +1. Check troubleshooting section +2. Run with verbose output +3. Check Docker and Azure CLI status +4. Verify authentication setup + +--- + +**Made with โค๏ธ for seamless AI agent migration** \ No newline at end of file diff --git a/samples/microsoft/python/migration/requirements.txt b/samples/microsoft/python/migration/requirements.txt new file mode 100644 index 00000000..23809e43 --- /dev/null +++ b/samples/microsoft/python/migration/requirements.txt @@ -0,0 +1,10 @@ +# Core dependencies +azure-cosmos==4.5.1 +azure-identity==1.15.0 +azure-ai-projects==1.0.0 +requests==2.31.0 +pandas==2.1.4 +urllib3==2.1.0 + +# Optional dependencies for Azure Function tools +azure-ai-agents \ No newline at end of file diff --git a/samples/microsoft/python/migration/run-migration-docker-auth.ps1 b/samples/microsoft/python/migration/run-migration-docker-auth.ps1 new file mode 100644 index 00000000..03bffc22 --- /dev/null +++ b/samples/microsoft/python/migration/run-migration-docker-auth.ps1 @@ -0,0 +1,321 @@ +#!/usr/bin/env pwsh +# Docker migration runner with automatic token authentication +# This script handles token generation and Docker execution automatically +# REQUIRES: --production-resource, --production-subscription, --production-tenant + +param( + [Parameter(ValueFromRemainingArguments)] + [string[]]$Arguments +) + +# Colors for output +$Green = "`e[32m" +$Blue = "`e[34m" +$Yellow = "`e[33m" +$Red = "`e[31m" +$Reset = "`e[0m" + +Write-Host "${Blue}๐Ÿณ Running v1 to v2 assistant migration in DOCKER with automatic authentication${Reset}" +Write-Host "======================================================================================" + +# Check if Docker is running +try { + docker info | Out-Null + Write-Host "${Green}โœ… Docker is running${Reset}" +} catch { + Write-Host "${Red}โŒ Docker is not running. Please start Docker Desktop and try again.${Reset}" + exit 1 +} + +# Check if Azure CLI is available and authenticated +try { + $account = az account show 2>$null | ConvertFrom-Json + if ($account) { + Write-Host "${Green}โœ… Azure CLI authenticated as: $($account.user.name)${Reset}" + } else { + Write-Host "${Red}โŒ Azure CLI not authenticated. Run 'az login' first.${Reset}" + exit 1 + } +} catch { + Write-Host "${Red}โŒ Azure CLI not found. Install from https://docs.microsoft.com/cli/azure/${Reset}" + exit 1 +} + +# Parse arguments - production parameters are REQUIRED +$productionTenant = $null +$sourceTenant = $null +$productionResource = $null +$productionSubscription = $null +$useConnectionString = $false + +for ($i = 0; $i -lt $Arguments.Length; $i++) { + if ($Arguments[$i] -eq "--production-tenant" -and ($i + 1) -lt $Arguments.Length) { + $productionTenant = $Arguments[$i + 1] + } + if ($Arguments[$i] -eq "--source-tenant" -and ($i + 1) -lt $Arguments.Length) { + $sourceTenant = $Arguments[$i + 1] + } + if ($Arguments[$i] -eq "--production-resource" -and ($i + 1) -lt $Arguments.Length) { + $productionResource = $Arguments[$i + 1] + } + if ($Arguments[$i] -eq "--production-subscription" -and ($i + 1) -lt $Arguments.Length) { + $productionSubscription = $Arguments[$i + 1] + } + if ($Arguments[$i] -eq "--project-connection-string") { + $useConnectionString = $true + Write-Host "${Blue}๐Ÿ“ Detected project connection string usage${Reset}" + } +} + +# Validate required production parameters +if (-not $productionResource -or -not $productionSubscription -or -not $productionTenant) { + Write-Host "${Red}โŒ Missing required production parameters!${Reset}" + Write-Host "" + Write-Host "REQUIRED parameters:" + Write-Host " --production-resource (e.g., nextgen-eastus)" + Write-Host " --production-subscription (e.g., b1615458-c1ea-49bc-8526-cafc948d3c25)" + Write-Host " --production-tenant (e.g., 33e577a9-b1b8-4126-87c0-673f197bf624)" + Write-Host "" + Write-Host "Example:" + Write-Host " .\run-migration-docker-auth.ps1 --use-api \" -ForegroundColor Yellow + Write-Host " --source-tenant 72f988bf-86f1-41af-91ab-2d7cd011db47 \" -ForegroundColor Yellow + Write-Host " --production-resource nextgen-eastus \" -ForegroundColor Yellow + Write-Host " --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \" -ForegroundColor Yellow + Write-Host " --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 \" -ForegroundColor Yellow + Write-Host " asst_wBMH6Khnqbo1J7W1G6w3p1rN" -ForegroundColor Yellow + exit 1 +} + +# Generate source token (still needed even with connection string - SDK requires credential) +$sourceToken = $null +if ($useConnectionString -and $sourceTenant) { + Write-Host "${Blue}๐Ÿ”‘ Generating source Azure AI token for project connection string (tenant: $sourceTenant)${Reset}" + try { + # Check current tenant + $currentTenant = az account show --query tenantId -o tsv 2>$null + + if ($currentTenant -ne $sourceTenant) { + Write-Host "${Yellow}๐Ÿ”„ Switching to source tenant: $sourceTenant${Reset}" + az login --tenant $sourceTenant --only-show-errors + if ($LASTEXITCODE -ne 0) { + Write-Host "${Red}โŒ Failed to authenticate with source tenant${Reset}" + exit 1 + } + } + + $sourceToken = az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv + if ($sourceToken -and $sourceToken.Length -gt 100) { + Write-Host "${Green}โœ… Source token generated successfully (length: $($sourceToken.Length))${Reset}" + } else { + Write-Host "${Red}โŒ Failed to generate source token or token is invalid${Reset}" + exit 1 + } + } catch { + Write-Host "${Red}โŒ Failed to generate source Azure token: $_${Reset}" + exit 1 + } +} elseif ($useConnectionString) { + Write-Host "${Blue}๐Ÿ”‘ Generating source Azure AI token for project connection string (current tenant)${Reset}" + try { + $sourceToken = az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv + if ($sourceToken -and $sourceToken.Length -gt 100) { + Write-Host "${Green}โœ… Source token generated successfully (length: $($sourceToken.Length))${Reset}" + } else { + Write-Host "${Red}โŒ Failed to generate source token or token is invalid${Reset}" + exit 1 + } + } catch { + Write-Host "${Red}โŒ Failed to generate source Azure token: $_${Reset}" + exit 1 + } +} elseif ($sourceTenant) { + Write-Host "${Blue}๐Ÿ”‘ Generating source Azure AI token for tenant: $sourceTenant${Reset}" + try { + # Check current tenant + $currentTenant = az account show --query tenantId -o tsv 2>$null + + if ($currentTenant -ne $sourceTenant) { + Write-Host "${Yellow}๐Ÿ”„ Switching to source tenant: $sourceTenant${Reset}" + az login --tenant $sourceTenant --only-show-errors + if ($LASTEXITCODE -ne 0) { + Write-Host "${Red}โŒ Failed to authenticate with source tenant${Reset}" + exit 1 + } + } + + $sourceToken = az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv + if ($sourceToken -and $sourceToken.Length -gt 100) { + Write-Host "${Green}โœ… Source token generated successfully (length: $($sourceToken.Length))${Reset}" + } else { + Write-Host "${Red}โŒ Failed to generate source token or token is invalid${Reset}" + exit 1 + } + } catch { + Write-Host "${Red}โŒ Failed to generate source Azure token: $_${Reset}" + exit 1 + } +} else { + Write-Host "${Blue}๐Ÿ”‘ Generating source Azure AI token (current tenant)...${Reset}" + try { + $sourceToken = az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv + if ($sourceToken -and $sourceToken.Length -gt 100) { + Write-Host "${Green}โœ… Source token generated successfully (length: $($sourceToken.Length))${Reset}" + } else { + Write-Host "${Red}โŒ Failed to generate source token or token is invalid${Reset}" + exit 1 + } + } catch { + Write-Host "${Red}โŒ Failed to generate source Azure token: $_${Reset}" + exit 1 + } +} + +# Handle production authentication (REQUIRED) +$productionToken = $null +Write-Host "${Blue}๐Ÿญ Production v2 API Configuration:${Reset}" +Write-Host "${Blue} ๏ฟฝ Resource: $productionResource${Reset}" +Write-Host "${Blue} ๐Ÿ“‹ Subscription: $productionSubscription${Reset}" +Write-Host "${Blue} ๐Ÿ” Tenant: $productionTenant${Reset}" + +Write-Host "${Blue}๐Ÿ” Switching to production tenant: $productionTenant${Reset}" + +try { + # Check current tenant + $currentTenant = az account show --query tenantId -o tsv 2>$null + + if ($currentTenant -eq $productionTenant) { + Write-Host "${Green}โœ… Already authenticated with production tenant${Reset}" + } else { + Write-Host "${Yellow}๐Ÿ”„ Switching from tenant $currentTenant to $productionTenant${Reset}" + az login --tenant $productionTenant --only-show-errors + if ($LASTEXITCODE -ne 0) { + Write-Host "${Red}โŒ Failed to authenticate with production tenant${Reset}" + exit 1 + } + } + + # Generate production token + Write-Host "${Blue}๐Ÿ”‘ Generating production Azure AI token...${Reset}" + $productionToken = az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv + if ($productionToken -and $productionToken.Length -gt 100) { + Write-Host "${Green}โœ… Production token generated successfully (length: $($productionToken.Length))${Reset}" + } else { + Write-Host "${Red}โŒ Failed to generate production token${Reset}" + exit 1 + } + + # Switch back to source tenant if different (for reading v1 assistants) + if ($sourceTenant -and $currentTenant -ne $productionTenant) { + Write-Host "${Blue}๐Ÿ”„ Switching back to source tenant for reading operations: $sourceTenant${Reset}" + az login --tenant $sourceTenant --only-show-errors + if ($LASTEXITCODE -ne 0) { + Write-Host "${Yellow}โš ๏ธ Warning: Could not switch back to source tenant${Reset}" + } + } elseif ($currentTenant -ne $productionTenant) { + Write-Host "${Blue}๐Ÿ”„ Switching back to original tenant for reading operations${Reset}" + az login --tenant $currentTenant --only-show-errors + if ($LASTEXITCODE -ne 0) { + Write-Host "${Yellow}โš ๏ธ Warning: Could not switch back to original tenant${Reset}" + } + } + +} catch { + Write-Host "${Red}โŒ Failed during production tenant authentication: $_${Reset}" + exit 1 +} + +# Check if image exists +if (!(docker image inspect v1-to-v2-migration 2>$null)) { + Write-Host "${Yellow}โš ๏ธ Docker image 'v1-to-v2-migration' not found.${Reset}" + Write-Host "${Blue}๐Ÿ”จ Building Docker image...${Reset}" + docker build -t v1-to-v2-migration . +} + +# Load environment variables from .env if it exists +if (Test-Path ".env") { + Write-Host "${Green}โœ… Loading environment variables from .env file${Reset}" + Get-Content ".env" | ForEach-Object { + if ($_ -match "^([^#=]+)=(.*)$") { + $name = $matches[1].Trim() + $value = $matches[2].Trim() + [Environment]::SetEnvironmentVariable($name, $value, "Process") + } + } +} else { + Write-Host "${Yellow}โš ๏ธ No .env file found. Using environment variables or defaults.${Reset}" +} + +# Run the container with token authentication +Write-Host "${Green}๐Ÿƒ Running migration in Docker container with token authentication...${Reset}" +Write-Host "${Yellow}Arguments: $Arguments${Reset}" +Write-Host "" + +try { + # Prepare Docker environment variables + $dockerEnvVars = @( + "--network", "host" + "-e", "DOCKER_CONTAINER=true" + "-e", "AZ_TOKEN=$sourceToken" + ) + + # Add production token (required) + $dockerEnvVars += "-e", "PRODUCTION_TOKEN=$productionToken" + Write-Host "${Green}๐Ÿญ Passing both source and production tokens to container${Reset}" + + # Check if we need the beta version for project connection string + $needsBetaVersion = $false + for ($i = 0; $i -lt $Arguments.Length; $i++) { + if ($Arguments[$i] -eq "--project-connection-string") { + $needsBetaVersion = $true + Write-Host "${Blue}๐Ÿ” Detected project connection string usage - beta version required${Reset}" + break + } + } + + # All arguments are now passed to Python (source-tenant is supported) + $filteredArguments = $Arguments + + # Add environment variable to indicate if beta version is needed + if ($needsBetaVersion) { + $dockerEnvVars += "-e", "NEED_BETA_VERSION=true" + Write-Host "${Blue}๐Ÿ”ง Passing beta version requirement to container${Reset}" + } + + docker run --rm -it ` + @dockerEnvVars ` + -e COSMOS_DB_CONNECTION_STRING="$env:COSMOS_DB_CONNECTION_STRING" ` + -e COSMOS_DB_DATABASE_NAME="$env:COSMOS_DB_DATABASE_NAME" ` + -e COSMOS_DB_CONTAINER_NAME="$env:COSMOS_DB_CONTAINER_NAME" ` + -e ASSISTANT_API_BASE="$env:ASSISTANT_API_BASE" ` + -e ASSISTANT_API_VERSION="$env:ASSISTANT_API_VERSION" ` + -e ASSISTANT_API_KEY="$env:ASSISTANT_API_KEY" ` + -e PROJECT_ENDPOINT_URL="$env:PROJECT_ENDPOINT_URL" ` + -e PROJECT_CONNECTION_STRING="$env:PROJECT_CONNECTION_STRING" ` + -e V2_API_BASE="$env:V2_API_BASE" ` + -e V2_API_VERSION="$env:V2_API_VERSION" ` + -e V2_API_KEY="$env:V2_API_KEY" ` + -e AZURE_TENANT_ID="$env:AZURE_TENANT_ID" ` + -e AZURE_CLIENT_ID="$env:AZURE_CLIENT_ID" ` + -e AZURE_CLIENT_SECRET="$env:AZURE_CLIENT_SECRET" ` + -e AZURE_SUBSCRIPTION_ID="$env:AZURE_SUBSCRIPTION_ID" ` + -e AZURE_RESOURCE_GROUP="$env:AZURE_RESOURCE_GROUP" ` + -e AZURE_PROJECT_NAME="$env:AZURE_PROJECT_NAME" ` + -v "$env:USERPROFILE\.azure:/home/migration/.azure" ` + v1-to-v2-migration ` + @filteredArguments + + $exitCode = $LASTEXITCODE + + if ($exitCode -eq 0) { + Write-Host "" + Write-Host "${Green}โœ… Migration completed successfully!${Reset}" + } else { + Write-Host "" + Write-Host "${Red}โŒ Migration failed with exit code: $exitCode${Reset}" + } + + exit $exitCode +} catch { + Write-Host "${Red}โŒ Failed to run Docker container: $_${Reset}" + exit 1 +} \ No newline at end of file diff --git a/samples/microsoft/python/migration/run-migration-docker-auth.sh b/samples/microsoft/python/migration/run-migration-docker-auth.sh new file mode 100644 index 00000000..e47e7252 --- /dev/null +++ b/samples/microsoft/python/migration/run-migration-docker-auth.sh @@ -0,0 +1,245 @@ +#!/bin/bash +# Docker migration runner with automatic token authentication (Unix/Linux/macOS) +# This script handles token generation and Docker execution automatically +# REQUIRES: --production-resource, --production-subscription, --production-tenant + +set -e + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +PURPLE='\033[0;35m' +NC='\033[0m' # No Color + +echo -e "${BLUE}๐Ÿณ Running v1 to v2 assistant migration in DOCKER with automatic authentication${NC}" +echo "======================================================================================" + +# Check if Docker is running +if ! docker info > /dev/null 2>&1; then + echo -e "${RED}โŒ Docker is not running. Please start Docker and try again.${NC}" + exit 1 +fi +echo -e "${GREEN}โœ… Docker is running${NC}" + +# Check if Azure CLI is available and authenticated +if ! command -v az &> /dev/null; then + echo -e "${RED}โŒ Azure CLI not found. Install from https://docs.microsoft.com/cli/azure/${NC}" + exit 1 +fi + +# Check Azure CLI authentication +if ! az account show > /dev/null 2>&1; then + echo -e "${RED}โŒ Azure CLI not authenticated. Run 'az login' first.${NC}" + exit 1 +fi + +ACCOUNT_INFO=$(az account show --query "user.name" -o tsv 2>/dev/null) +echo -e "${GREEN}โœ… Azure CLI authenticated as: ${ACCOUNT_INFO}${NC}" + +# Parse arguments to check for project connection string and production parameters +NEED_BETA_VERSION="false" +SOURCE_TENANT="" +PRODUCTION_RESOURCE="" +PRODUCTION_SUBSCRIPTION="" +PRODUCTION_TENANT="" + +for arg in "$@"; do + if [[ "$arg" == *"--project-connection-string"* ]]; then + NEED_BETA_VERSION="true" + elif [[ "$arg" == --source-tenant* ]]; then + SOURCE_TENANT="${arg#*=}" + if [ -z "$SOURCE_TENANT" ]; then + shift + SOURCE_TENANT="$1" + fi + elif [[ "$arg" == --production-resource* ]]; then + PRODUCTION_RESOURCE="${arg#*=}" + if [ -z "$PRODUCTION_RESOURCE" ]; then + shift + PRODUCTION_RESOURCE="$1" + fi + elif [[ "$arg" == --production-subscription* ]]; then + PRODUCTION_SUBSCRIPTION="${arg#*=}" + if [ -z "$PRODUCTION_SUBSCRIPTION" ]; then + shift + PRODUCTION_SUBSCRIPTION="$1" + fi + elif [[ "$arg" == --production-tenant* ]]; then + PRODUCTION_TENANT="${arg#*=}" + if [ -z "$PRODUCTION_TENANT" ]; then + shift + PRODUCTION_TENANT="$1" + fi + fi +done + +# Validate required production parameters +if [ -z "$PRODUCTION_RESOURCE" ] || [ -z "$PRODUCTION_SUBSCRIPTION" ] || [ -z "$PRODUCTION_TENANT" ]; then + echo -e "${RED}โŒ Missing required production parameters!${NC}" + echo "" + echo "REQUIRED parameters:" + echo " --production-resource (e.g., nextgen-eastus)" + echo " --production-subscription (e.g., b1615458-c1ea-49bc-8526-cafc948d3c25)" + echo " --production-tenant (e.g., 33e577a9-b1b8-4126-87c0-673f197bf624)" + echo "" + echo "Example:" + echo " ./run-migration-docker-auth.sh --use-api \\" + echo " --source-tenant 72f988bf-86f1-41af-91ab-2d7cd011db47 \\" + echo " --production-resource nextgen-eastus \\" + echo " --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\" + echo " --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 \\" + echo " asst_wBMH6Khnqbo1J7W1G6w3p1rN" + exit 1 +fi + +# Production parameters are required - always in production mode +PRODUCTION_MODE="true" + +# Generate source token +if [ -n "$SOURCE_TENANT" ]; then + echo -e "${BLUE}๐Ÿ”‘ Generating source Azure AI token for tenant: ${SOURCE_TENANT}${NC}" + SOURCE_TOKEN=$(az account get-access-token --tenant "$SOURCE_TENANT" --scope https://ai.azure.com/.default --query accessToken -o tsv 2>/dev/null) + + if [ $? -ne 0 ] || [ -z "$SOURCE_TOKEN" ] || [ ${#SOURCE_TOKEN} -lt 100 ]; then + echo -e "${RED}โŒ Failed to generate source token or token is invalid${NC}" + exit 1 + fi + echo -e "${GREEN}โœ… Source token generated successfully (length: ${#SOURCE_TOKEN})${NC}" +else + echo -e "${BLUE}๐Ÿ”‘ Generating Azure AI token...${NC}" + SOURCE_TOKEN=$(az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv 2>/dev/null) + + if [ $? -ne 0 ] || [ -z "$SOURCE_TOKEN" ] || [ ${#SOURCE_TOKEN} -lt 100 ]; then + echo -e "${RED}โŒ Failed to generate token or token is invalid${NC}" + exit 1 + fi + echo -e "${GREEN}โœ… Token generated successfully (length: ${#SOURCE_TOKEN})${NC}" +fi + +# Generate production token (REQUIRED) +PRODUCTION_TOKEN="" +echo -e "${BLUE}๐Ÿญ Production v2 API Configuration:${NC}" +echo -e "${BLUE} ๐ŸŽฏ Resource: ${PRODUCTION_RESOURCE}${NC}" +echo -e "${BLUE} ๐Ÿ“‹ Subscription: ${PRODUCTION_SUBSCRIPTION}${NC}" +echo -e "${BLUE} ๐Ÿ” Tenant: ${PRODUCTION_TENANT}${NC}" +echo -e "${BLUE}๐Ÿ” Switching to production tenant: ${PRODUCTION_TENANT}${NC}" + +# Switch to production tenant +CURRENT_TENANT=$(az account show --query "tenantId" -o tsv 2>/dev/null) +if [ "$CURRENT_TENANT" = "$PRODUCTION_TENANT" ]; then + echo -e "${GREEN}โœ… Already authenticated with production tenant${NC}" +else + echo -e "${YELLOW}๐Ÿ”„ Switching from tenant ${CURRENT_TENANT} to ${PRODUCTION_TENANT}${NC}" + az login --tenant "$PRODUCTION_TENANT" --only-show-errors 2>&1 + if [ $? -ne 0 ]; then + echo -e "${RED}โŒ Failed to authenticate with production tenant${NC}" + exit 1 + fi +fi + +echo -e "${BLUE}๐Ÿ”‘ Generating production Azure AI token...${NC}" +PRODUCTION_TOKEN=$(az account get-access-token --scope https://ai.azure.com/.default --query accessToken -o tsv 2>/dev/null) + +if [ $? -ne 0 ] || [ -z "$PRODUCTION_TOKEN" ] || [ ${#PRODUCTION_TOKEN} -lt 100 ]; then + echo -e "${RED}โŒ Failed to generate production token or token is invalid${NC}" + exit 1 +fi +echo -e "${GREEN}โœ… Production token generated successfully (length: ${#PRODUCTION_TOKEN})${NC}" + +# Switch back to source tenant if different (for reading v1 assistants) +if [ -n "$SOURCE_TENANT" ] && [ "$SOURCE_TENANT" != "$PRODUCTION_TENANT" ]; then + echo -e "${BLUE}๐Ÿ”„ Switching back to source tenant for reading operations: ${SOURCE_TENANT}${NC}" + az login --tenant "$SOURCE_TENANT" --only-show-errors 2>&1 + if [ $? -ne 0 ]; then + echo -e "${YELLOW}โš ๏ธ Warning: Could not switch back to source tenant${NC}" + fi +elif [ "$CURRENT_TENANT" != "$PRODUCTION_TENANT" ]; then + echo -e "${BLUE}๐Ÿ”„ Switching back to original tenant for reading operations${NC}" + az login --tenant "$CURRENT_TENANT" --only-show-errors 2>&1 + if [ $? -ne 0 ]; then + echo -e "${YELLOW}โš ๏ธ Warning: Could not switch back to original tenant${NC}" + fi +fi + +# Check if image exists +if ! docker image inspect v1-to-v2-migration > /dev/null 2>&1; then + echo -e "${YELLOW}โš ๏ธ Docker image 'v1-to-v2-migration' not found.${NC}" + echo -e "${BLUE}๐Ÿ”จ Building Docker image...${NC}" + docker build -t v1-to-v2-migration . +fi + +# Load environment variables from .env if it exists +if [ -f .env ]; then + echo -e "${GREEN}โœ… Loading environment variables from .env file${NC}" + export $(cat .env | grep -v '^#' | grep -v '^$' | xargs) +else + echo -e "${YELLOW}โš ๏ธ No .env file found. Using environment variables or defaults.${NC}" +fi + +# Run the container with token authentication +echo -e "${GREEN}๐Ÿƒ Running migration in Docker container with token authentication...${NC}" +echo -e "${YELLOW}Arguments: $@${NC}" + +# Build Docker run command with required production tokens +DOCKER_CMD="docker run --rm -it \ + --network host \ + -e DOCKER_CONTAINER=true \ + -e LOCAL_HOST=host.docker.internal:5001 \ + -e COSMOS_CONNECTION_STRING=\"$COSMOS_CONNECTION_STRING\" \ + -e COSMOS_DB_CONNECTION_STRING=\"$COSMOS_DB_CONNECTION_STRING\" \ + -e COSMOS_DB_DATABASE_NAME=\"$COSMOS_DB_DATABASE_NAME\" \ + -e COSMOS_DB_CONTAINER_NAME=\"$COSMOS_DB_CONTAINER_NAME\" \ + -e ASSISTANT_API_BASE=\"$ASSISTANT_API_BASE\" \ + -e ASSISTANT_API_VERSION=\"$ASSISTANT_API_VERSION\" \ + -e ASSISTANT_API_KEY=\"$ASSISTANT_API_KEY\" \ + -e PROJECT_ENDPOINT_URL=\"$PROJECT_ENDPOINT_URL\" \ + -e PROJECT_CONNECTION_STRING=\"$PROJECT_CONNECTION_STRING\" \ + -e V2_API_BASE=\"$V2_API_BASE\" \ + -e V2_API_VERSION=\"$V2_API_VERSION\" \ + -e V2_API_KEY=\"$V2_API_KEY\" \ + -e AZURE_TENANT_ID=\"$AZURE_TENANT_ID\" \ + -e AZURE_CLIENT_ID=\"$AZURE_CLIENT_ID\" \ + -e AZURE_CLIENT_SECRET=\"$AZURE_CLIENT_SECRET\" \ + -e AZURE_SUBSCRIPTION_ID=\"$AZURE_SUBSCRIPTION_ID\" \ + -e AZURE_RESOURCE_GROUP=\"$AZURE_RESOURCE_GROUP\" \ + -e AZURE_PROJECT_NAME=\"$AZURE_PROJECT_NAME\" \ + -e NEED_BETA_VERSION=\"$NEED_BETA_VERSION\" \ + -e AZ_TOKEN=\"$SOURCE_TOKEN\" \ + -e PRODUCTION_TOKEN=\"$PRODUCTION_TOKEN\"" + +echo -e "${GREEN}๐Ÿญ Passing both source and production tokens to container${NC}" + +# Add beta version flag message +if [ "$NEED_BETA_VERSION" = "true" ]; then + echo -e "${BLUE}๐Ÿ“ฆ Using azure-ai-projects version 1.0.0b10 (beta - for connection string support)${NC}" +else + echo -e "${GREEN}โœ… Using standard azure-ai-projects version 1.0.0${NC}" +fi + +# Complete the Docker command +DOCKER_CMD="$DOCKER_CMD \ + -v \"$HOME/.azure:/home/migration/.azure\" \ + v1-to-v2-migration" + +# Add script arguments +for arg in "$@"; do + DOCKER_CMD="$DOCKER_CMD \"$arg\"" +done + +# Execute Docker command +echo "" +eval $DOCKER_CMD + +EXIT_CODE=$? + +if [ $EXIT_CODE -eq 0 ]; then + echo "" + echo -e "${GREEN}โœ… Migration completed successfully!${NC}" +else + echo "" + echo -e "${RED}โŒ Migration failed with exit code: $EXIT_CODE${NC}" +fi + +exit $EXIT_CODE \ No newline at end of file diff --git a/samples/microsoft/python/migration/v1_to_v2_migration.py b/samples/microsoft/python/migration/v1_to_v2_migration.py new file mode 100644 index 00000000..aac682d5 --- /dev/null +++ b/samples/microsoft/python/migration/v1_to_v2_migration.py @@ -0,0 +1,1933 @@ +import os, sys, time, json, argparse, subprocess, requests +from typing import List, Dict, Any, Optional +from azure.cosmos import CosmosClient, exceptions +from read_cosmos_data import fetch_data +from azure.ai.agents.models import AzureFunctionStorageQueue, AzureFunctionTool + +# Import AIProjectClient for project endpoint support +try: + from azure.ai.projects import AIProjectClient + from azure.identity import DefaultAzureCredential, AzureCliCredential + from azure.core.credentials import AccessToken + PROJECT_CLIENT_AVAILABLE = True +except ImportError: + PROJECT_CLIENT_AVAILABLE = False + print("โš ๏ธ Warning: azure-ai-projects package not available. Project endpoint functionality disabled.") + +# Cosmos DB Configuration +COSMOS_CONNECTION_STRING = os.getenv("COSMOS_CONNECTION_STRING") or None +DATABASE_NAME = "testDB2" +WRITE_DATABASE_NAME = "agents" +SOURCE_CONTAINER = "testContainer1" # Where v1 assistants and agents are stored +TARGET_CONTAINER = "agent-definitions" # Where v2 agents will be stored + +# API Configuration +HOST = os.getenv("AGENTS_HOST") or "eastus.api.azureml.ms" +# Use host.docker.internal for Docker containers to access Windows host +LOCAL_HOST = os.getenv("LOCAL_HOST") or "host.docker.internal:5001" #"localhost:5001"# +SUBSCRIPTION_ID = os.getenv("AGENTS_SUBSCRIPTION") or "921496dc-987f-410f-bd57-426eb2611356" +RESOURCE_GROUP = os.getenv("AGENTS_RESOURCE_GROUP") or "agents-e2e-tests-eastus" +RESOURCE_GROUP_V2 = os.getenv("AGENTS_RESOURCE_GROUP_V2") or "agents-e2e-tests-westus2" +WORKSPACE = os.getenv("AGENTS_WORKSPACE") or "basicaccountjqqa@e2e-tests@AML" +WORKSPACE_V2 = os.getenv("AGENTS_WORKSPACE_V2") or "e2e-tests-westus2-account@e2e-tests-westus2@AML" +API_VERSION = os.getenv("AGENTS_API_VERSION") or "2025-05-15-preview" +TOKEN = os.getenv("AZ_TOKEN") + +# Source Tenant Configuration (for reading v1 assistants from source tenant) +SOURCE_TENANT = os.getenv("SOURCE_TENANT") or os.getenv("AGENTS_TENANT") or "72f988bf-86f1-41af-91ab-2d7cd011db47" # Microsoft tenant + +# Production Resource Configuration +PRODUCTION_RESOURCE = os.getenv("PRODUCTION_RESOURCE") # e.g., "nextgen-eastus" +PRODUCTION_SUBSCRIPTION = os.getenv("PRODUCTION_SUBSCRIPTION") # e.g., "b1615458-c1ea-49bc-8526-cafc948d3c25" +PRODUCTION_TENANT = os.getenv("PRODUCTION_TENANT") # e.g., "33e577a9-b1b8-4126-87c0-673f197bf624" +PRODUCTION_TOKEN = os.getenv("PRODUCTION_TOKEN") # Production token from PowerShell script + +# v1 API base URL +BASE_V1 = f"https://{HOST}/agents/v1.0/subscriptions/{SUBSCRIPTION_ID}/resourceGroups/{RESOURCE_GROUP}/providers/Microsoft.MachineLearningServices/workspaces/{WORKSPACE}" +# v2 API base URL - will be determined based on production vs local mode +BASE_V2 = None # Will be set dynamically based on production resource configuration + +def create_cosmos_client_from_connection_string(connection_string: str): + """ + Create a Cosmos DB client using a connection string. + """ + try: + return CosmosClient.from_connection_string(connection_string) + except Exception as e: + print(f"Failed to create Cosmos client from connection string: {e}") + raise + +def ensure_database_and_container(client, database_name: str, container_name: str): + """ + Ensure the database and container exist, create them if they don't. + """ + try: + database = client.get_database_client(database_name) + print(f"Database '{database_name}' found") + except exceptions.CosmosResourceNotFoundError: + print(f"Creating database '{database_name}'") + database = client.create_database_if_not_exists(id=database_name) + + try: + container = database.get_container_client(container_name) + print(f"Container '{container_name}' found") + except exceptions.CosmosResourceNotFoundError: + print(f"Creating container '{container_name}'") + container = database.create_container_if_not_exists( + id=container_name, + partition_key={'paths': ['/id'], 'kind': 'Hash'} + ) + + return database, container + +def get_production_v2_base_url(resource_name: str, subscription_id: str, project_name: str) -> str: + """ + Build the production v2 API base URL for Azure AI services. + + Args: + resource_name: The Azure AI resource name (e.g., "nextgen-eastus") + subscription_id: The subscription ID for production + project_name: The project name (e.g., "nextgen-eastus") + + Returns: + The production v2 API base URL + """ + # Production format: https://{resource}-resource.services.ai.azure.com/api/projects/{project}/agents/{agent}/versions + return f"https://{resource_name}-resource.services.ai.azure.com/api/projects/{project_name}" + +# Production token handling removed - now handled by PowerShell wrapper +# which provides PRODUCTION_TOKEN environment variable + +# Production authentication is now handled by the PowerShell wrapper +# which generates both AZ_TOKEN and PRODUCTION_TOKEN environment variables + +def get_token_from_az(tenant_id: Optional[str] = None) -> Optional[str]: + """ + Runs the az CLI to get an access token for the AI resource scope. + Returns the token string on success, or None on failure. + + Args: + tenant_id: Optional tenant ID to authenticate with + """ + try: + cmd = [ + "az", "account", "get-access-token", + "--scope", "https://ai.azure.com/.default", + "--query", "accessToken", + "-o", "tsv" + ] + + # Add tenant parameter if provided + if tenant_id: + cmd.extend(["--tenant", tenant_id]) + print(f"๐Ÿ” Requesting token for tenant: {tenant_id}") + + # capture output (shell=True needed for Windows) + proc = subprocess.run(cmd, capture_output=True, text=True, shell=True) + if proc.returncode != 0: + print("az CLI returned non-zero exit code when fetching token:", proc.stderr.strip()) + return None + + # Clean the token output - get only the last non-empty line that looks like a token + lines = [line.strip() for line in proc.stdout.strip().split('\n') if line.strip()] + if not lines: + print("az CLI returned empty token.") + return None + + # JWT tokens start with 'ey' or are long strings (>100 chars) + token = None + for line in reversed(lines): + if line.startswith('ey') or len(line) > 100: + token = line + break + + if not token: + # Fallback to the last line if no obvious token found + token = lines[-1] + + return token + except FileNotFoundError: + print("az CLI not found on PATH. Please install Azure CLI or set AZ_TOKEN env var.") + return None + except Exception as ex: + print("Unexpected error while running az CLI:", ex) + return None + +class ManualAzureCliCredential: + """ + A custom credential class that uses our manual az CLI token extraction. + This works around issues with the azure-identity AzureCliCredential in containers. + """ + def get_token(self, *scopes, **kwargs): + """Get an access token using az CLI.""" + try: + # Try different scopes based on what's requested + if scopes: + scope = scopes[0] + else: + # Default to Azure AI scope for Azure AI Projects (confirmed correct audience) + scope = "https://ai.azure.com/.default" + + cmd = [ + "az", "account", "get-access-token", + "--scope", scope, + "--query", "accessToken", + "-o", "tsv" + ] + proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, text=True, shell=True) + if proc.returncode != 0: + # Try without suppressing stderr to get the actual error + proc_debug = subprocess.run(cmd, capture_output=True, text=True, shell=True) + raise Exception(f"az CLI returned error: {proc_debug.stderr.strip()}") + + # Clean the token output - get only the last non-empty line (the actual token) + lines = [line.strip() for line in proc.stdout.strip().split('\n') if line.strip()] + if not lines: + raise Exception("az CLI returned empty token") + + # The token should be the last line that looks like a token (starts with ey) + token = None + for line in reversed(lines): + if line.startswith('ey') or len(line) > 50: # JWT tokens start with 'ey' or are long strings + token = line + break + + if not token: + # Fallback to the last line if no obvious token found + token = lines[-1] + + # Return a proper AccessToken object + import time + # Token expires in 1 hour (3600 seconds) + expires_on = int(time.time()) + 3600 + return AccessToken(token, expires_on) + + except Exception as e: + raise Exception(f"Failed to get token via az CLI: {e}") + +class StaticTokenCredential: + """ + A credential class that uses a pre-provided static token. + Useful when we have a token from AZ_TOKEN environment variable. + """ + def __init__(self, token: str): + self.token = token + + def get_token(self, *scopes, **kwargs): + """Return the static token.""" + import time + # Assume token expires in 1 hour (3600 seconds) + expires_on = int(time.time()) + 3600 + return AccessToken(self.token, expires_on) + +def get_azure_credential(): + """ + Get the appropriate Azure credential for the current environment. + Prefers static token credential when AZ_TOKEN is available. + """ + if not PROJECT_CLIENT_AVAILABLE: + raise ImportError("azure-identity package is required for credential functionality") + + # Check if we have a static token from environment variable (highest priority) + static_token = os.environ.get('AZ_TOKEN') + if static_token: + print("๐Ÿ”‘ Using static token from AZ_TOKEN environment variable") + return StaticTokenCredential(static_token) + + # Check if we're likely in a container environment + is_container = os.path.exists('/.dockerenv') or os.environ.get('DOCKER_CONTAINER') == 'true' + + if is_container: + # In container, use DefaultAzureCredential which has better fallback handling for version mismatches + try: + print("๐Ÿณ Container environment detected, using DefaultAzureCredential for better compatibility") + return DefaultAzureCredential() + except Exception as e: + print(f"โš ๏ธ DefaultAzureCredential failed: {e}") + print("๐Ÿ’ก Falling back to manual Azure CLI credential") + try: + return ManualAzureCliCredential() + except Exception as e2: + print(f"โš ๏ธ Manual Azure CLI credential also failed: {e2}") + print("๐Ÿ’ก This might be due to Azure CLI version mismatch between host and container") + raise Exception(f"All credential methods failed. Host CLI: 2.77.0, Container CLI: 2.78.0. Try: az upgrade") + else: + # On host system, use default credential chain + print("๐Ÿ–ฅ๏ธ Host environment detected, using default credential chain") + return DefaultAzureCredential() + +def set_api_token(force_refresh: bool = False, tenant_id: Optional[str] = None) -> bool: + """ + Ensure we have a valid bearer token for API calls. + Returns True if a token is set, False otherwise. + + Args: + force_refresh: If True, ignore existing tokens and get a fresh one from az CLI + tenant_id: Optional tenant ID to authenticate with (uses SOURCE_TENANT if not provided) + """ + global TOKEN + + # If force refresh is requested, skip environment variable and get fresh token + if not force_refresh: + # Check environment variable first + env_token = os.getenv("AZ_TOKEN") + if env_token: + TOKEN = env_token + return True + + # Use provided tenant or default to SOURCE_TENANT + if tenant_id is None: + tenant_id = SOURCE_TENANT + + # Try az CLI (either forced or as fallback) with tenant + token = get_token_from_az(tenant_id) + if token: + TOKEN = token + print(f"๐Ÿ”„ Token refreshed from az CLI for tenant: {tenant_id}") + return True + return False + +def do_api_request_with_token(method: str, url: str, token: str, **kwargs) -> requests.Response: + """ + Wrapper around requests.request with specific token authentication. + """ + headers = kwargs.pop("headers", {}) + headers["Authorization"] = f"Bearer {token}" + headers["Accept"] = "application/json" + kwargs["headers"] = headers + + # Set longer timeout for localhost/local development (servers may be slower) + if "localhost" in url or "host.docker.internal" in url: + kwargs["timeout"] = 120 # 2 minutes for local development + kwargs["verify"] = False + # Suppress the SSL warning for localhost/local development + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + host_type = "localhost" if "localhost" in url else "host.docker.internal (Docker)" + print(f"๐Ÿ  Making request to {host_type} with extended timeout and no SSL verification: {url}") + elif "timeout" not in kwargs: + kwargs["timeout"] = 30 + + try: + resp = requests.request(method, url, **kwargs) + resp.raise_for_status() + return resp + + except requests.exceptions.Timeout as e: + print(f"โฐ Request timed out: {e}") + print("๐Ÿ’ก This usually means:") + print(" - The server is not running") + print(" - The server is overloaded") + print(" - The endpoint doesn't exist") + if "localhost" in url or "host.docker.internal" in url: + print(" - Check if the v2 API server is running") + raise + except requests.exceptions.ConnectionError as e: + print(f"๐Ÿ”Œ Connection failed: {e}") + if "localhost" in url or "host.docker.internal" in url: + print("๐Ÿ’ก Make sure the v2 API server is running") + raise + except requests.exceptions.RequestException as e: + print(f"โŒ API request failed: {e}") + raise + +def do_api_request(method: str, url: str, **kwargs) -> requests.Response: + """ + Wrapper around requests.request with authentication and retry logic. + """ + headers = kwargs.pop("headers", {}) + if TOKEN: + headers["Authorization"] = f"Bearer {TOKEN}" + headers["Accept"] = "application/json" + kwargs["headers"] = headers + + # Set longer timeout for localhost/local development (servers may be slower) + if "localhost" in url or "host.docker.internal" in url: + kwargs["timeout"] = 120 # 2 minutes for local development + kwargs["verify"] = False + # Suppress the SSL warning for localhost/local development + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + host_type = "localhost" if "localhost" in url else "host.docker.internal (Docker)" + print(f"๐Ÿ  Making request to {host_type} with extended timeout and no SSL verification: {url}") + elif "timeout" not in kwargs: + kwargs["timeout"] = 30 + + try: + resp = requests.request(method, url, **kwargs) + if resp.status_code == 401: + print("Received 401 Unauthorized. Trying to refresh token...") + time.sleep(5) + if set_api_token(force_refresh=True): # Force refresh from az CLI on 401 + headers["Authorization"] = f"Bearer {TOKEN}" + kwargs["headers"] = headers + resp = requests.request(method, url, **kwargs) + else: + print("Token refresh failed.") + + resp.raise_for_status() + return resp + + except requests.exceptions.Timeout as e: + print(f"โฐ Request timed out: {e}") + print("๐Ÿ’ก This usually means:") + print(" - The server is not running") + print(" - The server is overloaded") + print(" - The endpoint doesn't exist") + if "localhost" in url or "host.docker.internal" in url: + print(" - Check if the v2 API server is running") + raise + except requests.exceptions.ConnectionError as e: + print(f"๐Ÿ”Œ Connection failed: {e}") + if "localhost" in url or "host.docker.internal" in url: + print("๐Ÿ’ก Make sure the v2 API server is running") + raise + except requests.exceptions.RequestException as e: + print(f"โŒ API request failed: {e}") + raise + +def test_v2_api_connectivity() -> bool: + """Test if the local v2 API server is reachable.""" + # Build local development URL for testing + local_base = f"https://{LOCAL_HOST}/agents/v2.0/subscriptions/{SUBSCRIPTION_ID}/resourceGroups/{RESOURCE_GROUP_V2}/providers/Microsoft.MachineLearningServices/workspaces/{WORKSPACE_V2}" + + try: + # Try a simple GET request to the base URL + print(f"๐Ÿ” Testing connectivity to {local_base}...") + response = requests.get(local_base, verify=False, timeout=10) + print(f"โœ… Server responded with status code: {response.status_code}") + return True + except requests.exceptions.Timeout: + print(f"โฐ Timeout connecting to {local_base}") + print("๐Ÿ’ก The server might not be running or is too slow to respond") + return False + except requests.exceptions.ConnectionError: + print(f"๐Ÿ”Œ Cannot connect to {local_base}") + print("๐Ÿ’ก Make sure the v2 API server is running") + return False + except Exception as e: + print(f"โŒ Unexpected error testing connectivity: {e}") + return False + +def get_assistant_from_api(assistant_id: str) -> Dict[str, Any]: + """Get v1 assistant details from API including internal metadata.""" + url = f"{BASE_V1}/assistants/{assistant_id}" + params = {"api-version": API_VERSION, "include[]": "internal_metadata"} + r = do_api_request("GET", url, params=params) + return r.json() + +def list_assistants_from_api() -> List[Dict[str, Any]]: + """List all v1 assistants from API.""" + url = f"{BASE_V1}/assistants" + params = {"api-version": API_VERSION, "limit": "100", "include[]": "internal_metadata"} + r = do_api_request("GET", url, params=params) + response_data = r.json() + + # Handle different response formats + if isinstance(response_data, dict): + if "data" in response_data: + return response_data["data"] + elif "assistants" in response_data: + return response_data["assistants"] + elif "items" in response_data: + return response_data["items"] + elif isinstance(response_data, list): + return response_data + + # If we can't find a list, return empty + print(f"Warning: Unexpected API response format: {type(response_data)}") + return [] + +def ensure_project_connection_package(): + """Ensure the correct azure-ai-projects version is installed for project connection string functionality.""" + try: + # Test if we have the from_connection_string method + from azure.ai.projects import AIProjectClient + if hasattr(AIProjectClient, 'from_connection_string'): + print("โœ… Correct azure-ai-projects version already installed (1.0.0b10)") + return True + else: + print("โš ๏ธ Current azure-ai-projects version doesn't support from_connection_string") + print("๐Ÿ”„ Upgrading to azure-ai-projects==1.0.0b10...") + + import subprocess + import sys + + # Upgrade to the beta version + result = subprocess.run([ + sys.executable, "-m", "pip", "install", "--upgrade", "azure-ai-projects==1.0.0b10" + ], capture_output=True, text=True) + + if result.returncode == 0: + print("โœ… Successfully upgraded to azure-ai-projects==1.0.0b10") + # Force reimport after upgrade + import importlib + import azure.ai.projects + importlib.reload(azure.ai.projects) + return True + else: + print(f"โŒ Failed to upgrade package: {result.stderr}") + return False + + except ImportError: + print("โŒ azure-ai-projects package not found") + print("๐Ÿ”„ Installing azure-ai-projects==1.0.0b10...") + + import subprocess + import sys + + result = subprocess.run([ + sys.executable, "-m", "pip", "install", "azure-ai-projects==1.0.0b10" + ], capture_output=True, text=True) + + if result.returncode == 0: + print("โœ… Successfully installed azure-ai-projects==1.0.0b10") + return True + else: + print(f"โŒ Failed to install package: {result.stderr}") + return False + +def get_assistant_from_project_connection(project_connection_string: str, assistant_id: str) -> Dict[str, Any]: + """Get v1 assistant details from AIProjectClient using connection string.""" + global AIProjectClient, PROJECT_CLIENT_AVAILABLE + + if not PROJECT_CLIENT_AVAILABLE: + print("โŒ azure-ai-projects package is required for project connection string functionality") + print("๐Ÿ”„ Attempting to install the correct version...") + if not ensure_project_connection_package(): + raise ImportError("Failed to install azure-ai-projects==1.0.0b10") + + # Re-import after installation + try: + from azure.ai.projects import AIProjectClient + PROJECT_CLIENT_AVAILABLE = True + except ImportError: + raise ImportError("Failed to import AIProjectClient after installation") # Ensure we have the correct version + if not ensure_project_connection_package(): + raise ImportError("azure-ai-projects==1.0.0b10 is required for project connection string functionality") + + # Try to use from_connection_string method (available in beta versions) + try: + project_client = AIProjectClient.from_connection_string( + credential=get_azure_credential(), + conn_str=project_connection_string + ) + print("โœ… Using AIProjectClient.from_connection_string method") + except AttributeError: + # This shouldn't happen now, but keep as fallback + print("โš ๏ธ from_connection_string not available after upgrade") + raise ImportError("azure-ai-projects==1.0.0b10 is required for project connection string functionality") + + with project_client: + agent = project_client.agents.get_agent(assistant_id) + # Convert the agent object to dictionary format with proper JSON serialization + if hasattr(agent, 'model_dump'): + return json.loads(json.dumps(agent.model_dump(), default=str)) + else: + return json.loads(json.dumps(dict(agent), default=str)) + +def list_assistants_from_project_connection(project_connection_string: str) -> List[Dict[str, Any]]: + """List all v1 assistants from AIProjectClient using connection string.""" + global AIProjectClient, PROJECT_CLIENT_AVAILABLE + + if not PROJECT_CLIENT_AVAILABLE: + print("โŒ azure-ai-projects package is required for project connection string functionality") + print("๐Ÿ”„ Attempting to install the correct version...") + if not ensure_project_connection_package(): + raise ImportError("Failed to install azure-ai-projects==1.0.0b10") + + # Re-import after installation + try: + from azure.ai.projects import AIProjectClient + PROJECT_CLIENT_AVAILABLE = True + except ImportError: + raise ImportError("Failed to import AIProjectClient after installation") # Ensure we have the correct version + if not ensure_project_connection_package(): + raise ImportError("azure-ai-projects==1.0.0b10 is required for project connection string functionality") + + # Try to use from_connection_string method (available in beta versions) + try: + project_client = AIProjectClient.from_connection_string( + credential=get_azure_credential(), + conn_str=project_connection_string + ) + print("โœ… Using AIProjectClient.from_connection_string method") + except AttributeError: + # This shouldn't happen now, but keep as fallback + print("โš ๏ธ from_connection_string not available after upgrade") + raise ImportError("azure-ai-projects==1.0.0b10 is required for project connection string functionality") + + with project_client: + agents = project_client.agents.list_agents() + # Convert agent objects to dictionary format with proper JSON serialization + agent_list = [] + for agent in agents: + if hasattr(agent, 'model_dump'): + agent_dict = json.loads(json.dumps(agent.model_dump(), default=str)) + else: + agent_dict = json.loads(json.dumps(dict(agent), default=str)) + agent_list.append(agent_dict) + return agent_list + +def get_assistant_from_project(project_endpoint: str, assistant_id: str, subscription_id: Optional[str] = None, resource_group_name: Optional[str] = None, project_name: Optional[str] = None) -> Dict[str, Any]: + """Get v1 assistant details from project endpoint using direct API calls (bypassing AIProjectClient SDK bug).""" + + # Since direct API calls work and AIProjectClient has issues, use direct REST API + print(f" ๐ŸŒ Using direct API call to project endpoint (bypassing AIProjectClient SDK)") + + # Build the direct API URL + if not project_endpoint.endswith('/'): + project_endpoint = project_endpoint + '/' + + # Remove trailing slash if present, then add the assistants path + api_url = project_endpoint.rstrip('/') + f'/assistants/{assistant_id}' + + # Add API version parameter + params = {"api-version": API_VERSION} + + print(f" ๐Ÿ“ž Making direct API call to: {api_url}") + print(f" ๐Ÿ”ง Using API version: {API_VERSION}") + + try: + # Make the direct API request + response = do_api_request("GET", api_url, params=params) + result = response.json() + + print(f" โœ… Successfully retrieved assistant via direct API call") + print(f" ๐Ÿ“‹ Assistant ID: {result.get('id', 'N/A')}") + print(f" ๐Ÿ“‹ Assistant Name: {result.get('name', 'N/A')}") + + return result + + except Exception as e: + print(f" โŒ Direct API call failed: {e}") + + # Fallback to AIProjectClient if available (for debugging) + if PROJECT_CLIENT_AVAILABLE: + print(f" ๐Ÿ”„ Attempting fallback to AIProjectClient...") + + # Extract project information from endpoint if not provided + if not subscription_id or not resource_group_name or not project_name: + print(f" ๐Ÿ” Some project parameters missing, attempting to extract from endpoint or environment...") + + # Use environment variables as fallbacks + subscription_id = subscription_id or os.getenv("AGENTS_SUBSCRIPTION") or "921496dc-987f-410f-bd57-426eb2611356" + resource_group_name = resource_group_name or os.getenv("AGENTS_RESOURCE_GROUP") or "agents-e2e-tests-eastus" + + # Try to extract project name from endpoint URL + if not project_name: + import re + project_match = re.search(r'/projects/([^/?]+)', project_endpoint) + if project_match: + project_name = project_match.group(1) + print(f" ๐Ÿ“ Extracted project name from endpoint: {project_name}") + else: + project_name = "default-project" + print(f" โš ๏ธ Could not extract project name from endpoint, using default: {project_name}") + + print(f" ๐Ÿ“‹ Using: subscription={subscription_id[:8]}..., resource_group={resource_group_name}, project={project_name}") + + # Initialize AIProjectClient with all required parameters + try: + project_client = AIProjectClient( + endpoint=project_endpoint, + credential=get_azure_credential(), + subscription_id=subscription_id, + resource_group_name=resource_group_name, + project_name=project_name + ) + + with project_client: + agent = project_client.agents.get_agent(assistant_id) + # Convert the agent object to dictionary format with proper JSON serialization + if hasattr(agent, 'model_dump'): + return json.loads(json.dumps(agent.model_dump(), default=str)) + else: + return json.loads(json.dumps(dict(agent), default=str)) + + except Exception as client_error: + print(f" โŒ AIProjectClient fallback also failed: {client_error}") + raise RuntimeError(f"Both direct API call and AIProjectClient failed. Direct API error: {e}, AIProjectClient error: {client_error}") + else: + raise + +def list_assistants_from_project(project_endpoint: str, subscription_id: Optional[str] = None, resource_group_name: Optional[str] = None, project_name: Optional[str] = None) -> List[Dict[str, Any]]: + """List all v1 assistants from project endpoint using direct API calls (bypassing AIProjectClient SDK bug).""" + + # Since direct API calls work and AIProjectClient has issues, use direct REST API + print(f" ๐ŸŒ Using direct API call to project endpoint (bypassing AIProjectClient SDK)") + + # Build the direct API URL + if not project_endpoint.endswith('/'): + project_endpoint = project_endpoint + '/' + + # Remove trailing slash if present, then add the assistants path + api_url = project_endpoint.rstrip('/') + '/assistants' + + # Add API version parameter + params = {"api-version": API_VERSION, "limit": "100"} + + print(f" ๐Ÿ“ž Making direct API call to: {api_url}") + print(f" ๐Ÿ”ง Using API version: {API_VERSION}") + + try: + # Make the direct API request + response = do_api_request("GET", api_url, params=params) + result = response.json() + + # Handle different response formats (same logic as list_assistants_from_api) + if isinstance(result, dict): + if "data" in result: + agent_list = result["data"] + elif "assistants" in result: + agent_list = result["assistants"] + elif "items" in result: + agent_list = result["items"] + else: + # If we can't find a list, return empty + print(f" โš ๏ธ Unexpected API response format: {type(result)}") + agent_list = [] + elif isinstance(result, list): + agent_list = result + else: + # If we can't find a list, return empty + print(f" โš ๏ธ Unexpected API response format: {type(result)}") + agent_list = [] + + print(f" โœ… Successfully retrieved {len(agent_list)} assistants via direct API call") + + return agent_list + + except Exception as e: + print(f" โŒ Direct API call failed: {e}") + + # Fallback to AIProjectClient if available (for debugging) + if PROJECT_CLIENT_AVAILABLE: + print(f" ๐Ÿ”„ Attempting fallback to AIProjectClient...") + + # Try different AIProjectClient constructor patterns for different versions + try: + # Try the newer constructor with additional parameters (if provided) + if subscription_id and resource_group_name and project_name: + project_client = AIProjectClient( + endpoint=project_endpoint, + credential=get_azure_credential(), + subscription_id=subscription_id, + resource_group_name=resource_group_name, + project_name=project_name + ) + else: + # Fallback to the original constructor (should work with most versions) + project_client = AIProjectClient( + endpoint=project_endpoint, + credential=get_azure_credential(), + ) + except TypeError as type_error: + # If that fails, try with just endpoint and credential + print(f" โš ๏ธ Trying alternative AIProjectClient constructor due to: {type_error}") + try: + project_client = AIProjectClient( + endpoint=project_endpoint, + credential=get_azure_credential(), + ) + except Exception as fallback_error: + raise RuntimeError(f"Could not initialize AIProjectClient with any constructor pattern. Original error: {type_error}, Fallback error: {fallback_error}") + + with project_client: + agents = project_client.agents.list_agents() + # Convert agent objects to dictionary format with proper JSON serialization + agent_list = [] + for agent in agents: + if hasattr(agent, 'model_dump'): + agent_dict = json.loads(json.dumps(agent.model_dump(), default=str)) + else: + agent_dict = json.loads(json.dumps(dict(agent), default=str)) + agent_list.append(agent_dict) + return agent_list + else: + raise + +def create_agent_version_via_api(agent_name: str, agent_version_data: Dict[str, Any], production_resource: Optional[str] = None, production_subscription: Optional[str] = None, production_token: Optional[str] = None) -> Dict[str, Any]: + """ + Create a v2 agent version using the v2 API endpoint. + + Args: + agent_name: The agent name (without version) + agent_version_data: The agent version payload matching v2 API format + production_resource: Optional production resource name (e.g., "nextgen-eastus") + production_subscription: Optional production subscription ID + production_token: Optional production token for authentication + + Returns: + API response data + """ + # Build the v2 API endpoint URL based on mode (production vs local) + agent_name = agent_name.lower()[:len(agent_name)-1] + "f" + + if production_resource and production_subscription: + # Production mode: use Azure AI services endpoint format + base_url = get_production_v2_base_url(production_resource, production_subscription, production_resource) + url = f"{base_url}/agents/{agent_name}/versions" + print(f"๐Ÿญ Using PRODUCTION endpoint") + else: + # Local development mode: use the existing BASE_V2 format + if BASE_V2 is None: + # Fallback to local development URL if not set + local_base = f"https://{LOCAL_HOST}/agents/v2.0/subscriptions/{SUBSCRIPTION_ID}/resourceGroups/{RESOURCE_GROUP_V2}/providers/Microsoft.MachineLearningServices/workspaces/{WORKSPACE_V2}" + url = f"{local_base}/agents/{agent_name}/versions" + else: + url = f"{BASE_V2}/agents/{agent_name}/versions" + print(f"๐Ÿ  Using LOCAL development endpoint") + + params = {"api-version": API_VERSION} + + print(f"๐ŸŒ Creating agent version via v2 API:") + print(f" URL: {url}") + print(f" Agent Name: {agent_name}") + print(f" API Version: {API_VERSION}") + print(f" Full params: {params}") + + # Debug: Show the actual request body + print(f"๐Ÿ” Request Body Debug:") + print(f" Type: {type(agent_version_data)}") + print(f" Keys: {list(agent_version_data.keys()) if isinstance(agent_version_data, dict) else 'Not a dict'}") + if isinstance(agent_version_data, dict): + import json + print(f" Full JSON payload:") + print(json.dumps(agent_version_data, indent=2, default=str)[:2000] + "..." if len(str(agent_version_data)) > 2000 else json.dumps(agent_version_data, indent=2, default=str)) + + try: + # Make the POST request to create the agent version with appropriate token + # Use production token from environment if available and production resource is specified + if production_resource and PRODUCTION_TOKEN: + print(f" ๐Ÿ”‘ Using production token for authentication") + response = do_api_request_with_token("POST", url, PRODUCTION_TOKEN, params=params, json=agent_version_data) + else: + print(f" ๐Ÿ”‘ Using standard token for authentication") + response = do_api_request("POST", url, params=params, json=agent_version_data) + result = response.json() + + print(f"โœ… Successfully created agent version via v2 API") + print(f" Response ID: {result.get('id', 'N/A')}") + + return result + + except requests.exceptions.HTTPError as e: + print(f"โŒ Failed to create agent version via v2 API: {e}") + if hasattr(e, 'response') and e.response: + print(f"๐Ÿ” Response Status Code: {e.response.status_code}") + try: + error_response = e.response.json() + print(f"๐Ÿ” Error Response JSON:") + import json + print(json.dumps(error_response, indent=2)) + except: + print(f"๐Ÿ” Error Response Text: {e.response.text[:1000]}") + raise + except Exception as e: + print(f"โŒ Failed to create agent version via v2 API: {e}") + raise + +def prepare_v2_api_payload(v2_agent_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Prepare the payload for v2 API from our transformed agent data. + Converts our internal format to the v2 API expected format and includes flattened migration metadata. + All metadata values are converted to strings as required by the API. + """ + agent_version = v2_agent_data['v2_agent_version'] + migration_notes = v2_agent_data['migration_notes'] + + # Start with the existing metadata and enhance it with migration info + enhanced_metadata = agent_version.get("metadata", {}).copy() + + # Convert any existing metadata values to strings + string_metadata = {} + for key, value in enhanced_metadata.items(): + if key == "feature_flags" and isinstance(value, dict): + # Convert feature flags to comma-separated string + flag_list = [f"{k}={v}" for k, v in value.items()] + string_metadata[key] = ",".join(flag_list) + elif isinstance(value, (dict, list)): + # Convert complex objects to JSON strings + string_metadata[key] = json.dumps(value) + else: + # Convert everything else to string + string_metadata[key] = str(value) if value is not None else "" + + # Add flattened migration information to metadata (all as strings) + current_timestamp = int(time.time() * 1000) # Milliseconds + string_metadata.update({ + "migrated_from": "v1_assistant_via_api_migration_script", # Combined migration_source and migrated_from + "migration_timestamp": str(current_timestamp), + "original_v1_id": str(migration_notes['original_v1_id']), + "new_v2_format": str(migration_notes['new_v2_format']), + "migration_changes": ",".join(migration_notes['changes']) + # Removed migrated_at as requested + }) + + # Extract the core fields that the v2 API expects + api_payload = { + "description": agent_version.get("description"), + "metadata": string_metadata, + "definition": agent_version.get("definition", {}) + } + + # Remove None values to keep payload clean + api_payload = {k: v for k, v in api_payload.items() if v is not None} + + print(f"๐Ÿ”ง Prepared v2 API payload:") + print(f" Description: {api_payload.get('description', 'N/A')}") + print(f" Metadata keys: {list(api_payload.get('metadata', {}).keys())}") + print(f" Definition kind: {api_payload.get('definition', {}).get('kind', 'N/A')}") + print(f" Migration info: Original v1 ID = {migration_notes['original_v1_id']}") + print(f" All metadata values converted to strings") + + return api_payload + +def determine_agent_kind(v1_assistant: Dict[str, Any]) -> str: + """ + Determine the appropriate v2 agent kind based on v1 assistant properties. + + Possible v2 kinds: + - "prompt": Standard conversational agent (default) + - "hosted": Hosted external service + - "container_app": Container-based agent + - "workflow": Multi-step workflow agent + """ + # For now, all assistants will be migrated as "prompt" agents + # Uncomment the detection logic below if you need to differentiate agent kinds in the future + + # # Check for workflow indicators + # tools = v1_assistant.get("tools", []) + # if any(tool.get("type") == "function" for tool in tools if isinstance(tool, dict)): + # # If it has function tools, it might be a workflow + # if len(tools) > 3: # Arbitrary threshold for complex workflows + # return "workflow" + # + # # Check for hosted service indicators + # metadata = v1_assistant.get("metadata", {}) + # if metadata.get("service_type") == "hosted" or metadata.get("external_service"): + # return "hosted" + # + # # Check for container indicators + # if metadata.get("deployment_type") == "container" or metadata.get("container_image"): + # return "container_app" + + # Default to prompt agent for all assistants (test assumption: all are prompt agents) + return "prompt" + +def v1_assistant_to_v2_agent(v1_assistant: Dict[str, Any], agent_name: Optional[str] = None, version: str = "1") -> Dict[str, Any]: + """ + Transform a v1 assistant object to v2 agent structure. + Based on the migration document mapping from v1 Agent to v2 AgentObject + AgentVersionObject. + """ + # Validate tools - check for unsupported tool types + v1_tools = v1_assistant.get("tools", []) + + # Handle string-encoded tools (from project client serialization) + if isinstance(v1_tools, str): + try: + v1_tools = json.loads(v1_tools) + except json.JSONDecodeError: + print(f" โš ๏ธ Warning: Could not parse tools string: {v1_tools}") + v1_tools = [] + + # Ensure v1_tools is a list + if not isinstance(v1_tools, list): + v1_tools = [] + + # Check for unsupported tool types and log warnings + assistant_id = v1_assistant.get("id", "unknown") + assistant_name = v1_assistant.get("name", "unknown") + unsupported_tools = [] + + for tool in v1_tools: + if not isinstance(tool, dict): + continue + + tool_type = tool.get("type") + + if tool_type == "connected_agent": + unsupported_tools.append(tool_type) + print(f" โš ๏ธ WARNING: Your classic agent includes connected agents, which aren't supported in the new experience.") + print(f" โ„น๏ธ These connected agents won't be carried over when you create the new agent.") + print(f" ๐Ÿ’ก To orchestrate multiple agents, use a workflow instead.") + elif tool_type == "event_binding": + unsupported_tools.append(tool_type) + print(f" โš ๏ธ WARNING: Your classic agent uses 'event_binding' which isn't supported in the new experience.") + print(f" โ„น๏ธ This tool won't be carried over when you create the new agent.") + elif tool_type == "output_binding": + unsupported_tools.append(tool_type) + print(f" โš ๏ธ WARNING: Your classic agent uses 'output_binding' which isn't supported in the new experience.") + print(f" โ„น๏ธ This tool won't be carried over when you create the new agent.") + print(f" ๐Ÿ’ก Consider using 'capture_structured_outputs' in your new agent instead.") + + if unsupported_tools: + print(f" ๐Ÿ“‹ Unsupported tools that will be skipped: {', '.join(unsupported_tools)}") + + # Derive agent name if not provided + if not agent_name: + agent_name = v1_assistant.get("name") or f"agent_{v1_assistant.get('id', 'unknown')}" + + # Determine the appropriate agent kind + agent_kind = determine_agent_kind(v1_assistant) + + # Extract and preserve feature flags from v1 data + v1_metadata = v1_assistant.get("metadata", {}) + + # Ensure v1_metadata is a dictionary (defensive programming) + if not isinstance(v1_metadata, dict): + print(f" โš ๏ธ Warning: metadata is not a dict (type: {type(v1_metadata)}), using empty dict") + v1_metadata = {} + + feature_flags = {} + + # Look for feature flags in various locations + if isinstance(v1_metadata, dict) and "feature_flags" in v1_metadata: + potential_flags = v1_metadata.get("feature_flags", {}) + if isinstance(potential_flags, dict): + feature_flags = potential_flags + elif "internal_metadata" in v1_assistant and isinstance(v1_assistant["internal_metadata"], dict): + potential_flags = v1_assistant["internal_metadata"].get("feature_flags", {}) + if isinstance(potential_flags, dict): + feature_flags = potential_flags + + # Build enhanced metadata for v2 that includes feature flags + enhanced_metadata = v1_metadata.copy() if isinstance(v1_metadata, dict) else {} + if feature_flags and isinstance(feature_flags, dict): + enhanced_metadata["feature_flags"] = feature_flags + print(f" ๐Ÿšฉ Preserving {len(feature_flags)} feature flags: {list(feature_flags.keys())}") + + # Create the v2 AgentObject (metadata level) + agent_object = { + "object": "agent", # Changed from "assistant" to "agent" + "id": f"{agent_name}:{version}", # New format: {name}:{version} + "name": agent_name, + "labels": [] # New: Label associations (empty for now) + } + + # Transform tools and merge with tool_resources + # Note: v1_tools already validated and parsed above during connected_agent check + v1_tool_resources = v1_assistant.get("tool_resources", {}) + + # Handle string-encoded tool_resources (from project client serialization) + if isinstance(v1_tool_resources, str): + try: + v1_tool_resources = json.loads(v1_tool_resources) + except json.JSONDecodeError: + # Try eval as fallback for string representations + try: + v1_tool_resources = eval(v1_tool_resources) if v1_tool_resources.strip().startswith('{') else {} + except: + print(f" โš ๏ธ Could not parse tool_resources string: {v1_tool_resources}") + v1_tool_resources = {} + + # Ensure v1_tool_resources is a dict + if not isinstance(v1_tool_resources, dict): + v1_tool_resources = {} + + # DEBUG: Print the actual tools and tool_resources structure + print(f"๐Ÿ”ง DEBUG - Tools transformation:") + print(f" v1_tools: {v1_tools}") + print(f" v1_tools type: {type(v1_tools)}") + print(f" v1_tool_resources: {v1_tool_resources}") + print(f" v1_tool_resources type: {type(v1_tool_resources)}") + + # Transform tools to v2 format by merging with tool_resources + transformed_tools = [] + for i, tool in enumerate(v1_tools): + print(f" Processing tool {i}: {tool} (type: {type(tool)})") + # Handle string-encoded individual tools + if isinstance(tool, str): + try: + tool = json.loads(tool) + except json.JSONDecodeError: + try: + tool = eval(tool) if tool.strip().startswith('{') else {} + except: + print(f" โš ๏ธ Could not parse tool string: {tool}") + continue + + if isinstance(tool, dict): + tool_type = tool.get("type") + + # Skip unsupported tools + if tool_type in ["connected_agent", "event_binding", "output_binding"]: + print(f" โญ๏ธ Skipping unsupported tool type: {tool_type}") + continue + transformed_tool = {"type": tool_type} + + # Handle file_search tool + if tool_type == "file_search" and "file_search" in v1_tool_resources: + file_search_resources = v1_tool_resources["file_search"] + print(f" Found file_search resources: {file_search_resources}") + if "vector_store_ids" in file_search_resources: + transformed_tool["vector_store_ids"] = file_search_resources["vector_store_ids"] + print(f" Added vector_store_ids: {file_search_resources['vector_store_ids']}") + + # Handle code_interpreter tool + elif tool_type == "code_interpreter" and "code_interpreter" in v1_tool_resources: + code_resources = v1_tool_resources["code_interpreter"] + print(f" Found code_interpreter resources: {code_resources}") + if "file_ids" in code_resources: + # Add container with auto type and file_ids for v2 format + transformed_tool["container"] = { + "type": "auto", + "file_ids": code_resources["file_ids"] + } + print(f" Added container with auto type and file_ids: {code_resources['file_ids']}") + else: + # If no file_ids, still add container with auto type + transformed_tool["container"] = {"type": "auto"} + print(f" Added container with auto type (no file_ids)") + + # Handle code_interpreter tool without resources + elif tool_type == "code_interpreter": + # If no tool_resources, still add container with auto type + transformed_tool["container"] = {"type": "auto"} + print(f" Added container with auto type (no resources)") + + # Handle function tools (no resources typically) + elif tool_type == "function": + # Copy function definition if present + if "function" in tool: + transformed_tool["function"] = tool["function"] + + # Handle MCP tools + elif tool_type == "mcp": + # Copy all MCP-specific properties that actually exist (don't copy None/null values) + for key in ["server_label", "server_description", "server_url", "require_approval", "project_connection_id"]: + if key in tool and tool[key] is not None: + transformed_tool[key] = tool[key] + print(f" Added MCP tool properties: {[k for k in tool.keys() if k != 'type' and tool[k] is not None]}") + + # Handle computer_use_preview tools + elif tool_type == "computer_use_preview": + # Copy all computer use specific properties + for key in ["display_width", "display_height", "environment"]: + if key in tool: + transformed_tool[key] = tool[key] + print(f" Added computer use tool properties: {[k for k in tool.keys() if k != 'type']}") + + # Handle image_generation tools + elif tool_type == "image_generation": + # Copy any image generation specific properties (currently none, but future-proof) + for key, value in tool.items(): + if key != "type": + transformed_tool[key] = value + print(f" Added image generation tool properties: {[k for k in tool.keys() if k != 'type']}") + + # Handle azure_function tools + elif tool_type == "azure_function": + # Copy all azure function specific properties + for key in ["name", "description", "parameters", "input_queue", "output_queue"]: + if key in tool: + transformed_tool[key] = tool[key] + print(f" Added Azure Function tool properties: {[k for k in tool.keys() if k != 'type']}") + + # Handle any other tool types by copying all properties except 'type' + else: + for key, value in tool.items(): + if key != "type": + transformed_tool[key] = value + print(f" Added generic tool properties for {tool_type}: {[k for k in tool.keys() if k != 'type']}") + + transformed_tools.append(transformed_tool) + + print(f" Final transformed_tools: {transformed_tools}") + print(f" Transformed tools count: {len(transformed_tools)}") + + # Create the v2 AgentVersionObject (definition level) + agent_version = { + "object": "agent.version", # New object type + "id": f"{agent_name}:{version}", + "name": agent_name, + "version": version, + "created_at": v1_assistant.get("created_at"), + "description": v1_assistant.get("description"), + "metadata": enhanced_metadata, # Use enhanced metadata with feature flags + "labels": [], # Associated labels for this version + "status": "active", # New: Agent status tracking + "definition": { + "kind": agent_kind, # Dynamically determined based on v1 assistant properties + "model": v1_assistant.get("model"), + "instructions": v1_assistant.get("instructions"), + "tools": transformed_tools, # Use transformed tools with embedded resources + "temperature": v1_assistant.get("temperature"), + "top_p": v1_assistant.get("top_p"), + "response_format": v1_assistant.get("response_format") + } + } + + # Handle tool_resources - this is a breaking change in v2 + # if "tool_resources" in v1_assistant: + # agent_version["definition"]["tool_resources_legacy"] = v1_assistant["tool_resources"] + + # Remove None values from definition to keep it clean + definition = agent_version["definition"] + agent_version["definition"] = {k: v for k, v in definition.items() if v is not None} + + return { + "v2_agent_object": agent_object, + "v2_agent_version": agent_version, + "migration_notes": { + "original_v1_id": v1_assistant.get("id"), + "new_v2_format": f"{agent_name}:{version}", + "migrated_at": int(time.time()), + "changes": [ + "Object type changed from 'assistant' to 'agent'", + "ID format changed to name:version", + "Definition fields moved to nested definition object", + "Tool resources structure changed (stored as legacy)", + "Added versioning and labeling support" + ] + } + } + +def save_v2_agent_to_cosmos(v2_agent_data: Dict[str, Any], connection_string: str, database_name: str, container_name: str, project_id: Optional[str] = None, feature_flags: Optional[Dict[str, Any]] = None): + """ + Save the v2 agent data to Cosmos DB with proper partition key structure. + Matches existing container format with composite partition key: /object.project_id, /object.agent_name + """ + client = create_cosmos_client_from_connection_string(connection_string) + + # Don't create container - use existing one with composite partition key + database = client.get_database_client(database_name) + container = database.get_container_client(container_name) + + # Use default project_id if not provided (matching existing data format) + if not project_id: + project_id = "e2e-tests-westus2-account@e2e-tests-westus2@AML" # Default from example + + # Get agent info - restore colon format to match existing data + agent_name = v2_agent_data['v2_agent_object']['name'] + version = v2_agent_data['v2_agent_version']['version'] + agent_id_with_version = f"{agent_name}:{version}" + + # Create AgentVersionObject document matching existing format + v2_data = v2_agent_data['v2_agent_version'].copy() # Make a copy to avoid modifying original + + # Build the object structure with all fields including object_type + object_structure = { + "id": agent_id_with_version, # ID inside object + "metadata": v2_data.get("metadata", {}), # Original agent metadata (without v1 ID) + "description": v2_data.get("description"), + "definition": v2_data.get("definition"), + "agent_name": agent_name, # Required for partition key + "version": v2_data.get("version"), + "project_id": project_id, # Required for partition key + "object_type": "agent.version" # object_type inside object + } + + # Build the document with object containing all data + current_timestamp = int(time.time() * 1000) # Milliseconds like in example + agent_version_doc = { + "id": agent_id_with_version, # Top-level ID for document + "info": { + "created_at": current_timestamp, + "updated_at": current_timestamp, + "deleted": False + }, + "metadata": { + "migration_info": { + "migrated_from": "v1_assistant_via_api_migration_script", # Combined source info + "migration_timestamp": current_timestamp, + "original_v1_id": v2_agent_data['migration_notes']['original_v1_id'], + "has_feature_flags": bool(feature_flags) if feature_flags else False, + "feature_flag_count": len(feature_flags) if feature_flags else 0, + "feature_flags": feature_flags if feature_flags else {} + } + }, # Document-level metadata with migration info + "object": object_structure, # All data inside object + "migrated_at": int(time.time()) # Keep our migration timestamp too + } + + print(f"๐Ÿ” Document structure for partition key:") + print(f" - id: {agent_version_doc['id']}") + print(f" - object: {agent_version_doc['object']}") + print(f" - object type: {type(agent_version_doc['object'])}") + if isinstance(agent_version_doc['object'], dict): + print(f" - object.project_id: {agent_version_doc['object']['project_id']}") + print(f" - object.agent_name: {agent_version_doc['object']['agent_name']}") + print(f" - object.object_type: {agent_version_doc['object']['object_type']}") + else: + print(f" โŒ ERROR: 'object' field is not a dict: {agent_version_doc['object']}") + + # Also save migration metadata (optional) + migration_timestamp = int(time.time() * 1000) # Milliseconds like in example + migration_doc = { + "id": f"migration_{v2_agent_data['migration_notes']['original_v1_id']}", + "info": { + "created_at": migration_timestamp, + "updated_at": migration_timestamp, + "deleted": False + }, + "metadata": {}, # Empty metadata object at same level as object + "object": { + "project_id": project_id, + "agent_name": f"migration_{agent_name}", + "object_type": "migration_metadata", # object_type inside object + "original_v1_id": v2_agent_data['migration_notes']['original_v1_id'], + "new_v2_format": v2_agent_data['migration_notes']['new_v2_format'], + "migrated_at": int(time.time()), + "data": v2_agent_data['migration_notes'] + } + } + + try: + # Debug: Print document IDs and partition key values + print(f"๐Ÿ” Attempting to save documents:") + print(f" - Agent Version ID: {agent_version_doc['id']}") + print(f" - Migration ID: {migration_doc['id']}") + + # Save documents one by one with error handling + print(" - Saving Agent Version (main document)...") + agent_version_result = container.upsert_item(agent_version_doc) + print(" โœ… Agent Version saved") + + print(" - Saving Migration Metadata...") + migration_result = container.upsert_item(migration_doc) + print(" โœ… Migration Metadata saved") + + print(f"โœ… Successfully saved v2 agent '{v2_agent_data['v2_agent_object']['name']}' to Cosmos DB") + print(f" - Agent Version: {agent_version_doc['id']}") + print(f" - Migration Metadata: {migration_doc['id']}") + + return { + "agent_version": agent_version_result, + "migration": migration_result + } + except Exception as e: + print(f"โŒ Failed to save v2 agent to Cosmos DB: {e}") + print(f"โŒ Error type: {type(e)}") + print(f"โŒ Document that failed:") + print(f" Agent Version Doc: {agent_version_doc}") + print(f" Migration Doc: {migration_doc}") + raise + +def process_v1_assistants_to_v2_agents(args=None, assistant_id: Optional[str] = None, cosmos_connection_string: Optional[str] = None, use_api: bool = False, project_endpoint: Optional[str] = None, project_connection_string: Optional[str] = None, project_subscription: Optional[str] = None, project_resource_group: Optional[str] = None, project_name: Optional[str] = None, production_resource: Optional[str] = None, production_subscription: Optional[str] = None, production_tenant: Optional[str] = None, source_tenant: Optional[str] = None): + """ + Main processing function that reads v1 assistants from Cosmos DB, API, Project endpoint, or Project connection string, + converts them to v2 agents, and saves via v2 API. + + Args: + assistant_id: Optional specific assistant ID to migrate (if not provided, migrates all) + cosmos_connection_string: Optional Cosmos connection string (if not provided, uses environment variable) + use_api: If True, read v1 assistants from API instead of Cosmos DB + project_endpoint: Optional project endpoint for AIProjectClient (e.g., "https://...api/projects/p-3") + project_connection_string: Optional project connection string for AIProjectClient (e.g., "eastus.api.azureml.ms;...;...;...") + source_tenant: Optional source tenant ID for authentication when reading v1 assistants + """ + + # Handle package version management based on usage + need_beta_version = os.environ.get('NEED_BETA_VERSION') == 'true' or project_connection_string is not None + + if need_beta_version: + print("๐Ÿ”ง Project connection string detected - ensuring beta version is installed...") + if not ensure_project_connection_package(): + print("โŒ Failed to install required beta version") + sys.exit(1) + if project_connection_string: + print(f"๐Ÿข Reading v1 assistants from Project Connection String") + if not PROJECT_CLIENT_AVAILABLE: + print("โŒ Error: azure-ai-projects package is required for project connection string functionality") + print("Install with: pip install azure-ai-projects==1.0.0b10") + sys.exit(1) + + # Get assistants from Project Client using connection string + if assistant_id: + print(f"๐ŸŽฏ Fetching specific assistant from project connection: {assistant_id}") + try: + assistant_data = get_assistant_from_project_connection(project_connection_string, assistant_id) + v1_assistants = [assistant_data] + except Exception as e: + print(f"โŒ Failed to fetch assistant {assistant_id} from project connection: {e}") + return + else: + print("๐Ÿ“Š Fetching all assistants from project connection") + try: + v1_assistants = list_assistants_from_project_connection(project_connection_string) + except Exception as e: + print(f"โŒ Failed to fetch assistants from project connection: {e}") + return + + if not v1_assistants: + print("โŒ No v1 assistants found from project connection") + return + + print(f"๐Ÿ“Š Found {len(v1_assistants)} v1 assistant records from project connection") + + elif project_endpoint: + print(f"๐Ÿข Reading v1 assistants from Project Endpoint: {project_endpoint}") + if not PROJECT_CLIENT_AVAILABLE: + print("โŒ Error: azure-ai-projects package is required for project endpoint functionality") + print("Install with: pip install azure-ai-projects") + sys.exit(1) + + # Get assistants from Project Client + if assistant_id: + print(f"๐ŸŽฏ Fetching specific assistant from project: {assistant_id}") + try: + assistant_data = get_assistant_from_project(project_endpoint, assistant_id, project_subscription, project_resource_group, project_name) + v1_assistants = [assistant_data] + except Exception as e: + print(f"โŒ Failed to fetch assistant {assistant_id} from project: {e}") + return + else: + print("๐Ÿ“Š Fetching all assistants from project") + try: + v1_assistants = list_assistants_from_project(project_endpoint, project_subscription, project_resource_group, project_name) + except Exception as e: + print(f"โŒ Failed to fetch assistants from project: {e}") + return + + if not v1_assistants: + print("โŒ No v1 assistants found from project") + return + + print(f"๐Ÿ“Š Found {len(v1_assistants)} v1 assistant records from project") + + elif use_api: + print("๐ŸŒ Reading v1 assistants from API") + # Ensure we have API authentication + if not TOKEN and not set_api_token(): + print("โŒ Error: Unable to obtain API authentication token") + print("Set AZ_TOKEN env var or ensure az CLI is installed and logged in") + sys.exit(1) + + # Get assistants from API + if assistant_id: + print(f"๐ŸŽฏ Fetching specific assistant from API: {assistant_id}") + try: + assistant_data = get_assistant_from_api(assistant_id) + v1_assistants = [assistant_data] + except Exception as e: + print(f"โŒ Failed to fetch assistant {assistant_id} from API: {e}") + return + else: + print("๐Ÿ“Š Fetching all assistants from API") + try: + v1_assistants = list_assistants_from_api() + except Exception as e: + print(f"โŒ Failed to fetch assistants from API: {e}") + return + + if not v1_assistants: + print("โŒ No v1 assistants found from API") + return + + print(f"๐Ÿ“Š Found {len(v1_assistants)} v1 assistant records from API") + + else: + print(f"๐Ÿ“– Reading v1 assistants from Cosmos DB: {DATABASE_NAME}/{SOURCE_CONTAINER}") + # Use provided connection string or fall back to environment variable + connection_string = cosmos_connection_string or COSMOS_CONNECTION_STRING + + if not connection_string: + print("Error: COSMOS_CONNECTION_STRING environment variable must be set or provided as parameter") + print("Set it with: $env:COSMOS_CONNECTION_STRING='AccountEndpoint=...;AccountKey=...'") + print("Or provide it as command line argument: python v1_to_v2_migration.py ") + sys.exit(1) + + # Build query - filter by assistant_id if provided + if assistant_id: + query = f"SELECT * FROM c WHERE c.object_type = 'v1_assistant' AND c.data.id = '{assistant_id}'" + print(f"๐ŸŽฏ Filtering for specific assistant ID: {assistant_id}") + else: + query = "SELECT * FROM c WHERE c.object_type = 'v1_assistant'" + print("๐Ÿ“Š Processing all v1 assistants") + + # Read v1 assistant data from source container + v1_data = fetch_data( + database_name=DATABASE_NAME, + container_name=SOURCE_CONTAINER, + connection_string=connection_string, + query=query + ) + + if v1_data is None or v1_data.empty: + print("โŒ No v1 assistant data found in source container") + return + + print(f"๐Ÿ“Š Found {len(v1_data)} v1 assistant records from Cosmos DB") + + # Convert pandas DataFrame to list for uniform processing + v1_assistants = [] + for idx, (index, row) in enumerate(v1_data.iterrows()): + # Process Cosmos DB row format (same logic as before) + v1_assistant = {} + + # Check if we have flattened 'data.*' columns + data_columns = [col for col in row.keys() if col.startswith('data.')] + + if data_columns: + # Reconstruct nested structure + for col in data_columns: + field_name = col[5:] # Remove 'data.' (5 characters) + value = row[col] + + # Handle nested fields like 'internal_metadata.feature_flags' + if '.' in field_name: + parts = field_name.split('.') + current = v1_assistant + for part in parts[:-1]: + if part not in current: + current[part] = {} + current = current[part] + current[parts[-1]] = value + else: + v1_assistant[field_name] = value + + elif 'data' in row and row['data'] is not None: + raw_data = row['data'] + if isinstance(raw_data, str): + v1_assistant = json.loads(raw_data) + elif isinstance(raw_data, dict): + v1_assistant = raw_data + else: + continue + else: + continue + + # Clean up None values + v1_assistant = {k: v for k, v in v1_assistant.items() if v is not None} + v1_assistants.append(v1_assistant) + + # Ensure we have API authentication for v2 API saving + # Use source tenant for authentication (for reading v1 assistants) + # Force refresh if we have production resource (might have switched tenants) + force_refresh = production_resource is not None + tenant_for_auth = source_tenant if source_tenant else SOURCE_TENANT + if not TOKEN and not set_api_token(force_refresh=force_refresh, tenant_id=tenant_for_auth): + print("โŒ Error: Unable to obtain API authentication token for v2 API saving") + print("Set AZ_TOKEN env var or ensure az CLI is installed and logged in") + sys.exit(1) + + # Now we have uniform v1_assistants list regardless of source + # Process each v1 assistant + processed_count = 0 + for idx, v1_assistant in enumerate(v1_assistants): + try: + print(f"\n๐Ÿ”„ Processing record {idx + 1}/{len(v1_assistants)}") + + if project_connection_string: + print(f" โœ… Processing Project Connection data for assistant: {v1_assistant.get('id', 'unknown')}") + elif project_endpoint: + print(f" โœ… Processing Project Endpoint data for assistant: {v1_assistant.get('id', 'unknown')}") + elif use_api: + print(f" โœ… Processing API data for assistant: {v1_assistant.get('id', 'unknown')}") + else: + print(f" โœ… Processing Cosmos DB data for assistant: {v1_assistant.get('id', 'unknown')}") + + # Clean up None values + v1_assistant = {k: v for k, v in v1_assistant.items() if v is not None} + + # Helper function to ensure tools array exists and is properly formatted + def ensure_tools_array(): + if "tools" not in v1_assistant: + v1_assistant["tools"] = [] + elif isinstance(v1_assistant["tools"], str): + # Handle string-encoded tools + try: + v1_assistant["tools"] = json.loads(v1_assistant["tools"]) + except: + v1_assistant["tools"] = [] + + # Ensure tools is a list + if not isinstance(v1_assistant["tools"], list): + v1_assistant["tools"] = [] + + # Add test tools if requested + if args: + # Add test function tool + if hasattr(args, 'add_test_function') and args.add_test_function: + print("๐Ÿงช Adding test function tool for testing...") + test_function_tool = { + "type": "function", + "function": { + "name": "get_current_temperature", + "description": "Get the current temperature for a specific location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g., San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["Celsius", "Fahrenheit"], + "description": "The temperature unit to use. Infer this from the user's location." + } + }, + "required": ["location", "unit"] + } + } + } + ensure_tools_array() + v1_assistant["tools"].append(test_function_tool) + print(f" โœ… Added test function tool: {test_function_tool['function']['name']}") + + # Add test MCP tool + if hasattr(args, 'add_test_mcp') and args.add_test_mcp: + print("๐Ÿงช Adding test MCP tool for testing...") + test_mcp_tool = { + "type": "mcp", + "server_label": "dmcp", + "server_description": "A Dungeons and Dragons MCP server to assist with dice rolling.", + "server_url": "https://dmcp-server.deno.dev/sse", + "require_approval": "never", + } + ensure_tools_array() + v1_assistant["tools"].append(test_mcp_tool) + print(f" โœ… Added test MCP tool: {test_mcp_tool['server_label']}") + + # Add test image generation tool + if hasattr(args, 'add_test_imagegen') and args.add_test_imagegen: + print("๐Ÿงช Adding test image generation tool for testing...") + test_imagegen_tool = { + "type": "image_generation" + } + ensure_tools_array() + v1_assistant["tools"].append(test_imagegen_tool) + print(f" โœ… Added test image generation tool") + + # Add test computer use tool + if hasattr(args, 'add_test_computer') and args.add_test_computer: + print("๐Ÿงช Adding test computer use tool for testing...") + test_computer_tool = { + "type": "computer_use_preview", + "display_width": 1024, + "display_height": 768, + "environment": "browser" # other possible values: "mac", "windows", "ubuntu" + } + ensure_tools_array() + v1_assistant["tools"].append(test_computer_tool) + print(f" โœ… Added test computer use tool: {test_computer_tool['environment']} environment") + + # Add test Azure Function tool + if hasattr(args, 'add_test_azurefunction') and args.add_test_azurefunction: + print("๐Ÿงช Adding test Azure Function tool for testing...") + # Using your local Azurite instance + storage_service_endpoint = "https://127.0.0.1:8001" + test_azurefunction_tool = { + "type": "azure_function", + "name": "foo", + "description": "Get answers from the foo bot.", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The question to ask." + }, + "outputqueueuri": { + "type": "string", + "description": "The full output queue URI." + } + }, + "required": ["query"] + }, + "input_queue": { + "queue_name": "azure-function-foo-input", + "storage_service_endpoint": storage_service_endpoint + }, + "output_queue": { + "queue_name": "azure-function-foo-output", + "storage_service_endpoint": storage_service_endpoint + } + } + ensure_tools_array() + v1_assistant["tools"].append(test_azurefunction_tool) + print(f" โœ… Added test Azure Function tool: {test_azurefunction_tool['name']} (using Azurite at {storage_service_endpoint})") + + # Pretty print the full v1 object for inspection + print(f"\n๐Ÿ“‹ Full v1 Assistant Object:") + print("=" * 60) + import pprint + pprint.pprint(v1_assistant, indent=2, width=80) + print("=" * 60) + + assistant_id = v1_assistant.get('id', 'unknown') + + print(f" Assistant ID: {assistant_id}") + print(f" Assistant Name: {v1_assistant.get('name', 'N/A')}") + print(f" Assistant Model: {v1_assistant.get('model', 'N/A')}") + + # Preview the detected agent kind + detected_kind = determine_agent_kind(v1_assistant) + print(f" ๐Ÿ” Detected Agent Kind: {detected_kind}") + + # Convert v1 to v2 + v2_agent = v1_assistant_to_v2_agent(v1_assistant) + + # Save to target container with proper project_id + # You can customize this project_id as needed + project_id = "e2e-tests-westus2-account@e2e-tests-westus2@AML" # Match existing data format + + # Extract feature flags to pass to save function + v1_metadata = v1_assistant.get("metadata", {}) + assistant_feature_flags = {} + if "feature_flags" in v1_metadata: + assistant_feature_flags = v1_metadata.get("feature_flags", {}) + elif "internal_metadata" in v1_assistant and isinstance(v1_assistant["internal_metadata"], dict): + assistant_feature_flags = v1_assistant["internal_metadata"].get("feature_flags", {}) + + # Save the v2 agent via v2 API + print("๐ŸŒ Saving via v2 API...") + # Extract agent name (without version) for the API endpoint + agent_name = v2_agent['v2_agent_object']['name'] + + # Prepare the payload for v2 API + api_payload = prepare_v2_api_payload(v2_agent) + + # Create the agent version via v2 API + # Production token is provided via environment variable + if production_resource and not PRODUCTION_TOKEN: + print(f"โŒ Production resource specified but no PRODUCTION_TOKEN environment variable found. Skipping v2 API save.") + print("๐Ÿ’ก Use run-migration-docker-auth.ps1 for automatic dual-token authentication") + continue + + api_result = create_agent_version_via_api(agent_name, api_payload, production_resource, production_subscription) + print(f"โœ… Agent version created via v2 API: {api_result.get('id', 'N/A')}") + + processed_count += 1 + + except KeyError as ke: + print(f"โŒ KeyError processing record {idx + 1}: {ke}") + print(f" Assistant data keys: {list(v1_assistant.keys()) if v1_assistant else 'N/A'}") + continue + except json.JSONDecodeError as je: + print(f"โŒ JSON decode error processing record {idx + 1}: {je}") + continue + except Exception as e: + print(f"โŒ Error processing record {idx + 1}: {e}") + print(f" Error type: {type(e)}") + import traceback + traceback.print_exc() + continue + + print(f"\n๐ŸŽ‰ Migration completed!") + print(f" Total records processed: {processed_count}/{len(v1_assistants)}") + if project_connection_string: + print(f" Source: Project Connection String") + elif project_endpoint: + print(f" Source: Project Endpoint ({project_endpoint})") + elif use_api: + print(f" Source: API ({HOST})") + else: + print(f" Source: Cosmos DB ({DATABASE_NAME}/{SOURCE_CONTAINER})") + + # Always using v2 API + print(f" Target: v2 API ({BASE_V2})") + +def main(): + """ + Main function to orchestrate the v1 to v2 migration. + """ + parser = argparse.ArgumentParser( + description="Migrate v1 OpenAI Assistants to v2 Azure ML Agents", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Migrate from v1 API to production v2 API (REQUIRED production parameters) + python v1_to_v2_migration.py --use-api \\ + --source-tenant 72f988bf-86f1-41af-91ab-2d7cd011db47 \\ + --production-resource nextgen-eastus \\ + --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\ + --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 \\ + asst_wBMH6Khnqbo1J7W1G6w3p1rN + + # Migrate all assistants from Cosmos DB to production v2 API + python v1_to_v2_migration.py \\ + --production-resource nextgen-eastus \\ + --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\ + --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 + + # Migrate from project endpoint to production v2 API + python v1_to_v2_migration.py \\ + --project-endpoint "https://your-project.api.azure.com/api/projects/p-3" \\ + --production-resource nextgen-eastus \\ + --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\ + --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 \\ + asst_abc123 + + # Note: Use run-migration-docker-auth.ps1 for automatic dual-tenant authentication + + # Read from project connection string (requires azure-ai-projects==1.0.0b10) + python v1_to_v2_migration.py --project-connection-string "eastus.api.azureml.ms;subscription-id;resource-group;project-name" + python v1_to_v2_migration.py asst_abc123 --project-connection-string "eastus.api.azureml.ms;subscription-id;resource-group;project-name" + + # Production deployment: migrate to production Azure AI resource + python v1_to_v2_migration.py --project-endpoint "https://source-project.api.azure.com/api/projects/p-3" --production-resource "nextgen-eastus" --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" asst_abc123 + """ + ) + + parser.add_argument( + 'assistant_id', + nargs='?', + default=None, + help='Optional: Specific assistant ID to migrate (e.g., asst_abc123). If not provided, migrates all assistants.' + ) + + parser.add_argument( + 'cosmos_endpoint', + nargs='?', + default=None, + help='Optional: Cosmos DB connection string. If not provided, uses COSMOS_CONNECTION_STRING environment variable.' + ) + + parser.add_argument( + '--use-api', + action='store_true', + help='Read v1 assistants from v1 API instead of Cosmos DB.' + ) + + parser.add_argument( + '--project-endpoint', + type=str, + help='Project endpoint for AIProjectClient (e.g., "https://...api/projects/p-3"). If provided, reads assistants from project instead of API or Cosmos DB.' + ) + + parser.add_argument( + '--project-subscription', + type=str, + help='Azure subscription ID for project endpoint (optional, only needed for certain azure-ai-projects versions).' + ) + + parser.add_argument( + '--project-resource-group', + type=str, + help='Azure resource group name for project endpoint (optional, only needed for certain azure-ai-projects versions).' + ) + + parser.add_argument( + '--project-name', + type=str, + help='Project name for project endpoint (optional, only needed for certain azure-ai-projects versions).' + ) + + parser.add_argument( + '--project-connection-string', + type=str, + help='Project connection string for AIProjectClient (e.g., "eastus.api.azureml.ms;...;...;..."). Requires azure-ai-projects==1.0.0b10. If provided, reads assistants from project connection instead of other methods.' + ) + + parser.add_argument( + '--add-test-function', + action='store_true', + help='Add a test function tool to the assistant for testing function tool transformation. Adds get_current_temperature function.' + ) + + parser.add_argument( + '--add-test-mcp', + action='store_true', + help='Add a test MCP tool to the assistant for testing MCP tool transformation. Adds D&D dice rolling MCP server.' + ) + + parser.add_argument( + '--add-test-imagegen', + action='store_true', + help='Add a test image generation tool to the assistant for testing image generation tool transformation.' + ) + + parser.add_argument( + '--add-test-computer', + action='store_true', + help='Add a test computer use tool to the assistant for testing computer use tool transformation.' + ) + + parser.add_argument( + '--add-test-azurefunction', + action='store_true', + help='Add a test Azure Function tool to the assistant for testing Azure Function tool transformation.' + ) + + # Production Resource Arguments (REQUIRED for v2 API) + parser.add_argument( + '--production-resource', + type=str, + required=True, + help='Production Azure AI resource name (REQUIRED). Example: "nextgen-eastus"' + ) + + parser.add_argument( + '--production-subscription', + type=str, + required=True, + help='Production subscription ID (REQUIRED). Example: "b1615458-c1ea-49bc-8526-cafc948d3c25"' + ) + + parser.add_argument( + '--production-tenant', + type=str, + required=True, + help='Production tenant ID for Azure authentication (REQUIRED). Example: "33e577a9-b1b8-4126-87c0-673f197bf624"' + ) + + parser.add_argument( + '--source-tenant', + type=str, + help='Source tenant ID for reading v1 assistants. If not provided, uses SOURCE_TENANT environment variable or defaults to Microsoft tenant (72f988bf-86f1-41af-91ab-2d7cd011db47). Example: "72f988bf-86f1-41af-91ab-2d7cd011db47"' + ) + + args = parser.parse_args() + + # Handle empty string as None for assistant_id + assistant_id = args.assistant_id if args.assistant_id and args.assistant_id.strip() else None + cosmos_connection_string = args.cosmos_endpoint if args.cosmos_endpoint and args.cosmos_endpoint.strip() else None + + # Production arguments are now required, so no additional validation needed + + print("๐Ÿš€ Starting v1 to v2 Agent Migration") + print("=" * 50) + + # Production parameters are required + print(f"๐Ÿญ Production v2 API Configuration:") + print(f" ๐ŸŽฏ Resource: {args.production_resource}") + print(f" ๐Ÿ“‹ Subscription: {args.production_subscription}") + print(f" ๐Ÿ” Tenant: {args.production_tenant}") + + if PRODUCTION_TOKEN: + print(f" โœ… Production token available (length: {len(PRODUCTION_TOKEN)})") + else: + print(" โš ๏ธ No PRODUCTION_TOKEN environment variable found") + print(" ๐Ÿ’ก Use run-migration-docker-auth.ps1 for automatic dual-token authentication") + + if assistant_id: + print(f"๐ŸŽฏ Target Assistant ID: {assistant_id}") + else: + print("๐Ÿ“Š Processing all assistants") + + if cosmos_connection_string: + print("๐Ÿ”— Using provided Cosmos connection string") + else: + print("๐Ÿ”— Using COSMOS_CONNECTION_STRING environment variable") + + if args.project_connection_string: + print(f"๐Ÿข Reading assistants from Project Connection String") + elif args.project_endpoint: + print(f"๐Ÿข Reading assistants from Project Endpoint: {args.project_endpoint}") + elif args.use_api: + print("๐ŸŒ Reading assistants from v1 API") + else: + print("๐Ÿ’พ Reading assistants from Cosmos DB") + + # Always using v2 API (required) + if args.production_resource: + print(f"๐Ÿญ Saving agents via PRODUCTION v2 API (resource: {args.production_resource})") + print(f" ๐Ÿ“‹ Production subscription: {args.production_subscription}") + else: + print("๐Ÿš€ Saving agents via PROD v2 API") + + print("=" * 50) + + process_v1_assistants_to_v2_agents( + args, assistant_id, cosmos_connection_string, args.use_api, + args.project_endpoint, args.project_connection_string, args.project_subscription, + args.project_resource_group, args.project_name, args.production_resource, + args.production_subscription, args.production_tenant, args.source_tenant + ) + +if __name__ == "__main__": + main() \ No newline at end of file