|
| 1 | +## Rusty Schema Diff - Function Documentation |
| 2 | + |
| 3 | +### Schema Analysis API |
| 4 | + |
| 5 | +#### Analyze Schema Compatibility |
| 6 | + |
| 7 | +**Functionality:** |
| 8 | +Analyze compatibility between two schema versions, detecting breaking changes and generating detailed reports. Supports multiple schema formats including JSON Schema, OpenAPI, Protobuf, and SQL DDL. |
| 9 | + |
| 10 | +**Parameters:** |
| 11 | + |
| 12 | +- **old_schema (Schema, required):** |
| 13 | + The original schema version to compare against. |
| 14 | + |
| 15 | +- **new_schema (Schema, required):** |
| 16 | + The new schema version being analyzed. |
| 17 | + |
| 18 | +**Response:** |
| 19 | + |
| 20 | +- **compatibility_report (CompatibilityReport):** |
| 21 | + Detailed report containing: |
| 22 | + - **is_compatible (bool):** Overall compatibility status |
| 23 | + - **compatibility_score (u32):** Score from 0-100 |
| 24 | + - **changes (Vec<SchemaChange>):** List of detected changes |
| 25 | + - **issues (Vec<CompatibilityIssue>):** Any compatibility issues found |
| 26 | + |
| 27 | +### JSON Schema Analysis |
| 28 | + |
| 29 | +#### Analyze JSON Schema Changes |
| 30 | + |
| 31 | +**Functionality:** |
| 32 | +Analyze changes between JSON Schema versions, with support for complex nested structures and references. |
| 33 | + |
| 34 | +**Usage Example:** |
| 35 | + |
| 36 | +```rust |
| 37 | +use rusty_schema_diff::{Schema, SchemaFormat, JsonSchemaAnalyzer, SchemaAnalyzer}; |
| 38 | +use semver::Version; |
| 39 | + |
| 40 | +let old_schema = Schema::new( |
| 41 | + SchemaFormat::JsonSchema, |
| 42 | + r#"{ |
| 43 | + "type": "object", |
| 44 | + "properties": { |
| 45 | + "name": {"type": "string"}, |
| 46 | + "age": {"type": "integer"} |
| 47 | + } |
| 48 | + }"#.to_string(), |
| 49 | + Version::parse("1.0.0").unwrap() |
| 50 | +); |
| 51 | + |
| 52 | +let new_schema = Schema::new( |
| 53 | + SchemaFormat::JsonSchema, |
| 54 | + r#"{ |
| 55 | + "type": "object", |
| 56 | + "properties": { |
| 57 | + "name": {"type": "string"}, |
| 58 | + "age": {"type": "integer"}, |
| 59 | + "email": {"type": "string", "format": "email"} |
| 60 | + } |
| 61 | + }"#.to_string(), |
| 62 | + Version::parse("1.1.0").unwrap() |
| 63 | +); |
| 64 | + |
| 65 | +let analyzer = JsonSchemaAnalyzer; |
| 66 | +let report = analyzer.analyze_compatibility(&old_schema, &new_schema)?; |
| 67 | + |
| 68 | +println!("Compatibility Score: {}", report.compatibility_score); |
| 69 | +``` |
| 70 | + |
| 71 | +### OpenAPI Analysis |
| 72 | + |
| 73 | +#### Analyze OpenAPI Changes |
| 74 | + |
| 75 | +**Functionality:** |
| 76 | +Analyze changes between OpenAPI specifications, including endpoints, parameters, request bodies, and responses. |
| 77 | + |
| 78 | +**Usage Example:** |
| 79 | + |
| 80 | +```rust |
| 81 | +use rusty_schema_diff::prelude::*; |
| 82 | + |
| 83 | +let old_api = Schema::new( |
| 84 | + SchemaFormat::OpenAPI, |
| 85 | + // Your OpenAPI spec here |
| 86 | + openapi_yaml.to_string(), |
| 87 | + Version::parse("1.0.0").unwrap() |
| 88 | +); |
| 89 | + |
| 90 | +let analyzer = OpenApiAnalyzer; |
| 91 | +let report = analyzer.analyze_compatibility(&old_api, &new_api)?; |
| 92 | + |
| 93 | +// Check for breaking changes in endpoints |
| 94 | +for change in report.changes { |
| 95 | + if change.is_breaking { |
| 96 | + println!("Breaking change in {}: {}", change.location, change.description); |
| 97 | + } |
| 98 | +} |
| 99 | +``` |
| 100 | + |
| 101 | +### SQL DDL Analysis |
| 102 | + |
| 103 | +#### Analyze SQL Schema Changes |
| 104 | + |
| 105 | +**Functionality:** |
| 106 | +Analyze changes between SQL DDL schemas, including table structures, columns, constraints, and indexes. |
| 107 | + |
| 108 | +**Usage Example:** |
| 109 | + |
| 110 | +```rust |
| 111 | +use rusty_schema_diff::prelude::*; |
| 112 | + |
| 113 | +let old_ddl = Schema::new( |
| 114 | + SchemaFormat::SqlDDL, |
| 115 | + r#" |
| 116 | + CREATE TABLE users ( |
| 117 | + id INTEGER PRIMARY KEY, |
| 118 | + name VARCHAR(255) NOT NULL |
| 119 | + ); |
| 120 | + "#.to_string(), |
| 121 | + Version::parse("1.0.0").unwrap() |
| 122 | +); |
| 123 | + |
| 124 | +let analyzer = SqlAnalyzer; |
| 125 | +let report = analyzer.analyze_compatibility(&old_ddl, &new_ddl)?; |
| 126 | + |
| 127 | +// Generate migration SQL |
| 128 | +let plan = analyzer.generate_migration_path(&old_ddl, &new_ddl)?; |
| 129 | +for step in plan.steps { |
| 130 | + println!("Migration SQL: {}", step); |
| 131 | +} |
| 132 | +``` |
| 133 | + |
| 134 | +### Protobuf Analysis |
| 135 | + |
| 136 | +#### Analyze Protobuf Changes |
| 137 | + |
| 138 | +**Functionality:** |
| 139 | +Analyze changes between Protobuf schemas, including messages, fields, and services. |
| 140 | + |
| 141 | +**Usage Example:** |
| 142 | + |
| 143 | +```rust |
| 144 | +use rusty_schema_diff::prelude::*; |
| 145 | + |
| 146 | +let old_proto = Schema::new( |
| 147 | + SchemaFormat::Protobuf, |
| 148 | + r#" |
| 149 | + syntax = "proto3"; |
| 150 | + message User { |
| 151 | + string name = 1; |
| 152 | + int32 age = 2; |
| 153 | + } |
| 154 | + "#.to_string(), |
| 155 | + Version::parse("1.0.0").unwrap() |
| 156 | +); |
| 157 | + |
| 158 | +let analyzer = ProtobufAnalyzer; |
| 159 | +let report = analyzer.analyze_compatibility(&old_proto, &new_proto)?; |
| 160 | + |
| 161 | +// Check compatibility |
| 162 | +if report.is_compatible { |
| 163 | + println!("Schemas are compatible"); |
| 164 | + for change in report.changes { |
| 165 | + println!("Change: {}", change.description); |
| 166 | + } |
| 167 | +} |
| 168 | +``` |
| 169 | + |
| 170 | +### Migration Plan Generation |
| 171 | + |
| 172 | +#### Generate Migration Path |
| 173 | + |
| 174 | +**Functionality:** |
| 175 | +Generate step-by-step migration plans between schema versions. |
| 176 | + |
| 177 | +**Parameters:** |
| 178 | + |
| 179 | +- **old_schema (Schema, required):** |
| 180 | + The source schema version. |
| 181 | + |
| 182 | +- **new_schema (Schema, required):** |
| 183 | + The target schema version. |
| 184 | + |
| 185 | +**Response:** |
| 186 | + |
| 187 | +- **migration_plan (MigrationPlan):** |
| 188 | + Contains: |
| 189 | + - **steps (Vec<String>):** Ordered list of migration steps |
| 190 | + - **metadata (HashMap<String, String>):** Additional migration information |
| 191 | + |
| 192 | +### Error Handling |
| 193 | + |
| 194 | +The library uses a custom error type `SchemaDiffError` that covers various error cases: |
| 195 | + |
| 196 | +```rust |
| 197 | +pub enum SchemaDiffError { |
| 198 | + ParseError(String), |
| 199 | + ComparisonError(String), |
| 200 | + InvalidFormat(String), |
| 201 | + IoError(std::io::Error), |
| 202 | + JsonError(serde_json::Error), |
| 203 | + ProtobufError(String), |
| 204 | +} |
| 205 | +``` |
| 206 | + |
| 207 | +### Best Practices |
| 208 | + |
| 209 | +1. **Version Management:** |
| 210 | + - Always use semantic versioning for your schemas |
| 211 | + - Include version information in schema metadata |
| 212 | + |
| 213 | +2. **Compatibility Analysis:** |
| 214 | + - Run compatibility checks before deploying schema changes |
| 215 | + - Review all breaking changes carefully |
| 216 | + - Consider backward compatibility requirements |
| 217 | + |
| 218 | +3. **Migration Planning:** |
| 219 | + - Generate and review migration plans before implementation |
| 220 | + - Test migrations in a staging environment |
| 221 | + - Have rollback plans ready |
| 222 | + |
| 223 | +4. **Error Handling:** |
| 224 | + - Implement proper error handling for all schema operations |
| 225 | + - Log and monitor schema analysis results |
| 226 | + - Validate schemas before analysis |
0 commit comments