@@ -59,17 +59,21 @@ def test_api_register_schema_incompatible(kafka_cluster, load_file):
59
59
60
60
"""
61
61
sr = kafka_cluster .schema_registry ()
62
- schema1 = Schema (load_file ('adv_schema .avsc' ), schema_type = 'AVRO' )
63
- schema2 = Schema (load_file ('basic_schema .avsc' ), schema_type = 'AVRO' )
62
+ schema1 = Schema (load_file ('basic_schema .avsc' ), schema_type = 'AVRO' )
63
+ schema2 = Schema (load_file ('adv_schema .avsc' ), schema_type = 'AVRO' )
64
64
subject = _subject_name ('test_register_incompatible' )
65
65
66
66
sr .register_schema (subject , schema1 )
67
67
68
68
with pytest .raises (SchemaRegistryError , match = "Schema being registered is"
69
69
" incompatible with an"
70
70
" earlier schema" ) as e :
71
+ # The default Schema Registry compatibility type is BACKWARD.
72
+ # this allows 1) fields to be deleted, 2) optional fields to
73
+ # be added. schema2 adds non-optional fields to schema1, so
74
+ # registering schema2 after schema1 should fail.
71
75
sr .register_schema (subject , schema2 )
72
- assert e .value .http_status_code == 409
76
+ assert e .value .http_status_code == 409 # conflict
73
77
assert e .value .error_code == 409
74
78
75
79
@@ -237,6 +241,9 @@ def test_api_get_subject_versions(kafka_cluster, load_file):
237
241
assert registered_schema .subject == subject
238
242
assert registered_schema .version in versions
239
243
244
+ # revert global compatibility level back to the default.
245
+ sr .set_compatibility (level = "BACKWARD" )
246
+
240
247
241
248
def test_api_delete_subject (kafka_cluster , load_file ):
242
249
"""
@@ -416,6 +423,9 @@ def test_api_config_update(kafka_cluster):
416
423
sr .set_compatibility (level = level )
417
424
assert sr .get_compatibility () == level
418
425
426
+ # revert global compatibility level back to the default.
427
+ sr .set_compatibility (level = "BACKWARD" )
428
+
419
429
420
430
def test_api_register_logical_schema (kafka_cluster , load_file ):
421
431
sr = kafka_cluster .schema_registry ()
0 commit comments