Skip to content

Commit 34b6f03

Browse files
committed
test(provider): add unleash integration tests
Signed-off-by: Kiki L Hakiem <[email protected]>
1 parent 0a00ea7 commit 34b6f03

File tree

2 files changed

+333
-0
lines changed

2 files changed

+333
-0
lines changed

providers/openfeature-provider-unleash/pyproject.toml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,11 @@ pretty = true
6262
strict = true
6363
disallow_any_generics = false
6464

65+
[tool.pytest.ini_options]
66+
markers = [
67+
"integration: marks tests as integration tests (deselect with '-m \"not integration\"')",
68+
]
69+
6570
[project.scripts]
6671
# workaround while UV doesn't support scripts directly in the pyproject.toml
6772
# see: https://github.com/astral-sh/uv/issues/5903
Lines changed: 328 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,328 @@
1+
"""Integration tests for Unleash provider using a running Unleash instance."""
2+
3+
from openfeature import api
4+
from openfeature.contrib.provider.unleash import UnleashProvider
5+
from openfeature.evaluation_context import EvaluationContext
6+
import pytest
7+
import requests
8+
9+
10+
UNLEASH_URL = "http://0.0.0.0:4242/api"
11+
API_TOKEN = "default:development.unleash-insecure-api-token"
12+
ADMIN_TOKEN = "user:76672ac99726f8e48a1bbba16b7094a50d1eee3583d1e8457e12187a"
13+
14+
15+
def create_test_flags():
16+
"""Create test flags in the Unleash instance."""
17+
flags = [
18+
{
19+
"name": "integration-boolean-flag",
20+
"description": "Boolean feature flag for testing",
21+
"type": "release",
22+
"enabled": True,
23+
},
24+
{
25+
"name": "integration-string-flag",
26+
"description": "String feature flag for testing",
27+
"type": "release",
28+
"enabled": True,
29+
},
30+
{
31+
"name": "integration-float-flag",
32+
"description": "Float feature flag for testing",
33+
"type": "release",
34+
"enabled": True,
35+
},
36+
{
37+
"name": "integration-object-flag",
38+
"description": "Object feature flag for testing",
39+
"type": "release",
40+
"enabled": True,
41+
},
42+
{
43+
"name": "integration-integer-flag",
44+
"description": "Integer feature flag for testing",
45+
"type": "release",
46+
"enabled": True,
47+
},
48+
{
49+
"name": "integration-targeting-flag",
50+
"description": "Targeting feature flag for testing",
51+
"type": "release",
52+
"enabled": True,
53+
},
54+
]
55+
56+
headers = {"Authorization": ADMIN_TOKEN, "Content-Type": "application/json"}
57+
58+
for flag in flags:
59+
try:
60+
response = requests.post(
61+
f"{UNLEASH_URL}/admin/projects/default/features",
62+
headers=headers,
63+
json=flag,
64+
timeout=10,
65+
)
66+
if response.status_code in [200, 201, 409]:
67+
print(f"Flag '{flag['name']}' created")
68+
add_strategy_with_variants(flag["name"], headers)
69+
enable_flag(flag["name"], headers)
70+
else:
71+
print(f"Failed to create flag '{flag['name']}': {response.status_code}")
72+
except Exception as e:
73+
print(f"Error creating flag '{flag['name']}': {e}")
74+
75+
76+
def add_strategy_with_variants(flag_name: str, headers: dict):
77+
"""Add strategy with variants to a flag."""
78+
variant_configs = {
79+
"integration-boolean-flag": [
80+
{
81+
"stickiness": "default",
82+
"name": "true",
83+
"weight": 1000,
84+
"payload": {"type": "string", "value": "true"},
85+
"weightType": "variable",
86+
}
87+
],
88+
"integration-string-flag": [
89+
{
90+
"stickiness": "default",
91+
"name": "my-string",
92+
"weight": 1000,
93+
"payload": {"type": "string", "value": "my-string"},
94+
"weightType": "variable",
95+
}
96+
],
97+
"integration-float-flag": [
98+
{
99+
"stickiness": "default",
100+
"name": "9000.5",
101+
"weight": 1000,
102+
"payload": {"type": "string", "value": "9000.5"},
103+
"weightType": "variable",
104+
}
105+
],
106+
"integration-object-flag": [
107+
{
108+
"stickiness": "default",
109+
"name": "object-variant",
110+
"weight": 1000,
111+
"payload": {"type": "json", "value": '{"foo": "bar"}'},
112+
"weightType": "variable",
113+
}
114+
],
115+
"integration-integer-flag": [
116+
{
117+
"stickiness": "default",
118+
"name": "42",
119+
"weight": 1000,
120+
"payload": {"type": "string", "value": "42"},
121+
"weightType": "variable",
122+
}
123+
],
124+
"integration-targeting-flag": [
125+
{
126+
"stickiness": "default",
127+
"name": "targeted-true",
128+
"weight": 1000,
129+
"payload": {"type": "string", "value": "true"},
130+
"weightType": "variable",
131+
}
132+
],
133+
}
134+
135+
# Add targeting constraints for the targeting flag
136+
constraints = []
137+
if flag_name == "integration-targeting-flag":
138+
constraints = [
139+
{
140+
"contextName": "userId",
141+
"operator": "IN",
142+
"values": ["targeted-user"],
143+
}
144+
]
145+
146+
strategy_payload = {
147+
"name": "flexibleRollout",
148+
"constraints": constraints,
149+
"parameters": {
150+
"rollout": "100",
151+
"stickiness": "default",
152+
"groupId": flag_name,
153+
},
154+
"variants": variant_configs.get(flag_name, []),
155+
"segments": [],
156+
"disabled": False,
157+
}
158+
159+
strategy_response = requests.post(
160+
f"{UNLEASH_URL}/admin/projects/default/features/{flag_name}/environments/development/strategies",
161+
headers=headers,
162+
json=strategy_payload,
163+
timeout=10,
164+
)
165+
if strategy_response.status_code in [200, 201]:
166+
print(f"Strategy with variants added to '{flag_name}'")
167+
else:
168+
print(
169+
f"Failed to add strategy to '{flag_name}': {strategy_response.status_code}"
170+
)
171+
172+
173+
def enable_flag(flag_name: str, headers: dict):
174+
"""Enable a flag in the development environment."""
175+
try:
176+
enable_response = requests.post(
177+
f"{UNLEASH_URL}/admin/projects/default/features/{flag_name}/environments/development/on",
178+
headers=headers,
179+
timeout=10,
180+
)
181+
if enable_response.status_code in [200, 201]:
182+
print(f"Flag '{flag_name}' enabled in development environment")
183+
else:
184+
print(f"Failed to enable flag '{flag_name}': {enable_response.status_code}")
185+
except Exception as e:
186+
print(f"Error enabling flag '{flag_name}': {e}")
187+
188+
189+
@pytest.fixture(scope="session", autouse=True)
190+
def setup_test_flags():
191+
"""Setup test flags before running any tests."""
192+
print("Creating test flags in Unleash...")
193+
create_test_flags()
194+
print("Test flags setup completed")
195+
196+
197+
@pytest.fixture
198+
def unleash_provider():
199+
"""Create an Unleash provider instance for testing."""
200+
provider = UnleashProvider(
201+
url=UNLEASH_URL,
202+
app_name="test-app",
203+
api_token=API_TOKEN,
204+
)
205+
provider.initialize()
206+
yield provider
207+
# Clean up the provider to avoid multiple UnleashClient instances
208+
provider.shutdown()
209+
210+
211+
@pytest.fixture
212+
def client(unleash_provider):
213+
"""Create an OpenFeature client with the Unleash provider."""
214+
# Set the provider globally
215+
api.set_provider(unleash_provider)
216+
return api.get_client()
217+
218+
219+
@pytest.mark.integration
220+
def test_integration_health_check():
221+
"""Test that Unleash health check endpoint is accessible."""
222+
response = requests.get(f"{UNLEASH_URL.replace('/api', '')}/health", timeout=5)
223+
assert response.status_code == 200
224+
225+
226+
@pytest.mark.integration
227+
def test_integration_provider_initialization(unleash_provider):
228+
"""Test that the Unleash provider can be initialized."""
229+
assert unleash_provider is not None
230+
assert unleash_provider.client is not None
231+
232+
233+
@pytest.mark.integration
234+
def test_integration_provider_metadata(unleash_provider):
235+
"""Test that the provider returns correct metadata."""
236+
metadata = unleash_provider.get_metadata()
237+
assert metadata.name == "Unleash Provider"
238+
239+
240+
@pytest.mark.integration
241+
def test_integration_flag_details_resolution(unleash_provider):
242+
"""Test flag details resolution with the Unleash provider."""
243+
details = unleash_provider.resolve_boolean_details(
244+
"integration-boolean-flag", False
245+
)
246+
assert details is not None
247+
assert hasattr(details, "value")
248+
assert hasattr(details, "reason")
249+
assert hasattr(details, "variant")
250+
assert details.value is True
251+
252+
253+
@pytest.mark.integration
254+
def test_integration_provider_status(unleash_provider):
255+
"""Test that the provider status is correctly reported."""
256+
status = unleash_provider.get_status()
257+
assert status.value == "READY"
258+
259+
260+
@pytest.mark.integration
261+
def test_integration_boolean_flag_resolution(unleash_provider):
262+
"""Test boolean flag resolution with the Unleash provider."""
263+
details = unleash_provider.resolve_boolean_details(
264+
"integration-boolean-flag", False
265+
)
266+
assert details.value is True
267+
268+
269+
@pytest.mark.integration
270+
def test_integration_string_flag_resolution(unleash_provider):
271+
"""Test string flag resolution with the Unleash provider."""
272+
details = unleash_provider.resolve_string_details(
273+
"integration-string-flag", "default"
274+
)
275+
assert details.value == "my-string"
276+
277+
278+
@pytest.mark.integration
279+
def test_integration_integer_flag_resolution(unleash_provider):
280+
"""Test integer flag resolution with the Unleash provider."""
281+
details = unleash_provider.resolve_integer_details("integration-integer-flag", 0)
282+
assert details.value == 42
283+
284+
285+
@pytest.mark.integration
286+
def test_integration_float_flag_resolution(unleash_provider):
287+
"""Test float flag resolution with the Unleash provider."""
288+
details = unleash_provider.resolve_float_details("integration-float-flag", 0.0)
289+
assert details.value == 9000.5
290+
291+
292+
@pytest.mark.integration
293+
def test_integration_object_flag_resolution(unleash_provider):
294+
"""Test object flag resolution with the Unleash provider."""
295+
details = unleash_provider.resolve_object_details("integration-object-flag", {})
296+
assert details.value == {"foo": "bar"}
297+
298+
299+
@pytest.mark.integration
300+
def test_integration_nonexistent_flag(unleash_provider):
301+
"""Test that non-existent flags return default value."""
302+
details = unleash_provider.resolve_boolean_details("test-nonexistent-flag", False)
303+
assert details.value is False
304+
assert details.reason.value == "DEFAULT"
305+
306+
307+
@pytest.mark.integration
308+
def test_integration_targeting_positive_case(unleash_provider):
309+
"""Test targeting with a user that should be targeted (positive case)."""
310+
context = EvaluationContext(targeting_key="targeted-user")
311+
312+
details = unleash_provider.resolve_boolean_details(
313+
"integration-targeting-flag", False, context
314+
)
315+
assert details.value is True
316+
assert isinstance(details.value, bool)
317+
318+
319+
@pytest.mark.integration
320+
def test_integration_targeting_negative_case(unleash_provider):
321+
"""Test targeting with a user that should not be targeted (negative case)."""
322+
context = EvaluationContext(targeting_key="non-targeted-user")
323+
324+
details = unleash_provider.resolve_boolean_details(
325+
"integration-targeting-flag", False, context
326+
)
327+
assert details.value is False
328+
assert isinstance(details.value, bool)

0 commit comments

Comments
 (0)