Skip to content

Strip Unused Dependencies #354

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Aug 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 0 additions & 14 deletions causal_testing/specification/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from collections.abc import Callable
from typing import TypeVar

import lhsmdu
from pandas import DataFrame
from scipy.stats._distn_infrastructure import rv_generic

Expand Down Expand Up @@ -38,19 +37,6 @@ def __init__(self, name: str, datatype: T, distribution: rv_generic = None, hidd
def __repr__(self):
return f"{self.typestring()}: {self.name}::{self.datatype.__name__}"

def sample(self, n_samples: int) -> [T]:
"""Generate a Latin Hypercube Sample of size n_samples according to the
Variable's distribution.

:param int n_samples: The number of samples to generate.
:return: A list of samples
:rtype: List[T]

"""
assert self.distribution is not None, "Sampling requires a distribution to be specified."
lhs = lhsmdu.sample(1, n_samples).tolist()[0]
return lhsmdu.inverseTransformSample(self.distribution, lhs).tolist()

def typestring(self) -> str:
"""Return the type of the Variable, e.g. INPUT, or OUTPUT. Note that
this is NOT the datatype (int, str, etc.).
Expand Down
12 changes: 8 additions & 4 deletions causal_testing/testing/metamorphic_relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,14 @@ class ShouldCause(MetamorphicRelation):

def to_json_stub(
self,
skip: bool = True,
skip: bool = False,
estimate_type: str = "coefficient",
effect_type: str = "direct",
estimator: str = "LinearRegressionEstimator",
) -> dict:
"""
Convert to a JSON frontend stub string for user customisation.
:param skip: Whether to skip the test
:param skip: Whether to skip the test (default False).
:param effect_type: The type of causal effect to consider (total or direct)
:param estimate_type: The estimate type to use when evaluating tests
:param estimator: The name of the estimator class to use when evaluating the test
Expand Down Expand Up @@ -77,14 +77,14 @@ class ShouldNotCause(MetamorphicRelation):

def to_json_stub(
self,
skip: bool = True,
skip: bool = False,
estimate_type: str = "coefficient",
effect_type: str = "direct",
estimator: str = "LinearRegressionEstimator",
) -> dict:
"""
Convert to a JSON frontend stub string for user customisation.
:param skip: Whether to skip the test
:param skip: Whether to skip the test (default False).
:param effect_type: The type of causal effect to consider (total or direct)
:param estimate_type: The estimate type to use when evaluating tests
:param estimator: The name of the estimator class to use when evaluating the test
Expand Down Expand Up @@ -244,6 +244,10 @@ def generate_causal_tests(
if len(list(causal_dag.predecessors(relation.base_test_case.outcome_variable))) > 0
]

logger.warning("The skip parameter is hard-coded to False during test generation for better integration with the "
"causal testing component (python -m causal_testing test ...)"
"Please carefully review the generated tests and decide which to skip.")

logger.info(f"Generated {len(tests)} tests. Saving to {output_path}.")
with open(output_path, "w", encoding="utf-8") as f:
json.dump({"tests": tests}, f, indent=2)
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@ requires-python = ">=3.10"
license = { text = "MIT" }
keywords = ["causal inference", "verification"]
dependencies = [
"fitter~=1.7",
"lifelines~=0.29.0",
"lhsmdu~=1.1",
"networkx>=3.4,<3.5",
"numpy~=1.26",
"pandas>=2.1",
Expand Down
4 changes: 0 additions & 4 deletions tests/specification_tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,6 @@ class TestVariable(unittest.TestCase):
def setUp(self) -> None:
pass

def test_sample_flakey(self):
ip = Input("ip", float, norm)
self.assertGreater(kstest(ip.sample(10), norm.cdf).pvalue, 0.95)

def test_typestring(self):
class Var(Variable):
pass
Expand Down
14 changes: 7 additions & 7 deletions tests/testing_tests/test_metamorphic_relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def test_should_not_cause_json_stub(self):
"name": "X1 _||_ Z",
"formula": "Z ~ X1",
"alpha": 0.05,
"skip": True,
"skip": False,
},
)

Expand All @@ -86,7 +86,7 @@ def test_should_not_cause_logistic_json_stub(self):
"name": "X1 _||_ Z",
"formula": "Z ~ X1",
"alpha": 0.05,
"skip": True,
"skip": False,
},
)

Expand All @@ -107,7 +107,7 @@ def test_should_cause_json_stub(self):
"formula": "Z ~ X1",
"treatment_variable": "X1",
"name": "X1 --> Z",
"skip": True,
"skip": False,
},
)

Expand All @@ -120,7 +120,7 @@ def test_should_cause_logistic_json_stub(self):
should_cause_mr = ShouldCause(BaseTestCase("X1", "Z"), adj_set)
self.assertEqual(
should_cause_mr.to_json_stub(
effect_type="total", estimate_type="unit_odds_ratio", estimator="LogisticRegressionEstimator", skip=True
effect_type="total", estimate_type="unit_odds_ratio", estimator="LogisticRegressionEstimator", skip=False
),
{
"effect": "total",
Expand All @@ -130,7 +130,7 @@ def test_should_cause_logistic_json_stub(self):
"formula": "Z ~ X1",
"treatment_variable": "X1",
"name": "X1 --> Z",
"skip": True,
"skip": False,
},
)

Expand Down Expand Up @@ -263,7 +263,7 @@ def test_generate_causal_tests_ignore_cycles(self):
tests = json.load(f)
expected = list(
map(
lambda x: x.to_json_stub(skip=True),
lambda x: x.to_json_stub(skip=False),
filter(
lambda relation: len(list(dcg.predecessors(relation.base_test_case.outcome_variable))) > 0,
relations,
Expand All @@ -282,7 +282,7 @@ def test_generate_causal_tests(self):
tests = json.load(f)
expected = list(
map(
lambda x: x.to_json_stub(skip=True),
lambda x: x.to_json_stub(skip=False),
filter(
lambda relation: len(list(dag.predecessors(relation.base_test_case.outcome_variable))) > 0,
relations,
Expand Down
Loading