@@ -712,7 +712,7 @@ class DeterministicInference(ForwardInference):
712712 >>> from torch.distributions import Bernoulli
713713 >>> from torch_concepts import InputVariable, EndogenousVariable
714714 >>> from torch_concepts.distributions import Delta
715- >>> from torch_concepts.nn import DeterministicInference, ParametricCPD, ProbabilisticModel
715+ >>> from torch_concepts.nn import DeterministicInference, ParametricCPD, ProbabilisticModel, LinearCC
716716 >>>
717717 >>> # Create a simple PGM: latent -> A -> B
718718 >>> input_var = InputVariable('input', parents=[], distribution=Delta, size=10)
@@ -723,7 +723,7 @@ class DeterministicInference(ForwardInference):
723723 >>> from torch.nn import Identity, Linear
724724 >>> cpd_emb = ParametricCPD('input', parametrization=Identity())
725725 >>> cpd_A = ParametricCPD('A', parametrization=Linear(10, 1))
726- >>> cpd_B = ParametricCPD('B', parametrization=Linear (1, 1))
726+ >>> cpd_B = ParametricCPD('B', parametrization=LinearCC (1, 1))
727727 >>>
728728 >>> # Create probabilistic model
729729 >>> pgm = ProbabilisticModel(
@@ -743,7 +743,7 @@ class DeterministicInference(ForwardInference):
743743 >>> print(results['B'].shape) # torch.Size([4, 1]) - endogenous, not {0,1}
744744 >>>
745745 >>> # Query specific concepts - returns concatenated endogenous
746- >>> output = inference.query(['B', 'A'], evidence={'embedding ': x})
746+ >>> output = inference.query(['B', 'A'], evidence={'input ': x})
747747 >>> print(output.shape) # torch.Size([4, 2])
748748 >>> # output contains [logit_B, logit_A] for each sample
749749 >>>
@@ -794,6 +794,8 @@ class AncestralSamplingInference(ForwardInference):
794794 >>> from torch_concepts import InputVariable
795795 >>> from torch_concepts.distributions import Delta
796796 >>> from torch_concepts.nn import AncestralSamplingInference, ParametricCPD, ProbabilisticModel
797+ >>> from torch_concepts import EndogenousVariable
798+ >>> from torch_concepts.nn import LinearCC
797799 >>>
798800 >>> # Create a simple PGM: embedding -> A -> B
799801 >>> embedding_var = InputVariable('embedding', parents=[], distribution=Delta, size=10)
@@ -804,7 +806,7 @@ class AncestralSamplingInference(ForwardInference):
804806 >>> from torch.nn import Identity, Linear
805807 >>> cpd_emb = ParametricCPD('embedding', parametrization=Identity())
806808 >>> cpd_A = ParametricCPD('A', parametrization=Linear(10, 1))
807- >>> cpd_B = ParametricCPD('B', parametrization=Linear (1, 1))
809+ >>> cpd_B = ParametricCPD('B', parametrization=LinearCC (1, 1))
808810 >>>
809811 >>> # Create probabilistic model
810812 >>> pgm = ProbabilisticModel(
@@ -838,8 +840,8 @@ class AncestralSamplingInference(ForwardInference):
838840 >>>
839841 >>> # With relaxed distributions (requires temperature)
840842 >>> from torch.distributions import RelaxedBernoulli
841- >>> var_A_relaxed = Variable ('A', parents=['embedding'],
842- ... distribution=RelaxedBernoulli, size=1)
843+ >>> var_A_relaxed = InputVariable ('A', parents=['embedding'],
844+ ... distribution=RelaxedBernoulli, size=1)
843845 >>> pgm = ProbabilisticModel(
844846 ... variables=[embedding_var, var_A_relaxed, var_B],
845847 ... parametric_cpds=[cpd_emb, cpd_A, cpd_B]
0 commit comments