Skip to content

Commit

Permalink
deploy: 3543414
Browse files Browse the repository at this point in the history
  • Loading branch information
facebook-github-bot committed Jul 19, 2024
1 parent 4fb269d commit c31f330
Show file tree
Hide file tree
Showing 18 changed files with 646 additions and 226 deletions.
364 changes: 276 additions & 88 deletions api/_modules/captum/attr/_core/feature_ablation.html

Large diffs are not rendered by default.

364 changes: 276 additions & 88 deletions api/_modules/captum/attr/_core/feature_ablation/index.html

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
<div class="body" role="main">
<h1>Source code for captum.attr._core.layer.layer_feature_permutation</h1><div class="highlight"><pre>
<span></span><span class="ch">#!/usr/bin/env python3</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">import</span> <span class="nn">torch</span>
<span class="kn">from</span> <span class="nn">captum._utils.common</span> <span class="kn">import</span> <span class="p">(</span>
Expand Down Expand Up @@ -233,7 +233,11 @@ <h1>Source code for captum.attr._core.layer.layer_feature_permutation</h1><div c
<span class="k">finally</span><span class="p">:</span>
<span class="k">if</span> <span class="n">hook</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">hook</span><span class="o">.</span><span class="n">remove</span><span class="p">()</span>
<span class="k">return</span> <span class="nb">eval</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="k">return</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">eval</span><span class="p">)</span>

<span class="k">with</span> <span class="n">torch</span><span class="o">.</span><span class="n">no_grad</span><span class="p">():</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="n">_format_tensor_into_tuples</span><span class="p">(</span><span class="n">inputs</span><span class="p">)</span>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
<div class="body" role="main">
<h1>Source code for captum.attr._core.layer.layer_feature_permutation</h1><div class="highlight"><pre>
<span></span><span class="ch">#!/usr/bin/env python3</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">import</span> <span class="nn">torch</span>
<span class="kn">from</span> <span class="nn">captum._utils.common</span> <span class="kn">import</span> <span class="p">(</span>
Expand Down Expand Up @@ -233,7 +233,11 @@ <h1>Source code for captum.attr._core.layer.layer_feature_permutation</h1><div c
<span class="k">finally</span><span class="p">:</span>
<span class="k">if</span> <span class="n">hook</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">hook</span><span class="o">.</span><span class="n">remove</span><span class="p">()</span>
<span class="k">return</span> <span class="nb">eval</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="k">return</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">eval</span><span class="p">)</span>

<span class="k">with</span> <span class="n">torch</span><span class="o">.</span><span class="n">no_grad</span><span class="p">():</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="n">_format_tensor_into_tuples</span><span class="p">(</span><span class="n">inputs</span><span class="p">)</span>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span></span><span class="ch">#!/usr/bin/env python3</span>
<span class="kn">import</span> <span class="nn">functools</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">overload</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">overload</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">import</span> <span class="nn">torch</span>
<span class="kn">from</span> <span class="nn">captum._utils.common</span> <span class="kn">import</span> <span class="p">(</span>
Expand Down Expand Up @@ -136,7 +136,8 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span class="s2">"Multiple layers provided. Please ensure that each layer is"</span>
<span class="s2">"**not** solely dependent on the outputs of"</span>
<span class="s2">"another layer. Please refer to the documentation for more"</span>
<span class="s2">"detail."</span>
<span class="s2">"detail."</span><span class="p">,</span>
<span class="n">stacklevel</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span>
<span class="p">)</span>

<span class="nd">@overload</span>
Expand Down Expand Up @@ -503,13 +504,17 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span class="c1"># the inputs is an empty tuple</span>
<span class="c1"># coz it is prepended into additional_forward_args</span>
<span class="n">output</span> <span class="o">=</span> <span class="n">_run_forward</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward_func</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">(),</span> <span class="n">target_ind</span><span class="p">,</span> <span class="n">additional_forward_args</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward_func</span><span class="p">,</span> <span class="p">(),</span> <span class="n">target_ind</span><span class="p">,</span> <span class="n">additional_forward_args</span>
<span class="p">)</span>
<span class="k">finally</span><span class="p">:</span>
<span class="k">for</span> <span class="n">hook</span> <span class="ow">in</span> <span class="n">hooks</span><span class="p">:</span>
<span class="k">if</span> <span class="n">hook</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">hook</span><span class="o">.</span><span class="n">remove</span><span class="p">()</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="n">output</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">output</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">output</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">numel</span><span class="p">()</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span>
<span class="s2">"Target not provided when necessary, cannot"</span>
<span class="s2">" take gradient with respect to multiple outputs."</span>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span></span><span class="ch">#!/usr/bin/env python3</span>
<span class="kn">import</span> <span class="nn">functools</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">overload</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">overload</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">import</span> <span class="nn">torch</span>
<span class="kn">from</span> <span class="nn">captum._utils.common</span> <span class="kn">import</span> <span class="p">(</span>
Expand Down Expand Up @@ -136,7 +136,8 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span class="s2">"Multiple layers provided. Please ensure that each layer is"</span>
<span class="s2">"**not** solely dependent on the outputs of"</span>
<span class="s2">"another layer. Please refer to the documentation for more"</span>
<span class="s2">"detail."</span>
<span class="s2">"detail."</span><span class="p">,</span>
<span class="n">stacklevel</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span>
<span class="p">)</span>

<span class="nd">@overload</span>
Expand Down Expand Up @@ -503,13 +504,17 @@ <h1>Source code for captum.attr._core.layer.layer_integrated_gradients</h1><div
<span class="c1"># the inputs is an empty tuple</span>
<span class="c1"># coz it is prepended into additional_forward_args</span>
<span class="n">output</span> <span class="o">=</span> <span class="n">_run_forward</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward_func</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">(),</span> <span class="n">target_ind</span><span class="p">,</span> <span class="n">additional_forward_args</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward_func</span><span class="p">,</span> <span class="p">(),</span> <span class="n">target_ind</span><span class="p">,</span> <span class="n">additional_forward_args</span>
<span class="p">)</span>
<span class="k">finally</span><span class="p">:</span>
<span class="k">for</span> <span class="n">hook</span> <span class="ow">in</span> <span class="n">hooks</span><span class="p">:</span>
<span class="k">if</span> <span class="n">hook</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">hook</span><span class="o">.</span><span class="n">remove</span><span class="p">()</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="n">output</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">output</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">output</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">numel</span><span class="p">()</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span>
<span class="s2">"Target not provided when necessary, cannot"</span>
<span class="s2">" take gradient with respect to multiple outputs."</span>
Expand Down
6 changes: 5 additions & 1 deletion api/_modules/captum/attr/_core/lrp.html
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,11 @@ <h1>Source code for captum.attr._core.lrp</h1><div class="highlight"><pre>
<span class="c1"># adjustments as inputs to the layers with adjusted weights. This procedure</span>
<span class="c1"># is important for graph generation in the 2nd forward pass.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_register_pre_hooks</span><span class="p">()</span>
<span class="k">return</span> <span class="n">output</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="k">return</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">output</span><span class="p">)</span>

<span class="k">def</span> <span class="nf">_remove_forward_hooks</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">for</span> <span class="n">forward_handle</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_handles</span><span class="p">:</span>
Expand Down
6 changes: 5 additions & 1 deletion api/_modules/captum/attr/_core/lrp/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,11 @@ <h1>Source code for captum.attr._core.lrp</h1><div class="highlight"><pre>
<span class="c1"># adjustments as inputs to the layers with adjusted weights. This procedure</span>
<span class="c1"># is important for graph generation in the 2nd forward pass.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_register_pre_hooks</span><span class="p">()</span>
<span class="k">return</span> <span class="n">output</span>

<span class="c1"># _run_forward may return future of Tensor,</span>
<span class="c1"># but we don't support it here now</span>
<span class="c1"># And it will fail before here.</span>
<span class="k">return</span> <span class="n">cast</span><span class="p">(</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">output</span><span class="p">)</span>

<span class="k">def</span> <span class="nf">_remove_forward_hooks</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">for</span> <span class="n">forward_handle</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_handles</span><span class="p">:</span>
Expand Down
6 changes: 3 additions & 3 deletions api/_modules/captum/attr/_core/shapley_value.html
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ <h1>Source code for captum.attr._core.shapley_value</h1><div class="highlight"><
<span class="kn">import</span> <span class="nn">itertools</span>
<span class="kn">import</span> <span class="nn">math</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">Iterable</span><span class="p">,</span> <span class="n">Sequence</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">Iterable</span><span class="p">,</span> <span class="n">Sequence</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">import</span> <span class="nn">torch</span>
<span class="kn">from</span> <span class="nn">captum._utils.common</span> <span class="kn">import</span> <span class="p">(</span>
Expand All @@ -59,7 +59,7 @@ <h1>Source code for captum.attr._core.shapley_value</h1><div class="highlight"><
<span class="n">_tensorize_baseline</span><span class="p">,</span>
<span class="p">)</span>
<span class="kn">from</span> <span class="nn">captum.log</span> <span class="kn">import</span> <span class="n">log_usage</span>
<span class="kn">from</span> <span class="nn">torch</span> <span class="kn">import</span> <span class="n">Tensor</span>
<span class="kn">from</span> <span class="nn">torch</span> <span class="kn">import</span> <span class="n">dtype</span><span class="p">,</span> <span class="n">Tensor</span>


<span class="k">def</span> <span class="nf">_all_perm_generator</span><span class="p">(</span><span class="n">num_features</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">num_samples</span><span class="p">:</span> <span class="nb">int</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Iterable</span><span class="p">[</span><span class="n">Sequence</span><span class="p">[</span><span class="nb">int</span><span class="p">]]:</span>
Expand Down Expand Up @@ -588,7 +588,7 @@ <h1>Source code for captum.attr._core.shapley_value</h1><div class="highlight"><
<span class="c1"># using python built-in type as torch dtype</span>
<span class="c1"># int -&gt; torch.int64, float -&gt; torch.float64</span>
<span class="c1"># ref: https://github.com/pytorch/pytorch/pull/21215</span>
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">([</span><span class="n">forward_output</span><span class="p">],</span> <span class="n">dtype</span><span class="o">=</span><span class="n">output_type</span><span class="p">)</span></div>
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">([</span><span class="n">forward_output</span><span class="p">],</span> <span class="n">dtype</span><span class="o">=</span><span class="n">cast</span><span class="p">(</span><span class="n">dtype</span><span class="p">,</span> <span class="n">output_type</span><span class="p">))</span></div>



Expand Down
Loading

0 comments on commit c31f330

Please sign in to comment.