Skip to content

Commit

Permalink
Add comments to empty blocks to address csharpsquid:S108, #672 (#1032)
Browse files Browse the repository at this point in the history
  • Loading branch information
paulirwin authored Nov 19, 2024
1 parent 18bd8bf commit 3c14b14
Show file tree
Hide file tree
Showing 54 changed files with 234 additions and 184 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@ namespace Lucene.Net.Analysis.Miscellaneous
/// a replacement for the maximum field length setting inside <see cref="Index.IndexWriter"/>.
/// <para>
/// By default, this filter ignores any tokens in the wrapped <see cref="TokenStream"/>
/// once the limit has been reached, which can result in <see cref="Reset"/> being
/// called prior to <see cref="IncrementToken"/> returning <c>false</c>. For most
/// <see cref="TokenStream"/> implementations this should be acceptable, and faster
/// then consuming the full stream. If you are wrapping a <see cref="TokenStream"/>
/// which requires that the full stream of tokens be exhausted in order to
/// function properly, use the
/// once the limit has been reached, which can result in <see cref="Reset"/> being
/// called prior to <see cref="IncrementToken"/> returning <c>false</c>. For most
/// <see cref="TokenStream"/> implementations this should be acceptable, and faster
/// then consuming the full stream. If you are wrapping a <see cref="TokenStream"/>
/// which requires that the full stream of tokens be exhausted in order to
/// function properly, use the
/// <see cref="LimitTokenCountFilter.LimitTokenCountFilter(TokenStream,int,bool)"/> consumeAllTokens
/// option.
/// </para>
Expand Down Expand Up @@ -91,8 +91,9 @@ public override bool IncrementToken()
}
else
{
while (consumeAllTokens && m_input.IncrementToken()) // NOOP
while (consumeAllTokens && m_input.IncrementToken())
{
// NOOP
}
return false;
}
Expand All @@ -105,4 +106,4 @@ public override void Reset()
exhausted = false;
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ namespace Lucene.Net.Analysis.Miscellaneous
/// are not greater than the configured limit.
/// <para>
/// By default, this filter ignores any tokens in the wrapped <see cref="TokenStream"/>
/// once the limit has been exceeded, which can result in <see cref="Reset"/> being
/// called prior to <see cref="IncrementToken"/> returning <c>false</c>. For most
/// <see cref="TokenStream"/> implementations this should be acceptable, and faster
/// once the limit has been exceeded, which can result in <see cref="Reset"/> being
/// called prior to <see cref="IncrementToken"/> returning <c>false</c>. For most
/// <see cref="TokenStream"/> implementations this should be acceptable, and faster
/// then consuming the full stream. If you are wrapping a <see cref="TokenStream"/>
/// which requires that the full stream of tokens be exhausted in order to
/// function properly, use the
/// which requires that the full stream of tokens be exhausted in order to
/// function properly, use the
/// <see cref="LimitTokenPositionFilter(TokenStream,int,bool)"/> consumeAllTokens
/// option.
/// </para>
Expand Down Expand Up @@ -91,8 +91,9 @@ public override bool IncrementToken()
}
else
{
while (consumeAllTokens && m_input.IncrementToken()) // NOOP
while (consumeAllTokens && m_input.IncrementToken())
{
// NOOP
}
exhausted = true;
return false;
Expand All @@ -112,4 +113,4 @@ public override void Reset()
exhausted = false;
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public sealed class TrimFilter : TokenFilter
/// <param name="version"> the Lucene match version </param>
/// <param name="in"> the stream to consume </param>
/// <param name="updateOffsets"> whether to update offsets </param>
/// @deprecated Offset updates are not supported anymore as of Lucene 4.4.
/// @deprecated Offset updates are not supported anymore as of Lucene 4.4.
[Obsolete("Offset updates are not supported anymore as of Lucene 4.4.")]
public TrimFilter(LuceneVersion version, TokenStream @in, bool updateOffsets)
: base(@in)
Expand Down Expand Up @@ -84,6 +84,7 @@ public override bool IncrementToken()
// eat the first characters
for (start = 0; start < len && char.IsWhiteSpace(termBuffer[start]); start++)
{
// LUCENENET: intentionally empty
}
// eat the end characters
for (end = len; end >= start && char.IsWhiteSpace(termBuffer[end - 1]); end--)
Expand Down Expand Up @@ -111,4 +112,4 @@ public override bool IncrementToken()
return true;
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,16 @@ namespace Lucene.Net.Analysis.Sinks
/// TeeSinkTokenFilter source1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(version, reader1));
/// TeeSinkTokenFilter.SinkTokenStream sink1 = source1.NewSinkTokenStream();
/// TeeSinkTokenFilter.SinkTokenStream sink2 = source1.NewSinkTokenStream();
///
///
/// TeeSinkTokenFilter source2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(version, reader2));
/// source2.AddSinkTokenStream(sink1);
/// source2.AddSinkTokenStream(sink2);
///
///
/// TokenStream final1 = new LowerCaseFilter(version, source1);
/// TokenStream final2 = source2;
/// TokenStream final3 = new EntityDetect(sink1);
/// TokenStream final4 = new URLDetect(sink2);
///
///
/// d.Add(new TextField("f1", final1, Field.Store.NO));
/// d.Add(new TextField("f2", final2, Field.Store.NO));
/// d.Add(new TextField("f3", final3, Field.Store.NO));
Expand Down Expand Up @@ -131,6 +131,7 @@ public void ConsumeAllTokens()
{
while (IncrementToken())
{
// LUCENENET: intentionally empty
}
}

Expand Down Expand Up @@ -182,7 +183,7 @@ public abstract class SinkFilter
{
/// <summary>
/// Returns true, iff the current state of the passed-in <see cref="AttributeSource"/> shall be stored
/// in the sink.
/// in the sink.
/// </summary>
public abstract bool Accept(AttributeSource source);

Expand Down Expand Up @@ -271,4 +272,4 @@ public override bool Accept(AttributeSource source)
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1007,7 +1007,7 @@ public override bool Stem()
lab2:
m_cursor = m_limit - v_2;
goto replab1;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab1:
// repeat, line 143
Expand Down Expand Up @@ -1036,7 +1036,7 @@ public override bool Stem()
lab4:
m_cursor = m_limit - v_3;
goto replab3;
end_of_outer_loop_2: { }
end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
}
replab3:
// do, line 144
Expand Down Expand Up @@ -1066,4 +1066,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -840,7 +840,7 @@ private bool r_cleaning()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -1162,4 +1162,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ private bool r_prelude()
lab1:
m_cursor = v_2;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
m_cursor = v_1;
Expand Down Expand Up @@ -301,7 +301,7 @@ private bool r_prelude()
lab4:
m_cursor = v_4;
goto replab3;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab3:
return true;
Expand Down Expand Up @@ -475,7 +475,7 @@ private bool r_postlude()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -1015,4 +1015,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ private bool r_prelude()
lab4:
m_cursor = v_4;
goto replab3;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab3: {/* LUCENENET: intentionally blank */}
} while (false);
Expand Down Expand Up @@ -1318,7 +1318,7 @@ private bool r_postlude()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -1553,4 +1553,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ private bool r_prelude()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -655,7 +655,7 @@ private bool r_postlude()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -1522,7 +1522,7 @@ private bool r_un_accent()
} while (false);
lab1:
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
if (v_1 > 0)
Expand Down Expand Up @@ -1773,4 +1773,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ private bool r_prelude()
lab1:
m_cursor = v_2;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
m_cursor = v_1;
Expand Down Expand Up @@ -292,7 +292,7 @@ private bool r_prelude()
lab7:
m_cursor = v_5;
goto replab6;
end_of_outer_loop_2: { }
end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
}
replab6:
return true;
Expand Down Expand Up @@ -497,7 +497,7 @@ private bool r_postlude()
lab1:
m_cursor = v_1;
goto replab0;
end_of_outer_loop: { }
end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
}
replab0:
return true;
Expand Down Expand Up @@ -893,4 +893,4 @@ public override int GetHashCode()
return this.GetType().FullName.GetHashCode();
}
}
}
}
Loading

0 comments on commit 3c14b14

Please sign in to comment.