编辑:经过仔细检查,这似乎不是一个好主意。在原始哈希集中有少于60个元素时,下面的方法似乎比创建新哈希集更慢。
免责声明:这似乎有效,但使用风险自负。如果您将序列化克隆的哈希集,您可能需要复制SerializationInfo m_siInfo。
我也遇到了这个问题,并尝试解决它。以下是一个扩展方法,它使用FieldInfo.GetValue和SetValue来复制所需的字段。它比使用HashSet(IEnumerable)更快,速度取决于原始哈希集中的元素数量。对于1,000个元素,差异约为7倍。对于100,000个元素,差异约为3倍。
还有其他可能更快的方法,但现在这个方法已经消除了我的瓶颈。我尝试使用表达式树和发射,但遇到了障碍,如果我让它们工作,我会更新此帖子。
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.Serialization;
public static class HashSetExtensions
{
public static HashSet<T> Clone<T>(this HashSet<T> original)
{
var clone = (HashSet<T>)FormatterServices.GetUninitializedObject(typeof(HashSet<T>));
Copy(Fields<T>.comparer, original, clone);
if (original.Count == 0)
{
Fields<T>.freeList.SetValue(clone, -1);
}
else
{
Fields<T>.count.SetValue(clone, original.Count);
Clone(Fields<T>.buckets, original, clone);
Clone(Fields<T>.slots, original, clone);
Copy(Fields<T>.freeList, original, clone);
Copy(Fields<T>.lastIndex, original, clone);
Copy(Fields<T>.version, original, clone);
}
return clone;
}
static void Copy<T>(FieldInfo field, HashSet<T> source, HashSet<T> target)
{
field.SetValue(target, field.GetValue(source));
}
static void Clone<T>(FieldInfo field, HashSet<T> source, HashSet<T> target)
{
field.SetValue(target, ((Array)field.GetValue(source)).Clone());
}
static class Fields<T>
{
public static readonly FieldInfo freeList = GetFieldInfo("m_freeList");
public static readonly FieldInfo buckets = GetFieldInfo("m_buckets");
public static readonly FieldInfo slots = GetFieldInfo("m_slots");
public static readonly FieldInfo count = GetFieldInfo("m_count");
public static readonly FieldInfo lastIndex = GetFieldInfo("m_lastIndex");
public static readonly FieldInfo version = GetFieldInfo("m_version");
public static readonly FieldInfo comparer = GetFieldInfo("m_comparer");
static FieldInfo GetFieldInfo(string name)
{
return typeof(HashSet<T>).GetField(name, BindingFlags.Instance | BindingFlags.NonPublic);
}
}
}