提交 ba7ae5e7 编写于 作者: P Paul Hatcher

Revert back to 3143 - work done on wrong branch

SVN: branches/1.2.x@3183
上级 fbcea312
......@@ -3,193 +3,208 @@
namespace NHibernate.Search.Tests.Bridge
{
[Indexed]
public class Cloud
{
private int id;
private long? long1;
private int? int1;
private double? double1;
private float? float1;
private long long2;
private double double2;
private float float2;
private int int2;
private string string1;
private DateTime? dateTime;
private DateTime? dateTimeYear;
private DateTime? dateTimeMonth;
private DateTime? dateTimeDay;
private DateTime? dateTimeHour;
private DateTime? dateTimeMinute;
private DateTime? dateTimeSecond;
private DateTime? dateTimeMillisecond;
private String customFieldBridge;
private String customStringBridge;
private CloudType type;
private bool storm;
[Field(Index.Tokenized, Store = Store.Yes)]
[FieldBridge(typeof(TruncateFieldBridge))]
public virtual string CustomFieldBridge
{
get { return customFieldBridge; }
set { customFieldBridge = value; }
}
[Field(Index.Tokenized, Store = Store.Yes)]
[FieldBridge(typeof(TruncateStringBridge), 4)]
public virtual string CustomStringBridge
{
get { return customStringBridge; }
set { customStringBridge = value; }
}
[DocumentId]
public virtual int Id
{
get { return id; }
set { id = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual long? Long1
{
get { return long1; }
set { long1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual int? Int1
{
get { return int1; }
set { int1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual double? Double1
{
get { return double1; }
set { double1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual float? Float1
{
get { return float1; }
set { float1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual long Long2
{
get { return long2; }
set { long2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual int Int2
{
get { return int2; }
set { int2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual double Double2
{
get { return double2; }
set { double2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual float Float2
{
get { return float2; }
set { float2 = value; }
}
[Field(Index.Tokenized, Store = Store.Yes)]
public virtual string String1
{
get { return string1; }
set { string1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual DateTime? DateTime
{
get { return dateTime; }
set { dateTime = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Year)]
public virtual DateTime? DateTimeYear
{
get { return dateTimeYear; }
set { dateTimeYear = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Month)]
public virtual DateTime? DateTimeMonth
{
get { return dateTimeMonth; }
set { dateTimeMonth = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Day)]
public virtual DateTime? DateTimeDay
{
get { return dateTimeDay; }
set { dateTimeDay = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Hour)]
public virtual DateTime? DateTimeHour
{
get { return dateTimeHour; }
set { dateTimeHour = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Minute)]
public virtual DateTime? DateTimeMinute
{
get { return dateTimeMinute; }
set { dateTimeMinute = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Second)]
public virtual DateTime? DateTimeSecond
{
get { return dateTimeSecond; }
set { dateTimeSecond = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Millisecond)]
public virtual DateTime? DateTimeMillisecond
{
get { return dateTimeMillisecond; }
set { dateTimeMillisecond = value; }
}
[Field(Index.Tokenized)]
public virtual CloudType Type
{
get { return type; }
set { type = value; }
}
[Field(Index.Tokenized)]
public virtual bool Storm
{
get { return storm; }
set { storm = value; }
}
}
[Indexed]
public class Cloud
{
private int id;
#if NET_2_0
private long? long1;
private int? int1;
private double? double1;
private float? float1;
#endif
private long long2;
private double double2;
private float float2;
private int int2;
private string string1;
#if NET_2_0
private DateTime? dateTime;
private DateTime? dateTimeYear;
private DateTime? dateTimeMonth;
private DateTime? dateTimeDay;
private DateTime? dateTimeHour;
private DateTime? dateTimeMinute;
private DateTime? dateTimeSecond;
private DateTime? dateTimeMillisecond;
#endif
private String customFieldBridge;
private String customStringBridge;
private CloudType type;
private bool storm;
[Field(Index.Tokenized, Store = Store.Yes)]
[FieldBridge(typeof (TruncateFieldBridge))]
public virtual string CustomFieldBridge
{
get { return customFieldBridge; }
set { this.customFieldBridge = value; }
}
[Field(Index.Tokenized, Store = Store.Yes)]
[FieldBridge(typeof (TruncateStringBridge), 4)]
public virtual string CustomStringBridge
{
get { return customStringBridge; }
set { this.customStringBridge = value; }
}
[DocumentId]
public virtual int Id
{
get { return id; }
set { this.id = value; }
}
#if NET_2_0
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual long? Long1
{
get { return long1; }
set { this.long1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual int? Int1
{
get { return int1; }
set { this.int1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual double? Double1
{
get { return double1; }
set { this.double1 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual float? Float1
{
get { return float1; }
set { this.float1 = value; }
}
#endif
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual long Long2
{
get { return long2; }
set { this.long2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual int Int2
{
get { return int2; }
set { this.int2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual double Double2
{
get { return double2; }
set { this.double2 = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual float Float2
{
get { return float2; }
set { this.float2 = value; }
}
[Field(Index.Tokenized, Store = Store.Yes)]
public virtual string String1
{
get { return string1; }
set { this.string1 = value; }
}
#if NET_2_0
[Field(Index.UnTokenized, Store = Store.Yes)]
public virtual DateTime? DateTime
{
get { return dateTime; }
set { this.dateTime = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Year)]
public virtual DateTime? DateTimeYear
{
get { return dateTimeYear; }
set { this.dateTimeYear = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Month)]
public virtual DateTime? DateTimeMonth
{
get { return dateTimeMonth; }
set { this.dateTimeMonth = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Day)]
public virtual DateTime? DateTimeDay
{
get { return dateTimeDay; }
set { this.dateTimeDay = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Hour)]
public virtual DateTime? DateTimeHour
{
get { return dateTimeHour; }
set { this.dateTimeHour = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Minute)]
public virtual DateTime? DateTimeMinute
{
get { return dateTimeMinute; }
set { this.dateTimeMinute = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Second)]
public virtual DateTime? DateTimeSecond
{
get { return dateTimeSecond; }
set { this.dateTimeSecond = value; }
}
[Field(Index.UnTokenized, Store = Store.Yes)]
[DateBridge(Resolution.Millisecond)]
public virtual DateTime? DateTimeMillisecond
{
get { return dateTimeMillisecond; }
set { this.dateTimeMillisecond = value; }
}
#endif
[Field(Index.Tokenized)]
public virtual CloudType Type
{
get { return type; }
set { this.type = value; }
}
[Field(Index.Tokenized)]
public virtual bool Storm
{
get { return storm; }
set { this.storm = value; }
}
}
}
\ No newline at end of file
......@@ -106,7 +106,11 @@ protected void TearDown()
Directory.Delete("./lucenedirs/",true);
}
#if NET_2_0
protected override void Configure(IList<Configuration> cfg)
#else
protected override void Configure(IList cfg)
#endif
{
//master
cfg[0].SetProperty("hibernate.search.default.sourceBase", "./lucenedirs/master/copy");
......
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.Reflection;
using NHibernate.Cfg;
using NHibernate.Tool.hbm2ddl;
......@@ -9,12 +11,21 @@ namespace NHibernate.Search.Tests.DirectoryProvider
{
public abstract class MultiplySessionFactoriesTestCase
{
private readonly List<ISessionFactory> sessionFactories = new List<ISessionFactory>();
#if NET_2_0
private List<ISessionFactory> sessionFactories = new List<ISessionFactory>();
private List<Configuration> configurations;
#else
private IList sessionFactories = new ArrayList();
private IList configurations;
#endif
protected abstract int NumberOfSessionFactories { get; }
#if NET_2_0
protected IList<ISessionFactory> SessionFactories
#else
protected IList SessionFactories
#endif
{
get { return sessionFactories; }
}
......@@ -47,7 +58,11 @@ public void BuildSessionFactories()
private void Configure()
{
#if NET_2_0
configurations = new List<Configuration>();
#else
configurations = new ArrayList();
#endif
for (int i = 0; i < NumberOfSessionFactories; i++)
{
configurations.Add(CreateConfiguration());
......@@ -67,7 +82,11 @@ private Configuration CreateConfiguration()
return cfg;
}
#if NET_2_0
protected abstract void Configure(IList<Configuration> cfg);
#else
protected abstract void Configure(IList cfg);
#endif
protected abstract IList Mappings
{
......
using System;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.IO;
using System.Text;
using System.Threading;
using NHibernate.Search.Storage;
using NUnit.Framework;
namespace NHibernate.Search.Tests
{
[TestFixture]
public class FileHleperTestCase
{
[SetUp]
public void SetUp()
{
DirectoryInfo dir = new DirectoryInfo("./filehelpersrc");
dir.Create();
WriteFile(dir, "a");
WriteFile(dir, "b");
dir = new DirectoryInfo(Path.Combine(dir.FullName, "subdir"));
dir.Create();
WriteFile(dir, "c");
}
[TestFixture]
public class FileHleperTestCase
{
[SetUp]
public void SetUp()
{
DirectoryInfo dir = new DirectoryInfo("./filehelpersrc");
dir.Create();
WriteFile(dir, "a");
WriteFile(dir, "b");
dir = new DirectoryInfo(Path.Combine(dir.FullName, "subdir"));
dir.Create();
WriteFile(dir, "c");
}
private static void WriteFile(DirectoryInfo dir, String name)
{
FileInfo a = new FileInfo(Path.Combine(dir.FullName, name));
StreamWriter os = a.CreateText();
os.WriteLine(1);
os.WriteLine(2);
os.WriteLine(3);
os.Flush();
os.Close();
}
private void WriteFile(DirectoryInfo dir, String name)
{
FileInfo a = new FileInfo(Path.Combine(dir.FullName, name));
StreamWriter os = a.CreateText();
os.WriteLine(1);
os.WriteLine(2);
os.WriteLine(3);
os.Flush();
os.Close();
}
[TearDown]
protected void TearDown()
{
DirectoryInfo dir = new DirectoryInfo("./filehelpersrc");
dir.Delete(true);
dir = new DirectoryInfo("./filehelperdest");
dir.Delete(true);
}
[TearDown]
protected void TearDown()
{
DirectoryInfo dir = new DirectoryInfo("./filehelpersrc");
dir.Delete(true);
dir = new DirectoryInfo("./filehelperdest");
dir.Delete(true);
}
[Test]
public void Synchronize()
{
DirectoryInfo src = new DirectoryInfo("./filehelpersrc");
DirectoryInfo dest = new DirectoryInfo("./filehelperdest");
FileHelper.Synchronize(src, dest, true);
Assert.IsTrue(File.Exists(Path.Combine(dest.FullName, "b")));
[Test]
public void Synchronize()
{
DirectoryInfo src = new DirectoryInfo("./filehelpersrc");
DirectoryInfo dest = new DirectoryInfo("./filehelperdest");
FileHelper.Synchronize(src, dest, true);
Assert.IsTrue(File.Exists(Path.Combine(dest.FullName, "b")));
string path = Path.Combine(dest.FullName, Path.Combine("subdir", "c"));
Assert.IsTrue(File.Exists(path));
string path = Path.Combine(dest.FullName, Path.Combine("subdir", "c"));
Assert.IsTrue(File.Exists(path));
//change
Thread.Sleep(2*2000);
StreamWriter os = File.CreateText(Path.Combine(src.FullName, "c"));
os.WriteLine(1);
os.WriteLine(2);
os.WriteLine(3);
os.Flush();
os.Close();
FileInfo test = new FileInfo(Path.Combine(src.FullName, "c"));
FileInfo destTest = new FileInfo(Path.Combine(dest.FullName, "c"));
Assert.AreNotSame(test.LastWriteTime, destTest.LastWriteTime);
FileHelper.Synchronize(src, dest, true);
destTest.Refresh();
Assert.AreEqual(test.LastWriteTime, destTest.LastWriteTime);
Assert.AreEqual(test.Length, destTest.Length);
//change
Thread.Sleep(2*2000);
StreamWriter os = File.CreateText(Path.Combine(src.FullName, "c"));
os.WriteLine(1);
os.WriteLine(2);
os.WriteLine(3);
os.Flush();
os.Close();
FileInfo test = new FileInfo(Path.Combine(src.FullName, "c"));
FileInfo destTest = new FileInfo(Path.Combine(dest.FullName, "c"));
Assert.AreNotSame(test.LastWriteTime, destTest.LastWriteTime);
FileHelper.Synchronize(src, dest, true);
destTest.Refresh();
Assert.AreEqual(test.LastWriteTime, destTest.LastWriteTime);
Assert.AreEqual(test.Length, destTest.Length);
//delete file
test.Delete();
FileHelper.Synchronize(src, dest, true);
destTest.Refresh();
Assert.IsTrue(! destTest.Exists);
}
}
//delete file
test.Delete();
FileHelper.Synchronize(src, dest, true);
destTest.Refresh();
Assert.IsTrue(! destTest.Exists);
}
}
}
\ No newline at end of file
......@@ -33,7 +33,12 @@ public static DocumentIdAttribute GetDocumentId(MemberInfo member)
if (objects.Length == 0)
return null;
DocumentIdAttribute documentIdAttribute = (DocumentIdAttribute) objects[0];
#if NET_2_0
documentIdAttribute.Name = documentIdAttribute.Name ?? member.Name;
#else
if (documentIdAttribute.Name == null)
documentIdAttribute.Name = member.Name;
#endif
return documentIdAttribute;
}
......@@ -43,7 +48,12 @@ public static FieldAttribute GetField(MemberInfo member)
if (objects.Length == 0)
return null;
FieldAttribute fieldAttribute = (FieldAttribute) objects[0];
#if NET_2_0
fieldAttribute.Name = fieldAttribute.Name ?? member.Name;
#else
if (fieldAttribute.Name == null)
fieldAttribute.Name = member.Name;
#endif
return fieldAttribute;
}
......
using System;
namespace NHibernate.Search.Attributes
{
/// <summary>
/// Describe the owning entity as being part of the target entity's
/// index (to be more accurate, being part of the indexed object graph)
///
/// Only necessary when an @Indexed class is used as a <see cref="IndexedEmbeddedAttribute" />
/// target class. ContainedIn must mark the property pointing back
/// to the IndexedEmbedded owning Entity
///
/// Not necessary if the class is an Embeddable class.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false)]
public class ContainedInAttribute : Attribute
{
}
}
\ No newline at end of file
namespace NHibernate.Search.Attributes
{
/// <summary>
/// The choices of how to index a field
/// </summary>
public enum Index
{
/// <summary>
/// Index the field's value without an Analyzer, and disable
/// the storing of norms. No norms means that index-time boosting
/// and field length normalization will be disabled. The benefit is
/// less memory usage as norms take up one byte per indexed field
/// for every document in the index.
/// </summary>
NoNormalization,
/// <summary>
/// Do not index the field value. This field can thus not be searched,
/// but one can still access its contents provided it is
/// </summary>
No,
/// <summary>
/// Index the field's value so it can be searched. An Analyzer will be used
/// to tokenize and possibly further normalize the text before its
/// terms will be stored in the index. This is useful for common text.
/// </summary>
Tokenized,
/// <summary>
/// Index the field's value without using an Analyzer, so it can be searched.
/// As no analyzer is used the value will be stored as a single term. This is
/// useful for unique Ids like product numbers.
/// </summary>
UnTokenized
}
}
\ No newline at end of file
using System;
using System.Text;
namespace NHibernate.Search.Attributes
{
......@@ -9,4 +10,4 @@ namespace NHibernate.Search.Attributes
public class IndexedEmbeddedAttribute : Attribute
{
}
}
\ No newline at end of file
}
namespace NHibernate.Search.Attributes
{
/// <summary>
/// Whether or not the value is stored in the document
/// </summary>
public enum Store
{
Yes,
No,
Compress
}
}
\ No newline at end of file
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.Threading;
namespace NHibernate.Search.Backend
......@@ -13,11 +15,20 @@ public interface IBackendQueueProcessorFactory
{
void Initialize(IDictionary props, SearchFactory searchFactory);
#if NET_2_0
/// <summary>
/// Return a runnable implementation responsible for processing the queue to a given backend
/// </summary>
/// <param name="queue"></param>
/// <returns></returns>
WaitCallback GetProcessor(List<LuceneWork> queue);
#else
/// <summary>
/// Return a runnable implementation responsible for processing the queue to a given backend
/// </summary>
/// <param name="queue"></param>
/// <returns></returns>
WaitCallback GetProcessor(IList queue);
#endif
}
}
\ No newline at end of file
#if NET_2_0
using System.Collections.Generic;
#else
using System.Collections;
#endif
namespace NHibernate.Search.Backend
{
......@@ -9,6 +13,7 @@ namespace NHibernate.Search.Backend
/// </summary>
public interface IQueueingProcessor
{
#if NET_2_0
/// <summary>
/// Performs all the work in the queue
/// </summary>
......@@ -20,5 +25,18 @@ public interface IQueueingProcessor
/// </summary>
/// <param name="queue"></param>
void CancelWork(List<LuceneWork> queue);
#else
/// <summary>
/// Performs all the work in the queue
/// </summary>
/// <param name="queue">The queue.</param>
void PerformWork(IList queue);
/// <summary>
/// Rollback
/// </summary>
/// <param name="queue"></param>
void CancelWork(IList queue);
#endif
}
}
\ No newline at end of file
using System;
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.Threading;
using NHibernate.Search.Backend.Impl.Lucene;
using NHibernate.Search.Impl;
......@@ -23,12 +25,19 @@ public class BatchedQueueingProcessor : IQueueingProcessor
{
this.searchFactory = searchFactory;
//default to sync if none defined
#if NET_2_0
sync =
!"async".Equals((string) properties[Environment.WorkerExecution],
StringComparison.InvariantCultureIgnoreCase);
string backend = (string) properties[Environment.WorkerBackend];
if (StringHelper.IsEmpty(backend) || "lucene".Equals(backend, StringComparison.InvariantCultureIgnoreCase))
#else
sync = !"async".Equals(((string) properties[Environment.WorkerExecution]).ToLower());
string backend = (string) properties[Environment.WorkerBackend];
if (StringHelper.IsEmpty(backend) || "lucene".Equals(backend.ToLower()))
#endif
{
backendQueueProcessorFactory = new LuceneBackendQueueProcessorFactory();
}
......@@ -51,7 +60,11 @@ public class BatchedQueueingProcessor : IQueueingProcessor
//TODO implements parallel batchWorkers (one per Directory)
#if NET_2_0
public void PerformWork(List<LuceneWork> luceneQueue)
#else
public void PerformWork(IList luceneQueue)
#endif
{
WaitCallback processor = backendQueueProcessorFactory.GetProcessor(luceneQueue);
if (sync)
......@@ -64,7 +77,11 @@ public void PerformWork(List<LuceneWork> luceneQueue)
}
}
#if NET_2_0
public void CancelWork(List<LuceneWork> queue)
#else
public void CancelWork(IList queue)
#endif
{
queue.Clear();
}
......
using System;
#if NET_2_0
using System.Collections.Generic;
#else
using System.Collections;
#endif
using NHibernate.Search.Impl;
namespace NHibernate.Search.Backend.Impl.Lucene
......@@ -11,6 +15,7 @@ public class LuceneBackendQueueProcessor
{
private readonly SearchFactory searchFactory;
#if NET_2_0
private readonly List<LuceneWork> queue;
public LuceneBackendQueueProcessor(List<LuceneWork> queue, SearchFactory searchFactory)
......@@ -39,6 +44,27 @@ private static void SortQueueToAvoidDeadLocks(List<LuceneWork> queue, Workspace
else return 1;
});
}
#else
private readonly IList queue;
public LuceneBackendQueueProcessor(IList queue, SearchFactory searchFactory)
{
this.queue = queue;
this.searchFactory = searchFactory;
}
/// <summary>
/// one must lock the directory providers in the exact same order to avoid
/// dead lock between concurrent threads or processes
/// To achieve that, the work will be done per directory provider
/// We rely on the both the DocumentBuilder.GetHashCode() and the GetWorkHashCode() to
/// sort them by predictive order at all times, and to put deletes before adds
/// </summary>
private static void SortQueueToAvoidDeadLocks(IList queue, Workspace luceneWorkspace)
{
throw new NotImplementedException("Need to sort this");
}
#endif
private static long GetWorkHashCode(LuceneWork luceneWork, Workspace luceneWorkspace)
{
......
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.Threading;
namespace NHibernate.Search.Backend.Impl.Lucene
......@@ -13,9 +15,17 @@ public void Initialize(IDictionary props, SearchFactory searchFactory)
this.searchFactory = searchFactory;
}
#if NET_2_0
public WaitCallback GetProcessor(List<LuceneWork> queue)
{
return new LuceneBackendQueueProcessor(queue, searchFactory).Run;
}
#else
public WaitCallback GetProcessor(IList queue)
{
LuceneBackendQueueProcessor proc = new LuceneBackendQueueProcessor(queue, searchFactory);
return new WaitCallback(proc.Run);
}
#endif
}
}
\ No newline at end of file
namespace NHibernate.Search.Backend
{
/// <summary>
/// Wrapper class around the Lucene indexing parameters <i>mergeFactor</i>, <i>maxMergeDocs</i> and
/// <i>maxBufferedDocs</i>.
/// <p>
/// There are two sets of these parameters. One is for regular indexing the other is for batch indexing
/// triggered by <code>FullTextSessoin.index(Object entity)</code>
/// </summary>
public class LuceneIndexingParameters
{
private int transactionMergeFactor = 10;
private int transactionMaxMergeDocs = int.MaxValue;
private int transactionMaxBufferedDocs = 10;
private int batchMergeFactor = 10;
private int batchMaxMergeDocs = int.MinValue;
private int batchMaxBufferedDocs = 10;
// the defaults settings
private const int DEFAULT_MERGE_FACTOR = 10;
private const int DEFAULT_MAX_MERGE_DOCS = int.MinValue;
private const int DEFAULT_MAX_BUFFERED_DOCS = 10;
#region Constructors
/// <summary>
/// Constructor which instantiates a new parameter object with the the default values.
/// </summary>
public LuceneIndexingParameters()
{
transactionMergeFactor = DEFAULT_MERGE_FACTOR;
batchMergeFactor = DEFAULT_MERGE_FACTOR;
transactionMaxMergeDocs = DEFAULT_MAX_MERGE_DOCS;
batchMaxMergeDocs = DEFAULT_MAX_MERGE_DOCS;
transactionMaxBufferedDocs = DEFAULT_MAX_BUFFERED_DOCS;
batchMaxBufferedDocs = DEFAULT_MAX_BUFFERED_DOCS;
}
#endregion
#region Property methods
/// <summary>
///
/// </summary>
public int TransactionMaxMergeDocs
{
get { return transactionMaxMergeDocs; }
set { transactionMaxMergeDocs = value; }
}
/// <summary>
///
/// </summary>
public int TransactionMergeFactor
{
get { return transactionMergeFactor; }
set { transactionMergeFactor = value; }
}
/// <summary>
///
/// </summary>
public int BatchMaxMergeDocs
{
get { return batchMaxMergeDocs; }
set { batchMaxMergeDocs = value; }
}
/// <summary>
///
/// </summary>
public int BatchMergeFactor
{
get { return batchMergeFactor; }
set { batchMergeFactor = value; }
}
/// <summary>
///
/// </summary>
public int BatchMaxBufferedDocs
{
get { return batchMaxBufferedDocs; }
set { batchMaxBufferedDocs = value; }
}
/// <summary>
///
/// </summary>
public int TransactionMaxBufferedDocs
{
get { return transactionMaxBufferedDocs; }
set { transactionMaxBufferedDocs = value; }
}
#endregion
}
}
\ No newline at end of file
using System;
#if NET_2_0
using System.Collections.Generic;
#else
using System.Collections;
#endif
using System.IO;
using System.Threading;
using log4net;
......@@ -10,161 +14,179 @@
namespace NHibernate.Search.Impl
{
//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene use abstract classes, not interfaces
/// <summary>
/// Lucene workspace
/// This is not intended to be used in a multithreaded environment
/// <p/>
/// One cannot execute modification through an IndexReader when an IndexWriter has been acquired on the same underlying directory
/// One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same underlying directory
/// The recommended approach is to execute all the modifications on the IndexReaders, {@link #Dispose()} }, and acquire the
/// index writers
/// </summary>
public class Workspace : IDisposable
{
private static readonly ILog log = LogManager.GetLogger(typeof(Workspace));
/// <summary>
/// Lucene workspace
/// This is not intended to be used in a multithreaded environment
/// <p/>
/// One cannot execute modification through an IndexReader when an IndexWriter has been acquired on the same underlying directory
/// One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same underlying directory
/// The recommended approach is to execute all the modifications on the IndexReaders, {@link #Dispose()} }, and acquire the
/// index writers
/// </summary>
public class Workspace : IDisposable
{
private static ILog log = LogManager.GetLogger(typeof(Workspace));
#if NET_2_0
private Dictionary<IDirectoryProvider, IndexReader> readers = new Dictionary<IDirectoryProvider, IndexReader>();
private Dictionary<IDirectoryProvider, IndexWriter> writers = new Dictionary<IDirectoryProvider, IndexWriter>();
private List<IDirectoryProvider> lockedProviders = new List<IDirectoryProvider>();
#else
private Hashtable readers = new Hashtable();
private Hashtable writers = new Hashtable();
private IList lockedProviders = new ArrayList();
#endif
private SearchFactory searchFactory;
private readonly Dictionary<IDirectoryProvider, IndexReader> readers = new Dictionary<IDirectoryProvider, IndexReader>();
private readonly Dictionary<IDirectoryProvider, IndexWriter> writers = new Dictionary<IDirectoryProvider, IndexWriter>();
private readonly List<IDirectoryProvider> lockedProviders = new List<IDirectoryProvider>();
private readonly SearchFactory searchFactory;
public Workspace(SearchFactory searchFactory)
{
this.searchFactory = searchFactory;
}
public Workspace(SearchFactory searchFactory)
{
this.searchFactory = searchFactory;
}
public DocumentBuilder GetDocumentBuilder(System.Type entity)
{
return searchFactory.GetDocumentBuilder(entity);
}
public DocumentBuilder GetDocumentBuilder(System.Type entity)
{
return searchFactory.GetDocumentBuilder(entity);
}
public IndexReader GetIndexReader(System.Type entity)
{
//TODO NPEs
IDirectoryProvider provider = searchFactory.GetDirectoryProvider(entity);
//one cannot access a reader for update after a writer has been accessed
if (writers.ContainsKey(provider))
throw new AssertionFailure("Tries to read for update a index while a writer is accessed" + entity);
IndexReader reader = null;
readers.TryGetValue(provider, out reader);
public IndexReader GetIndexReader(System.Type entity)
{
//TODO NPEs
IDirectoryProvider provider = searchFactory.GetDirectoryProvider(entity);
//one cannot access a reader for update after a writer has been accessed
if (writers.ContainsKey(provider))
throw new AssertionFailure("Tries to read for update a index while a writer is accessed" + entity);
IndexReader reader = null;
#if NET_2_0
readers.TryGetValue(provider, out reader);
#else
if (readers.ContainsKey(provider))
reader = (IndexReader) readers[provider];
#endif
if (reader != null) return reader;
LockProvider(provider);
try
{
reader = IndexReader.Open(provider.Directory);
readers.Add(provider, reader);
}
catch (IOException e)
{
CleanUp(new SearchException("Unable to open IndexReader for " + entity, e));
}
return reader;
}
if (reader != null) return reader;
LockProvider(provider);
try
{
reader = IndexReader.Open(provider.Directory);
readers.Add(provider, reader);
}
catch (IOException e)
{
CleanUp(new SearchException("Unable to open IndexReader for " + entity, e));
}
return reader;
}
public IndexWriter GetIndexWriter(System.Type entity)
{
IDirectoryProvider provider = searchFactory.GetDirectoryProvider(entity);
//one has to close a reader for update before a writer is accessed
IndexReader reader = null;
#if NET_2_0
readers.TryGetValue(provider, out reader);
#else
if (readers.ContainsKey(provider))
reader = (IndexReader) readers[provider];
#endif
if (reader != null)
{
try
{
reader.Close();
}
catch (IOException e)
{
throw new SearchException("Exception while closing IndexReader", e);
}
readers.Remove(provider);
}
IndexWriter writer = null;
#if NET_2_0
writers.TryGetValue(provider, out writer);
#else
if (writers.ContainsKey(provider))
writer = (IndexWriter) writers[provider];
#endif
if (writer != null) return writer;
LockProvider(provider);
try
{
writer = new IndexWriter(
provider.Directory, searchFactory.GetDocumentBuilder(entity).Analyzer, false
); //have been created at init time
writers.Add(provider, writer);
}
catch (IOException e)
{
CleanUp(new SearchException("Unable to open IndexWriter for " + entity, e));
}
return writer;
}
public IndexWriter GetIndexWriter(System.Type entity)
{
IDirectoryProvider provider = searchFactory.GetDirectoryProvider(entity);
//one has to close a reader for update before a writer is accessed
IndexReader reader = null;
readers.TryGetValue(provider, out reader);
private void LockProvider(IDirectoryProvider provider)
{
//make sure to use a semaphore
object syncLock = searchFactory.GetLockObjForDirectoryProvider(provider);
Monitor.Enter(syncLock);
lockedProviders.Add(provider);
}
if (reader != null)
{
try
{
reader.Close();
}
catch (IOException e)
{
throw new SearchException("Exception while closing IndexReader", e);
}
readers.Remove(provider);
}
IndexWriter writer;
writers.TryGetValue(provider, out writer);
private void CleanUp(SearchException originalException)
{
//release all readers and writers, then release locks
SearchException raisedException = originalException;
foreach (IndexReader reader in readers.Values)
{
try
{
reader.Close();
}
catch (IOException e)
{
if (raisedException != null)
{
log.Error("Subsequent Exception while closing IndexReader", e);
}
else
{
raisedException = new SearchException("Exception while closing IndexReader", e);
}
}
}
foreach (IndexWriter writer in writers.Values)
{
try
{
writer.Close();
}
catch (IOException e)
{
if (raisedException != null)
{
log.Error("Subsequent Exception while closing IndexWriter", e);
}
else
{
raisedException = new SearchException("Exception while closing IndexWriter", e);
}
}
}
foreach (IDirectoryProvider provider in lockedProviders)
{
object syncLock = searchFactory.GetLockObjForDirectoryProvider(provider);
Monitor.Exit(syncLock);
}
readers.Clear();
writers.Clear();
lockedProviders.Clear();
if (raisedException != null) throw raisedException;
}
if (writer != null) return writer;
LockProvider(provider);
try
{
writer = new IndexWriter(
provider.Directory, searchFactory.GetDocumentBuilder(entity).Analyzer, false
); //have been created at init time
writers.Add(provider, writer);
}
catch (IOException e)
{
CleanUp(new SearchException("Unable to open IndexWriter for " + entity, e));
}
return writer;
}
private void LockProvider(IDirectoryProvider provider)
{
//make sure to use a semaphore
object syncLock = searchFactory.GetLockObjForDirectoryProvider(provider);
Monitor.Enter(syncLock);
lockedProviders.Add(provider);
}
private void CleanUp(SearchException originalException)
{
//release all readers and writers, then release locks
SearchException raisedException = originalException;
foreach (IndexReader reader in readers.Values)
{
try
{
reader.Close();
}
catch (IOException e)
{
if (raisedException != null)
{
log.Error("Subsequent Exception while closing IndexReader", e);
}
else
{
raisedException = new SearchException("Exception while closing IndexReader", e);
}
}
}
foreach (IndexWriter writer in writers.Values)
{
try
{
writer.Close();
}
catch (IOException e)
{
if (raisedException != null)
{
log.Error("Subsequent Exception while closing IndexWriter", e);
}
else
{
raisedException = new SearchException("Exception while closing IndexWriter", e);
}
}
}
foreach (IDirectoryProvider provider in lockedProviders)
{
object syncLock = searchFactory.GetLockObjForDirectoryProvider(provider);
Monitor.Exit(syncLock);
}
readers.Clear();
writers.Clear();
lockedProviders.Clear();
if (raisedException != null) throw raisedException;
}
/// <summary>
/// release resources consumed in the workspace if any
/// </summary>
public void Dispose()
{
CleanUp(null);
}
}
/// <summary>
/// release resources consumed in the workspace if any
/// </summary>
public void Dispose()
{
CleanUp(null);
}
}
}
\ No newline at end of file
using System;
#if NET_2_0
using System.Collections.Generic;
#else
using System.Collections;
#endif
using System.Reflection;
using Lucene.Net.Documents;
using NHibernate.Search.Attributes;
......@@ -7,20 +11,37 @@
namespace NHibernate.Search.Bridge
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class BridgeFactory
{
#if NET_2_0
private static readonly Dictionary<String, IFieldBridge> builtInBridges = new Dictionary<String, IFieldBridge>();
#else
private static readonly Hashtable builtInBridges = new Hashtable();
#endif
private BridgeFactory()
{
}
#if NET_2_0
public static readonly ITwoWayFieldBridge DOUBLE = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<double>());
public static readonly ITwoWayFieldBridge FLOAT = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<float>());
public static readonly ITwoWayFieldBridge SHORT = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<short>());
public static readonly ITwoWayFieldBridge INTEGER = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<int>());
public static readonly ITwoWayFieldBridge LONG = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<long>());
public static readonly ITwoWayFieldBridge BOOLEAN = new TwoWayString2FieldBridgeAdaptor(new ValueTypeBridge<bool>());
#else
public static readonly ITwoWayFieldBridge DOUBLE = new TwoWayString2FieldBridgeAdaptor(new DoubleBridge());
public static readonly ITwoWayFieldBridge FLOAT = new TwoWayString2FieldBridgeAdaptor(new FloatBridge());
public static readonly ITwoWayFieldBridge SHORT = new TwoWayString2FieldBridgeAdaptor(new ShortBridge());
public static readonly ITwoWayFieldBridge INTEGER = new TwoWayString2FieldBridgeAdaptor(new IntBridge());
public static readonly ITwoWayFieldBridge LONG = new TwoWayString2FieldBridgeAdaptor(new LongBridge());
public static readonly ITwoWayFieldBridge BOOLEAN = new TwoWayString2FieldBridgeAdaptor(new BoolBridge());
#endif
public static readonly ITwoWayFieldBridge STRING = new TwoWayString2FieldBridgeAdaptor(new StringBridge());
public static readonly IFieldBridge DATE_YEAR = new String2FieldBridgeAdaptor(DateBridge.DATE_YEAR);
......@@ -90,9 +111,13 @@ public static IFieldBridge GuessType(MemberInfo member)
{
//find in built-ins
System.Type returnType = GetMemberType(member);
#if NET_2_0
if (IsNullable(returnType))
returnType = returnType.GetGenericArguments()[0];
builtInBridges.TryGetValue(returnType.Name, out bridge);
#else
bridge = (IFieldBridge) (builtInBridges.ContainsKey(returnType.Name) ? builtInBridges[returnType.Name] : null);
#endif
if (bridge == null && returnType.IsEnum)
{
bridge = new TwoWayString2FieldBridgeAdaptor(
......@@ -107,7 +132,12 @@ public static IFieldBridge GuessType(MemberInfo member)
private static bool IsNullable(System.Type returnType)
{
#if NET_2_0
return returnType.IsGenericType && typeof(Nullable<>) == returnType.GetGenericTypeDefinition();
#else
// TODO: Work out if this is adequate
return returnType.IsClass;
#endif
}
private static System.Type GetMemberType(MemberInfo member)
......
......@@ -7,8 +7,16 @@ namespace NHibernate.Search.Bridge
/// <summary>
/// Put an object inside the document.
/// </summary>
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public interface IFieldBridge
{
#if NET_2_0
void Set(string idKeywordName, object id, Document doc, Field.Store store, Field.Index index, float? boost);
#else
void Set(string idKeywordName, object id, Document doc, Field.Store store, Field.Index index, float boost);
#endif
}
}
\ No newline at end of file
......@@ -4,6 +4,10 @@
namespace NHibernate.Search.Bridge
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public interface ITwoWayFieldBridge : IFieldBridge
{
object Get(string value, Document document);
......
using System;
using Lucene.Net.Documents;
using NHibernate.Search.Bridge;
using NHibernate.Util;
namespace NHibernate.Search.Bridge
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class String2FieldBridgeAdaptor : IFieldBridge
{
private readonly IStringBridge stringBridge;
......@@ -13,16 +18,25 @@ public String2FieldBridgeAdaptor(IStringBridge stringBridge)
this.stringBridge = stringBridge;
}
public void Set(String name, Object value, Document document, Field.Store store, Field.Index index, float? boost)
#if NET_2_0
public void Set(String name, Object value, Document document, Field.Store store, Field.Index index, float? boost)
#else
public void Set(String name, Object value, Document document, Field.Store store, Field.Index index, float boost)
#endif
{
String indexedString = stringBridge.ObjectToString(value);
//Do not add fields on empty strings, seems a sensible default in most situations
if (StringHelper.IsNotEmpty(indexedString))
{
Field field = new Field(name, indexedString, store, index);
#if NET_2_0
if (boost != null)
field.SetBoost(boost.Value);
document.Add(field);
#else
if (boost != 0F)
field.SetBoost(boost);
#endif
document.Add(field);
}
}
}
......
......@@ -3,6 +3,10 @@
namespace NHibernate.Search.Bridge
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class TwoWayString2FieldBridgeAdaptor : String2FieldBridgeAdaptor, ITwoWayFieldBridge
{
private readonly ITwoWayStringBridge stringBridge;
......
using System;
using System.Collections;
using System.IO;
#if NET_2_0
using System.Security.AccessControl;
#endif
using log4net;
using NHibernate.Search.Impl;
using NHibernate.Util;
namespace NHibernate.Search
{
public class DirectoryProviderHelper
{
private static readonly ILog log = LogManager.GetLogger(typeof(DirectoryProviderHelper));
public class DirectoryProviderHelper
{
private static ILog log = LogManager.GetLogger(typeof (DirectoryProviderHelper));
/// <summary>
/// Build a directory name out of a root and relative path, guessing the significant part
/// and checking for the file availability
/// </summary>
public static String GetSourceDirectory(
String rootPropertyName, String relativePropertyName,
String directoryProviderName, IDictionary properties)
{
//TODO check that it's a directory
String root = (string) properties[rootPropertyName];
String relative = (string) properties[relativePropertyName];
if (log.IsDebugEnabled)
{
log.Debug(
"Guess source directory from " + rootPropertyName + " " + root != null
? root : "<null>" + " and " + relativePropertyName + " " + (relative != null ? relative: "<null>")
);
}
if (relative == null) relative = directoryProviderName;
if (StringHelper.IsEmpty(root))
{
log.Debug("No root directory, go with relative " + relative);
DirectoryInfo sourceFile = new DirectoryInfo(relative);
if (! sourceFile.Exists)
{
throw new HibernateException("Unable to read source directory: " + relative);
}
//else keep source as it
}
else
{
DirectoryInfo rootDir = new DirectoryInfo(root);
if (rootDir.Exists)
{
DirectoryInfo sourceFile = new DirectoryInfo(Path.Combine(root, relative));
if (! sourceFile.Exists) sourceFile.Create();
log.Debug("Get directory from root + relative");
try
{
relative = sourceFile.FullName;
}
catch (IOException e)
{
throw new AssertionFailure("Unable to get canonical path: " + root + " + " + relative);
}
}
else
{
throw new SearchException(rootPropertyName + " does not exist");
}
}
return relative;
}
/// <summary>
/// Build a directory name out of a root and relative path, guessing the significant part
/// and checking for the file availability
/// </summary>
public static String GetSourceDirectory(
String rootPropertyName, String relativePropertyName,
String directoryProviderName, IDictionary properties)
{
//TODO check that it's a directory
String root = (string) properties[rootPropertyName];
String relative = (string) properties[relativePropertyName];
if (log.IsDebugEnabled)
{
log.Debug(
"Guess source directory from " + rootPropertyName + " " + root != null
? root
: "<null>" + " and " + relativePropertyName + " " + (relative != null ? relative : "<null>")
);
}
if (relative == null) relative = directoryProviderName;
if (StringHelper.IsEmpty(root))
{
log.Debug("No root directory, go with relative " + relative);
DirectoryInfo sourceFile = new DirectoryInfo(relative);
if (! sourceFile.Exists)
{
throw new HibernateException("Unable to read source directory: " + relative);
}
//else keep source as it
}
else
{
DirectoryInfo rootDir = new DirectoryInfo(root);
if (rootDir.Exists)
{
DirectoryInfo sourceFile = new DirectoryInfo(Path.Combine(root, relative));
if (! sourceFile.Exists) sourceFile.Create();
log.Debug("Get directory from root + relative");
try
{
relative = sourceFile.FullName;
}
catch (IOException e)
{
throw new AssertionFailure("Unable to get canonical path: " + root + " + " + relative);
}
}
else
{
throw new SearchException(rootPropertyName + " does not exist");
}
}
return relative;
}
public static DirectoryInfo DetermineIndexDir(String directoryProviderName, IDictionary properties)
{
bool createIfMissing;
#if NET_2_0
string indexBase = (string) properties["indexBase"] ?? ".";
string shouldCreate = (string)properties["indexBase.create"] ?? "false";
bool.TryParse(shouldCreate, out createIfMissing);
#else
string indexBase = (string) properties["indexBase"] != null ? (string) properties["indexBase"] : ".";
string shouldCreate = (string) properties["indexBase.create"] != null ? (string) properties["indexBase.create"] : "false";
try
{
createIfMissing = bool.Parse(shouldCreate);
}
catch
{
createIfMissing = false;
}
#endif
//We need this to allow using the search from the web, where the "." directory is
//somewhere in the system root.
indexBase = indexBase.Replace("~", AppDomain.CurrentDomain.BaseDirectory);
DirectoryInfo indexDir = new DirectoryInfo(indexBase);
if (!(indexDir.Exists))
{
if(!createIfMissing)
throw new HibernateException(String.Format("Index directory does not exists: {0}", indexBase));
indexDir.Create();
}
public static DirectoryInfo DetermineIndexDir(String directoryProviderName, IDictionary properties)
{
bool createIfMissing;
string indexBase = (string) properties["indexBase"] ?? ".";
string shouldCreate = (string) properties["indexBase.create"] ?? "false";
bool.TryParse(shouldCreate, out createIfMissing);
if (!HasWriteAccess(indexDir))
{
throw new HibernateException("Cannot write into index directory: " + indexBase);
}
//We need this to allow using the search from the web, where the "." directory is
//somewhere in the system root.
indexBase = indexBase.Replace("~", AppDomain.CurrentDomain.BaseDirectory);
DirectoryInfo indexDir = new DirectoryInfo(indexBase);
if (!(indexDir.Exists))
{
if (!createIfMissing)
throw new HibernateException(String.Format("Index directory does not exists: {0}", indexBase));
indexDir.Create();
}
indexDir = new DirectoryInfo(Path.Combine(indexDir.FullName, directoryProviderName));
return indexDir;
}
if (!HasWriteAccess(indexDir))
{
throw new HibernateException("Cannot write into index directory: " + indexBase);
}
indexDir = new DirectoryInfo(Path.Combine(indexDir.FullName, directoryProviderName));
return indexDir;
}
private static bool HasWriteAccess(DirectoryInfo indexDir)
{
string tempFileName = Path.Combine(indexDir.FullName, Guid.NewGuid().ToString());
//Yuck! but it is the simplest way
try
{
File.CreateText(tempFileName).Close();
}
catch (UnauthorizedAccessException)
{
return false;
}
try
{
File.Delete(tempFileName);
}
catch (UnauthorizedAccessException)
{
//we may have permissions to create but not delete, ignoring
}
return true;
}
}
private static bool HasWriteAccess(DirectoryInfo indexDir)
{
string tempFileName = Path.Combine(indexDir.FullName, Guid.NewGuid().ToString());
//Yuck! but it is the simplest way
try
{
File.CreateText(tempFileName).Close();
}
catch (UnauthorizedAccessException)
{
return false;
}
try
{
File.Delete(tempFileName);
}
catch (UnauthorizedAccessException)
{
//we may have permissions to create but not delete, ignoring
}
return true;
}
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.Reflection;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using System.Collections;
using Iesi.Collections;
#endif
using log4net;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using NHibernate.Mapping;
using NHibernate.Search.Attributes;
using NHibernate.Search.Backend;
using NHibernate.Search.Bridge;
using NHibernate.Search.Impl;
using NHibernate.Search.Storage;
using NHibernate.Util;
using FieldInfo=System.Reflection.FieldInfo;
using FieldInfo = System.Reflection.FieldInfo;
namespace NHibernate.Search.Engine
{
/// <summary>
/// Set up and provide a manager for indexes classes
/// </summary>
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class DocumentBuilder
{
private static readonly ILog log = LogManager.GetLogger(typeof(DocumentBuilder));
......@@ -29,24 +39,36 @@ public class DocumentBuilder
private String idKeywordName;
private MemberInfo idGetter;
private readonly Analyzer analyzer;
#if NET_2_0
private float? idBoost;
#else
private float idBoost;
#endif
public const string CLASS_FIELDNAME = "_hibernate_class";
private ITwoWayFieldBridge idBridge;
#if NET_2_0
private ISet<System.Type> mappedSubclasses = new HashedSet<System.Type>();
#else
private ISet mappedSubclasses = new HashedSet();
#endif
private int level = 0;
private int maxLevel = int.MaxValue;
public DocumentBuilder(System.Type clazz, Analyzer analyzer, IDirectoryProvider directory)
{
beanClass = clazz;
this.beanClass = clazz;
this.analyzer = analyzer;
directoryProvider = directory;
this.directoryProvider = directory;
if (clazz == null) throw new AssertionFailure("Unable to build a DocumemntBuilder with a null class");
rootPropertiesMetadata.boost = GetBoost(clazz);
#if NET_2_0
Set<System.Type> processedClasses = new HashedSet<System.Type>();
#else
ISet processedClasses = new HashedSet();
#endif
processedClasses.Add(clazz);
InitializeMembers(clazz, rootPropertiesMetadata, true, "", processedClasses);
//processedClasses.remove( clazz ); for the sake of completness
......@@ -57,9 +79,15 @@ public DocumentBuilder(System.Type clazz, Analyzer analyzer, IDirectoryProvider
}
}
#if NET_2_0
private void InitializeMembers(
System.Type clazz, PropertiesMetadata propertiesMetadata, bool isRoot, String prefix,
Set<System.Type> processedClasses)
#else
private void InitializeMembers(
System.Type clazz, PropertiesMetadata propertiesMetadata, bool isRoot, String prefix,
ISet processedClasses)
#endif
{
PropertyInfo[] propertyInfos = clazz.GetProperties();
foreach (PropertyInfo propertyInfo in propertyInfos)
......@@ -67,8 +95,8 @@ public DocumentBuilder(System.Type clazz, Analyzer analyzer, IDirectoryProvider
InitializeMember(propertyInfo, propertiesMetadata, isRoot, prefix, processedClasses);
}
FieldInfo[] fields = clazz.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo fieldInfo in fields)
System.Reflection.FieldInfo[] fields = clazz.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance);
foreach (System.Reflection.FieldInfo fieldInfo in fields)
{
InitializeMember(fieldInfo, propertiesMetadata, isRoot, prefix, processedClasses);
}
......@@ -76,7 +104,11 @@ public DocumentBuilder(System.Type clazz, Analyzer analyzer, IDirectoryProvider
private void InitializeMember(
MemberInfo member, PropertiesMetadata propertiesMetadata, bool isRoot,
#if NET_2_0
String prefix, ISet<System.Type> processedClasses)
#else
String prefix, ISet processedClasses)
#endif
{
DocumentIdAttribute documentIdAnn = AttributeUtil.GetDocumentId(member);
if (isRoot && documentIdAnn != null)
......@@ -91,7 +123,7 @@ public DocumentBuilder(System.Type clazz, Analyzer analyzer, IDirectoryProvider
IFieldBridge fieldBridge = BridgeFactory.GuessType(member);
if (fieldBridge is ITwoWayFieldBridge)
{
idBridge = (ITwoWayFieldBridge) fieldBridge;
idBridge = (ITwoWayFieldBridge)fieldBridge;
}
else
{
......@@ -147,6 +179,7 @@ private static Field.Index GetIndex(Index index)
}
}
#if NET_2_0
private static float? GetBoost(MemberInfo element)
{
if (element == null) return null;
......@@ -155,6 +188,16 @@ private static Field.Index GetIndex(Index index)
return null;
return boost.Value;
}
#else
private static float GetBoost(MemberInfo element)
{
if (element == null) return 0;
BoostAttribute boost = AttributeUtil.GetBoost(element);
if (boost == null)
return 0;
return boost.Value;
}
#endif
private static object GetMemberValue(Object instnace, MemberInfo getter)
{
......@@ -162,14 +205,17 @@ private static object GetMemberValue(Object instnace, MemberInfo getter)
if (info != null)
return info.GetValue(instnace, null);
else
return ((FieldInfo) getter).GetValue(instnace);
return ((FieldInfo)getter).GetValue(instnace);
}
/// <summary>
/// This add the new work to the queue, so it can be processed in a batch fashion later
/// </summary>
public void AddToWorkQueue(object entity, object id, WorkType workType, List<LuceneWork> queue,
SearchFactory searchFactory)
#if NET_2_0
public void AddToWorkQueue(object entity, object id, WorkType workType, List<LuceneWork> queue, SearchFactory searchFactory)
#else
public void AddToWorkQueue(object entity, object id, WorkType workType, IList queue, SearchFactory searchFactory)
#endif
{
System.Type entityClass = NHibernateUtil.GetClass(entity);
foreach (LuceneWork luceneWork in queue)
......@@ -231,26 +277,33 @@ not supported
}
*/
private void ProcessContainedInValue(object value, List<LuceneWork> queue, System.Type valueClass,
DocumentBuilder builder, SearchFactory searchFactory)
{
object id = GetMemberValue(value, builder.idGetter);
builder.AddToWorkQueue(value, id, WorkType.Update, queue, searchFactory);
}
#if NET_2_0
private void ProcessContainedInValue(object value, List<LuceneWork> queue, System.Type valueClass, DocumentBuilder builder, SearchFactory searchFactory)
#else
private void ProcessContainedInValue(object value, IList queue, System.Type valueClass, DocumentBuilder builder, SearchFactory searchFactory)
#endif
{
object id = DocumentBuilder.GetMemberValue(value, builder.idGetter);
builder.AddToWorkQueue(value, id, WorkType.Update, queue, searchFactory);
}
public Document GetDocument(object instance, object id)
{
Document doc = new Document();
System.Type instanceClass = instance.GetType();
#if NET_2_0
if (rootPropertiesMetadata.boost != null)
{
doc.SetBoost(rootPropertiesMetadata.boost.Value);
}
// TODO: Check if that should be an else?
#else
if (rootPropertiesMetadata.boost != 0)
doc.SetBoost(rootPropertiesMetadata.boost);
#endif
// TODO: Check if that should be an else?
{
Field classField =
new Field(CLASS_FIELDNAME, instanceClass.AssemblyQualifiedName, Field.Store.YES,
Field.Index.UN_TOKENIZED);
new Field(CLASS_FIELDNAME, instanceClass.AssemblyQualifiedName, Field.Store.YES, Field.Index.UN_TOKENIZED);
doc.Add(classField);
idBridge.Set(idKeywordName, id, doc, Field.Store.YES, Field.Index.UN_TOKENIZED, idBoost);
}
......@@ -262,6 +315,7 @@ private static void BuildDocumentFields(Object instance, Document doc, Propertie
{
if (instance == null) return;
#if NET_2_0
for (int i = 0; i < propertiesMetadata.keywordNames.Count; i++)
{
MemberInfo member = propertiesMetadata.keywordGetters[i];
......@@ -306,6 +360,63 @@ private static void BuildDocumentFields(Object instance, Document doc, Propertie
//TODO handle boost at embedded level: already stored in propertiesMedatada.boost
BuildDocumentFields(value, doc, propertiesMetadata.embeddedPropertiesMetadata[i]);
}
#else
for (int i = 0; i < propertiesMetadata.keywordNames.Count; i++)
{
MemberInfo member = (MemberInfo) propertiesMetadata.keywordGetters[i];
IFieldBridge bridge = (IFieldBridge) propertiesMetadata.keywordBridges[i];
Object value = GetMemberValue(instance, member);
bridge.Set(
(string) propertiesMetadata.keywordNames[i], value, doc, Field.Store.YES,
Field.Index.UN_TOKENIZED, GetBoost(member)
);
}
for (int i = 0; i < propertiesMetadata.textNames.Count; i++)
{
MemberInfo member = (MemberInfo) propertiesMetadata.textGetters[i];
IFieldBridge bridge = (IFieldBridge) propertiesMetadata.textBridges[i];
Object value = GetMemberValue(instance, member);
bridge.Set(
(string) propertiesMetadata.textNames[i], value, doc, Field.Store.YES,
Field.Index.TOKENIZED, GetBoost(member)
);
}
for (int i = 0; i < propertiesMetadata.unstoredNames.Count; i++)
{
MemberInfo member = (MemberInfo) propertiesMetadata.unstoredGetters[i];
IFieldBridge bridge = (IFieldBridge) propertiesMetadata.unstoredBridges[i];
Object value = GetMemberValue(instance, member);
bridge.Set(
(string) propertiesMetadata.unstoredNames[i], value, doc, Field.Store.NO,
Field.Index.TOKENIZED, GetBoost(member)
);
}
for (int i = 0; i < propertiesMetadata.fieldNames.Count; i++)
{
MemberInfo member = (MemberInfo) propertiesMetadata.fieldGetters[i];
IFieldBridge bridge = (IFieldBridge) propertiesMetadata.fieldBridges[i];
Object value = GetMemberValue(instance, member);
bridge.Set(
(string) propertiesMetadata.fieldNames[i], value, doc, (Field.Store) propertiesMetadata.fieldStore[i],
(Field.Index) propertiesMetadata.fieldIndex[i], GetBoost(member)
);
}
for (int i = 0; i < propertiesMetadata.embeddedGetters.Count; i++)
{
MemberInfo member = (MemberInfo) propertiesMetadata.embeddedGetters[i];
PropertiesMetadata md = (PropertiesMetadata) propertiesMetadata.embeddedPropertiesMetadata[i];
Object value = GetMemberValue(instance, member);
//if ( ! Hibernate.isInitialized( value ) ) continue; //this sounds like a bad idea
//TODO handle boost at embedded level: already stored in propertiesMedatada.boost
BuildDocumentFields(value, doc, md);
}
#endif
}
public Term GetTerm(object id)
......@@ -355,7 +466,8 @@ public static object GetDocumentId(SearchFactory searchFactory, Document documen
return builder.IdBridge.Get(builder.getIdKeywordName(), document);
}
public void PostInitialize(ISet<System.Type> indexedClasses)
#if NET_2_0
public void PostInitialize(ISet<System.Type> indexedClasses)
{
//this method does not requires synchronization
System.Type plainClass = beanClass;
......@@ -374,7 +486,7 @@ public ISet<System.Type> MappedSubclasses
get { return mappedSubclasses; }
}
private class PropertiesMetadata
{
public float? boost = null;
......@@ -396,5 +508,52 @@ private class PropertiesMetadata
public readonly List<PropertiesMetadata> embeddedPropertiesMetadata = new List<PropertiesMetadata>();
public readonly List<MemberInfo> containedInGetters = new List<MemberInfo>();
}
#else
public void PostInitialize(ISet indexedClasses)
{
//this method does not requires synchronization
System.Type plainClass = beanClass;
#if NET_2_0
ISet tempMappedSubclasses = new HashedSet<System.Type>();
#else
ISet tempMappedSubclasses = new HashedSet();
#endif
//together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
foreach (System.Type currentClass in indexedClasses)
{
if (plainClass.IsAssignableFrom(currentClass))
tempMappedSubclasses.Add(currentClass);
}
mappedSubclasses = tempMappedSubclasses;
}
public ISet MappedSubclasses
{
get { return mappedSubclasses; }
}
private class PropertiesMetadata
{
public float boost = 0;
public readonly IList keywordGetters = new ArrayList();
public readonly IList keywordNames = new ArrayList();
public readonly IList keywordBridges = new ArrayList();
public readonly IList unstoredGetters = new ArrayList();
public readonly IList unstoredNames = new ArrayList();
public readonly IList unstoredBridges = new ArrayList();
public readonly IList textGetters = new ArrayList();
public readonly IList textNames = new ArrayList();
public readonly IList textBridges = new ArrayList();
public readonly IList fieldNames = new ArrayList();
public readonly IList fieldGetters = new ArrayList();
public readonly IList fieldBridges = new ArrayList();
public readonly IList fieldStore = new ArrayList();
public readonly IList fieldIndex = new ArrayList();
public readonly IList embeddedGetters = new ArrayList();
public readonly IList embeddedPropertiesMetadata = new ArrayList();
public readonly IList containedInGetters = new ArrayList();
}
#endif
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.Text;
namespace NHibernate.Search.Engine
{
/// <summary>
///
/// </summary>
public interface ISearchFactoryImplementor : ISearchFactory
{
}
}
using System;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using System.Collections;
using Iesi.Collections;
#endif
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.Standard;
using NHibernate.Cfg;
......@@ -15,6 +20,10 @@
namespace NHibernate.Search
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class SearchFactory
{
private static readonly WeakHashtable sessionFactory2SearchFactory = new WeakHashtable();
......@@ -23,23 +32,28 @@ public class SearchFactory
/// <summary>
/// Note that we will lock on the values in this dictionary
/// </summary>
private readonly Dictionary<IDirectoryProvider, object> lockableDirectoryProviders =
new Dictionary<IDirectoryProvider, object>();
private readonly Dictionary<System.Type, DocumentBuilder> documentBuilders =
new Dictionary<System.Type, DocumentBuilder>();
#if NET_2_0
private readonly Dictionary<IDirectoryProvider, object> lockableDirectoryProviders = new Dictionary<IDirectoryProvider, object>();
private readonly Dictionary<System.Type, DocumentBuilder> documentBuilders = new Dictionary<System.Type, DocumentBuilder>();
#else
private readonly Hashtable lockableDirectoryProviders = new Hashtable();
private readonly Hashtable documentBuilders = new Hashtable();
#endif
private readonly IQueueingProcessor queueingProcessor;
private IBackendQueueProcessorFactory backendQueueProcessorFactory;
#if NET_2_0
public Dictionary<System.Type, DocumentBuilder> DocumentBuilders
#else
public Hashtable DocumentBuilders
#endif
{
get { return documentBuilders; }
}
public static SearchFactory GetSearchFactory(ISession session)
{
SearchFactory searchFactory = (SearchFactory) sessionFactory2SearchFactory[session.SessionFactory];
SearchFactory searchFactory = (SearchFactory)sessionFactory2SearchFactory[session.SessionFactory];
if (searchFactory == null)
{
throw new HibernateException(
......@@ -51,7 +65,7 @@ public static SearchFactory GetSearchFactory(ISession session)
public static SearchFactory GetSearchFactory(ISessionFactory sessionFactory)
{
return (SearchFactory) sessionFactory2SearchFactory[sessionFactory];
return (SearchFactory)sessionFactory2SearchFactory[sessionFactory];
}
public static void Initialize(Configuration cfg, ISessionFactory sessionFactory)
......@@ -80,8 +94,7 @@ private SearchFactory(Configuration cfg)
catch (Exception e)
{
throw new SearchException(
string.Format("Lucene analyzer class '{0}' defined in property '{1}' could not be found.",
analyzerClassName, Environment.AnalyzerClass),
string.Format("Lucene analyzer class '{0}' defined in property '{1}' could not be found.", analyzerClassName, Environment.AnalyzerClass),
e
);
}
......@@ -94,20 +107,19 @@ private SearchFactory(Configuration cfg)
Analyzer analyzer;
try
{
analyzer = (Analyzer) Activator.CreateInstance(analyzerClass);
analyzer = (Analyzer)Activator.CreateInstance(analyzerClass);
}
catch (InvalidCastException e)
{
throw new SearchException(
string.Format("Lucene analyzer does not implement {0}: {1}", typeof(Analyzer).FullName,
analyzerClassName)
string.Format("Lucene analyzer does not implement {0}: {1}", typeof(Analyzer).FullName, analyzerClassName)
);
}
catch (Exception e)
{
throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClassName);
}
queueingProcessor = new BatchedQueueingProcessor(this, cfg.Properties);
this.queueingProcessor = new BatchedQueueingProcessor(this, cfg.Properties);
DirectoryProviderFactory factory = new DirectoryProviderFactory();
......@@ -123,19 +135,27 @@ private SearchFactory(Configuration cfg)
documentBuilders.Add(mappedClass, documentBuilder);
}
}
#if NET_2_0
ISet<System.Type> classes = new HashedSet<System.Type>(documentBuilders.Keys);
#else
ISet classes = new HashedSet(documentBuilders.Keys);
#endif
foreach (DocumentBuilder documentBuilder in documentBuilders.Values)
{
documentBuilder.PostInitialize(classes);
}
}
#if NET_2_0
public void ExecuteQueue(List<LuceneWork> luceneWork, ISession session)
#else
public void ExecuteQueue(IList luceneWork, ISession session)
#endif
{
if (session.Transaction.IsActive)
{
ISessionImplementor si = (ISessionImplementor) session;
((SearchInterceptor) si.Interceptor).RegisterSyncronization(si.Transaction, luceneWork);
ISessionImplementor si = (ISessionImplementor)session;
((SearchInterceptor)si.Interceptor).RegisterSyncronization(si.Transaction, luceneWork);
}
else
{
......@@ -143,7 +163,11 @@ public void ExecuteQueue(List<LuceneWork> luceneWork, ISession session)
}
}
#if NET_2_0
public void ExecuteQueueImmediate(List<LuceneWork> luceneWork)
#else
public void ExecuteQueueImmediate(IList luceneWork)
#endif
{
queueingProcessor.PerformWork(luceneWork);
}
......@@ -156,9 +180,13 @@ public DocumentBuilder GetDocumentBuilder(object entity)
public DocumentBuilder GetDocumentBuilder(System.Type type)
{
#if NET_2_0
DocumentBuilder builder;
DocumentBuilders.TryGetValue(type, out builder);
return builder;
#else
return (DocumentBuilder) (DocumentBuilders.ContainsKey(type.Name) ? DocumentBuilders[type.Name] : null);
#endif
}
public IDirectoryProvider GetDirectoryProvider(System.Type entity)
......@@ -176,7 +204,11 @@ public void PerformWork(object entity, object id, ISession session, WorkType wor
DocumentBuilder documentBuilder = GetDocumentBuilder(entity);
if (documentBuilder == null)
return;
#if NET_2_0
List<LuceneWork> queue = new List<LuceneWork>();
#else
IList queue = new ArrayList();
#endif
documentBuilder.AddToWorkQueue(entity, id, workType, queue, this);
ExecuteQueue(queue, session);
}
......
......@@ -3,11 +3,17 @@
namespace NHibernate.Search
{
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public interface IFullTextSession : ISession
{
#if NET_2_0
IQuery CreateFullTextQuery<TEntity>(string defaultField, string query);
IQuery CreateFullTextQuery<TEntity>(string query);
#endif
IQuery CreateFullTextQuery(Query luceneQuery, params System.Type[] entities);
......
using NHibernate.Search.Reader;
using NHibernate.Search.Storage;
namespace NHibernate.Search
{
/// <summary>
/// Provide application wide operations as well as access to the underlying Lucene resources.
/// </summary>
public interface ISearchFactory
{
/// <summary>
/// Provide the configured readerProvider strategy,
/// hence access to a Lucene IndexReader
/// </summary>
IReaderProvider ReaderProvider { get; }
/// <summary>
/// Provide access to the DirectoryProviders (hence the Lucene Directories)
/// for a given entity
/// In most cases, the returned type will be a one element array.
/// But if the given entity is configured to use sharded indexes, then multiple
/// elements will be returned. In this case all of them should be considered.
/// </summary>
/// <param name="entity"></param>
/// <returns></returns>
IDirectoryProvider[] GetDirectoryProviders(System.Type entity);
/// <summary>
/// Optimize all indexes
/// </summary>
void Optimize();
/// <summary>
/// Optimize the index holding <code>entityType</code>
/// </summary>
/// <param name="entityType"></param>
void Optimize(System.Type entityType);
}
}
\ No newline at end of file
using System;
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using System.Data;
using System.Text;
using Lucene.Net.Analysis.Standard;
using Lucene.Net.QueryParsers;
using Lucene.Net.Search;
......@@ -9,349 +14,355 @@
namespace NHibernate.Search.Impl
{
public class FullTextSessionImpl : IFullTextSession
{
private readonly ISession session;
public FullTextSessionImpl(ISession session)
{
this.session = session;
}
#region Delegating to Inner Session
public void Flush()
{
session.Flush();
}
public IDbConnection Disconnect()
{
return session.Disconnect();
}
public void Reconnect()
{
session.Reconnect();
}
public void Reconnect(IDbConnection connection)
{
session.Reconnect(connection);
}
public IDbConnection Close()
{
return session.Close();
}
public void CancelQuery()
{
session.CancelQuery();
}
public bool IsDirty()
{
return session.IsDirty();
}
public object GetIdentifier(object obj)
{
return session.GetIdentifier(obj);
}
public bool Contains(object obj)
{
return session.Contains(obj);
}
public void Evict(object obj)
{
session.Evict(obj);
}
public object Load(System.Type theType, object id, LockMode lockMode)
{
return session.Load(theType, id, lockMode);
}
public object Load(System.Type theType, object id)
{
return session.Load(theType, id);
}
public T Load<T>(object id, LockMode lockMode)
{
return session.Load<T>(id, lockMode);
}
public T Load<T>(object id)
{
return session.Load<T>(id);
}
public void Load(object obj, object id)
{
session.Load(obj, id);
}
public void Replicate(object obj, ReplicationMode replicationMode)
{
session.Replicate(obj, replicationMode);
}
public object Save(object obj)
{
return session.Save(obj);
}
public void Save(object obj, object id)
{
session.Save(obj, id);
}
public void SaveOrUpdate(object obj)
{
session.SaveOrUpdate(obj);
}
public void Update(object obj)
{
session.Update(obj);
}
public void Update(object obj, object id)
{
session.Update(obj, id);
}
public object SaveOrUpdateCopy(object obj)
{
return session.SaveOrUpdateCopy(obj);
}
public object SaveOrUpdateCopy(object obj, object id)
{
return session.SaveOrUpdateCopy(obj, id);
}
public void Delete(object obj)
{
session.Delete(obj);
}
public IList Find(string query)
{
return session.Find(query);
}
public IList Find(string query, object value, IType type)
{
return session.Find(query, value, type);
}
public IList Find(string query, object[] values, IType[] types)
{
return session.Find(query, values, types);
}
public IEnumerable Enumerable(string query)
{
return session.Enumerable(query);
}
public IEnumerable Enumerable(string query, object value, IType type)
{
return session.Enumerable(query, value, type);
}
public IEnumerable Enumerable(string query, object[] values, IType[] types)
{
return session.Enumerable(query, values, types);
}
public ICollection Filter(object collection, string filter)
{
return session.Filter(collection, filter);
}
public ICollection Filter(object collection, string filter, object value, IType type)
{
return session.Filter(collection, filter, value, type);
}
public ICollection Filter(object collection, string filter, object[] values, IType[] types)
{
return session.Filter(collection, filter, values, types);
}
public int Delete(string query)
{
return session.Delete(query);
}
public int Delete(string query, object value, IType type)
{
return session.Delete(query, value, type);
}
public int Delete(string query, object[] values, IType[] types)
{
return session.Delete(query, values, types);
}
public void Lock(object obj, LockMode lockMode)
{
session.Lock(obj, lockMode);
}
public void Refresh(object obj)
{
session.Refresh(obj);
}
public void Refresh(object obj, LockMode lockMode)
{
session.Refresh(obj, lockMode);
}
public LockMode GetCurrentLockMode(object obj)
{
return session.GetCurrentLockMode(obj);
}
public ITransaction BeginTransaction()
{
return session.BeginTransaction();
}
public ITransaction BeginTransaction(IsolationLevel isolationLevel)
{
return session.BeginTransaction(isolationLevel);
}
public ICriteria CreateCriteria(System.Type persistentClass)
{
return session.CreateCriteria(persistentClass);
}
public ICriteria CreateCriteria(System.Type persistentClass, string alias)
{
return session.CreateCriteria(persistentClass, alias);
}
public IQuery CreateQuery(string queryString)
{
return session.CreateQuery(queryString);
}
public IQuery CreateFilter(object collection, string queryString)
{
return session.CreateFilter(collection, queryString);
}
public IQuery GetNamedQuery(string queryName)
{
return session.GetNamedQuery(queryName);
}
public IQuery CreateSQLQuery(string sql, string returnAlias, System.Type returnClass)
{
return session.CreateSQLQuery(sql, returnAlias, returnClass);
}
public IQuery CreateSQLQuery(string sql, string[] returnAliases, System.Type[] returnClasses)
{
return session.CreateSQLQuery(sql, returnAliases, returnClasses);
}
public ISQLQuery CreateSQLQuery(string queryString)
{
return session.CreateSQLQuery(queryString);
}
public void Clear()
{
session.Clear();
}
public object Get(System.Type clazz, object id)
{
return session.Get(clazz, id);
}
public object Get(System.Type clazz, object id, LockMode lockMode)
{
return session.Get(clazz, id, lockMode);
}
#if NET_2_0
public T Get<T>(object id)
{
return session.Get<T>(id);
}
public T Get<T>(object id, LockMode lockMode)
{
return session.Get<T>(id, lockMode);
}
#else
[CLSCompliant(false)]
#endif
public class FullTextSessionImpl : IFullTextSession
{
private readonly ISession session;
public FullTextSessionImpl(ISession session)
{
this.session = session;
}
#region Delegating to Inner Session
public void Flush()
{
session.Flush();
}
public IDbConnection Disconnect()
{
return session.Disconnect();
}
public void Reconnect()
{
session.Reconnect();
}
public void Reconnect(IDbConnection connection)
{
session.Reconnect(connection);
}
public IDbConnection Close()
{
return session.Close();
}
public void CancelQuery()
{
session.CancelQuery();
}
public bool IsDirty()
{
return session.IsDirty();
}
public object GetIdentifier(object obj)
{
return session.GetIdentifier(obj);
}
public bool Contains(object obj)
{
return session.Contains(obj);
}
public void Evict(object obj)
{
session.Evict(obj);
}
public object Load(System.Type theType, object id, LockMode lockMode)
{
return session.Load(theType, id, lockMode);
}
public object Load(System.Type theType, object id)
{
return session.Load(theType, id);
}
public IFilter EnableFilter(string filterName)
{
return session.EnableFilter(filterName);
}
public IFilter GetEnabledFilter(string filterName)
{
return session.GetEnabledFilter(filterName);
}
#if NET_2_0
public T Load<T>(object id, LockMode lockMode)
{
return session.Load<T>(id, lockMode);
}
public T Load<T>(object id)
{
return session.Load<T>(id);
}
#endif
public void DisableFilter(string filterName)
{
session.DisableFilter(filterName);
}
public void Load(object obj, object id)
{
session.Load(obj, id);
}
public void Replicate(object obj, ReplicationMode replicationMode)
{
session.Replicate(obj, replicationMode);
}
public object Save(object obj)
{
return session.Save(obj);
}
public void Save(object obj, object id)
{
session.Save(obj, id);
}
public void SaveOrUpdate(object obj)
{
session.SaveOrUpdate(obj);
}
public void Update(object obj)
{
session.Update(obj);
}
public void Update(object obj, object id)
{
session.Update(obj, id);
}
public object SaveOrUpdateCopy(object obj)
{
return session.SaveOrUpdateCopy(obj);
}
public object SaveOrUpdateCopy(object obj, object id)
{
return session.SaveOrUpdateCopy(obj, id);
}
public void Delete(object obj)
{
session.Delete(obj);
}
public IList Find(string query)
{
return session.Find(query);
}
public IList Find(string query, object value, IType type)
{
return session.Find(query, value, type);
}
public IList Find(string query, object[] values, IType[] types)
{
return session.Find(query, values, types);
}
public IEnumerable Enumerable(string query)
{
return session.Enumerable(query);
}
public IEnumerable Enumerable(string query, object value, IType type)
{
return session.Enumerable(query, value, type);
}
public IEnumerable Enumerable(string query, object[] values, IType[] types)
{
return session.Enumerable(query, values, types);
}
public ICollection Filter(object collection, string filter)
{
return session.Filter(collection, filter);
}
public ICollection Filter(object collection, string filter, object value, IType type)
{
return session.Filter(collection, filter, value, type);
}
public ICollection Filter(object collection, string filter, object[] values, IType[] types)
{
return session.Filter(collection, filter, values, types);
}
public int Delete(string query)
{
return session.Delete(query);
}
public int Delete(string query, object value, IType type)
{
return session.Delete(query, value, type);
}
public int Delete(string query, object[] values, IType[] types)
{
return session.Delete(query, values, types);
}
public void Lock(object obj, LockMode lockMode)
{
session.Lock(obj, lockMode);
}
public void Refresh(object obj)
{
session.Refresh(obj);
}
public void Refresh(object obj, LockMode lockMode)
{
session.Refresh(obj, lockMode);
}
public LockMode GetCurrentLockMode(object obj)
{
return session.GetCurrentLockMode(obj);
}
public ITransaction BeginTransaction()
{
return session.BeginTransaction();
}
public ITransaction BeginTransaction(IsolationLevel isolationLevel)
{
return session.BeginTransaction(isolationLevel);
}
public ICriteria CreateCriteria(System.Type persistentClass)
{
return session.CreateCriteria(persistentClass);
}
public ICriteria CreateCriteria(System.Type persistentClass, string alias)
{
return session.CreateCriteria(persistentClass, alias);
}
public IQuery CreateQuery(string queryString)
{
return session.CreateQuery(queryString);
}
public IQuery CreateFilter(object collection, string queryString)
{
return session.CreateFilter(collection, queryString);
}
public IQuery GetNamedQuery(string queryName)
{
return session.GetNamedQuery(queryName);
}
public IQuery CreateSQLQuery(string sql, string returnAlias, System.Type returnClass)
{
return session.CreateSQLQuery(sql, returnAlias, returnClass);
}
public IQuery CreateSQLQuery(string sql, string[] returnAliases, System.Type[] returnClasses)
{
return session.CreateSQLQuery(sql, returnAliases, returnClasses);
}
public ISQLQuery CreateSQLQuery(string queryString)
{
return session.CreateSQLQuery(queryString);
}
public void Clear()
{
session.Clear();
}
public object Get(System.Type clazz, object id)
{
return session.Get(clazz, id);
}
public object Get(System.Type clazz, object id, LockMode lockMode)
{
return session.Get(clazz, id, lockMode);
}
public IMultiQuery CreateMultiQuery()
{
return session.CreateMultiQuery();
}
#if NET_2_0
public T Get<T>(object id)
{
return session.Get<T>(id);
}
public T Get<T>(object id, LockMode lockMode)
{
return session.Get<T>(id, lockMode);
}
#endif
public IFilter EnableFilter(string filterName)
{
return session.EnableFilter(filterName);
}
public IFilter GetEnabledFilter(string filterName)
{
return session.GetEnabledFilter(filterName);
}
public void DisableFilter(string filterName)
{
session.DisableFilter(filterName);
}
public IMultiQuery CreateMultiQuery()
{
return session.CreateMultiQuery();
}
//public IMultiCriteria CreateMultiCriteria()
//{
// return session.CreateMultiCriteria();
//}
public FlushMode FlushMode
{
get { return session.FlushMode; }
set { session.FlushMode = value; }
}
public FlushMode FlushMode
{
get { return session.FlushMode; }
set { session.FlushMode = value; }
}
public ISessionFactory SessionFactory
{
get { return session.SessionFactory; }
}
public ISessionFactory SessionFactory
{
get { return session.SessionFactory; }
}
public IDbConnection Connection
{
get { return session.Connection; }
}
public IDbConnection Connection
{
get { return session.Connection; }
}
public bool IsOpen
{
get { return session.IsOpen; }
}
public bool IsOpen
{
get { return session.IsOpen; }
}
public bool IsConnected
{
get { return session.IsConnected; }
}
public bool IsConnected
{
get { return session.IsConnected; }
}
public ITransaction Transaction
{
get { return session.Transaction; }
}
public ITransaction Transaction
{
get { return session.Transaction; }
}
//public ISession SetBatchSize(int batchSize)
......@@ -359,13 +370,15 @@ public ITransaction Transaction
// return session.SetBatchSize(batchSize);
//}
public ISessionImplementor GetSessionImplementation()
{
return session.GetSessionImplementation();
}
public ISessionImplementor GetSessionImplementation()
{
return session.GetSessionImplementation();
}
#endregion
#endregion
#if NET_2_0
public IQuery CreateFullTextQuery<TEntity>(string defaultField, string queryString)
{
QueryParser queryParser = new QueryParser(defaultField, new StandardAnalyzer());
......@@ -379,10 +392,11 @@ public IQuery CreateFullTextQuery<TEntity>(string queryString)
Query query = queryParser.Parse(queryString);
return CreateFullTextQuery(query, typeof(TEntity));
}
#endif
public IQuery CreateFullTextQuery(Query luceneQuery, params System.Type[] entities)
{
return new FullTextQueryImpl(luceneQuery, entities, (ISessionImplementor) session);
return new FullTextQueryImpl(luceneQuery, entities, (ISessionImplementor)session);
}
public IFullTextSession Index(object entity)
......@@ -392,7 +406,7 @@ public IFullTextSession Index(object entity)
// TODO: Check to see this entity type is indexed
object id = session.GetIdentifier(entity);
searchFactory.PerformWork(entity, id, session, WorkType.Update);
// TODO: Why do we return this?
return this;
}
......@@ -421,5 +435,5 @@ public void Dispose()
{
session.Dispose();
}
}
}
\ No newline at end of file
}
}
using System;
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
#endif
using NHibernate.Search.Backend;
using NHibernate.Type;
......@@ -8,8 +10,13 @@ namespace NHibernate.Search.Impl
{
public class SearchInterceptor : EmptyInterceptor
{
#if NET_2_0
private readonly Dictionary<ITransaction, List<LuceneWork>> syncronizations = new Dictionary<ITransaction, List<LuceneWork>>();
private readonly List<object> entitiesToAddOnPostFlush = new List<object>();
#else
private readonly Hashtable syncronizations = new Hashtable();
private readonly IList entitiesToAddOnPostFlush = new ArrayList();
#endif
private ISession session;
private SearchFactory searchFactory;
......@@ -89,19 +96,36 @@ private void RegisterIndexing(object entity, object id, WorkType workType)
searchFactory.PerformWork(entity, id, session, workType);
}
#if NET_2_0
public void RegisterSyncronization(ITransaction transaction, List<LuceneWork> work)
#else
public void RegisterSyncronization(ITransaction transaction, IList work)
#endif
{
#if NET_2_0
if (syncronizations.ContainsKey(transaction) == false)
syncronizations.Add(transaction, new List<LuceneWork>());
syncronizations[transaction].AddRange(work);
#else
if (syncronizations.ContainsKey(transaction) == false)
syncronizations.Add(transaction, new ArrayList());
((ArrayList) syncronizations[transaction]).AddRange(work);
#endif
}
public override void AfterTransactionCompletion(ITransaction tx)
{
base.AfterTransactionCompletion(tx);
#if NET_2_0
List<LuceneWork> queue;
if (syncronizations.TryGetValue(tx, out queue) == false)
return;
#else
IList queue = (IList) (syncronizations.ContainsKey(tx) ? syncronizations[tx] : null);
if (queue == null)
return;
#endif
if (tx.WasCommitted)
{
SearchFactory.GetSearchFactory(session)
......
namespace NHibernate.Search
{
public enum Index
{
NoNormalization,
No,
Tokenized,
UnTokenized
}
}
\ No newline at end of file
using System.Collections;
using System;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using System.Collections;
using Iesi.Collections;
#endif
using Lucene.Net.Search;
using NHibernate.Expression;
using NHibernate.Impl;
......@@ -10,53 +15,57 @@
namespace NHibernate.Search
{
public class LuceneQueryExpression : InExpression
{
private readonly Query luceneQuery;
public class LuceneQueryExpression : InExpression
{
private readonly Query luceneQuery;
public LuceneQueryExpression(Query luceneQuery)
: base("id", new object[0])
{
this.luceneQuery = luceneQuery;
}
public LuceneQueryExpression(Query luceneQuery)
: base("id", new object[0])
{
this.luceneQuery = luceneQuery;
}
public override SqlString ToSqlString(ICriteria criteria, ICriteriaQuery criteriaQuery,
IDictionary enabledFilters)
{
ISet<System.Type> types;
List<object> ids = new List<object>();
public override NHibernate.SqlCommand.SqlString ToSqlString(ICriteria criteria, ICriteriaQuery criteriaQuery, System.Collections.IDictionary enabledFilters)
{
#if NET_2_0
ISet<System.Type> types;
List<object> ids = new List<object>();
#else
ISet types;
ArrayList ids = new ArrayList();
#endif
System.Type type = GetCriteriaClass(criteria);
SearchFactory searchFactory = SearchFactory.GetSearchFactory(GetSession(criteria));
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out types, type);
if (searcher == null)
throw new SearchException("Could not find a searcher for class: " + type.FullName);
Query query = FullTextSearchHelper.FilterQueryByClasses(types, luceneQuery);
Hits hits = searcher.Search(query);
System.Type type = GetCriteriaClass(criteria);
SearchFactory searchFactory = SearchFactory.GetSearchFactory(GetSession(criteria));
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out types, type);
if (searcher == null)
throw new SearchException("Could not find a searcher for class: " + type.FullName);
Query query = FullTextSearchHelper.FilterQueryByClasses(types, luceneQuery);
Hits hits = searcher.Search(query);
for (int i = 0; i < hits.Length(); i++)
{
object id = DocumentBuilder.GetDocumentId(searchFactory, hits.Doc(i));
ids.Add(id);
}
base.Values = ids.ToArray();
return base.ToSqlString(criteria, criteriaQuery, enabledFilters);
}
for (int i = 0; i < hits.Length(); i++)
{
object id = DocumentBuilder.GetDocumentId(searchFactory,hits.Doc(i));
ids.Add(id);
}
base.Values = ids.ToArray();
return base.ToSqlString(criteria, criteriaQuery, enabledFilters);
}
private static System.Type GetCriteriaClass(ICriteria criteria)
{
CriteriaImpl impl = criteria as CriteriaImpl;
if (impl != null)
return impl.CriteriaClass;
return GetCriteriaClass(((CriteriaImpl.Subcriteria) criteria).Parent);
}
private System.Type GetCriteriaClass(ICriteria criteria)
{
CriteriaImpl impl = criteria as CriteriaImpl;
if (impl != null)
return impl.CriteriaClass;
return GetCriteriaClass(((CriteriaImpl.Subcriteria) criteria).Parent);
}
public ISession GetSession(ICriteria criteria)
{
CriteriaImpl impl = criteria as CriteriaImpl;
if (impl != null)
return impl.Session;
return GetSession(((CriteriaImpl.Subcriteria) criteria).Parent);
}
}
public ISession GetSession(ICriteria criteria)
{
CriteriaImpl impl = criteria as CriteriaImpl;
if (impl != null)
return impl.Session;
return GetSession(((CriteriaImpl.Subcriteria) criteria).Parent);
}
}
}
\ No newline at end of file
......@@ -45,7 +45,6 @@
<Compile Include="AssemblyInfo.cs" />
<Compile Include="Attributes\AnalyzerAttribute.cs" />
<Compile Include="Attributes\BoostAttribute.cs" />
<Compile Include="Attributes\ContainedInAttribute.cs" />
<Compile Include="Attributes\DateBridgeAttribute.cs" />
<Compile Include="Attributes\DocumentIdAttribute.cs" />
<Compile Include="Attributes\FieldAttribute.cs" />
......@@ -56,15 +55,11 @@
<Compile Include="AttributeUtil.cs" />
<Compile Include="Backend\Impl\Lucene\LuceneBackendQueueProcessor.cs" />
<Compile Include="Backend\Impl\Lucene\LuceneBackendQueueProcessorFactory.cs" />
<Compile Include="Backend\LuceneIndexingParameters.cs" />
<Compile Include="Backend\OptimizeLuceneWork.cs" />
<Compile Include="Backend\PurgeAllLuceneWork.cs" />
<Compile Include="Engine\ISearchFactoryImplementor.cs" />
<Compile Include="ISearchFactory.cs" />
<Compile Include="Query\FullTextSearchHelper.cs" />
<Compile Include="LuceneQueryExpression.cs" />
<Compile Include="Backend\Impl\Lucene\LuceneWorker.cs" />
<Compile Include="Reader\IReaderProvider.cs" />
<Compile Include="Store\DirectoryProviderFactory.cs" />
<Compile Include="DirectoryProviderHelper.cs" />
<Compile Include="Store\FileHelper.cs" />
......@@ -88,13 +83,13 @@
<Compile Include="Bridge\Builtin\StringBridge.cs" />
<Compile Include="Environment.cs" />
<Compile Include="Impl\FullTextSessionImpl.cs" />
<Compile Include="Attributes\Index.cs" />
<Compile Include="Index.cs" />
<Compile Include="Backend\LuceneWork.cs" />
<Compile Include="SearchException.cs" />
<Compile Include="Engine\SearchFactory.cs" />
<Compile Include="Impl\SearchInterceptor.cs" />
<Compile Include="Bridge\Builtin\SimpleBridge.cs" />
<Compile Include="Attributes\Store.cs" />
<Compile Include="Store.cs" />
<Compile Include="Bridge\TwoWayString2FieldBridgeAdaptor.cs" />
<Compile Include="Bridge\Builtin\ValueTypeBridge.cs" />
<Compile Include="Backend\Work.cs" />
......@@ -121,6 +116,7 @@
<Folder Include="Event\" />
<Folder Include="Filter\" />
<Folder Include="Properties\" />
<Folder Include="Reader\" />
<Folder Include="Util\" />
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
......
using System;
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
using System.IO;
using Iesi.Collections.Generic;
#else
using Iesi.Collections;
#endif
using System.IO;
using log4net;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using NHibernate.Engine;
using NHibernate.Expression;
using NHibernate.Impl;
using NHibernate.Search.Engine;
using NHibernate.Search.Impl;
using Directory = Lucene.Net.Store.Directory;
namespace NHibernate.Search.Impl
{
public class FullTextQueryImpl : AbstractQueryImpl
{
private static readonly ILog log = LogManager.GetLogger(typeof(FullTextQueryImpl));
private readonly Query luceneQuery;
private readonly System.Type[] classes;
private ISet<System.Type> classesAndSubclasses;
private int resultSize;
private int batchSize = 1;
/// <summary>
/// classes must be immutable
/// </summary>
public FullTextQueryImpl(Query query, System.Type[] classes, ISessionImplementor session)
: base(query.ToString(), FlushMode.Unspecified, session)
{
luceneQuery = query;
this.classes = classes;
}
public override IEnumerable Enumerable()
{
return Enumerable<object>();
}
/// <summary>
/// Return an interator on the results.
/// Retrieve the object one by one (initialize it during the next() operation)
/// </summary>
public override IEnumerable<T> Enumerable<T>()
{
//implement an interator which keep the id/class for each hit and get the object on demand
//cause I can't keep the searcher and hence the hit opened. I dont have any hook to know when the
//user stop using it
//scrollable is better in this area
SearchFactory searchFactory = SearchFactory.GetSearchFactory(Session);
//find the directories
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out classesAndSubclasses, classes);
if (searcher == null)
{
return new IteratorImpl<T>(new List<EntityInfo>(), Session).Iterate();
}
try
{
Query query = FullTextSearchHelper.FilterQueryByClasses(classesAndSubclasses, luceneQuery);
Hits hits = searcher.Search(query);
SetResultSize(hits);
int first = First();
int max = Max(first, hits);
IList<EntityInfo> entityInfos = new List<EntityInfo>(max - first + 1);
for (int index = first; index <= max; index++)
{
Document document = hits.Doc(index);
EntityInfo entityInfo = new EntityInfo();
entityInfo.clazz = DocumentBuilder.GetDocumentClass(document);
entityInfo.id = DocumentBuilder.GetDocumentId(searchFactory, document);
entityInfos.Add(entityInfo);
}
return new IteratorImpl<T>(entityInfos, Session).Iterate();
}
catch (IOException e)
{
throw new HibernateException("Unable to query Lucene index", e);
}
finally
{
if (searcher != null)
{
try
{
searcher.Close();
}
catch (IOException e)
{
log.Warn("Unable to properly close searcher during lucene query: " + QueryString, e);
}
}
}
}
private class IteratorImpl<T>
{
private readonly IList<EntityInfo> entityInfos;
private readonly ISession session;
public IteratorImpl(IList<EntityInfo> entityInfos, ISession session)
{
this.entityInfos = entityInfos;
this.session = session;
}
public IEnumerable<T> Iterate()
{
foreach (EntityInfo entityInfo in entityInfos)
{
yield return (T) session.Load(entityInfo.clazz, entityInfo.id);
}
}
}
public override IList<T> List<T>()
{
ArrayList arrayList = new ArrayList();
List(arrayList);
return (T[]) arrayList.ToArray(typeof(T));
}
public override IList List()
{
ArrayList arrayList = new ArrayList();
List(arrayList);
return arrayList;
}
public override void List(IList list)
{
SearchFactory searchFactory = SearchFactory.GetSearchFactory(Session);
//find the directories
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out classesAndSubclasses, classes);
if (searcher == null)
return;
try
{
Query query = FullTextSearchHelper.FilterQueryByClasses(classesAndSubclasses, luceneQuery);
Hits hits = searcher.Search(query);
SetResultSize(hits);
int first = First();
int max = Max(first, hits);
for (int index = first; index <= max; index++)
{
Document document = hits.Doc(index);
System.Type clazz = DocumentBuilder.GetDocumentClass(document);
object id = DocumentBuilder.GetDocumentId(searchFactory, document);
list.Add(Session.Load(clazz, id));
//use load to benefit from the batch-size
//we don't face proxy casting issues since the exact class is extracted from the index
}
//then initialize the objects
IList excludedObects = new ArrayList();
foreach (Object element in list)
{
try
{
NHibernateUtil.Initialize(element);
}
catch (ObjectNotFoundException e)
{
log.Debug("Object found in Search index but not in database: "
+ e.PersistentClass + " with id " + e.Identifier);
excludedObects.Add(element);
}
}
foreach (object excludedObect in excludedObects)
{
list.Remove(excludedObect);
}
}
catch (IOException e)
{
throw new HibernateException("Unable to query Lucene index", e);
}
finally
{
if (searcher != null)
{
try
{
searcher.Close();
}
catch (IOException e)
{
log.Warn("Unable to properly close searcher during lucene query: " + QueryString, e);
}
}
}
}
private int Max(int first, Hits hits)
{
if (Selection.MaxRows == RowSelection.NoValue)
return hits.Length() - 1;
else if (Selection.MaxRows + first < hits.Length())
return first + Selection.MaxRows - 1;
else return hits.Length() - 1;
}
private int First()
{
if (Selection.FirstRow != RowSelection.NoValue)
return Selection.FirstRow;
else
return 0;
}
//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
private void SetResultSize(Hits hits)
{
resultSize = hits.Length();
}
public int ResultSize
{
get { return resultSize; }
}
private class EntityInfo
{
public System.Type clazz;
public object id;
}
}
#if NET_2_0
#else
[CLSCompliant(false)]
#endif
public class FullTextQueryImpl : AbstractQueryImpl
{
private static ILog log = LogManager.GetLogger(typeof (FullTextQueryImpl));
private Query luceneQuery;
private System.Type[] classes;
#if NET_2_0
private ISet<System.Type> classesAndSubclasses;
#else
private ISet classesAndSubclasses;
#endif
private int resultSize;
private int batchSize = 1;
/// <summary>
/// classes must be immutable
/// </summary>
public FullTextQueryImpl(Query query, System.Type[] classes, ISessionImplementor session)
: base(query.ToString(), FlushMode.Unspecified, session)
{
this.luceneQuery = query;
this.classes = classes;
}
#if NET_2_0
public override IEnumerable Enumerable()
{
return Enumerable<object>();
}
/// <summary>
/// Return an interator on the results.
/// Retrieve the object one by one (initialize it during the next() operation)
/// </summary>
public override IEnumerable<T> Enumerable<T>()
{
//implement an interator which keep the id/class for each hit and get the object on demand
//cause I can't keep the searcher and hence the hit opened. I dont have any hook to know when the
//user stop using it
//scrollable is better in this area
SearchFactory searchFactory = SearchFactory.GetSearchFactory(Session);
//find the directories
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out classesAndSubclasses, classes);
if (searcher == null)
{
return new IteratorImpl<T>(new List<EntityInfo>(), Session).Iterate();
}
try
{
Query query = FullTextSearchHelper.FilterQueryByClasses(classesAndSubclasses, luceneQuery);
Hits hits = searcher.Search(query);
SetResultSize(hits);
int first = First();
int max = Max(first, hits);
IList<EntityInfo> entityInfos = new List<EntityInfo>(max - first + 1);
for (int index = first; index <= max; index++)
{
Document document = hits.Doc(index);
EntityInfo entityInfo = new EntityInfo();
entityInfo.clazz = DocumentBuilder.GetDocumentClass(document);
entityInfo.id = DocumentBuilder.GetDocumentId(searchFactory, document);
entityInfos.Add(entityInfo);
}
return new IteratorImpl<T>(entityInfos, Session).Iterate();
}
catch (IOException e)
{
throw new HibernateException("Unable to query Lucene index", e);
}
finally
{
if (searcher != null)
{
try
{
searcher.Close();
}
catch (IOException e)
{
log.Warn("Unable to properly close searcher during lucene query: " + QueryString, e);
}
}
}
}
private class IteratorImpl<T>
{
private IList<EntityInfo> entityInfos;
private ISession session;
public IteratorImpl(IList<EntityInfo> entityInfos, ISession session)
{
this.entityInfos = entityInfos;
this.session = session;
}
public IEnumerable<T> Iterate()
{
foreach (EntityInfo entityInfo in entityInfos)
{
yield return (T) session.Load(entityInfo.clazz, entityInfo.id);
}
}
}
public override IList<T> List<T>()
{
ArrayList arrayList = new ArrayList();
List(arrayList);
return (T[]) arrayList.ToArray(typeof (T));
}
#else
public override IEnumerable Enumerable()
{
throw new NotImplementedException("Enumerator not implemented");
}
#endif
public override IList List()
{
ArrayList arrayList = new ArrayList();
List(arrayList);
return arrayList;
}
public override void List(IList list)
{
SearchFactory searchFactory = SearchFactory.GetSearchFactory(Session);
//find the directories
Searcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out classesAndSubclasses, classes);
if (searcher == null)
return;
try
{
Query query = FullTextSearchHelper.FilterQueryByClasses(classesAndSubclasses, luceneQuery);
Hits hits = searcher.Search(query);
SetResultSize(hits);
int first = First();
int max = Max(first, hits);
for (int index = first; index <= max; index++)
{
Document document = hits.Doc(index);
System.Type clazz = DocumentBuilder.GetDocumentClass(document);
object id = DocumentBuilder.GetDocumentId(searchFactory, document);
list.Add(this.Session.Load(clazz, id));
//use load to benefit from the batch-size
//we don't face proxy casting issues since the exact class is extracted from the index
}
//then initialize the objects
IList excludedObects = new ArrayList();
foreach (Object element in list)
{
try
{
NHibernateUtil.Initialize(element);
}
catch (ObjectNotFoundException e)
{
log.Debug("Object found in Search index but not in database: "
+ e.PersistentClass + " with id " + e.Identifier);
excludedObects.Add(element);
}
}
foreach (object excludedObect in excludedObects)
{
list.Remove(excludedObect);
}
}
catch (IOException e)
{
throw new HibernateException("Unable to query Lucene index", e);
}
finally
{
if (searcher != null)
{
try
{
searcher.Close();
}
catch (IOException e)
{
log.Warn("Unable to properly close searcher during lucene query: " + QueryString, e);
}
}
}
}
private int Max(int first, Hits hits)
{
if (Selection.MaxRows == RowSelection.NoValue)
return hits.Length() - 1;
else if (Selection.MaxRows + first < hits.Length())
return first + Selection.MaxRows - 1;
else return hits.Length() - 1;
}
private int First()
{
if (Selection.FirstRow != RowSelection.NoValue)
return Selection.FirstRow;
else
return 0;
}
//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
private void SetResultSize(Hits hits)
{
resultSize = hits.Length();
}
public int ResultSize
{
get { return this.resultSize; }
}
private class EntityInfo
{
public System.Type clazz;
public object id;
}
}
}
\ No newline at end of file
using System.Collections.Generic;
using System.IO;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using System.Collections;
using Iesi.Collections;
#endif
using Lucene.Net.Index;
using Lucene.Net.Search;
using NHibernate.Search.Engine;
......@@ -8,95 +13,121 @@
namespace NHibernate.Search
{
public class FullTextSearchHelper
{
public static Query FilterQueryByClasses(ISet<System.Type> classesAndSubclasses, Query luceneQuery)
{
//A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
//it also probably minimise the memory footprint
if (classesAndSubclasses == null)
{
return luceneQuery;
}
else
{
BooleanQuery classFilter = new BooleanQuery();
//annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
classFilter.SetBoost(0);
foreach (System.Type clazz in classesAndSubclasses)
{
Term t = new Term(DocumentBuilder.CLASS_FIELDNAME, clazz.AssemblyQualifiedName);
TermQuery termQuery = new TermQuery(t);
classFilter.Add(termQuery, BooleanClause.Occur.SHOULD);
}
BooleanQuery filteredQuery = new BooleanQuery();
filteredQuery.Add(luceneQuery, BooleanClause.Occur.MUST);
filteredQuery.Add(classFilter, BooleanClause.Occur.MUST);
return filteredQuery;
}
}
public class FullTextSearchHelper
{
#if NET_2_0
public static Query FilterQueryByClasses(ISet<System.Type> classesAndSubclasses, Query luceneQuery)
#else
public static Query FilterQueryByClasses(ISet classesAndSubclasses, Query luceneQuery)
#endif
{
//A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
//it also probably minimise the memory footprint
if (classesAndSubclasses == null)
{
return luceneQuery;
}
else
{
BooleanQuery classFilter = new BooleanQuery();
//annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
classFilter.SetBoost(0);
foreach (System.Type clazz in classesAndSubclasses)
{
Term t = new Term(DocumentBuilder.CLASS_FIELDNAME, clazz.AssemblyQualifiedName);
TermQuery termQuery = new TermQuery(t);
classFilter.Add(termQuery, BooleanClause.Occur.SHOULD);
}
BooleanQuery filteredQuery = new BooleanQuery();
filteredQuery.Add(luceneQuery, BooleanClause.Occur.MUST);
filteredQuery.Add(classFilter, BooleanClause.Occur.MUST);
return filteredQuery;
}
}
public static Searcher BuildSearcher(SearchFactory searchFactory, out ISet<System.Type> classesAndSubclasses,
params System.Type[] classes)
{
Dictionary<System.Type, DocumentBuilder> builders = searchFactory.DocumentBuilders;
ISet<Directory> directories = new HashedSet<Directory>();
if (classes == null || classes.Length == 0)
{
//no class means all classes
foreach (DocumentBuilder builder in builders.Values)
{
directories.Add(builder.DirectoryProvider.Directory);
}
classesAndSubclasses = null;
}
else
{
ISet<System.Type> involvedClasses = new HashedSet<System.Type>();
involvedClasses.AddAll(classes);
foreach (System.Type clazz in classes)
{
DocumentBuilder builder;
builders.TryGetValue(clazz, out builder);
if (builder != null) involvedClasses.AddAll(builder.MappedSubclasses);
}
foreach (System.Type clazz in involvedClasses)
{
DocumentBuilder builder;
builders.TryGetValue(clazz, out builder);
//TODO should we rather choose a polymorphic path and allow non mapped entities
if (builder == null) throw new HibernateException("Not a mapped entity: " + clazz);
directories.Add(builder.DirectoryProvider.Directory);
}
classesAndSubclasses = involvedClasses;
}
#if NET_2_0
public static Searcher BuildSearcher(SearchFactory searchFactory, out ISet<System.Type> classesAndSubclasses, params System.Type[] classes)
{
Dictionary<System.Type, DocumentBuilder> builders = searchFactory.DocumentBuilders;
ISet<Directory> directories = new HashedSet<Directory>();
#else
public static Searcher BuildSearcher(SearchFactory searchFactory, out ISet classesAndSubclasses, params System.Type[] classes)
{
Hashtable builders = searchFactory.DocumentBuilders;
ISet directories = new HashedSet();
#endif
if (classes == null || classes.Length == 0)
{
//no class means all classes
foreach (DocumentBuilder builder in builders.Values)
{
directories.Add(builder.DirectoryProvider.Directory);
}
classesAndSubclasses = null;
}
else
{
#if NET_2_0
ISet<System.Type> involvedClasses = new HashedSet<System.Type>();
#else
ISet involvedClasses = new HashedSet();
#endif
involvedClasses.AddAll(classes);
foreach (System.Type clazz in classes)
{
DocumentBuilder builder;
#if NET_2_0
builders.TryGetValue(clazz, out builder);
#else
builder = (DocumentBuilder) (builders.ContainsKey(clazz.Name) ? builders[clazz.Name] : null);
#endif
if (builder != null) involvedClasses.AddAll(builder.MappedSubclasses);
}
foreach (System.Type clazz in involvedClasses)
{
DocumentBuilder builder;
#if NET_2_0
builders.TryGetValue(clazz, out builder);
#else
builder = (DocumentBuilder) (builders.ContainsKey(clazz.Name) ? builders[clazz.Name] : null);
#endif
//TODO should we rather choose a polymorphic path and allow non mapped entities
if (builder == null) throw new HibernateException("Not a mapped entity: " + clazz);
directories.Add(builder.DirectoryProvider.Directory);
}
classesAndSubclasses = involvedClasses;
}
return GetSearcher(directories);
}
return GetSearcher(directories);
}
public static Searcher GetSearcher(ISet<Directory> directories)
{
if (directories.Count == 0)
return null;
//set up the searcher
int dirNbr = directories.Count;
IndexSearcher[] searchers = new IndexSearcher[dirNbr];
try
{
int index = 0;
foreach (Directory directory in directories)
{
if (dirNbr == 1)
return new IndexSearcher(directory);
searchers[index] = new IndexSearcher(directory);
index += 1;
}
return new MultiSearcher(searchers);
}
catch (IOException e)
{
throw new HibernateException("Unable to read Lucene directory", e);
}
}
}
#if NET_2_0
public static Searcher GetSearcher(ISet<Directory> directories)
#else
public static Searcher GetSearcher(ISet directories)
#endif
{
if (directories.Count == 0)
return null;
//set up the searcher
int dirNbr = directories.Count;
IndexSearcher[] searchers = new IndexSearcher[dirNbr];
try
{
int index = 0;
foreach (Directory directory in directories)
{
if (dirNbr == 1)
return new IndexSearcher(directory);
searchers[index] = new IndexSearcher(directory);
index += 1;
}
return new MultiSearcher(searchers);
}
catch (IOException e)
{
throw new HibernateException("Unable to read Lucene directory", e);
}
}
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.Text;
namespace NHibernate.Search.Reader
{
public interface IReaderProvider
{
}
}
namespace NHibernate.Search
{
public enum Store
{
Yes,
No,
Compress
}
}
\ No newline at end of file
using System;
using System.Collections;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using Iesi.Collections;
#endif
using NHibernate.Cfg;
using NHibernate.Mapping;
using NHibernate.Search.Attributes;
......@@ -8,117 +13,122 @@
namespace NHibernate.Search.Storage
{
public class DirectoryProviderFactory
{
private const String LUCENE_PREFIX = "hibernate.search.";
private const String LUCENE_DEFAULT = LUCENE_PREFIX + "default.";
public class DirectoryProviderFactory
{
private const String LUCENE_PREFIX = "hibernate.search.";
private const String LUCENE_DEFAULT = LUCENE_PREFIX + "default.";
private const string DEFAULT_DIRECTORY_PROVIDER = "NHibernate.Search.Storage.FSDirectoryProvider, NHibernate.Search";
private const string DEFAULT_DIRECTORY_PROVIDER =
"NHibernate.Search.Storage.FSDirectoryProvider, NHibernate.Search";
#if NET_2_0
public List<IDirectoryProvider> providers = new List<IDirectoryProvider>();
#else
public IList providers = new ArrayList();
#endif
public List<IDirectoryProvider> providers = new List<IDirectoryProvider>();
public IDirectoryProvider CreateDirectoryProvider(System.Type entity, Configuration cfg, SearchFactory searchFactory)
{
//get properties
String directoryProviderName = GetDirectoryProviderName(entity, cfg);
IDictionary indexProps = GetDirectoryProperties(cfg, directoryProviderName);
public IDirectoryProvider CreateDirectoryProvider(System.Type entity, Configuration cfg,
SearchFactory searchFactory)
{
//get properties
String directoryProviderName = GetDirectoryProviderName(entity, cfg);
IDictionary indexProps = GetDirectoryProperties(cfg, directoryProviderName);
//set up the directory
String className = (string)indexProps["directory_provider"];
if (StringHelper.IsEmpty(className))
{
className = DEFAULT_DIRECTORY_PROVIDER;
}
IDirectoryProvider provider = null;
try
{
System.Type directoryClass = ReflectHelper.ClassForName(className);
provider = (IDirectoryProvider)Activator.CreateInstance(directoryClass);
}
catch (Exception e)
{
throw new HibernateException("Unable to instanciate directory provider: " + className, e);
}
try
{
provider.Initialize(directoryProviderName, indexProps, searchFactory);
}
catch (Exception e)
{
throw new HibernateException("Unable to initialize: " + directoryProviderName, e);
}
int index = providers.IndexOf(provider);
if (index != -1)
{
//share the same Directory provider for the same underlying store
return (IDirectoryProvider) providers[index];
}
else
{
providers.Add(provider);
return provider;
}
}
//set up the directory
String className = (string) indexProps["directory_provider"];
if (StringHelper.IsEmpty(className))
{
className = DEFAULT_DIRECTORY_PROVIDER;
}
IDirectoryProvider provider;
try
{
System.Type directoryClass = ReflectHelper.ClassForName(className);
provider = (IDirectoryProvider) Activator.CreateInstance(directoryClass);
}
catch (Exception e)
{
throw new HibernateException("Unable to instanciate directory provider: " + className, e);
}
try
{
provider.Initialize(directoryProviderName, indexProps, searchFactory);
}
catch (Exception e)
{
throw new HibernateException("Unable to initialize: " + directoryProviderName, e);
}
int index = providers.IndexOf(provider);
if (index != -1)
{
//share the same Directory provider for the same underlying store
return (IDirectoryProvider) providers[index];
}
else
{
providers.Add(provider);
return provider;
}
}
private static IDictionary GetDirectoryProperties(Configuration cfg, String directoryProviderName)
{
IDictionary props = cfg.Properties;
String indexName = LUCENE_PREFIX + directoryProviderName;
IDictionary indexProps = new Hashtable();
IDictionary indexSpecificProps = new Hashtable();
foreach (DictionaryEntry entry in props)
{
String key = (String) entry.Key;
if (key.StartsWith(LUCENE_DEFAULT))
{
indexProps[key.Substring(LUCENE_DEFAULT.Length)] = entry.Value;
}
else if (key.StartsWith(indexName))
{
indexSpecificProps[key.Substring(indexName.Length)] = entry.Value;
}
}
foreach (DictionaryEntry indexSpecificProp in indexSpecificProps)
{
indexProps[indexSpecificProp.Key] = indexSpecificProp.Value;
}
return indexProps;
}
private static IDictionary GetDirectoryProperties(Configuration cfg, String directoryProviderName)
{
IDictionary props = cfg.Properties;
String indexName = LUCENE_PREFIX + directoryProviderName;
IDictionary indexProps = new Hashtable();
IDictionary indexSpecificProps = new Hashtable();
foreach (DictionaryEntry entry in props)
{
String key = (String) entry.Key;
if (key.StartsWith(LUCENE_DEFAULT))
{
indexProps[key.Substring(LUCENE_DEFAULT.Length)] = entry.Value;
}
else if (key.StartsWith(indexName))
{
indexSpecificProps[key.Substring(indexName.Length)] = entry.Value;
}
}
foreach (DictionaryEntry indexSpecificProp in indexSpecificProps)
{
indexProps[indexSpecificProp.Key] = indexSpecificProp.Value;
}
return indexProps;
}
private static String GetDirectoryProviderName(System.Type clazz, Configuration cfg)
{
//get the most specialized (ie subclass > superclass) non default index name
//if none extract the name from the most generic (superclass > subclass) [Indexed] class in the hierarchy
PersistentClass pc = cfg.GetClassMapping(clazz);
System.Type rootIndex = null;
do
{
IndexedAttribute indexAnn = AttributeUtil.GetIndexed(pc.MappedClass);
if (indexAnn != null)
{
if (string.IsNullOrEmpty(indexAnn.Index) == false)
{
return indexAnn.Index;
}
else
{
rootIndex = pc.MappedClass;
}
}
pc = pc.Superclass;
} while (pc != null);
//there is nobody out there with a non default [Indexed(Index = "fo")]
if (rootIndex != null)
{
return rootIndex.Name;
}
else
{
throw new HibernateException(
"Trying to extract the index name from a non @Indexed class: " + clazz);
}
}
}
private static String GetDirectoryProviderName(System.Type clazz, Configuration cfg)
{
//get the most specialized (ie subclass > superclass) non default index name
//if none extract the name from the most generic (superclass > subclass) [Indexed] class in the hierarchy
PersistentClass pc = cfg.GetClassMapping(clazz);
System.Type rootIndex = null;
do
{
IndexedAttribute indexAnn = AttributeUtil.GetIndexed(pc.MappedClass);
if (indexAnn != null)
{
#if NET_2_0
if (string.IsNullOrEmpty(indexAnn.Index)==false)
#else
if (indexAnn.Index != null && indexAnn.Index != string.Empty)
#endif
{
return indexAnn.Index;
}
else
{
rootIndex = pc.MappedClass;
}
}
pc = pc.Superclass;
} while (pc != null);
//there is nobody out there with a non default [Indexed(Index = "fo")]
if (rootIndex != null)
{
return rootIndex.Name;
}
else
{
throw new HibernateException(
"Trying to extract the index name from a non @Indexed class: " + clazz);
}
}
}
}
\ No newline at end of file
......@@ -5,59 +5,60 @@
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Index;
using Lucene.Net.Store;
using Directory=Lucene.Net.Store.Directory;
using NHibernate.Search.Impl;
using NHibernate.Util;
namespace NHibernate.Search.Storage
{
public class FSDirectoryProvider : IDirectoryProvider
{
private FSDirectory directory;
private static ILog log = LogManager.GetLogger(typeof(FSDirectoryProvider));
private String indexName;
public class FSDirectoryProvider : IDirectoryProvider
{
private FSDirectory directory;
private static ILog log = LogManager.GetLogger(typeof (FSDirectoryProvider));
private String indexName;
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, properties);
try
{
bool create = !indexDir.Exists;
indexName = indexDir.FullName;
directory = FSDirectory.GetDirectory(indexName, create);
if (create)
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create);
iw.Close();
}
searchFactory.RegisterDirectoryProviderForLocks(this);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + directoryProviderName, e);
}
}
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, properties);
try
{
bool create = !indexDir.Exists;
indexName = indexDir.FullName;
directory = FSDirectory.GetDirectory(indexName, create);
if (create)
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create);
iw.Close();
}
searchFactory.RegisterDirectoryProviderForLocks(this);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + directoryProviderName, e);
}
}
public Directory Directory
{
get { return directory; }
}
public Lucene.Net.Store.Directory Directory
{
get { return directory; }
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is FSDirectoryProvider)) return false;
return indexName.Equals(((FSDirectoryProvider) obj).indexName);
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is FSDirectoryProvider)) return false;
return indexName.Equals(((FSDirectoryProvider) obj).indexName);
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 11;
return 37*hash + indexName.GetHashCode();
}
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 11;
return 37*hash + indexName.GetHashCode();
}
}
}
\ No newline at end of file
......@@ -7,178 +7,188 @@
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Index;
using Lucene.Net.Store;
using Directory=Lucene.Net.Store.Directory;
using Directory = Lucene.Net.Store.Directory;
namespace NHibernate.Search.Storage
{
/// <summary>
/// File based DirectoryProvider that takes care of index copy
/// The base directory is represented by hibernate.search.<index>.indexBase
/// The index is created in <base directory>/<index name>
/// The source (aka copy) directory is built from <sourceBase>/<index name>
/// A copy is triggered every refresh seconds
/// </summary>
public class FSMasterDirectoryProvider : IDirectoryProvider
{
private static ILog log = LogManager.GetLogger(typeof(FSMasterDirectoryProvider));
private FSDirectory directory;
private int current;
private String indexName;
private Timer timer;
private SearchFactory searchFactory;
/// <summary>
/// File based DirectoryProvider that takes care of index copy
/// The base directory is represented by hibernate.search.<index>.indexBase
/// The index is created in <base directory>/<index name>
/// The source (aka copy) directory is built from <sourceBase>/<index name>
/// A copy is triggered every refresh seconds
/// </summary>
public class FSMasterDirectoryProvider : IDirectoryProvider
{
private static ILog log = LogManager.GetLogger(typeof(FSMasterDirectoryProvider));
private FSDirectory directory;
private int current;
private String indexName;
private Timer timer;
private SearchFactory searchFactory;
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
//source guessing
String source =
DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source,
directoryProviderName, properties);
if (source == null)
throw new ArgumentException("FSMasterDirectoryProvider requires a viable source directory");
log.Debug("Source directory: " + source);
DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, properties);
log.Debug("Index directory: " + indexDir);
String refreshPeriod = (string) (properties[Environment.Refresh] ?? "3600");
long period = int.Parse(refreshPeriod);
log.Debug("Refresh period " + period + " seconds");
period *= 1000; //per second
try
{
bool create = !File.Exists(Path.Combine(indexDir.FullName, "segments"));
indexName = indexDir.FullName;
if (create)
{
log.Debug("Index directory '" + indexName + "' will be initialized");
indexDir.Create();
}
directory = FSDirectory.GetDirectory(indexName, create);
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
//source guessing
String source = DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source, directoryProviderName, properties);
if (source == null)
throw new ArgumentException("FSMasterDirectoryProvider requires a viable source directory");
log.Debug("Source directory: " + source);
DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, properties);
log.Debug("Index directory: " + indexDir);
#if NET_2_0
String refreshPeriod = (string)(properties[Environment.Refresh] ?? "3600");
#else
String refreshPeriod = (string) (properties[Environment.Refresh] != null ? properties[Environment.Refresh] : "3600");
#endif
long period = int.Parse(refreshPeriod);
log.Debug("Refresh period " + period + " seconds");
period *= 1000; //per second
try
{
bool create = !File.Exists(Path.Combine(indexDir.FullName, "segments"));
indexName = indexDir.FullName;
if (create)
{
log.Debug("Index directory '" + indexName + "' will be initialized");
indexDir.Create();
}
directory = FSDirectory.GetDirectory(indexName, create);
if (create)
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create);
iw.Close();
}
if (create)
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create);
iw.Close();
}
//copy to source
if (File.Exists(Path.Combine(source, "current1")))
{
current = 2;
}
else if (File.Exists(Path.Combine(source, "current2")))
{
current = 1;
}
else
{
log.Debug("Source directory for '" + indexName + "' will be initialized");
current = 1;
}
String currentString = current.ToString();
DirectoryInfo subDir = new DirectoryInfo(Path.Combine(source, currentString));
FileHelper.Synchronize(indexDir, subDir, true);
File.Delete(Path.Combine(source, "current1"));
File.Delete(Path.Combine(source, "current2"));
log.Debug("Current directory: " + current);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + directoryProviderName, e);
}
searchFactory.RegisterDirectoryProviderForLocks(this);
timer = new Timer(
new CopyDirectory(this, indexName, source).Run
);
timer.Change(period, period);
this.searchFactory = searchFactory;
}
//copy to source
if (File.Exists(Path.Combine(source, "current1")))
{
current = 2;
}
else if (File.Exists(Path.Combine(source, "current2")))
{
current = 1;
}
else
{
log.Debug("Source directory for '" + indexName + "' will be initialized");
current = 1;
}
String currentString = current.ToString();
DirectoryInfo subDir = new DirectoryInfo(Path.Combine(source, currentString));
FileHelper.Synchronize(indexDir, subDir, true);
File.Delete(Path.Combine(source, "current1"));
File.Delete(Path.Combine(source, "current2"));
log.Debug("Current directory: " + current);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + directoryProviderName, e);
}
searchFactory.RegisterDirectoryProviderForLocks(this);
#if NET_2_0
timer = new Timer(
new CopyDirectory(this, indexName, source).Run
);
timer.Change(period, period);
#else
timer = new Timer(new TimerCallback(new CopyDirectory(this, indexName, source).Run), null, period, period);
#endif
this.searchFactory = searchFactory;
}
public Directory Directory
{
get { return directory; }
}
public Directory Directory
{
get { return directory; }
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is FSMasterDirectoryProvider)) return false;
return indexName.Equals(((FSMasterDirectoryProvider) obj).indexName);
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is FSMasterDirectoryProvider)) return false;
return indexName.Equals(((FSMasterDirectoryProvider)obj).indexName);
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 11;
return 37*hash + indexName.GetHashCode();
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 11;
return 37 * hash + indexName.GetHashCode();
}
private class CopyDirectory
{
private readonly FSMasterDirectoryProvider parent;
private readonly string source;
private readonly string destination;
private object directoryProviderLock;
private class CopyDirectory
{
private readonly FSMasterDirectoryProvider parent;
private String source;
private String destination;
private object directoryProviderLock;
public CopyDirectory(FSMasterDirectoryProvider parent, string source, string destination)
{
this.parent = parent;
this.source = source;
this.destination = destination;
}
public CopyDirectory(FSMasterDirectoryProvider parent, String source, String destination)
{
this.parent = parent;
this.source = source;
this.destination = destination;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Run(object ignored)
{
//TODO get rid of current and use the marker file instead?
DateTime start = DateTime.Now;
if (directoryProviderLock == null)
{
directoryProviderLock = parent.searchFactory.GetLockObjForDirectoryProvider(parent);
}
lock (directoryProviderLock)
{
int oldIndex = parent.current;
int index = parent.current == 1 ? 2 : 1;
DirectoryInfo sourceFile = new DirectoryInfo(source);
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.Synchronized)]
public void Run(object ignored)
{
//TODO get rid of current and use the marker file instead?
DateTime start = DateTime.Now;
if (directoryProviderLock == null)
{
directoryProviderLock = parent.searchFactory.GetLockObjForDirectoryProvider(parent);
}
lock (directoryProviderLock)
{
int oldIndex = parent.current;
int index = parent.current == 1 ? 2 : 1;
DirectoryInfo sourceFile = new DirectoryInfo(source);
DirectoryInfo destinationFile = new DirectoryInfo(Path.Combine(destination, index.ToString()));
//TODO make smart a parameter
try
{
log.Info("Copying " + sourceFile + " into " + destinationFile);
FileHelper.Synchronize(sourceFile, destinationFile, true);
parent.current = index;
}
catch (IOException e)
{
//don't change current
log.Error("Unable to synchronize source of " + parent.indexName, e);
return;
}
try
{
File.Delete(Path.Combine(destination, "current" + oldIndex));
}
catch (IOException e)
{
log.Warn("Unable to remove previous marker file from source of " + parent.indexName, e);
}
try
{
File.Create(Path.Combine(destination, "current" + index)).Dispose();
}
catch (IOException e)
{
log.Warn("Unable to create current marker in source of " + parent.indexName, e);
}
}
log.Info("Copy for " + parent.indexName + " took " + (DateTime.Now - start) + ".");
}
}
}
DirectoryInfo destinationFile = new DirectoryInfo(Path.Combine(destination, index.ToString()));
//TODO make smart a parameter
try
{
log.Info("Copying " + sourceFile + " into " + destinationFile);
FileHelper.Synchronize(sourceFile, destinationFile, true);
parent.current = index;
}
catch (IOException e)
{
//don't change current
log.Error("Unable to synchronize source of " + parent.indexName, e);
return;
}
try
{
File.Delete(Path.Combine(destination, "current" + oldIndex));
}
catch (IOException e)
{
log.Warn("Unable to remove previous marker file from source of " + parent.indexName, e);
}
try
{
#if NET_2_0
File.Create(Path.Combine(destination, "current" + index)).Dispose();
#else
File.Create(Path.Combine(destination, "current" + index));
#endif
}
catch (IOException e)
{
log.Warn("Unable to create current marker in source of " + parent.indexName, e);
}
}
log.Info("Copy for " + parent.indexName + " took " + (DateTime.Now- start) + ".");
}
}
}
}
\ No newline at end of file
using System;
using System.IO;
#if NET_2_0
using System.Collections.Generic;
using Iesi.Collections.Generic;
#else
using System.Collections;
using Iesi.Collections;
#endif
namespace NHibernate.Search.Storage
{
public class FileHelper
{
private const int LastWriteTimePrecision = 2000;
public class FileHelper
{
private const int LastWriteTimePrecision = 2000;
public static void Synchronize(DirectoryInfo source, DirectoryInfo destination, bool smart)
{
if (!destination.Exists)
{
destination.Create();
}
FileInfo[] sources = source.GetFiles();
ISet<string> srcNames = new HashedSet<string>();
foreach (FileInfo fileInfo in sources)
{
srcNames.Add(fileInfo.Name);
}
FileInfo[] dests = destination.GetFiles();
public static void Synchronize(DirectoryInfo source, DirectoryInfo destination, bool smart)
{
if (!destination.Exists)
{
destination.Create();
}
FileInfo[] sources = source.GetFiles();
#if NET_2_0
ISet<string> srcNames = new HashedSet<string>();
#else
ISet srcNames = new HashedSet();
#endif
foreach (FileInfo fileInfo in sources)
{
srcNames.Add(fileInfo.Name);
}
FileInfo[] dests = destination.GetFiles();
//delete files not present in source
foreach (FileInfo file in dests)
{
if (!srcNames.Contains(file.Name))
{
file.Delete();
}
}
//copy each file from source
foreach (FileInfo sourceFile in sources)
{
FileInfo destinationFile = new FileInfo(Path.Combine(destination.FullName, sourceFile.Name));
long destinationChanged = destinationFile.LastWriteTime.Ticks/LastWriteTimePrecision;
long sourceChanged = sourceFile.LastWriteTime.Ticks/LastWriteTimePrecision;
if (!smart || destinationChanged != sourceChanged)
{
sourceFile.CopyTo(destinationFile.FullName, true);
}
}
//delete files not present in source
foreach (FileInfo file in dests)
{
if (!srcNames.Contains(file.Name))
{
file.Delete();
}
}
//copy each file from source
foreach (FileInfo sourceFile in sources)
{
FileInfo destinationFile = new FileInfo(Path.Combine(destination.FullName,sourceFile.Name));
long destinationChanged = destinationFile.LastWriteTime.Ticks/LastWriteTimePrecision;
long sourceChanged = sourceFile.LastWriteTime.Ticks/LastWriteTimePrecision;
if(!smart || destinationChanged != sourceChanged)
{
sourceFile.CopyTo(destinationFile.FullName, true);
}
}
foreach (DirectoryInfo directoryInfo in source.GetDirectories())
{
Synchronize(directoryInfo,
new DirectoryInfo(Path.Combine(destination.FullName, directoryInfo.Name)),
smart);
}
}
}
foreach (DirectoryInfo directoryInfo in source.GetDirectories())
{
Synchronize(directoryInfo,
new DirectoryInfo(Path.Combine(destination.FullName, directoryInfo.Name)),
smart);
}
}
}
}
\ No newline at end of file
using System.Collections;
using Lucene.Net.Store;
using NHibernate.Search.Impl;
namespace NHibernate.Search.Storage
{
public interface IDirectoryProvider
{
void Initialize(string directoryProviderName, IDictionary indexProps, SearchFactory searchFactory);
public interface IDirectoryProvider
{
void Initialize(string directoryProviderName, IDictionary indexProps, SearchFactory searchFactory);
Directory Directory { get; }
}
Directory Directory { get; }
}
}
\ No newline at end of file
......@@ -4,56 +4,58 @@
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Index;
using Lucene.Net.Store;
using Directory=Lucene.Net.Store.Directory;
using NHibernate.Search.Impl;
namespace NHibernate.Search.Storage
{
public class RAMDirectoryProvider : IDirectoryProvider
{
private RAMDirectory directory;
private string indexName;
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
if (directoryProviderName == null)
throw new ArgumentNullException("directoryProviderName");
indexName = directoryProviderName;
directory = new RAMDirectory();
try
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), true);
iw.Close();
searchFactory.RegisterDirectoryProviderForLocks(this);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + indexName, e);
}
}
public Directory Directory
{
get { return directory; }
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is RAMDirectoryProvider)) return false;
return indexName.Equals(((RAMDirectoryProvider) obj).indexName);
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 7;
return 29*hash + indexName.GetHashCode();
}
}
public class RAMDirectoryProvider : IDirectoryProvider
{
private RAMDirectory directory;
private string indexName;
public void Initialize(String directoryProviderName, IDictionary properties, SearchFactory searchFactory)
{
if (directoryProviderName == null)
throw new ArgumentNullException("directoryProviderName");
indexName = directoryProviderName;
directory = new RAMDirectory();
try
{
IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), true);
iw.Close();
searchFactory.RegisterDirectoryProviderForLocks(this);
}
catch (IOException e)
{
throw new HibernateException("Unable to initialize index: " + indexName, e);
}
}
public Lucene.Net.Store.Directory Directory
{
get { return directory; }
}
public override bool Equals(Object obj)
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
if (obj == this) return true;
if (obj == null || !(obj is RAMDirectoryProvider)) return false;
return indexName.Equals(((RAMDirectoryProvider)obj).indexName);
}
public override int GetHashCode()
{
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
int hash = 7;
return 29 * hash + indexName.GetHashCode();
}
}
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册