提交 2cb98cb8 编写于 作者: M Marek Safar

[mdoc] Integrate api-doc-tools version

上级 b21c70ff
......@@ -20,9 +20,6 @@
[submodule "external/reference-assemblies"]
path = external/binary-reference-assemblies
url = git://github.com/mono/reference-assemblies.git
[submodule "external/Lucene.Net.Light"]
path = external/Lucene.Net.Light
url = git://github.com/mono/Lucene.Net.Light.git
[submodule "external/nunit-lite"]
path = external/nunit-lite
url = git://github.com/mono/NUnitLite.git
......@@ -58,3 +55,6 @@
[submodule "external/xunit-binaries"]
path = external/xunit-binaries
url = git://github.com/mono/xunit-binaries.git
[submodule "external/api-doc-tools"]
path = external/api-doc-tools
url = git://github.com/mono/api-doc-tools.git
Subproject commit 85978b7eb94738f516824341213d5e94060f5284
Subproject commit d03e819838c6241f92f90655cb448cc47c9e8791
......@@ -8,8 +8,6 @@ LIBRARY_PACKAGE = monodoc
# also activate legacy mode to compile old monodoc.dll api
LOCAL_MCS_FLAGS = /nowarn:618,612,672,809,414,649 /define:LEGACY_MODE
JAY_FLAGS = -ct
IMAGES = \
Resources/images/bc_bg.png \
Resources/images/bc_separator.png \
......@@ -53,10 +51,9 @@ IMAGES = \
Resources/images/reference.png \
Resources/images/treebg.png
IMAGE_RESOURCE_COMMAND = $(foreach file,$(IMAGES),/resource:$(file),$(notdir $(file)))
IMAGE_RESOURCE_COMMAND = $(foreach file,$(IMAGES),/resource:../../../external/api-doc-tools/monodoc/$(file),$(notdir $(file)))
RESOURCE_FILES = \
../../docs/monodoc.xml \
Resources/base.css \
Resources/ecmaspec-html-css.xsl \
Resources/ecmaspec-html.xsl \
......@@ -74,42 +71,20 @@ RESOURCE_FILES = \
Resources/mono-ecma-impl.xsl \
Resources/mono-ecma.css \
Resources/mono-ecma.xsl \
Resources/toc-html.xsl \
$(IMAGES)
Resources/toc-html.xsl
RESOURCE_COMMAND = $(foreach file,$(RESOURCE_FILES),/resource:../../../external/api-doc-tools/monodoc/$(file),$(notdir $(file)))
EXTRA_DISTFILES = \
jay.sh \
monodoc.dll.config.in \
$(RESOURCE_FILES) \
Monodoc.Ecma/EcmaUrlParser.jay \
Test/monodoc_test/monodoc.xml \
Test/monodoc_test/trees/tree-from-2-10.tree \
Test/monodoc_test/trees/tree-from-3-0-old.tree \
Test/monodoc_test/trees/tree-from-3-0.tree
LIB_REFS = ICSharpCode.SharpZipLib System System.Core System.Xml System.Xml.Linq System.Configuration
LIB_MCS_FLAGS = \
/unsafe \
/nowarn:169,164,162,168,219,618,612 \
/resource:../../docs/monodoc.xml,monodoc.xml \
/resource:Resources/base.css,base.css \
/resource:Resources/ecmaspec-html-css.xsl,ecmaspec-html-css.xsl \
/resource:Resources/ecmaspec-html.xsl,ecmaspec-html.xsl \
/resource:Resources/ecmaspec.css,ecmaspec.css \
/resource:Resources/helper.js,helper.js \
/resource:Resources/home.html,home.html \
/resource:Resources/Lminus.gif,Lminus.gif \
/resource:Resources/Lplus.gif,Lplus.gif \
/resource:Resources/creativecommons.png,creativecommons.png \
/resource:Resources/mdoc-html-format.xsl,mdoc-html-format.xsl \
/resource:Resources/mdoc-html-utils.xsl,mdoc-html-utils.xsl \
/resource:Resources/mdoc-sections-css.xsl,mdoc-sections-css.xsl \
/resource:Resources/mdoc-sections.xsl,mdoc-sections.xsl \
/resource:Resources/mono-ecma-css.xsl,mono-ecma-css.xsl \
/resource:Resources/mono-ecma-impl.xsl,mono-ecma-impl.xsl \
/resource:Resources/mono-ecma.css,mono-ecma.css \
/resource:Resources/mono-ecma.xsl,mono-ecma.xsl \
/resource:Resources/toc-html.xsl,toc-html.xsl \
$(RESOURCE_COMMAND) \
$(IMAGE_RESOURCE_COMMAND)
CLEAN_FILES += $(the_lib).config
......@@ -117,31 +92,10 @@ CLEAN_FILES += $(the_lib).config
TEST_MCS_FLAGS =
TEST_LIB_REFS = System System.Core System.Xml
DOC_SOURCE_DIRS = \
../../docs \
../../../docs
DOC_SOURCES = $(foreach dir,$(DOC_SOURCE_DIRS),$(wildcard $(dir)/*.source $(dir)/*.tree $(dir)/*.zip))
include ../../build/library.make
$(the_lib): Makefile $(RESOURCE_FILES)
all-local: $(the_lib).config
all-local: $(the_lib).config Monodoc.Ecma/EcmaUrlParser.cs
test-local: setup-doc-sources
dist-local: Monodoc.Ecma/EcmaUrlParser.cs
$(the_lib).config: Makefile monodoc.dll.config.in
$(the_lib).config: monodoc.dll.config.in
sed 's,@monodoc_refdir@,$(mono_libdir)/monodoc,g' monodoc.dll.config.in > $@
Monodoc.Ecma/EcmaUrlParser.cs: Monodoc.Ecma/EcmaUrlParser.jay $(topdir)/jay/skeleton.cs jay.sh
$(topdir)/$(thisdir)/jay.sh $(topdir) $< $@ $(JAY_FLAGS)
parser.exe: Monodoc.Ecma/EcmaUrlParser.cs Monodoc.Ecma/EcmaUrlTokenizer.cs Monodoc.Ecma/EcmaUrlParserDriver.cs Monodoc.Ecma/EcmaDesc.cs
mcs /out:$@ /debug $^
setup-doc-sources: $(DOC_SOURCES)
mkdir -p ./Test/monodoc_test/sources/
cp $(DOC_SOURCES) ./Test/monodoc_test/sources/
using System;
using System.IO;
using System.Reflection;
using System.Xml;
namespace Mono.Documentation {
public class ManifestResourceResolver : XmlUrlResolver {
private string[] dirs;
public ManifestResourceResolver (params string[] dirs)
{
this.dirs = (string[]) dirs.Clone ();
}
public override Uri ResolveUri (Uri baseUri, string relativeUri)
{
if (Array.IndexOf (
Assembly.GetExecutingAssembly ().GetManifestResourceNames (),
relativeUri) >= 0)
return new Uri ("x-resource:///" + relativeUri);
foreach (var dir in dirs) {
if (File.Exists (Path.Combine (dir, relativeUri)))
return base.ResolveUri (new Uri ("file://" + new DirectoryInfo (dir).FullName + "/"),
relativeUri);
}
return base.ResolveUri (baseUri, relativeUri);
}
public override object GetEntity (Uri absoluteUri, string role, Type ofObjectToReturn)
{
if (ofObjectToReturn == null)
ofObjectToReturn = typeof(Stream);
if (ofObjectToReturn != typeof(Stream))
throw new XmlException ("This object type is not supported.");
if (absoluteUri.Scheme != "x-resource")
return base.GetEntity (absoluteUri, role, ofObjectToReturn);
return Assembly.GetExecutingAssembly().GetManifestResourceStream (
absoluteUri.Segments [1]);
}
}
}
using System;
using System.Collections;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
using System.Xml;
namespace Mono.Documentation {
public delegate XmlDocument DocLoader (string escapedTypeName);
public static class XmlDocUtils
{
public static XmlNodeList GetMemberGenericParameters (XmlNode member)
{
return member.SelectNodes ("Docs/typeparam");
}
public static XmlNodeList GetTypeGenericParameters (XmlNode member)
{
return member.SelectNodes ("/Type/TypeParameters/TypeParameter");
}
public static string ToTypeName (string type, XmlNode member)
{
return ToTypeName (type, GetTypeGenericParameters (member),
GetMemberGenericParameters (member));
}
public static string ToTypeName (string type, XmlNodeList typeGenParams, XmlNodeList memberGenParams)
{
type = type.Replace ("&", "@").Replace ("<", "{").Replace (">", "}");
for (int i = 0; i < typeGenParams.Count; ++i) {
string name = typeGenParams [i].InnerText;
type = Regex.Replace (type, @"\b" + name + @"\b", "`" + i);
}
for (int i = 0; i < memberGenParams.Count; ++i) {
string name = memberGenParams [i].Attributes ["name"].Value;
type = Regex.Replace (type, @"\b" + name + @"\b", "``" + i);
}
return type;
}
public static string ToEscapedTypeName (string name)
{
return GetCountedName (name, "`");
}
private static string GetCountedName (string name, string escape)
{
int lt = name.IndexOf ("<");
if (lt == -1)
return name;
StringBuilder type = new StringBuilder (name.Length);
int start = 0;
do {
type.Append (name.Substring (start, lt - start));
type.Append (escape);
type.Append (GetGenericCount (name, lt, out start));
} while ((lt = name.IndexOf ('<', start)) >= 0);
if (start < name.Length)
type.Append (name.Substring (start));
return type.ToString ().Replace ("+", ".");
}
private static int GetGenericCount (string name, int start, out int end)
{
int n = 1;
bool r = true;
int i = start;
int depth = 1;
for ( ++i; r && i < name.Length; ++i) {
switch (name [i]) {
case ',': if (depth == 1) ++n; break;
case '<': ++depth; break;
case '>': --depth; if (depth == 0) r = false; break;
}
}
end = i;
return n;
}
public static string ToEscapedMemberName (string member)
{
// Explicitly implemented interface members contain '.'s in the member
// name, e.g. System.Collections.Generic.IEnumerable<A>.GetEnumerator.
// CSC does a s/\./#/g for these.
member = member.Replace (".", "#");
if (member [member.Length-1] == '>') {
int i = member.LastIndexOf ("<");
int ignore;
return member.Substring (0, i).Replace ("<", "{").Replace (">", "}") +
"``" + GetGenericCount (member, i, out ignore);
}
return member.Replace ("<", "{").Replace (">", "}");
}
public static void AddExtensionMethods (XmlDocument typexml, ArrayList/*<XmlNode>*/ extensions, DocLoader loader)
{
// if no members (enum, delegate) don't add extensions
XmlNode m = typexml.SelectSingleNode ("/Type/Members");
if (m == null)
return;
// static classes can't be targets:
if (typexml.SelectSingleNode (
"/Type/TypeSignature[@Language='C#']/@Value")
.Value.IndexOf (" static ") >= 0)
return;
foreach (string s in GetSupportedTypes (typexml, loader)) {
foreach (XmlNode extension in extensions) {
bool add = false;
foreach (XmlNode target in extension.SelectNodes ("Targets/Target")) {
if (target.Attributes ["Type"].Value == s) {
add = true;
break;
}
}
if (!add) {
continue;
}
foreach (XmlNode c in extension.SelectNodes ("Member")) {
XmlNode cm = typexml.ImportNode (c, true);
m.AppendChild (cm);
}
}
}
}
private static IEnumerable GetSupportedTypes (XmlDocument type, DocLoader loader)
{
yield return "System.Object";
yield return GetEscapedPath (type, "Type/@FullName");
Hashtable h = new Hashtable ();
GetInterfaces (h, type, loader);
string s = GetEscapedPath (type, "Type/Base/BaseTypeName");
if (s != null) {
yield return s;
XmlDocument d;
string p = s;
while (s != null && (d = loader (s)) != null) {
GetInterfaces (h, d, loader);
s = GetEscapedPath (d, "Type/Base/BaseTypeName");
if (p == s)
break;
yield return s;
}
}
foreach (object o in h.Keys)
yield return o.ToString ();
}
private static string GetEscapedPath (XmlDocument d, string path)
{
XmlNode n = d.SelectSingleNode (path);
if (n == null)
return null;
return "T:" + ToEscapedTypeName (n.InnerText);
}
private static void GetInterfaces (Hashtable ifaces, XmlDocument doc, DocLoader loader)
{
foreach (XmlNode n in doc.SelectNodes ("Type/Interfaces/Interface/InterfaceName")) {
string t = ToEscapedTypeName (n.InnerText);
string tk = "T:" + t;
if (!ifaces.ContainsKey (tk)) {
ifaces.Add (tk, null);
try {
XmlDocument d = loader (t);
if (d != null)
GetInterfaces (ifaces, d, loader);
}
catch (FileNotFoundException e) {
// ignore; interface documentation couldn't be found.
}
}
}
}
// Turns e.g. sources/netdocs into sources/cache/netdocs
public static string GetCacheDirectory (string assembledBase)
{
return Path.Combine (
Path.Combine (Path.GetDirectoryName (assembledBase), "cache"),
Path.GetFileName (assembledBase));
}
public static string GetCachedFileName (string cacheDir, string url)
{
return Path.Combine (cacheDir,
Uri.EscapeUriString (url).Replace ('/', '+').Replace ("*", "%2a"));
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
namespace Mono.Utilities
{
public class LRUCache<TKey, TValue>
{
[ThreadStatic]
static LRUCache<TKey, TValue> deflt;
public static LRUCache<TKey, TValue> Default {
get {
return deflt != null ? deflt : (deflt = new LRUCache<TKey, TValue> (5));
}
}
int capacity;
LinkedList<ListValueEntry<TKey, TValue>> list;
Dictionary<TKey, LinkedListNode<ListValueEntry<TKey, TValue>>> lookup;
LinkedListNode<ListValueEntry<TKey, TValue>> openNode;
public LRUCache (int capacity)
{
this.capacity = capacity;
this.list = new LinkedList<ListValueEntry<TKey, TValue>>();
this.lookup = new Dictionary<TKey, LinkedListNode<ListValueEntry<TKey, TValue>>> (capacity + 1);
this.openNode = new LinkedListNode<ListValueEntry<TKey, TValue>>(new ListValueEntry<TKey, TValue> (default(TKey), default(TValue)));
}
public void Put (TKey key, TValue value)
{
if (Get(key) == null) {
this.openNode.Value.ItemKey = key;
this.openNode.Value.ItemValue = value;
this.list.AddFirst (this.openNode);
this.lookup.Add (key, this.openNode);
if (this.list.Count > this.capacity) {
// last node is to be removed and saved for the next addition to the cache
this.openNode = this.list.Last;
// remove from list & dictionary
this.list.RemoveLast();
this.lookup.Remove(this.openNode.Value.ItemKey);
} else {
// still filling the cache, create a new open node for the next time
this.openNode = new LinkedListNode<ListValueEntry<TKey, TValue>>(new ListValueEntry<TKey, TValue>(default(TKey), default(TValue)));
}
}
}
public TValue Get (TKey key)
{
LinkedListNode<ListValueEntry<TKey, TValue>> node = null;
if (!this.lookup.TryGetValue (key, out node))
return default (TValue);
this.list.Remove (node);
this.list.AddFirst (node);
return node.Value.ItemValue;
}
class ListValueEntry<K, V> where K : TKey
where V : TValue
{
internal V ItemValue;
internal K ItemKey;
internal ListValueEntry(K key, V value)
{
this.ItemKey = key;
this.ItemValue = value;
}
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
namespace Mono.Utilities
{
public class LRUCache<TKey, TValue>
{
[ThreadStatic]
static LRUCache<TKey, TValue> deflt;
public static LRUCache<TKey, TValue> Default {
get {
return deflt != null ? deflt : (deflt = new LRUCache<TKey, TValue> (5));
}
}
int capacity;
LinkedList<ListValueEntry<TKey, TValue>> list;
Dictionary<TKey, LinkedListNode<ListValueEntry<TKey, TValue>>> lookup;
LinkedListNode<ListValueEntry<TKey, TValue>> openNode;
public LRUCache (int capacity)
{
this.capacity = capacity;
this.list = new LinkedList<ListValueEntry<TKey, TValue>>();
this.lookup = new Dictionary<TKey, LinkedListNode<ListValueEntry<TKey, TValue>>> (capacity + 1);
this.openNode = new LinkedListNode<ListValueEntry<TKey, TValue>>(new ListValueEntry<TKey, TValue> (default(TKey), default(TValue)));
}
public void Put (TKey key, TValue value)
{
if (Get(key) == null) {
this.openNode.Value.Itemkey = key;
this.openNode.Value.Itemvalue = value;
this.list.AddFirst (this.openNode);
this.lookup.Add (key, this.openNode);
if (this.list.Count > this.capacity) {
// last node is to be removed and saved for the next addition to the cache
this.openNode = this.list.Last;
// remove from list & dictionary
this.list.RemoveLast();
this.lookup.Remove(this.openNode.Value.Itemkey);
} else {
// still filling the cache, create a new open node for the next time
this.openNode = new LinkedListNode<ListValueEntry<Tkey, Tvalue>>(new ListValueEntry<Tkey, Tvalue>(default(Tkey), default(Tvalue)));
}
}
}
public TValue Get (TKey key)
{
LinkedListNode<ListValueEntry<TKey, TValue>> node = null;
if (!this.lookup.TryGetValue (key, out node))
return default (TValue);
this.list.Remove (node);
this.list.AddFirst (node);
return node.Value.ItemValue;
}
class ListValueEntry<K, V> where K : TKey
where V : TValue
{
internal V ItemValue;
internal K ItemKey;
internal ListValueEntry(K key, V value)
{
this.ItemKey = key;
this.ItemValue = value;
}
}
}
}
using System;
using System.Text.RegularExpressions;
using System.Collections;
namespace Mono.Utilities {
public class Colorizer {
//
// Syntax coloring
//
static string keywords_cs =
"(\\babstract\\b|\\bevent\\b|\\bnew\\b|\\bstruct\\b|\\bas\\b|\\bexplicit\\b|\\bnull\\b|\\bswitch\\b|\\bbase\\b|\\bextern\\b|"
+
"\\bobject\\b|\\bthis\\b|\\bbool\\b|\\bfalse\\b|\\boperator\\b|\\bthrow\\b|\\bbreak\\b|\\bfinally\\b|\\bout\\b|\\btrue\\b|"
+
"\\bbyte\\b|\\bfixed\\b|\\boverride\\b|\\btry\\b|\\bcase\\b|\\bfloat\\b|\\bparams\\b|\\btypeof\\b|\\bcatch\\b|\\bfor\\b|"
+
"\\bprivate\\b|\\buint\\b|\\bchar\\b|\\bforeach\\b|\\bprotected\\b|\\bulong\\b|\\bchecked\\b|\\bgoto\\b|\\bpublic\\b|"
+
"\\bunchecked\\b|\\bclass\\b|\\bif\\b|\\breadonly\\b|\\bunsafe\\b|\\bconst\\b|\\bimplicit\\b|\\bref\\b|\\bushort\\b|"
+
"\\bcontinue\\b|\\bin\\b|\\breturn\\b|\\busing\\b|\\bdecimal\\b|\\bint\\b|\\bsbyte\\b|\\bvirtual\\b|\\bdefault\\b|"
+
"\\binterface\\b|\\bsealed\\b|\\bvolatile\\b|\\bdelegate\\b|\\binternal\\b|\\bshort\\b|\\bvoid\\b|\\bdo\\b|\\bis\\b|"
+
"\\bsizeof\\b|\\bwhile\\b|\\bdouble\\b|\\block\\b|\\bstackalloc\\b|\\belse\\b|\\blong\\b|\\bstatic\\b|\\benum\\b|"
+ "\\bnamespace\\b|\\bstring\\b)";
#if false
// currently not in use
static string keywords_vb =
"(\\bAddHandler\\b|\\bAddressOf\\b|\\bAlias\\b|\\bAnd\\b|\\bAndAlso\\b|\\bAnsi\\b|\\bAs\\b|\\bAssembly\\b|"
+
"\\bAuto\\b|\\bBoolean\\b|\\bByRef\\b|\\bByte\\b|\\bByVal\\b|\\bCall\\b|\\bCase\\b|\\bCatch\\b|"
+
"\\bCBool\\b|\\bCByte\\b|\\bCChar\\b|\\bCDate\\b|\\bCDec\\b|\\bCDbl\\b|\\bChar\\b|\\bCInt\\b|"
+
"\\bClass\\b|\\bCLng\\b|\\bCObj\\b|\\bConst\\b|\\bCShort\\b|\\bCSng\\b|\\bCStr\\b|\\bCType\\b|"
+
"\\bDate\\b|\\bDecimal\\b|\\bDeclare\\b|\\bDefault\\b|\\bDelegate\\b|\\bDim\\b|\\bDirectCast\\b|\\bDo\\b|"
+
"\\bDouble\\b|\\bEach\\b|\\bElse\\b|\\bElseIf\\b|\\bEnd\\b|\\bEnum\\b|\\bErase\\b|\\bError\\b|"
+
"\\bEvent\\b|\\bExit\\b|\\bFalse\\b|\\bFinally\\b|\\bFor\\b|\\bFriend\\b|\\bFunction\\b|\\bGet\\b|"
+
"\\bGetType\\b|\\bGoSub\\b|\\bGoTo\\b|\\bHandles\\b|\\bIf\\b|\\bImplements\\b|\\bImports\\b|\\bIn\\b|"
+
"\\bInherits\\b|\\bInteger\\b|\\bInterface\\b|\\bIs\\b|\\bLet\\b|\\bLib\\b|\\bLike\\b|\\bLong\\b|"
+
"\\bLoop\\b|\\bMe\\b|\\bMod\\b|\\bModule\\b|\\bMustInherit\\b|\\bMustOverride\\b|\\bMyBase\\b|\\bMyClass\\b|"
+
"\\bNamespace\\b|\\bNew\\b|\\bNext\\b|\\bNot\\b|\\bNothing\\b|\\bNotInheritable\\b|\\bNotOverridable\\b|\\bObject\\b|"
+
"\\bOn\\b|\\bOption\\b|\\bOptional\\b|\\bOr\\b|\\bOrElse\\b|\\bOverloads\\b|\\bOverridable\\b|\\bOverrides\\b|"
+
"\\bParamArray\\b|\\bPreserve\\b|\\bPrivate\\b|\\bProperty\\b|\\bProtected\\b|\\bPublic\\b|\\bRaiseEvent\\b|\\bReadOnly\\b|"
+
"\\bReDim\\b|\\bREM\\b|\\bRemoveHandler\\b|\\bResume\\b|\\bReturn\\b|\\bSelect\\b|\\bSet\\b|\\bShadows\\b|"
+
"\\bShared\\b|\\bShort\\b|\\bSingle\\b|\\bStatic\\b|\\bStep\\b|\\bStop\\b|\\bString\\b|\\bStructure\\b|"
+
"\\bSub\\b|\\bSyncLock\\b|\\bThen\\b|\\bThrow\\b|\\bTo\\b|\\bTrue\\b|\\bTry\\b|\\bTypeOf\\b|"
+
"\\bUnicode\\b|\\bUntil\\b|\\bVariant\\b|\\bWhen\\b|\\bWhile\\b|\\bWith\\b|\\bWithEvents\\b|\\bWriteOnly\\b|\\bXor\\b)";
#endif
public static string Colorize(string text, string lang)
{
lang = lang.Trim().ToLower();
switch (lang) {
case "xml":
return ColorizeXml(text);
case "cs": case "c#": case "csharp":
return ColorizeCs(text);
case "vb":
return ColorizeVb(text);
}
return Escape (text);
}
static string ColorizeXml(string text)
{
// Order is highly important.
// s/ /&nbsp;/g must be first, as later substitutions add required spaces
text = text.Replace(" ", "&nbsp;");
// Find & mark XML elements
Regex re = new Regex("<\\s*(\\/?)\\s*([\\s\\S]*?)\\s*(\\/?)\\s*>");
text = re.Replace(text, "{blue:&lt;$1}{maroon:$2}{blue:$3&gt;}");
// Colorize attribute strings; must be done before colorizing marked XML
// elements so that we don't clobber the colorized XML tags.
re = new Regex ("([\"'])(.*?)\\1");
text = re.Replace (text,
"$1<font color=\"purple\">$2</font>$1");
// Colorize marked XML elements
re = new Regex("\\{(\\w*):([\\s\\S]*?)\\}");
//text = re.Replace(text, "<span style='color:$1'>$2</span>");
text = re.Replace(text, "<font color=\"$1\">$2</font>");
// Standard Structure
text = text.Replace("\t", "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;");
re = new Regex("\r\n|\r|\n");
text = re.Replace(text, "<br/>");
return text;
}
static string ColorizeCs(string text)
{
text = text.Replace(" ", "&nbsp;");
text = text.Replace("<", "&lt;");
text = text.Replace(">", "&gt;");
Regex re = new Regex("\"((((?!\").)|\\\")*?)\"");
text =
re.Replace(text,
"<font color=\"purple\">\"$1\"</font>");
//"<span style='color:purple'>\"$1\"</span>");
re = new
Regex
("//(((.(?!\"</font>))|\"(((?!\").)*)\"</font>)*)(\r|\n|\r\n)");
//("//(((.(?!\"</span>))|\"(((?!\").)*)\"</span>)*)(\r|\n|\r\n)");
text =
re.Replace(text,
"<font color=\"green\">//$1</font><br/>");
// "<span style='color:green'>//$1</span><br/>");
re = new Regex(keywords_cs);
text = re.Replace(text, "<font color=\"blue\">$1</font>");
//text = re.Replace(text, "<span style='color:blue'>$1</span>");
text = text.Replace("\t", "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;");
text = text.Replace("\n", "<br/>");
return text;
}
static string ColorizeVb(string text) {
text = text.Replace(" ", "&nbsp;");
/* Regex re = new Regex ("\"((((?!\").)|\\\")*?)\"");
text = re.Replace (text,"<span style='color:purple'>\"$1\"</span>");
re = new Regex ("'(((.(?!\"\\<\\/span\\>))|\"(((?!\").)*)\"\\<\\/span\\>)*)(\r|\n|\r\n)");
text = re.Replace (text,"<span style='color:green'>//$1</span><br/>");
re = new Regex (keywords_vb);
text = re.Replace (text,"<span style='color:blue'>$1</span>");
*/
text = text.Replace("\t", "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;");
text = text.Replace("\n", "<br/>");
return text;
}
static string Escape(string text)
{
text = text.Replace("&", "&amp;");
text = text.Replace(" ", "&nbsp;");
text = text.Replace("<", "&lt;");
text = text.Replace(">", "&gt;");
text = text.Replace("\n", "<br/>");
return text;
}
}
}
using System;
using System.Linq;
using System.Text;
using System.Collections.Generic;
namespace Monodoc.Ecma
{
/* Some properties might not be filled/meaningful depending on kind
* like a namespace EcmaUrl won't have a valid TypeName
*/
public class EcmaDesc : IEquatable<EcmaDesc>
{
public enum Kind
{
Type,
Constructor,
Method,
Namespace,
Field,
Property,
Event,
Operator
}
public enum Mod
{
Normal,
Pointer,
Ref,
Out
}
public enum Format
{
WithArgs,
WithoutArgs
}
public Kind DescKind {
get;
set;
}
public Mod DescModifier {
get;
set;
}
public string Namespace {
get;
set;
}
public string TypeName {
get;
set;
}
public string MemberName {
get;
set;
}
public EcmaDesc NestedType {
get;
set;
}
/* A list of the array dimensions attached to this type.
* The list count corresponds to the number of recursive
* array definition (jagged arrays) the value of the
* corresponding list item is the number of dimension
* attached to that array definition instance
*/
public IList<int> ArrayDimensions {
get;
set;
}
/* Depending on the form of the url, we might not have the type
* of the argument but only how many the type/member has i.e.
* when such number is specified with a backtick
*/
public IList<EcmaDesc> GenericTypeArguments {
get;
set;
}
/* The GenericTypeArguments list may be null, in which case, this
* is an easier/safer way to check the count.
*/
public int GenericTypeArgumentsCount {
get { return GenericTypeArguments != null ? GenericTypeArguments.Count : 0; }
}
/* This property tells if the above collections only correct value
* is the number of item in it to represent generic arguments
*/
public bool GenericTypeArgumentsIsNumeric {
get {
return GenericTypeArguments != null && GenericTypeArguments.FirstOrDefault () == null;
}
}
public IList<EcmaDesc> GenericMemberArguments {
get;
set;
}
/* The GenericMemberArguments list may be null, in which case, this
* is an easier/safer way to check the count.
*/
public int GenericMemberArgumentsCount {
get { return GenericMemberArguments != null ? GenericMemberArguments.Count : 0; }
}
public bool GenericMemberArgumentsIsNumeric {
get {
return GenericMemberArguments != null && GenericMemberArguments.FirstOrDefault () == null;
}
}
public IList<EcmaDesc> MemberArguments {
get;
set;
}
/* The GenericTypeArguments list may be null, in which case, this
* is an easier/safer way to check the count.
*/
public int MemberArgumentsCount {
get { return MemberArguments != null ? MemberArguments.Count : 0; }
}
/* This indicates that we actually want an inner part of the ecmadesc
* i.e. in case of T: we could want the members (*), ctor (C), methods (M), ...
*/
public char Etc {
get;
set;
}
public bool IsEtc {
get {
return Etc != (char)0;
}
}
/* EtcFilter is only valid in some case of IsEtc when the inner part needs
* to be further filtered e.g. in case we want a listing of the type overloads
* Equals
*/
public string EtcFilter {
get;
set;
}
/* When a member is an explicit implementation of an interface member, we register
* the member EcmaDesc with its interface parent here
*/
public EcmaDesc ExplicitImplMember {
get;
set;
}
// Returns the TypeName and the generic/inner type information if existing
public string ToCompleteTypeName (char innerTypeSeparator = '.')
{
var result = TypeName;
if (GenericTypeArguments != null)
result += FormatGenericArgs (GenericTypeArguments);
if (NestedType != null)
result += innerTypeSeparator + NestedType.ToCompleteTypeName ();
if (ArrayDimensions != null && ArrayDimensions.Count > 0)
result += ArrayDimensions.Select (dim => "[" + new string (',', dim - 1) + "]").Aggregate (string.Concat);
return result;
}
// Returns the member name with its generic types if existing
public string ToCompleteMemberName (Format format)
{
/* We special process two cases:
* - Explicit member implementation which append a full type specification
* - Conversion operator which are exposed as normal method but have specific captioning in the end
*/
if (ExplicitImplMember != null) {
var impl = ExplicitImplMember;
return impl.FormattedNamespace + impl.ToCompleteTypeName () + "." + impl.ToCompleteMemberName (format);
} else if (format == Format.WithArgs && DescKind == Kind.Operator && MemberName.EndsWith ("Conversion")) {
var type1 = MemberArguments[0].FormattedNamespace + MemberArguments[0].ToCompleteTypeName () + ModToString (MemberArguments[0]);
var type2 = MemberArguments[1].FormattedNamespace + MemberArguments[1].ToCompleteTypeName () + ModToString (MemberArguments[1]);
return type1 + " to " + type2;
}
var result = IsEtc && !string.IsNullOrEmpty (EtcFilter) ? EtcFilter : MemberName;
// Temporary hack for monodoc produced inner type ctor
//if (DescKind == Kind.Constructor && NestedType != null)
//result = ToCompleteTypeName ();
if (GenericMemberArguments != null)
result += FormatGenericArgs (GenericMemberArguments);
if (format == Format.WithArgs) {
result += '(';
if (MemberArguments != null && MemberArguments.Count > 0) {
var args = MemberArguments.Select (a => FormatNamespace (a) + a.ToCompleteTypeName ('+') + ModToString (a));
result += string.Join (",", args);
}
result += ')';
}
return result;
}
public string ToEcmaCref ()
{
var sb = new StringBuilder ();
// Cref type
sb.Append (DescKind.ToString ()[0]);
sb.Append (":");
// Create the rest
ConstructCRef (sb);
return sb.ToString ();
}
void ConstructCRef (StringBuilder sb, bool skipLeadingDot = false)
{
if (string.IsNullOrEmpty (Namespace))
skipLeadingDot = true;
sb.Append (Namespace);
if (DescKind == Kind.Namespace)
return;
if (!skipLeadingDot)
sb.Append ('.');
sb.Append (TypeName);
AppendGenericArguments (sb, GenericTypeArguments, GenericTypeArgumentsIsNumeric, GenericTypeArgumentsCount);
if (NestedType != null) {
sb.Append ('+');
NestedType.ConstructCRef (sb, skipLeadingDot: true);
}
if (ArrayDimensions != null && ArrayDimensions.Count > 0) {
for (int i = 0; i < ArrayDimensions.Count; i++) {
sb.Append ('[');
sb.Append (new string (',', ArrayDimensions[i] - 1));
sb.Append (']');
}
}
if (DescKind == Kind.Type)
return;
if (ExplicitImplMember != null) {
sb.Append ('$');
ExplicitImplMember.DescKind = this.DescKind;
ExplicitImplMember.ConstructCRef (sb, skipLeadingDot: false);
return;
}
sb.Append (".");
sb.Append (MemberName);
AppendGenericArguments (sb, GenericMemberArguments, GenericMemberArgumentsIsNumeric, GenericMemberArgumentsCount);
if (MemberArguments != null && MemberArgumentsCount > 0) {
sb.Append ("(");
int i=0;
foreach (var a in MemberArguments) {
if (i > 0) {
sb.Append(",");
}
a.ConstructCRef (sb);
i++;
}
sb.Append (")");
}
}
void AppendGenericArguments (StringBuilder sb, IEnumerable<EcmaDesc> arguments, bool isNumeric, int argumentsCount)
{
if (arguments != null && isNumeric) {
sb.AppendFormat ("`{0}", argumentsCount);
} else if (arguments != null) {
sb.Append ('<');
int i=0;
foreach (var t in arguments) {
if (i > 0) {
sb.Append (",");
}
t.ConstructCRef (sb);
i++;
}
sb.Append ('>');
}
}
public override string ToString ()
{
return string.Format ("({8}) {0}::{1}{2}{3}{7} {4}{5}{6} {9} {10}",
Namespace,
TypeName,
FormatGenericArgsFull (GenericTypeArguments),
NestedType != null ? "+" + NestedType.ToString () : string.Empty,
MemberName ?? string.Empty,
FormatGenericArgsFull (GenericMemberArguments),
MemberArguments != null ? "(" + string.Join (",", MemberArguments.Select (m => m.ToString ())) + ")" : string.Empty,
ArrayDimensions != null && ArrayDimensions.Count > 0 ? ArrayDimensions.Select (dim => "[" + new string (',', dim - 1) + "]").Aggregate (string.Concat) : string.Empty,
DescKind.ToString ()[0],
Etc != 0 ? '(' + Etc.ToString () + ')' : string.Empty,
ExplicitImplMember != null ? "$" + ExplicitImplMember.ToString () : string.Empty);
}
public override bool Equals (object other)
{
var otherDesc = other as EcmaDesc;
return otherDesc != null && Equals (otherDesc);
}
public bool Equals (EcmaDesc other)
{
if (other == null)
return false;
if (NestedType == null ^ other.NestedType == null
|| ArrayDimensions == null ^ other.ArrayDimensions == null
|| GenericTypeArguments == null ^ other.GenericTypeArguments == null
|| GenericMemberArguments == null ^ other.GenericMemberArguments == null
|| MemberArguments == null ^ other.MemberArguments == null
|| ExplicitImplMember == null ^ other.ExplicitImplMember == null)
return false;
return other != null
&& DescKind == other.DescKind
&& TypeName == other.TypeName
&& Namespace == other.Namespace
&& MemberName == other.MemberName
&& (NestedType == null || NestedType.Equals (other.NestedType))
&& (ArrayDimensions == null || ArrayDimensions.SequenceEqual (other.ArrayDimensions))
&& (GenericTypeArguments == null || GenericTypeArguments.SequenceEqual (other.GenericTypeArguments))
&& (GenericMemberArguments == null || GenericMemberArguments.SequenceEqual (other.GenericMemberArguments))
&& (MemberArguments == null || MemberArguments.SequenceEqual (other.MemberArguments))
&& Etc == other.Etc
&& EtcFilter == other.EtcFilter
&& (ExplicitImplMember == null || ExplicitImplMember.Equals (other.ExplicitImplMember));
}
public override int GetHashCode ()
{
return DescKind.GetHashCode ()
^ TypeName.GetHashCode ()
^ Namespace.GetHashCode ()
^ MemberName.GetHashCode ();
}
bool What (bool input)
{
if (!input)
throw new Exception ("Not equal");
return input;
}
bool WhatT (bool input)
{
if (input)
throw new Exception ("Not equal");
return input;
}
string FormatNamespace (EcmaDesc desc)
{
return string.IsNullOrEmpty (desc.Namespace) ? string.Empty : desc.Namespace + ".";
}
string FormatGenericArgs (IEnumerable<EcmaDesc> args)
{
if (args == null || !args.Any ())
return string.Empty;
// If we only have the number of generic arguments, use ` notation
if (args.First () == null)
return "`" + args.Count ();
IEnumerable<string> argsList = args.Select (t => FormatNamespace (t) + t.ToCompleteTypeName ());
return "<" + string.Join (",", argsList) + ">";
}
string FormatGenericArgsFull (IEnumerable<EcmaDesc> genericArgs)
{
return genericArgs != null ? "<" + string.Join (",", genericArgs.Select (t => t.ToString ())) + ">" : string.Empty;
}
string ModToString (EcmaDesc desc)
{
switch (desc.DescModifier) {
case Mod.Pointer:
return "*";
case Mod.Ref:
return "&";
case Mod.Out:
return "@";
default:
return string.Empty;
}
}
string FormattedNamespace {
get {
return !string.IsNullOrEmpty (Namespace) ? Namespace + "." : string.Empty;
}
}
}
}
%{
using System.Text;
using System.IO;
using System;
using System.Linq;
using System.Collections.Generic;
namespace Monodoc.Ecma
{
public class EcmaUrlParser
{
int yacc_verbose_flag = 0;
public void IsValid (string input)
{
var lexer = new EcmaUrlTokenizer (input);
this.yyparse (lexer);
}
public EcmaDesc Parse (string input)
{
var lexer = new EcmaUrlTokenizer (input);
return (EcmaDesc)this.yyparse (lexer);
}
public bool TryParse (string input, out EcmaDesc desc)
{
desc = null;
try {
desc = Parse (input);
} catch {
return false;
}
return true;
}
EcmaDesc SetEcmaDescType (object result, EcmaDesc.Kind kind)
{
var desc = result as EcmaDesc;
desc.DescKind = kind;
return desc;
}
List<T> SafeReverse<T> (List<T> input)
{
if (input == null)
return null;
input.Reverse ();
return input;
}
%}
%token ERROR
%token IDENTIFIER
%token DIGIT
%token DOT
%token COMMA
%token COLON
%token INNER_TYPE_SEPARATOR
%token OP_GENERICS_LT
%token OP_GENERICS_GT
%token OP_GENERICS_BACKTICK
%token OP_OPEN_PAREN
%token OP_CLOSE_PAREN
%token OP_ARRAY_OPEN
%token OP_ARRAY_CLOSE
%token SLASH_SEPARATOR
%token STAR
%token REF_ARG
%token OUT_ARG
%token EXPLICIT_IMPL_SEP
%start expression
%%
expression
: 'T' COLON type_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Type); }
| 'N' COLON namespace_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Namespace); }
| 'M' COLON method_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Method); }
| 'F' COLON simple_member_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Field); }
| 'C' COLON constructor_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Constructor); }
| 'P' COLON property_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Property); }
| 'E' COLON simple_member_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Event); }
| 'O' COLON operator_expression { $$ = SetEcmaDescType ($3, EcmaDesc.Kind.Operator); }
/* i.e. id.id.id or id */
dot_expression
: IDENTIFIER { $$ = new List<string> { (string)$1 }; }
| IDENTIFIER DOT dot_expression { ((ICollection<string>)$3).Add ((string)$1); $$ = $3; }
namespace_expression
: dot_expression { $$ = new EcmaDesc { Namespace = string.Join (".", ((IEnumerable<string>)$1).Reverse ()) }; }
type_expression
: dot_expression type_expression_suffix {
var dotExpr = ((List<string>)$1);
dotExpr.Reverse ();
var desc = $2 as EcmaDesc;
desc.DescKind = EcmaDesc.Kind.Type;
desc.Namespace = string.Join (".", dotExpr.Take (dotExpr.Count - 1));
desc.TypeName = dotExpr.Last ();
$$ = desc;
}
/* To be used in types with no namespaces attached to them like an inner type*/
reduced_type_expression
: IDENTIFIER type_expression_suffix {
var desc = $2 as EcmaDesc;
desc.DescKind = EcmaDesc.Kind.Type;
desc.TypeName = $1 as string;
$$ = desc;
}
type_expression_suffix
: opt_generic_type_suffix opt_inner_type_description opt_array_definition opt_etc {
bool nestedDescHasEtc = $2 != null && ((EcmaDesc)$2).IsEtc;
EcmaDesc nestedType = (EcmaDesc)$2;
$$ = new EcmaDesc {
GenericTypeArguments = $1 as List<EcmaDesc>,
NestedType = nestedType,
ArrayDimensions = SafeReverse ($3 as List<int>),
Etc = $4 != null ? ((Tuple<char, string>)$4).Item1 : nestedDescHasEtc ? nestedType.Etc : (char)0,
EtcFilter = $4 != null ? ((Tuple<char, string>)$4).Item2 : nestedDescHasEtc ? nestedType.EtcFilter : null
};
if (nestedDescHasEtc) {
nestedType.Etc = (char)0;
nestedType.EtcFilter = null;
}
}
opt_inner_type_description
: /* empty */ { $$ = null; }
| INNER_TYPE_SEPARATOR reduced_type_expression { $$ = $2; }
opt_generic_type_suffix
: /* empty */ { $$ = null; }
| OP_GENERICS_BACKTICK DIGIT { $$ = Enumerable.Repeat<EcmaDesc> (null, (int)$2).ToList (); }
| OP_GENERICS_LT generic_type_arg_list OP_GENERICS_GT { $$ = $2; }
generic_type_arg_list
: type_expression { $$ = new List<EcmaDesc> () { (EcmaDesc)$1 }; }
| generic_type_arg_list COMMA type_expression { ((List<EcmaDesc>)$1).Add ((EcmaDesc)$3); $$ = $1; }
opt_array_definition
: /* empty */ { $$ = null; }
| OP_ARRAY_OPEN opt_array_definition_list OP_ARRAY_CLOSE opt_array_definition {
var dims = ((IList<int>)$4) ?? new List<int> (2);
dims.Add ((int)$2);
$$ = dims;
}
opt_array_definition_list
: /* empty */ { $$ = 1; }
| COMMA opt_array_definition_list { $$ = ((int)$2) + 1; }
opt_etc
: /* empty */ { $$ = null; }
| SLASH_SEPARATOR etc_identifier { $$ = Tuple.Create<char, string> (((string)$2)[0], null); }
| SLASH_SEPARATOR etc_identifier SLASH_SEPARATOR reduced_member_expression { $$ = Tuple.Create<char, string> (((string)$2)[0], (string)$4); }
/* | SLASH_SEPARATOR etc_identifier SLASH_SEPARATOR IDENTIFIER opt_generic_type_suffix { $$ = Tuple.Create<char, string> (((string)$2)[0], (string)$4 + ($5 == null ? string.Empty : "<" + string.Join (",", ((IEnumerable<EcmaDesc>)$5).Select (t => t.ToCompleteTypeName ())) + ">")); } */
etc_identifier
: STAR { $$ = "*"; }
| IDENTIFIER { $$ = $1; }
method_expression
: type_expression DOT IDENTIFIER opt_generic_type_suffix opt_arg_list_suffix {
var desc = $1 as EcmaDesc;
desc.MemberName = $3 as string;
desc.GenericMemberArguments = $4 as List<EcmaDesc>;
desc.MemberArguments = SafeReverse ($5 as List<EcmaDesc>);
$$ = desc;
}
| dot_expression opt_generic_type_suffix opt_arg_list_suffix {
var dotExpr = ((List<string>)$1);
$$ = new EcmaDesc {
Namespace = string.Join (".", dotExpr.Skip (2).DefaultIfEmpty (string.Empty).Reverse ()),
TypeName = dotExpr.Skip (1).First (),
MemberName = dotExpr.First (),
GenericMemberArguments = $2 as List<EcmaDesc>,
MemberArguments = SafeReverse ($3 as List<EcmaDesc>)
};
}
| type_expression EXPLICIT_IMPL_SEP method_expression {
var desc = $1 as EcmaDesc;
desc.ExplicitImplMember = $3 as EcmaDesc;
$$ = desc;
}
/* To be used with members that may have no type/namespace attached */
reduced_member_expression
: IDENTIFIER opt_generic_type_suffix { $$ = (string)$1 + ($2 == null ? string.Empty : "<" + string.Join (",", ((IEnumerable<EcmaDesc>)$2).Select (t => t.ToCompleteTypeName ())) + ">"); }
| IDENTIFIER opt_generic_type_suffix DOT reduced_member_expression {
var existing = $4 as string;
var expr = (string)$1 + ($2 == null ? string.Empty : "<" + string.Join (",", ((IEnumerable<EcmaDesc>)$2).Select (t => t.ToCompleteTypeName ())) + ">");
$$ = expr + "." + existing;
}
arg_type_expression
: type_expression opt_arg_type_suffix { var desc = (EcmaDesc)$1; desc.DescModifier = (EcmaDesc.Mod)$2; $$ = desc; }
opt_arg_type_suffix
: /* empty */ { $$ = EcmaDesc.Mod.Normal; }
| STAR { $$ = EcmaDesc.Mod.Pointer; }
| REF_ARG { $$ = EcmaDesc.Mod.Ref; }
| OUT_ARG { $$ = EcmaDesc.Mod.Out; }
type_expression_list
: /* empty */ { $$ = null; }
| arg_type_expression { $$ = new List<EcmaDesc> () { (EcmaDesc)$1 }; }
| arg_type_expression COMMA type_expression_list { ((List<EcmaDesc>)$3).Add ((EcmaDesc)$1); $$ = $3; }
simple_member_expression
: dot_expression {
var dotExpr = ((List<string>)$1);
dotExpr.Reverse ();
$$ = new EcmaDesc {
Namespace = dotExpr.Count > 2 ? string.Join (".", dotExpr.Take (dotExpr.Count - 2)) : string.Empty,
TypeName = dotExpr.Count > 1 ? dotExpr[dotExpr.Count - 2] : string.Empty,
MemberName = dotExpr[dotExpr.Count - 1]
};
}
| type_expression DOT IDENTIFIER {
var desc = $1 as EcmaDesc;
desc.MemberName = $3 as string;
$$ = desc;
}
| type_expression EXPLICIT_IMPL_SEP simple_member_expression {
var desc = $1 as EcmaDesc;
desc.ExplicitImplMember = $3 as EcmaDesc;
$$ = desc;
}
constructor_expression
: method_expression { $$ = $1; }
operator_expression
: method_expression { $$ = $1; }
property_expression
: simple_member_expression opt_property_indexer {
var desc = $1 as EcmaDesc;
(desc.ExplicitImplMember ?? desc).MemberArguments = SafeReverse ($2 as List<EcmaDesc>);
$$ = desc;
}
opt_property_indexer
: opt_arg_list_suffix { $$ = $1; }
/*simple_member_expression opt_arg_list_suffix { $$ = CopyFromEcmaDesc (new EcmaDesc {
MemberArguments = SafeReverse ($2 as List<EcmaDesc>)
}, (EcmaDesc)$1);
}*/
opt_arg_list_suffix
: /* empty */ { $$ = null; }
| OP_OPEN_PAREN type_expression_list OP_CLOSE_PAREN { $$ = $2; }
%%
}
\ No newline at end of file
using System;
using System.IO;
namespace Monodoc.Ecma
{
public class EcmaUrlParserDriver
{
public static void Main (string[] args)
{
var input = new StringReader (args[0]);
var lexer = new EcmaUrlTokenizer (input);
var parser = new EcmaUrlParser ();
Console.WriteLine (parser.yyparse (lexer));
}
}
}
using System;
using System.Text;
using System.Globalization;
namespace Monodoc.Ecma
{
public class EcmaUrlTokenizer : yyParser.yyInput
{
const char EndOfStream = (char)0;
string input;
object val;
int current_token;
int current_pos;
int real_current_pos;
int identCount = 0;
public EcmaUrlTokenizer (string input)
{
this.input = input;
}
static bool is_identifier_start_character (char c)
{
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || Char.IsLetter (c);
}
static bool is_identifier_part_character (char c)
{
if (c >= 'a' && c <= 'z')
return true;
if (c >= 'A' && c <= 'Z')
return true;
if (c == '_' || (c >= '0' && c <= '9'))
return true;
if (c < 0x80)
return false;
return Char.IsLetter (c) || Char.GetUnicodeCategory (c) == UnicodeCategory.ConnectorPunctuation;
}
public bool advance ()
{
return Peek () != EndOfStream;
}
public Object Value {
get {
return val;
}
}
public Object value ()
{
return val;
}
public int token ()
{
int token = xtoken ();
//Console.WriteLine ("Current token {0} with value {1}", token, val == null ? "(none)" : val.ToString ());
if (token == Token.ERROR) {
throw new Exception (string.Format ("Error at position {0} parsing url '{0}'", current_pos, input));
}
current_token = token;
return token;
}
int xtoken ()
{
char next = Read ();
while (char.IsWhiteSpace (next))
next = Read ();
current_pos++;
val = null;
switch (next) {
case ',':
return Token.COMMA;
case '.':
return Token.DOT;
case '{':
case '<':
return Token.OP_GENERICS_LT;
case '}':
case '>':
return Token.OP_GENERICS_GT;
case '`':
return Token.OP_GENERICS_BACKTICK;
case '(':
return Token.OP_OPEN_PAREN;
case ')':
return Token.OP_CLOSE_PAREN;
case '+':
return Token.INNER_TYPE_SEPARATOR;
case ':':
return Token.COLON;
case '/':
return Token.SLASH_SEPARATOR;
case '[':
return Token.OP_ARRAY_OPEN;
case ']':
return Token.OP_ARRAY_CLOSE;
case '*':
return Token.STAR;
case '&':
return Token.REF_ARG;
case '@':
return Token.OUT_ARG;
case '$':
return Token.EXPLICIT_IMPL_SEP;
default:
return TokenizeIdentifierOrNumber (next);
}
}
int TokenizeIdentifierOrNumber (char current)
{
// We must first return the expression type which is a uppercase letter and a colon
if (current_pos < 2) {
val = null;
return (int)current;
}
if (is_identifier_start_character (current) || current == '*') {
unsafe {
// identifier length is artificially limited to 1024 bytes by implementations
char* pIdent = stackalloc char[512];
*pIdent = current;
identCount = 1;
char peek;
while ((peek = Peek ()) != EndOfStream && is_identifier_part_character (peek)) {
*(pIdent + identCount) = Read ();
++current_pos;
++identCount;
}
val = new string ((char*)pIdent, 0, identCount);
return Token.IDENTIFIER;
}
} else if (char.IsDigit (current)) {
val = current - '0';
return Token.DIGIT;
} else {
val = null;
return Token.ERROR;
}
}
char Read ()
{
try {
return input[real_current_pos++];
} catch {
return EndOfStream;
}
}
char Peek ()
{
try {
return input[real_current_pos];
} catch {
return EndOfStream;
}
}
}
}
using System;
using System.IO;
using System.Linq;
using System.Xml;
using System.Diagnostics;
using System.Collections.Generic;
using Mono.Utilities;
using Lucene.Net.Index;
namespace Monodoc
{
public enum SortType {
Caption,
Element
}
//
// The HelpSource class keeps track of the archived data, and its
// tree
//
public
#if LEGACY_MODE
partial
#endif
class HelpSource
{
static int id;
//
// The unique ID for this HelpSource.
//
int source_id;
// The name of the HelpSource, used by all the file (.tree, .zip, ...) used by it
string name;
// The full directory path where the HelpSource files are located
string basePath;
// The tree of this help source
Tree tree;
string treeFilePath;
RootTree rootTree;
IDocCache cache;
IDocStorage storage;
public HelpSource (string base_filename, bool create)
{
this.name = Path.GetFileName (base_filename);
this.basePath = Path.GetDirectoryName (base_filename);
this.treeFilePath = base_filename + ".tree";
this.storage = new Monodoc.Storage.ZipStorage (base_filename + ".zip");
this.cache = DocCacheHelper.GetDefaultCache (Name);
tree = create ? new Tree (this, string.Empty, string.Empty) : new Tree (this, treeFilePath);
source_id = id++;
}
public HelpSource ()
{
tree = new Tree (this, "Blah", "Blah");
source_id = id++;
this.cache = new Caches.NullCache ();
}
public int SourceID {
get {
return source_id;
}
}
public string Name {
get {
return name;
}
}
/* This gives the full path of the source/ directory */
public string BaseFilePath {
get {
return basePath;
}
}
public TraceLevel TraceLevel {
get;
set;
}
public string BaseDir {
get {
return basePath;
}
}
public Tree Tree {
get {
return tree;
}
}
public RootTree RootTree {
get {
return rootTree;
}
set {
rootTree = value;
}
}
public IDocCache Cache {
get {
return cache;
}
}
public IDocStorage Storage {
get {
return storage;
}
protected set {
storage = value;
}
}
// A HelpSource may have a common prefix to its URL, give it here
protected virtual string UriPrefix {
get {
return "dummy:";
}
}
public virtual SortType SortType {
get {
return SortType.Caption;
}
}
/// <summary>
/// Returns a stream from the packaged help source archive
/// </summary>
public virtual Stream GetHelpStream (string id)
{
return storage.Retrieve (id);
}
public virtual Stream GetCachedHelpStream (string id)
{
if (string.IsNullOrEmpty (id))
throw new ArgumentNullException ("id");
if (!cache.CanCache (DocEntity.Text))
return GetHelpStream (id);
if (!cache.IsCached (id))
cache.CacheText (id, GetHelpStream (id));
return cache.GetCachedStream (id);
}
public XmlReader GetHelpXml (string id)
{
var url = "monodoc:///" + SourceID + "@" + Uri.EscapeDataString (id) + "@";
var stream = cache.IsCached (id) ? cache.GetCachedStream (id) : storage.Retrieve (id);
return stream == null ? null : new XmlTextReader (url, stream);
}
public virtual XmlDocument GetHelpXmlWithChanges (string id)
{
XmlDocument doc = new XmlDocument ();
if (!storage.SupportRevision) {
doc.Load (GetHelpXml (id));
} else {
var revManager = storage.RevisionManager;
doc.Load (revManager.RetrieveLatestRevision (id));
}
return doc;
}
public virtual string GetCachedText (string id)
{
if (!cache.CanCache (DocEntity.Text))
return GetText (id);
if (!cache.IsCached (id))
cache.CacheText (id, GetText (id));
return cache.GetCachedString (id);
}
public virtual string GetText (string id)
{
return new StreamReader (GetHelpStream (id)).ReadToEnd ();
}
// Tells if the result for the provided id is generated dynamically
// by the help source
public virtual bool IsGeneratedContent (string id)
{
return false;
}
// Tells if the content of the provided id is meant to be returned raw
public virtual bool IsRawContent (string id)
{
return false;
}
// Tells if provided id refers to a multi-content-type document if it's case
// tells the ids it's formed of
public virtual bool IsMultiPart (string id, out IEnumerable<string> parts)
{
parts = null;
return false;
}
/// <summary>
/// Saves the tree and the archive
/// </summary>
public void Save ()
{
tree.Save (treeFilePath);
storage.Dispose ();
}
public virtual void RenderPreviewDocs (XmlNode newNode, XmlWriter writer)
{
throw new NotImplementedException ();
}
public virtual string GetPublicUrl (Node node)
{
return node.GetInternalUrl ();
}
public virtual bool CanHandleUrl (string url)
{
return url.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase);
}
public virtual string GetInternalIdForUrl (string url, out Node node, out Dictionary<string, string> context)
{
context = null;
node = MatchNode (url);
return node == null ? null : url.Substring (UriPrefix.Length);
}
public virtual Node MatchNode (string url)
{
Node current = null;
var matchCache = LRUCache<string, Node>.Default;
if ((current = matchCache.Get (url)) != null)
return current;
current = Tree.RootNode;
var strippedUrl = url.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase) ? url.Substring (UriPrefix.Length) : url;
var searchNode = new Node () { Element = strippedUrl };
do {
int index = current.ChildNodes.BinarySearch (searchNode, NodeElementComparer.Instance);
if (index >= 0) {
Node n = current.ChildNodes[index];
matchCache.Put (url, n);
return n;
}
index = ~index;
if (index == current.ChildNodes.Count) {
return SlowMatchNode (Tree.RootNode, matchCache, strippedUrl);
}
if (index == 0)
return null;
current = current.ChildNodes [index - 1];
} while (true);
return null;
}
/* That slow path is mainly here to handle ecmaspec type of url which are composed of hard to sort numbers
* because they don't have the same amount of digit. We could use a regex to harmonise the various number
* parts but then it would be quite specific. Since in the case of ecmaspec the tree is well-formed enough
* the "Slow" match should still be fast enough
*/
Node SlowMatchNode (Node current, LRUCache<string, Node> matchCache, string url)
{
//Console.WriteLine ("Entering slow path for {0} starting from {1}", url, current.Element);
while (current != null) {
bool stop = true;
foreach (Node n in current.ChildNodes) {
var element = n.Element.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase) ? n.Element.Substring (UriPrefix.Length) : n.Element;
if (url.Equals (element, StringComparison.Ordinal)) {
matchCache.Put (url, n);
return n;
} else if (url.StartsWith (element + ".", StringComparison.OrdinalIgnoreCase) && !n.IsLeaf) {
current = n;
stop = false;
break;
}
}
if (stop)
current = null;
}
return null;
}
class NodeElementComparer : IComparer<Node>
{
public static NodeElementComparer Instance = new NodeElementComparer ();
public int Compare (Node n1, Node n2)
{
return string.Compare (Cleanup (n1), Cleanup (n2), StringComparison.Ordinal);
}
string Cleanup (Node n)
{
var prefix = n.Tree != null && n.Tree.HelpSource != null ? n.Tree.HelpSource.UriPrefix : string.Empty;
var element = n.Element.StartsWith (prefix, StringComparison.OrdinalIgnoreCase) ? n.Element.Substring (prefix.Length) : n.Element;
if (char.IsDigit (element, 0)) {
var count = element.TakeWhile (char.IsDigit).Count ();
element = element.PadLeft (Math.Max (0, 3 - count) + element.Length, '0');
}
//Console.WriteLine ("Cleaned up {0} to {1}", n.Element, element);
return element;
}
}
public virtual DocumentType GetDocumentTypeForId (string id)
{
return DocumentType.PlainText;
}
public virtual Stream GetImage (string url)
{
Stream result = null;
storage.TryRetrieve (url, out result);
return result;
}
//
// Populates the index.
//
public virtual void PopulateIndex (IndexMaker index_maker)
{
}
//
// Create different Documents for adding to Lucene search index
// The default action is do nothing. Subclasses should add the docs
//
public virtual void PopulateSearchableIndex (IndexWriter writer)
{
}
}
}
using System;
using System.IO;
using System.Linq;
using System.Xml;
using System.Diagnostics;
using System.Collections.Generic;
using Mono.Utilities;
using Lucene.Net.Index;
#if LEGACY_MODE
namespace Monodoc
{
using Generators;
public partial class HelpSource
{
static HtmlGenerator htmlGenerator = new HtmlGenerator (null);
[Obsolete]
public static bool use_css;
[Obsolete]
public static bool FullHtml = true;
[Obsolete]
public static bool UseWebdocCache;
[Obsolete ("Use Monodoc.Providers.HtmlGenerator.InlineCss")]
public string InlineCss {
get { return Monodoc.Generators.HtmlGenerator.InlineCss; }
}
[Obsolete]
public string InlineJavaScript {
get { return null; }
}
[Obsolete ("Use RenderUrl")]
public string GetText (string url, out Node node)
{
return rootTree.RenderUrl (url, htmlGenerator, out node, this);
}
[Obsolete ("Use RenderUrl")]
public string RenderNamespaceLookup (string url, out Node node)
{
return rootTree.RenderUrl (url, htmlGenerator, out node, this);
}
}
}
#endif
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Xml;
using System.Collections;
using System.Collections.Generic;
namespace Monodoc
{
public
#if LEGACY_MODE
partial
#endif
class Node : IComparable<Node>, IComparable
{
readonly Tree parentTree;
string caption, element, pubUrl;
public bool Documented;
bool loaded;
Node parent;
List<Node> nodes;
#if LEGACY_MODE
ArrayList legacyNodes;
#endif
Dictionary<string, Node> childrenLookup;
bool elementSort;
/* Address has three types of value,
* _ 0 is for no on-disk representation
* _ >0 is a valid address that is loaded immediately
* _ <0 is a valid negated address to indicate lazy loading
*/
int address;
#if LEGACY_MODE
[Obsolete ("Tree inheriting Node is being phased out. Use the `Tree.RootNode' property instead")]
public Node (string caption, string element)
{
this.parentTree = (Tree) this;
this.caption = caption;
this.element = element;
parent = null;
}
#endif
public Node (Node parent, string caption, string element) : this (parent.Tree, caption, element)
{
this.parent = parent;
}
internal Node (Tree tree, string caption, string element)
{
this.parentTree = tree;
this.caption = caption;
this.element = element;
this.elementSort = parentTree.HelpSource != null && parentTree.HelpSource.SortType == SortType.Element;
}
/// <summary>
/// Creates a node from an on-disk representation
/// </summary>
internal Node (Node parent, int address) : this (parent.parentTree, address)
{
this.parent = parent;
}
internal Node (Tree tree, int address)
{
this.address = address;
this.parentTree = tree;
this.elementSort = parentTree.HelpSource != null && parentTree.HelpSource.SortType == SortType.Element;
if (address > 0)
LoadNode ();
}
/* This is solely used for MatchNode to check for equality */
internal Node ()
{
}
void LoadNode ()
{
parentTree.InflateNode (this);
if (parent != null)
parent.RegisterFullNode (this);
}
public void AddNode (Node n)
{
nodes.Add (n);
n.parent = this;
n.Documented = true;
RegisterFullNode (n);
}
public void DeleteNode (Node n)
{
nodes.Remove (n);
if (!string.IsNullOrEmpty (n.element))
childrenLookup.Remove (n.element);
}
// When a child node is inflated, it calls this method
// so that we can add it to our lookup for quick search
void RegisterFullNode (Node child)
{
if (childrenLookup == null)
childrenLookup = new Dictionary<string, Node> ();
if (!string.IsNullOrEmpty (child.element))
childrenLookup[child.element] = child;
}
[Obsolete ("Use ChildNodes")]
public ArrayList Nodes {
get {
if (legacyNodes == null)
legacyNodes = new ArrayList (ChildNodes as ICollection);
return legacyNodes;
}
}
public IList<Node> ChildNodes {
get {
EnsureLoaded ();
return nodes != null ? nodes : new List<Node> ();
}
}
public string Element {
get {
EnsureLoaded ();
return element;
}
set {
element = value;
}
}
public string Caption {
get {
EnsureLoaded ();
return caption;
}
internal set {
caption = value;
}
}
public Node Parent {
get {
return parent;
}
}
public Tree Tree {
get {
return parentTree;
}
}
internal int Address {
get {
return address;
}
#if LEGACY_MODE
set {
address = value;
}
#endif
}
/// <summary>
/// Creates a new node, in the locator entry point, and with
/// a user visible caption of @caption
/// </summary>
public Node CreateNode (string c_caption, string c_element)
{
EnsureNodes ();
if (string.IsNullOrEmpty (c_caption))
throw new ArgumentNullException ("c_caption");
if (string.IsNullOrEmpty (c_element))
throw new ArgumentNullException ("c_element");
Node t = new Node (this, c_caption, c_element);
nodes.Add (t);
childrenLookup[c_element] = t;
return t;
}
public Node GetOrCreateNode (string c_caption, string c_element)
{
if (nodes == null)
return CreateNode (c_caption, c_element);
if (childrenLookup.Count != nodes.Count || (nodes.Count == 0 && childrenLookup.Count != nodes.Capacity))
UpdateLookup ();
Node result;
if (!childrenLookup.TryGetValue (c_element, out result))
result = CreateNode (c_caption, c_element);
return result;
}
public void EnsureNodes ()
{
if (nodes == null) {
nodes = new List<Node> ();
childrenLookup = new Dictionary<string, Node> ();
}
}
public void EnsureLoaded ()
{
if (address < 0 && !loaded) {
LoadNode ();
loaded = true;
}
}
void UpdateLookup ()
{
foreach (var node in nodes)
childrenLookup[node.Element] = node;
}
public bool IsLeaf {
get {
return nodes == null || nodes.Count == 0;
}
}
void EncodeInt (BinaryWriter writer, int value)
{
do {
int high = (value >> 7) & 0x01ffffff;
byte b = (byte)(value & 0x7f);
if (high != 0) {
b = (byte)(b | 0x80);
}
writer.Write(b);
value = high;
} while(value != 0);
}
int DecodeInt (BinaryReader reader)
{
int ret = 0;
int shift = 0;
byte b;
do {
b = reader.ReadByte();
ret = ret | ((b & 0x7f) << shift);
shift += 7;
} while ((b & 0x80) == 0x80);
return ret;
}
internal void Deserialize (BinaryReader reader)
{
int count = DecodeInt (reader);
element = reader.ReadString ();
caption = reader.ReadString ();
if (count == 0)
return;
nodes = new List<Node> (count);
for (int i = 0; i < count; i++) {
int child_address = DecodeInt (reader);
Node t = new Node (this, -child_address);
nodes.Add (t);
}
if (parentTree.ForceResort)
nodes.Sort ();
}
internal void Serialize (FileStream output, BinaryWriter writer)
{
if (nodes != null)
foreach (Node child in nodes)
child.Serialize (output, writer);
address = (int) output.Position;
EncodeInt (writer, nodes == null ? 0 : (int) nodes.Count);
writer.Write (element);
writer.Write (caption);
if (nodes != null)
foreach (Node child in nodes)
EncodeInt (writer, child.address);
}
public void Sort ()
{
if (nodes != null)
nodes.Sort ();
}
internal string GetInternalUrl ()
{
EnsureLoaded ();
if (element.IndexOf (":") != -1 || parent == null)
return element;
var parentUrl = parent.GetInternalUrl ();
return parentUrl.EndsWith ("/") ? parentUrl + element : parentUrl + "/" + element;
}
public string PublicUrl {
get {
if (pubUrl != null)
return pubUrl;
return pubUrl = parentTree.HelpSource != null ? parentTree.HelpSource.GetPublicUrl (this) : GetInternalUrl ();
}
}
int IComparable.CompareTo (object obj)
{
Node other = obj as Node;
if (other == null)
return -1;
return CompareToInternal (other);
}
int IComparable<Node>.CompareTo (Node obj)
{
return CompareToInternal (obj);
}
int CompareToInternal (Node other)
{
EnsureLoaded ();
other.EnsureLoaded ();
var cap1 = elementSort ? element : caption;
var cap2 = elementSort ? other.element : other.caption;
/* Some node (notably from ecmaspec) have number prepended to them
* which we need to sort better by padding them to the same number
* of digits
*/
if (char.IsDigit (cap1[0]) && char.IsDigit (cap2[0])) {
int c1 = cap1.TakeWhile (char.IsDigit).Count ();
int c2 = cap2.TakeWhile (char.IsDigit).Count ();
if (c1 != c2) {
cap1 = cap1.PadLeft (cap1.Length + Math.Max (0, c2 - c1), '0');
cap2 = cap2.PadLeft (cap2.Length + Math.Max (0, c1 - c2), '0');
}
}
return string.Compare (cap1, cap2, StringComparison.Ordinal);
}
}
internal static class IListExtensions
{
// TODO: if the backing store ever change from List<T>, we need to tune these methods to have a fallback mechanism
public static int BinarySearch<T> (this IList<T> ilist, T item)
{
var list = ilist as List<T>;
if (list == null)
throw new NotSupportedException ();
return list.BinarySearch (item);
}
public static int BinarySearch<T> (this IList<T> ilist, T item, IComparer<T> comparer)
{
var list = ilist as List<T>;
if (list == null)
throw new NotSupportedException ();
return list.BinarySearch (item, comparer);
}
}
}
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Xml;
using System.Collections.Generic;
#if LEGACY_MODE
namespace Monodoc
{
public partial class Node
{
[Obsolete ("Use `Tree' instead of 'tree'")]
public Tree tree {
get {
return this.Tree;
}
}
[Obsolete ("Use TreeDumper")]
public static void PrintTree (Tree t)
{
TreeDumper.PrintTree (t.RootNode);
}
}
}
#endif
using System;
namespace Monodoc
{
public abstract class Provider
{
//
// This code is used to "tag" all the different sources
//
static short serial;
public int Code { get; set; }
public Provider ()
{
Code = serial++;
}
public abstract void PopulateTree (Tree tree);
//
// Called at shutdown time after the tree has been populated to perform
// any fixups or final tasks.
//
public abstract void CloseTree (HelpSource hs, Tree tree);
}
}
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Xml;
using Monodoc.Providers;
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Index;
namespace Monodoc
{
public
#if LEGACY_MODE
partial
#endif
class RootTree : Tree
{
public const int MonodocVersion = 2;
const string RootNamespace = "root:/";
string basedir;
static List<string> uncompiledHelpSourcePaths = new List<string>();
HashSet<string> loadedSourceFiles = new HashSet<string>();
List<HelpSource> helpSources = new List<HelpSource>();
Dictionary<string, Node> nameToNode = new Dictionary<string, Node>();
Dictionary<string, HelpSource> nameToHelpSource = new Dictionary<string, HelpSource>();
public IList<HelpSource> HelpSources {
get {
return this.helpSources.AsReadOnly();
}
}
public DateTime LastHelpSourceTime {
get;
set;
}
static bool IsUnix {
get {
int platform = (int)Environment.OSVersion.Platform;
return platform == 4 || platform == 128 || platform == 6;
}
}
RootTree () : base (null, "Mono Documentation", "root:")
{
base.RootNode.EnsureNodes();
this.LastHelpSourceTime = DateTime.Now;
}
public static void AddUncompiledSource (string path)
{
uncompiledHelpSourcePaths.Add (path);
}
public static RootTree LoadTree ()
{
return RootTree.LoadTree (RootTree.ProbeBaseDirectories ());
}
static string ProbeBaseDirectories ()
{
string result = ".";
try {
result = Config.Get ("docPath") ?? ".";
} catch {}
return result;
}
public static RootTree LoadTree (string basedir, bool includeExternal = true)
{
if (string.IsNullOrEmpty (basedir))
throw new ArgumentNullException ("basedir");
if (!Directory.Exists (basedir))
throw new ArgumentException ("basedir", string.Format ("Base documentation directory at '{0}' doesn't exist", basedir));
XmlDocument xmlDocument = new XmlDocument ();
string filename = Path.Combine (basedir, "monodoc.xml");
xmlDocument.Load (filename);
IEnumerable<string> sourceFiles = Directory.EnumerateFiles (Path.Combine (basedir, "sources"), "*.source");
if (includeExternal)
sourceFiles = sourceFiles.Concat (RootTree.ProbeExternalDirectorySources ());
return RootTree.LoadTree (basedir, xmlDocument, sourceFiles);
}
static IEnumerable<string> ProbeExternalDirectorySources ()
{
IEnumerable<string> enumerable = Enumerable.Empty<string> ();
try {
string path = Config.Get ("docExternalPath");
enumerable = enumerable.Concat (System.IO.Directory.EnumerateFiles (path, "*.source"));
}
catch {}
if (Directory.Exists ("/Library/Frameworks/Mono.framework/External/monodoc"))
enumerable = enumerable.Concat (Directory.EnumerateFiles ("/Library/Frameworks/Mono.framework/External/monodoc", "*.source"));
var windowsPath = Path.Combine (Environment.GetFolderPath (Environment.SpecialFolder.LocalApplicationData), "monodoc");
if (Directory.Exists (windowsPath))
enumerable = enumerable.Concat (Directory.EnumerateFiles (windowsPath, "*.source"));
return enumerable;
}
public static RootTree LoadTree (string indexDir, XmlDocument docTree, IEnumerable<string> sourceFiles)
{
if (docTree == null) {
docTree = new XmlDocument ();
using (Stream manifestResourceStream = typeof (RootTree).Assembly.GetManifestResourceStream ("monodoc.xml")) {
docTree.Load (manifestResourceStream);
}
}
sourceFiles = (sourceFiles ?? new string[0]);
RootTree rootTree = new RootTree ();
rootTree.basedir = indexDir;
XmlNodeList xml_node_list = docTree.SelectNodes ("/node/node");
rootTree.nameToNode["root"] = rootTree.RootNode;
rootTree.nameToNode["libraries"] = rootTree.RootNode;
rootTree.Populate (rootTree.RootNode, xml_node_list);
if (rootTree.LookupEntryPoint ("various") == null) {
Console.Error.WriteLine ("No 'various' doc node! Check monodoc.xml!");
Node rootNode = rootTree.RootNode;
}
foreach (string current in sourceFiles)
rootTree.AddSourceFile (current);
foreach (string path in uncompiledHelpSourcePaths) {
var hs = new Providers.EcmaUncompiledHelpSource (path);
hs.RootTree = rootTree;
rootTree.helpSources.Add (hs);
string epath = "extra-help-source-" + hs.Name;
Node hsn = rootTree.RootNode.CreateNode (hs.Name, RootNamespace + epath);
rootTree.nameToHelpSource [epath] = hs;
hsn.EnsureNodes ();
foreach (Node n in hs.Tree.RootNode.ChildNodes)
hsn.AddNode (n);
}
RootTree.PurgeNode (rootTree.RootNode);
rootTree.RootNode.Sort ();
return rootTree;
}
public void AddSource (string sourcesDir)
{
IEnumerable<string> enumerable = Directory.EnumerateFiles (sourcesDir, "*.source");
foreach (string current in enumerable)
if (!this.AddSourceFile (current))
Console.Error.WriteLine ("Error: Could not load source file {0}", current);
}
public bool AddSourceFile (string sourceFile)
{
if (this.loadedSourceFiles.Contains (sourceFile))
return false;
Node node = this.LookupEntryPoint ("various") ?? base.RootNode;
XmlDocument xmlDocument = new XmlDocument ();
try {
xmlDocument.Load (sourceFile);
} catch {
bool result = false;
return result;
}
XmlNodeList extra_nodes = xmlDocument.SelectNodes ("/monodoc/node");
if (extra_nodes.Count > 0)
this.Populate (node, extra_nodes);
XmlNodeList sources = xmlDocument.SelectNodes ("/monodoc/source");
if (sources == null) {
Console.Error.WriteLine ("Error: No <source> section found in the {0} file", sourceFile);
return false;
}
loadedSourceFiles.Add (sourceFile);
foreach (XmlNode xmlNode in sources) {
XmlAttribute a = xmlNode.Attributes["provider"];
if (a == null) {
Console.Error.WriteLine ("Error: no provider in <source>");
continue;
}
string provider = a.InnerText;
a = xmlNode.Attributes["basefile"];
if (a == null) {
Console.Error.WriteLine ("Error: no basefile in <source>");
continue;
}
string basefile = a.InnerText;
a = xmlNode.Attributes["path"];
if (a == null) {
Console.Error.WriteLine ("Error: no path in <source>");
continue;
}
string path = a.InnerText;
string basefilepath = Path.Combine (Path.GetDirectoryName (sourceFile), basefile);
HelpSource helpSource = RootTree.GetHelpSource (provider, basefilepath);
if (helpSource != null) {
helpSource.RootTree = this;
this.helpSources.Add (helpSource);
this.nameToHelpSource[path] = helpSource;
Node node2 = this.LookupEntryPoint (path);
if (node2 == null) {
Console.Error.WriteLine ("node `{0}' is not defined on the documentation map", path);
node2 = node;
}
foreach (Node current in helpSource.Tree.RootNode.ChildNodes) {
node2.AddNode (current);
}
node2.Sort ();
}
}
return true;
}
static bool PurgeNode (Node node)
{
bool result = false;
if (!node.Documented)
{
List<Node> list = new List<Node> ();
foreach (Node current in node.ChildNodes)
{
bool flag = RootTree.PurgeNode (current);
if (flag)
{
list.Add (current);
}
}
result = (node.ChildNodes.Count == list.Count);
foreach (Node current2 in list)
{
node.DeleteNode (current2);
}
}
return result;
}
public static string[] GetSupportedFormats ()
{
return new string[]
{
"ecma",
"ecmaspec",
"error",
"man",
"xhtml"
};
}
public static HelpSource GetHelpSource (string provider, string basefilepath)
{
HelpSource result;
try {
switch (provider) {
case "xhtml":
case "hb":
result = new XhtmlHelpSource (basefilepath, false);
break;
case "man":
result = new ManHelpSource (basefilepath, false);
break;
case "error":
result = new ErrorHelpSource (basefilepath, false);
break;
case "ecmaspec":
result = new EcmaSpecHelpSource (basefilepath, false);
break;
case "ecma":
result = new EcmaHelpSource (basefilepath, false);
break;
default:
Console.Error.WriteLine ("Error: Unknown provider specified: {0}", provider);
result = null;
break;
}
} catch (FileNotFoundException) {
Console.Error.WriteLine ("Error: did not find one of the files in sources/" + basefilepath);
result = null;
}
return result;
}
public static Provider GetProvider (string provider, params string[] basefilepaths)
{
switch (provider) {
case "ecma":
return new EcmaProvider (basefilepaths[0]);
case "ecmaspec":
return new EcmaSpecProvider (basefilepaths[0]);
case "error":
return new ErrorProvider (basefilepaths[0]);
case "man":
return new ManProvider (basefilepaths);
case "xhml":
case "hb":
return new XhtmlProvider (basefilepaths[0]);
}
throw new NotSupportedException (provider);
}
void Populate (Node parent, XmlNodeList xml_node_list)
{
foreach (XmlNode xmlNode in xml_node_list) {
XmlAttribute e = xmlNode.Attributes["parent"];
Node parent2 = null;
if (e != null && this.nameToNode.TryGetValue (e.InnerText, out parent2)) {
xmlNode.Attributes.Remove (e);
Populate (parent2, xmlNode.SelectNodes ("."));
continue;
}
e = xmlNode.Attributes["label"];
if (e == null) {
Console.Error.WriteLine ("`label' attribute missing in <node>");
continue;
}
string label = e.InnerText;
e = xmlNode.Attributes["name"];
if (e == null) {
Console.Error.WriteLine ("`name' attribute missing in <node>");
continue;
}
string name = e.InnerText;
Node orCreateNode = parent.GetOrCreateNode (label, RootNamespace + name);
orCreateNode.EnsureNodes ();
this.nameToNode[name] = orCreateNode;
XmlNodeList xmlNodeList = xmlNode.SelectNodes ("./node");
if (xmlNodeList != null) {
this.Populate (orCreateNode, xmlNodeList);
}
}
}
public Node LookupEntryPoint (string name)
{
Node result = null;
if (!this.nameToNode.TryGetValue (name, out result))
result = null;
return result;
}
public TOutput RenderUrl<TOutput> (string url, IDocGenerator<TOutput> generator, HelpSource hintSource = null)
{
Node dummy;
return RenderUrl<TOutput> (url, generator, out dummy, hintSource);
}
public TOutput RenderUrl<TOutput> (string url, IDocGenerator<TOutput> generator, out Node node, HelpSource hintSource = null)
{
node = null;
string internalId = null;
Dictionary<string, string> context = null;
HelpSource hs = GetHelpSourceAndIdForUrl (url, hintSource, out internalId, out context, out node);
return generator.Generate (hs, internalId, context);
}
public HelpSource GetHelpSourceAndIdForUrl (string url, out string internalId, out Dictionary<string, string> context)
{
Node dummy;
return GetHelpSourceAndIdForUrl (url, out internalId, out context, out dummy);
}
public HelpSource GetHelpSourceAndIdForUrl (string url, out string internalId, out Dictionary<string, string> context, out Node node)
{
return GetHelpSourceAndIdForUrl (url, null, out internalId, out context, out node);
}
public HelpSource GetHelpSourceAndIdForUrl (string url, HelpSource hintSource, out string internalId, out Dictionary<string, string> context, out Node node)
{
node = null;
internalId = null;
context = null;
if (url == "root:") {
context = new Dictionary<string, string> { {"specialpage", "master-root"} };
internalId = url;
node = null;
// We return the first help source available since the generator will simply fetch this RootTree instance through it
return helpSources.FirstOrDefault ();
}
if (url.StartsWith (RootNamespace, StringComparison.OrdinalIgnoreCase)) {
context = new Dictionary<string, string> { {"specialpage", "root"} };
return GetHelpSourceAndIdFromName (url.Substring (RootNamespace.Length), out internalId, out node);
}
HelpSource helpSource = hintSource;
if (helpSource == null || string.IsNullOrEmpty (internalId = helpSource.GetInternalIdForUrl (url, out node, out context))) {
helpSource = null;
foreach (var hs in helpSources.Where (h => h.CanHandleUrl (url))) {
if (!string.IsNullOrEmpty (internalId = hs.GetInternalIdForUrl (url, out node, out context))) {
helpSource = hs;
break;
}
}
}
return helpSource;
}
public HelpSource GetHelpSourceAndIdFromName (string name, out string internalId, out Node node)
{
internalId = "root:";
node = LookupEntryPoint (name);
return node == null ? null : node.ChildNodes.Select (n => n.Tree.HelpSource).FirstOrDefault (hs => hs != null);
}
public HelpSource GetHelpSourceFromId (int id)
{
return (id < 0 || id >= this.helpSources.Count) ? null : this.helpSources[id];
}
public Stream GetImage (string url)
{
if (url.StartsWith ("source-id:", StringComparison.OrdinalIgnoreCase)) {
string text = url.Substring (10);
int num = text.IndexOf (":");
string text2 = text.Substring (0, num);
int id = 0;
if (!int.TryParse (text2, out id)) {
Console.Error.WriteLine ("Failed to parse source-id url: {0} `{1}'", url, text2);
return null;
}
HelpSource helpSourceFromId = this.GetHelpSourceFromId (id);
return helpSourceFromId.GetImage (text.Substring (num + 1));
}
Assembly assembly = Assembly.GetAssembly (typeof (RootTree));
return assembly.GetManifestResourceStream (url);
}
public IndexReader GetIndex ()
{
var paths = GetIndexesPathPrefixes ().Select (bp => Path.Combine (bp, "monodoc.index"));
var p = paths.FirstOrDefault (File.Exists);
return p == null ? (IndexReader)null : IndexReader.Load (p);
}
public static void MakeIndex ()
{
RootTree rootTree = RootTree.LoadTree ();
rootTree.GenerateIndex ();
}
public bool GenerateIndex ()
{
IndexMaker indexMaker = new IndexMaker ();
foreach (HelpSource current in this.helpSources)
current.PopulateIndex (indexMaker);
var paths = GetIndexesPathPrefixes ().Select (bp => Path.Combine (bp, "monodoc.index"));
bool successful = false;
foreach (var path in paths) {
try {
indexMaker.Save (path);
successful = true;
if (RootTree.IsUnix)
RootTree.chmod (path, 420);
} catch (UnauthorizedAccessException) {
}
}
if (!successful) {
Console.WriteLine ("You don't have permissions to write on any of [" + string.Join (", ", paths) + "]");
return false;
}
Console.WriteLine ("Documentation index updated");
return true;
}
public SearchableIndex GetSearchIndex ()
{
var paths = GetIndexesPathPrefixes ().Select (bp => Path.Combine (bp, "search_index"));
var p = paths.FirstOrDefault (Directory.Exists);
return p == null ? (SearchableIndex)null : SearchableIndex.Load (p);
}
public static void MakeSearchIndex ()
{
RootTree rootTree = RootTree.LoadTree ();
rootTree.GenerateSearchIndex ();
}
public bool GenerateSearchIndex ()
{
Console.WriteLine ("Loading the monodoc tree...");
IndexWriter indexWriter = null;
var analyzer = new StandardAnalyzer (Lucene.Net.Util.Version.LUCENE_CURRENT);
var paths = GetIndexesPathPrefixes ().Select (bp => Path.Combine (bp, "search_index"));
bool successful = false;
foreach (var path in paths) {
try {
if (!Directory.Exists (path))
Directory.CreateDirectory (path);
var directory = Lucene.Net.Store.FSDirectory.Open (path);
indexWriter = new IndexWriter (directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
successful = true;
} catch (UnauthorizedAccessException) {}
}
if (!successful) {
Console.WriteLine ("You don't have permissions to write on any of [" + string.Join (", ", paths) + "]");
return false;
}
Console.WriteLine ("Collecting and adding documents...");
foreach (HelpSource current in this.helpSources) {
current.PopulateSearchableIndex (indexWriter);
}
Console.WriteLine ("Closing...");
indexWriter.Optimize ();
indexWriter.Close ();
return true;
}
[DllImport ("libc")]
static extern int chmod (string filename, int mode);
IEnumerable<string> GetIndexesPathPrefixes ()
{
yield return basedir;
yield return Config.Get ("docPath");
var indexDirectory = Config.Get ("monodocIndexDirectory");
if (!string.IsNullOrEmpty (indexDirectory))
yield return indexDirectory;
yield return Path.Combine (Environment.GetFolderPath (Environment.SpecialFolder.ApplicationData), "monodoc");
}
[Obsolete]
public string GetTitle (string url)
{
return "Mono Documentation";
}
}
}
using System;
using System.IO;
using System.Linq;
using System.Xml;
using System.Diagnostics;
using System.Collections.Generic;
using Mono.Utilities;
using Lucene.Net.Index;
#if LEGACY_MODE
namespace Monodoc
{
using Generators;
public partial class RootTree
{
static IDocGenerator<string> rawGenerator = new RawGenerator ();
static HtmlGenerator htmlGenerator = new HtmlGenerator (null);
[Obsolete ("Use RawGenerator directly")]
public XmlDocument GetHelpXml (string id)
{
var rendered = RenderUrl (id, rawGenerator);
if (rendered == null)
return null;
var doc = new XmlDocument ();
doc.LoadXml (RenderUrl (id, rawGenerator));
return doc;
}
[Obsolete ("Use the RenderUrl variant accepting a generator")]
public string RenderUrl (string url, out Node n)
{
return RenderUrl (url, htmlGenerator, out n);
}
[Obsolete ("Use GenerateIndex")]
public static void MakeIndex (RootTree root)
{
root.GenerateIndex ();
}
[Obsolete ("Use GenerateSearchIndex")]
public static void MakeSearchIndex (RootTree root)
{
root.GenerateSearchIndex ();
}
}
}
#endif
//
//
// SearchableDocument.cs: Abstracts our model of document from the Lucene Document
//
// Author: Mario Sopena
//
using Lucene.Net.Documents;
namespace Monodoc
{
struct SearchableDocument
{
public string Title {
get; set;
}
public string Url {
get; set;
}
public string FullTitle {
get; set;
}
public string HotText {
get; set;
}
public string Text {
get; set;
}
public string Examples {
get; set;
}
public SearchableDocument Reset ()
{
Title = Url = FullTitle = HotText = Text = Examples = null;
return this;
}
public Document LuceneDoc {
get {
Document doc = new Document ();
doc.Add (UnIndexed ("title", Title));
doc.Add (UnIndexed ("url", Url));
doc.Add (UnIndexed ("fulltitle", FullTitle ?? string.Empty));
doc.Add (UnStored ("hottext", HotText));
doc.Add (UnStored ("text", Text));
doc.Add (UnStored ("examples", Examples));
return doc;
}
}
static Field UnIndexed(System.String name, System.String value_Renamed)
{
return new Field(name, value_Renamed, Field.Store.YES, Field.Index.NO);
}
static Field UnStored(System.String name, System.String value_Renamed)
{
return new Field(name, value_Renamed, Field.Store.NO, Field.Index.ANALYZED);
}
}
}
//
//
// SearchableIndex.cs: Index that uses Lucene to search through the docs
//
// Author: Mario Sopena
//
using System;
using System.IO;
using System.Collections.Generic;
// Lucene imports
using Lucene.Net.Index;
using Lucene.Net.Documents;
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Search;
using Lucene.Net.QueryParsers;
using Lucene.Net.Store;
namespace Monodoc
{
public class SearchableIndex
{
const int maxSearchCount = 30;
IndexSearcher searcher;
string dir;
public string Dir {
get {
if (dir == null)
dir = "search_index";
return dir;
}
set { dir = value; }
}
public static SearchableIndex Load (string dir)
{
SearchableIndex s = new SearchableIndex ();
s.dir = dir;
try {
//s.searcher = new IndexSearcher (dir);
// TODO: parametrize that depending if we run on the desktop (low footprint) or the server (use RAMDirectory for instance)
s.searcher = new IndexSearcher (FSDirectory.Open (dir));
} catch (IOException) {
Console.WriteLine ("Index nonexistent or in bad format");
return null;
}
return s;
}
public Result Search (string term)
{
return Search (term, maxSearchCount);
}
public Result Search (string term, int count)
{
return Search (term, count, 0);
}
public Result Search (string term, int count, int start) {
try {
term = term.ToLower ();
Term htTerm = new Term ("hottext", term);
Query qq1 = new FuzzyQuery (htTerm);
Query qq2 = new TermQuery (htTerm);
qq2.Boost = 10f;
Query qq3 = new PrefixQuery (htTerm);
qq3.Boost = 10f;
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery (0f);
q1.Add (qq1);
q1.Add (qq2);
q1.Add (qq3);
Query q2 = new TermQuery (new Term ("text", term));
q2.Boost = 3f;
Query q3 = new TermQuery (new Term ("examples", term));
q3.Boost = 3f;
DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);
q.Add (q1);
q.Add (q2);
q.Add (q3);
TopDocs top = SearchInternal (q, count, start);
Result r = new Result (term, searcher, top.ScoreDocs);
return r;
} catch (IOException) {
Console.WriteLine ("No index in {0}", dir);
return null;
}
}
TopDocs SearchInternal (Query q, int count, int start)
{
// Easy path that doesn't involve creating a Collector ourselves
// watch for Lucene.NET improvement on that (like searcher.SearchAfter)
if (start == 0)
return searcher.Search (q, count);
var weight = searcher.CreateWeight (q); // TODO: reuse weight instead of query
var collector = TopScoreDocCollector.Create (start + count + 1, false);
searcher.Search (q, collector);
return collector.TopDocs (start, count);
}
public Result FastSearch (string term, int number)
{
try {
term = term.ToLower ();
Query q1 = new TermQuery (new Term ("hottext", term));
Query q2 = new PrefixQuery (new Term ("hottext", term));
q2.Boost = 0.5f;
DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);
q.Add (q1);
q.Add (q2);
TopDocs top = searcher.Search (q, number);
return new Result (term, searcher, top.ScoreDocs);
} catch (IOException) {
Console.WriteLine ("No index in {0}", dir);
return null;
}
}
}
//
// An object representing the search term with the results
//
public class Result {
string term;
Searcher searcher;
ScoreDoc[] docs;
public string Term {
get { return term;}
}
public int Count {
get { return docs.Length; }
}
public Document this [int i] {
get { return searcher.Doc (docs[i].Doc); }
}
public string GetTitle (int i)
{
Document d = this[i];
return d == null ? string.Empty : d.Get ("title");
}
public string GetUrl (int i)
{
Document d = this[i];
return d == null ? string.Empty : d.Get ("url");
}
public string GetFullTitle (int i)
{
Document d = this[i];
return d == null ? string.Empty : d.Get ("fulltitle");
}
public float Score (int i)
{
return docs[i].Score;
}
public Result (string Term, Searcher searcher, ScoreDoc[] docs)
{
this.term = Term;
this.searcher = searcher;
this.docs = docs;
}
}
}
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Xml;
using System.Collections.Generic;
namespace Monodoc
{
/// <summary>
/// This tree is populated by the documentation providers, or populated
/// from a binary encoding of the tree. The format of the tree is designed
/// to minimize the need to load it in full.
/// </summary>
/* Ideally this class should also be abstracted to let user have something
* else than a file as a backing store, a database for instance
*/
public class Tree
#if LEGACY_MODE
: Node
#endif
{
public const long CurrentVersionNumber = 1;
const int VersionNumberKey = -(int)'v';
public readonly HelpSource HelpSource;
FileStream InputStream;
BinaryReader InputReader;
#if !LEGACY_MODE
// This is the node which contains all the other node of the tree
Node rootNode;
#endif
/// <summary>
/// Load from file constructor
/// </summary>
public Tree (HelpSource hs, string filename)
#if LEGACY_MODE
: base (null, null)
#endif
{
HelpSource = hs;
Encoding utf8 = new UTF8Encoding (false, true);
if (!File.Exists (filename)){
throw new FileNotFoundException ();
}
InputStream = File.OpenRead (filename);
InputReader = new BinaryReader (InputStream, utf8);
byte [] sig = InputReader.ReadBytes (4);
if (!GoodSig (sig))
throw new Exception ("Invalid file format");
InputStream.Position = 4;
// Try to read old version information
if (InputReader.ReadInt32 () == VersionNumberKey)
VersionNumber = InputReader.ReadInt64 ();
else {
// We try to see if there is a version number at the end of the file
InputStream.Seek (-(4 + 8), SeekOrigin.End); // VersionNumberKey + long
try {
if (InputReader.ReadInt32 () == VersionNumberKey)
VersionNumber = InputReader.ReadInt64 ();
} catch {}
// We set the stream back at the beginning of the node definition list
InputStream.Position = 4;
}
var position = InputReader.ReadInt32 ();
#if !LEGACY_MODE
rootNode = new Node (this, position);
#else
Address = position;
#endif
InflateNode (RootNode);
}
/// <summary>
/// Tree creation and merged tree constructor
/// </summary>
public Tree (HelpSource hs, string caption, string url)
#if !LEGACY_MODE
: this (hs, null, caption, url)
{
}
#else
: base (caption, url)
{
HelpSource = hs;
}
#endif
public Tree (HelpSource hs, Node parent, string caption, string element)
#if LEGACY_MODE
: base (parent, caption, element)
#endif
{
HelpSource = hs;
#if !LEGACY_MODE
rootNode = parent == null ? new Node (this, caption, element) : new Node (parent, caption, element);
#endif
}
/// <summary>
/// Saves the tree into the specified file using the help file format.
/// </summary>
public void Save (string file)
{
Encoding utf8 = new UTF8Encoding (false, true);
using (FileStream output = File.OpenWrite (file)){
// Skip over the pointer to the first node.
output.Position = 4 + 4;
using (BinaryWriter writer = new BinaryWriter (output, utf8)) {
// Recursively dump
RootNode.Serialize (output, writer);
// We want to generate 2.10 compatible files so we write the version number at the end
writer.Write (VersionNumberKey);
writer.Write (CurrentVersionNumber);
output.Position = 0;
writer.Write (new byte [] { (byte) 'M', (byte) 'o', (byte) 'H', (byte) 'P' });
writer.Write (RootNode.Address);
}
}
}
public Node RootNode {
get {
#if LEGACY_MODE
return this;
#else
return rootNode;
#endif
}
}
public long VersionNumber {
get;
private set;
}
static bool GoodSig (byte [] sig)
{
if (sig.Length != 4)
return false;
return sig [0] == (byte) 'M'
&& sig [1] == (byte) 'o'
&& sig [2] == (byte) 'H'
&& sig [3] == (byte) 'P';
}
public void InflateNode (Node baseNode)
{
var address = baseNode.Address;
if (address < 0)
address = -address;
InputStream.Position = address;
baseNode.Deserialize (InputReader);
}
// Nodes use this value to know if they should manually re-sort their child
// if they come from an older generator version
internal bool ForceResort {
get {
return VersionNumber == 0;
}
}
}
public static class TreeDumper
{
static int indent;
static void Indent ()
{
for (int i = 0; i < indent; i++)
Console.Write (" ");
}
public static void PrintTree (Node node)
{
Indent ();
Console.WriteLine ("{0},{1}\t[PublicUrl: {2}]", node.Element, node.Caption, node.PublicUrl);
if (node.ChildNodes.Count == 0)
return;
indent++;
foreach (Node n in node.ChildNodes)
PrintTree (n);
indent--;
}
public static string ExportToTocXml (Node root, string title, string desc)
{
if (root == null)
throw new ArgumentNullException ("root");
// Return a toc index of sub-nodes
StringBuilder buf = new StringBuilder ();
var writer = XmlWriter.Create (buf);
writer.WriteStartElement ("toc");
writer.WriteAttributeString ("title", title ?? string.Empty);
writer.WriteElementString ("description", desc ?? string.Empty);
writer.WriteStartElement ("list");
foreach (Node n in root.ChildNodes) {
writer.WriteStartElement ("item");
writer.WriteAttributeString ("url", n.Element);
writer.WriteValue (n.Caption);
writer.WriteEndElement ();
}
writer.WriteEndElement ();
writer.WriteEndElement ();
writer.Flush ();
writer.Close ();
return buf.ToString ();
}
}
}
using System;
namespace Monodoc
{
public static class TypeUtils
{
public static bool GetNamespaceAndType (string url, out string ns, out string type)
{
int nsidx = -1;
int numLt = 0;
for (int i = 0; i < url.Length; ++i) {
char c = url [i];
switch (c) {
case '<':
case '{':
++numLt;
break;
case '>':
case '}':
--numLt;
break;
case '.':
if (numLt == 0)
nsidx = i;
break;
}
}
if (nsidx == -1) {
ns = null;
type = null;
return false;
}
ns = url.Substring (0, nsidx);
type = url.Substring (nsidx + 1);
return true;
}
}
}
using System;
using System.Linq;
using System.IO;
using System.Configuration;
using System.Collections.Specialized;
using Monodoc.Caches;
namespace Monodoc
{
public enum DocEntity
{
Text,
Blob
}
public interface IDocCache : IDisposable
{
bool IsCached (string id);
bool CanCache (DocEntity entity);
Stream GetCachedStream (string id);
string GetCachedString (string id);
void CacheText (string id, string content);
void CacheText (string id, Stream stream);
void CacheBlob (string id, byte[] data);
void CacheBlob (string id, Stream stream);
}
public static class DocCacheHelper
{
static string cacheBaseDirectory;
static DocCacheHelper ()
{
try {
var cacheConfig = Config.Get ("cache");
if (cacheConfig == null) return;
var cacheValues = cacheConfig.Split (',');
if (cacheValues.Length == 2 && cacheValues[0].Equals ("file", StringComparison.Ordinal))
cacheBaseDirectory = cacheValues[1].Replace ("~", Environment.GetFolderPath (Environment.SpecialFolder.Personal));
} catch {}
}
// Use configuration option to query for cache directory, if it doesn't exist we instantiate a nullcache
public static IDocCache GetDefaultCache (string name)
{
if (cacheBaseDirectory == null)
return new NullCache ();
return new FileCache (Path.Combine (cacheBaseDirectory, name));
}
}
}
using System;
using System.IO;
namespace Monodoc.Caches
{
public class FileCache : IDocCache
{
string baseCacheDir;
public FileCache (string baseCacheDir)
{
this.baseCacheDir = baseCacheDir;
if (!Directory.Exists (baseCacheDir))
Directory.CreateDirectory (baseCacheDir);
}
public bool IsCached (string id)
{
return File.Exists (MakePath (id));
}
public bool CanCache (DocEntity entity)
{
return true;
}
public Stream GetCachedStream (string id)
{
return File.OpenRead (MakePath (id));
}
public string GetCachedString (string id)
{
return File.ReadAllText (MakePath (id));
}
public void CacheText (string id, string content)
{
File.WriteAllText (MakePath (id), content);
}
public void CacheText (string id, Stream stream)
{
using (var file = File.OpenWrite (MakePath (id)))
stream.CopyTo (file);
}
public void CacheBlob (string id, byte[] data)
{
File.WriteAllBytes (MakePath (id), data);
}
public void CacheBlob (string id, Stream stream)
{
using (var file = File.OpenWrite (MakePath (id)))
stream.CopyTo (file);
}
string MakePath (string id)
{
id = id.Replace (Path.DirectorySeparatorChar, '_');
return Path.Combine (baseCacheDir, id);
}
public void Dispose ()
{
if (!Directory.Exists (baseCacheDir))
return;
try {
Directory.Delete (baseCacheDir, true);
} catch {}
}
}
}
using System;
using System.IO;
namespace Monodoc.Caches
{
// This is basically a no-cache implementation
public class NullCache : IDocCache
{
public bool IsCached (string id)
{
return false;
}
public bool CanCache (DocEntity entity)
{
return false;
}
public Stream GetCachedStream (string id)
{
return null;
}
public string GetCachedString (string id)
{
return null;
}
public void CacheText (string id, string content)
{
}
public void CacheText (string id, Stream stream)
{
}
public void CacheBlob (string id, byte[] data)
{
}
public void CacheBlob (string id, Stream stream)
{
}
public void Dispose ()
{
}
}
}
using System;
using System.Collections.Generic;
namespace Monodoc
{
// All type of documents that a generator may find as input
public enum DocumentType {
EcmaXml, // Our main monodoc format
EcmaSpecXml,
Man,
AddinXml,
MonoBook, // This is mostly XHTML already, just need a tiny bit of processing
Html,
TocXml, // Used by help source displaying some kind of toc of the content they host
PlainText,
ErrorXml
}
/* This interface defines a set of transformation engine
* that convert multiple documentation source to a single output format
*/
public interface IDocGenerator<TOutput>
{
/* This method is responsible for finding out the documentation type
* for the given ID and use the right engine internally
* The id can be accompanied by a context dictionary giving away extra
* informtion to the renderer
*/
TOutput Generate (HelpSource hs, string internalId, Dictionary<string, string> context);
}
}
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Collections.Generic;
using Monodoc;
namespace Monodoc.Generators
{
using Html;
interface IHtmlExporter
{
string CssCode { get; }
string Export (Stream input, Dictionary<string, string> extras);
string Export (string input, Dictionary<string, string> extras);
}
public class HtmlGenerator : IDocGenerator<string>
{
const string cachePrefix = "htmlcached#";
static string css_code;
IDocCache defaultCache;
static Dictionary<DocumentType, IHtmlExporter> converters;
static HtmlGenerator ()
{
converters = new Dictionary<DocumentType, IHtmlExporter> {
{ DocumentType.EcmaXml, new Ecma2Html () },
{ DocumentType.Man, new Man2Html () },
{ DocumentType.TocXml, new Toc2Html () },
{ DocumentType.EcmaSpecXml, new Ecmaspec2Html () },
{ DocumentType.ErrorXml, new Error2Html () },
{ DocumentType.Html, new Idem () },
{ DocumentType.MonoBook, new MonoBook2Html () },
{ DocumentType.AddinXml, new Addin2Html () },
{ DocumentType.PlainText, new Idem () },
};
}
public HtmlGenerator (IDocCache defaultCache)
{
this.defaultCache = defaultCache;
}
public string Generate (HelpSource hs, string id, Dictionary<string, string> context)
{
string specialPage = null;
if (context != null && context.TryGetValue ("specialpage", out specialPage) && specialPage == "master-root")
return GenerateMasterRootPage (hs != null ? hs.RootTree : null);
if (id == "root:" && hs == null)
return MakeEmptySummary ();
if (hs == null || string.IsNullOrEmpty (id))
return MakeHtmlError (string.Format ("Your request has found no candidate provider [hs=\"{0}\", id=\"{1}\"]",
hs == null ? "(null)" : hs.Name, id ?? "(null)"));
var cache = defaultCache ?? hs.Cache;
if (cache != null && cache.IsCached (MakeCacheKey (hs, id, null)))
return cache.GetCachedString (MakeCacheKey (hs, id, null));
IEnumerable<string> parts;
if (hs.IsMultiPart (id, out parts))
return GenerateMultiPart (hs, parts, id, context);
if (hs.IsRawContent (id))
return hs.GetText (id) ?? string.Empty;
DocumentType type = hs.GetDocumentTypeForId (id);
if (cache != null && context != null && cache.IsCached (MakeCacheKey (hs, id, context)))
return cache.GetCachedString (MakeCacheKey (hs, id, context));
IHtmlExporter exporter;
if (!converters.TryGetValue (type, out exporter))
return MakeHtmlError (string.Format ("Input type '{0}' not supported",
type.ToString ()));
var result = hs.IsGeneratedContent (id) ?
exporter.Export (hs.GetCachedText (id), context) :
exporter.Export (hs.GetCachedHelpStream (id), context);
if (cache != null)
cache.CacheText (MakeCacheKey (hs, id, context), result);
return result;
}
string GenerateMultiPart (HelpSource hs, IEnumerable<string> ids, string originalId, Dictionary<string, string> context)
{
var sb = new StringBuilder ();
foreach (var id in ids)
sb.AppendLine (Generate (hs, id, context));
var cache = defaultCache ?? hs.Cache;
if (cache != null)
cache.CacheText (MakeCacheKey (hs, originalId, null), sb.ToString ());
return sb.ToString ();
}
string GenerateMasterRootPage (RootTree rootTree)
{
if (rootTree == null)
return string.Empty;
var assembly = System.Reflection.Assembly.GetAssembly (typeof (HtmlGenerator));
var hpStream = assembly.GetManifestResourceStream ("home.html");
var home = new StreamReader (hpStream).ReadToEnd ();
var links = string.Join (Environment.NewLine,
rootTree.RootNode.ChildNodes.Select (n => string.Format ("<li><a href=\"{0}\">{1}</a></li>", n.Element, n.Caption)));
return home.Replace ("@@API_DOCS@@", links);
}
public static string InlineCss {
get {
if (css_code != null)
return css_code;
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (HtmlGenerator));
Stream str_css = assembly.GetManifestResourceStream ("base.css");
StringBuilder sb = new StringBuilder ((new StreamReader (str_css)).ReadToEnd());
sb.Replace ("@@FONT_FAMILY@@", "Sans Serif");
sb.Replace ("@@FONT_SIZE@@", "100%");
css_code = sb.ToString () + converters.Values
.Select (c => c.CssCode)
.Where (css => !string.IsNullOrEmpty (css))
.DefaultIfEmpty (string.Empty)
.Aggregate (string.Concat);
return css_code;
}
set {
css_code = value;
}
}
string MakeHtmlError (string error)
{
return string.Format ("<html><head></head><body><p><em>Error:</em> {0}</p></body></html>", error);
}
string MakeEmptySummary ()
{
return @"<html><head></head><body><p><em>This node doesn't have a summary available</p></body></html>";
}
string MakeCacheKey (HelpSource hs, string page, IDictionary<string,string> extraParams)
{
var key = cachePrefix + hs.SourceID + page;
if (extraParams != null && extraParams.Count > 0) {
var paramPart = string.Join ("-", extraParams.Select (kvp => kvp.Key + kvp.Value));
key += '_' + paramPart;
}
return key;
}
}
}
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Collections.Generic;
using Monodoc;
namespace Monodoc.Generators
{
/// <summary>
/// This generators returns the raw content of the HelpSource without any transformation
/// </summary>
public class RawGenerator : IDocGenerator<string>
{
public string Generate (HelpSource hs, string id, Dictionary<string, string> context)
{
if (hs == null || string.IsNullOrEmpty (id))
return null;
IEnumerable<string> parts;
if (hs.IsMultiPart (id, out parts))
return GenerateMultiPart (hs, parts, id, context);
if (hs.IsRawContent (id))
return hs.GetText (id) ?? string.Empty;
var result = hs.IsGeneratedContent (id) ? hs.GetCachedText (id) : new StreamReader (hs.GetCachedHelpStream (id)).ReadToEnd ();
return result;
}
string GenerateMultiPart (HelpSource hs, IEnumerable<string> ids, string originalId, Dictionary<string, string> context)
{
var sb = new StringBuilder ();
foreach (var id in ids)
sb.AppendLine (Generate (hs, id, context));
return sb.ToString ();
}
}
}
using System;
using System.IO;
using System.Text;
using System.Xml;
using System.Xml.Xsl;
using System.Xml.XPath;
using System.Collections.Generic;
namespace Monodoc.Generators.Html
{
public class Addin2Html : IHtmlExporter
{
public string CssCode {
get {
return string.Empty;
}
}
public string Export (Stream stream, Dictionary<string, string> extraArgs)
{
using (var reader = new StreamReader (stream))
return Htmlize (GetAddin (reader, extraArgs["AddinID"]),
extraArgs["show"],
extraArgs["AddinID"],
extraArgs["FileID"],
extraArgs["NodeID"]);
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
return Htmlize (GetAddin (new StringReader (input), extraArgs["AddinID"]),
extraArgs["show"],
extraArgs["AddinID"],
extraArgs["FileID"],
extraArgs["NodeID"]);
}
XmlElement GetAddin (TextReader reader, string addinId)
{
XmlDocument doc = new XmlDocument ();
doc.Load (reader);
XmlElement addin = (XmlElement) doc.SelectSingleNode ("Addins/Addin[@fullId='" + addinId + "']");
return addin != null ? addin : null;
}
public string Htmlize (XmlElement addin, string urlType, string addinId, string fileId, string path)
{
if (urlType == Monodoc.Providers.AddinsHelpSource.AddinPrefix)
return GetAddinTextFromUrl (addin, addinId, fileId);
else if (urlType == Monodoc.Providers.AddinsHelpSource.ExtensionPrefix)
return GetExtensionTextFromUrl (addin, addinId, fileId, path);
else if (urlType == Monodoc.Providers.AddinsHelpSource.ExtensionNodePrefix)
return GetExtensionNodeTextFromUrl (addin, addinId, fileId, path);
return null;
}
protected string GetAddinTextFromUrl (XmlElement addin, string addinId, string fileId)
{
if (addin == null)
return "<html>Add-in not found: " + addinId + "</html>";
StringBuilder sb = new StringBuilder ("<html>");
sb.Append ("<h1>").Append (addin.GetAttribute ("name")).Append ("</h1>");
XmlElement docs = (XmlElement) addin.SelectSingleNode ("Description");
if (docs != null)
sb.Append (docs.InnerText);
sb.Append ("<p><table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
sb.AppendFormat ("<tr><td><b>Id</b></td><td>{0}</td></tr>", addin.GetAttribute ("addinId"));
sb.AppendFormat ("<tr><td><b>Namespace</b></td><td>{0}</td></tr>", addin.GetAttribute ("namespace"));
sb.AppendFormat ("<tr><td><b>Version</b></td><td>{0}</td></tr>", addin.GetAttribute ("version"));
sb.Append ("</table></p>");
sb.Append ("<p><b>Extension Points</b>:</p>");
sb.Append ("<ul>");
foreach (XmlElement ep in addin.SelectNodes ("ExtensionPoint")) {
sb.AppendFormat ("<li><a href=\"extension-point:{0}#{1}#{2}\">{3}</li>", fileId, addinId, ep.GetAttribute ("path"), ep.GetAttribute ("name"));
}
sb.Append ("</ul>");
sb.Append ("</html>");
return sb.ToString ();
}
protected string GetExtensionTextFromUrl (XmlElement addin, string addinId, string fileId, string path)
{
if (addin == null)
return "<html>Add-in not found: " + addinId + "</html>";
XmlElement ext = (XmlElement) addin.SelectSingleNode ("ExtensionPoint[@path='" + path + "']");
if (ext == null)
return "<html>Extension point not found: " + path + "</html>";
StringBuilder sb = new StringBuilder ("<html>");
sb.Append ("<h1>").Append (ext.GetAttribute ("name")).Append ("</h1>");
path = path.Replace ("/", " <b>/</b> ");
sb.Append ("<p><b>Path</b>: ").Append (path).Append ("</p>");
XmlElement desc = (XmlElement) ext.SelectSingleNode ("Description");
if (desc != null)
sb.Append (desc.InnerText);
sb.Append ("<p><b>Extension Nodes</b>:</p>");
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
foreach (XmlElement en in ext.SelectNodes ("ExtensionNode")) {
string nid = en.GetAttribute ("id");
string nname = en.GetAttribute ("name");
string sdesc = "";
desc = (XmlElement) en.SelectSingleNode ("Description");
if (desc != null)
sdesc = desc.InnerText;
sb.AppendFormat ("<tr><td><a href=\"extension-node:{0}#{1}#{2}\">{3}</td><td>{4}</td></tr>", fileId, addinId, nid, nname, sdesc);
}
sb.Append ("</table>");
sb.Append ("</html>");
return sb.ToString ();
}
protected string GetExtensionNodeTextFromUrl (XmlElement addin, string addinId, string fileId, string nodeId)
{
if (addin == null)
return "<html>Add-in not found: " + addinId + "</html>";
XmlElement node = (XmlElement) addin.SelectSingleNode ("ExtensionNodeType[@id='" + nodeId + "']");
if (node == null)
return "<html>Extension point not found: " + nodeId + "</html>";
StringBuilder sb = new StringBuilder ("<html>");
sb.Append ("<h1>").Append (node.GetAttribute ("name")).Append ("</h1>");
XmlElement desc = (XmlElement) node.SelectSingleNode ("Description");
if (desc != null)
sb.Append (desc.InnerText);
sb.Append ("<p><b>Attributes</b>:</p>");
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\"><tr>");
sb.Append ("<td><b>Name</b></td>");
sb.Append ("<td><b>Type</b></td>");
sb.Append ("<td><b>Required</b></td>");
sb.Append ("<td><b>Localizable</b></td>");
sb.Append ("<td><b>Description</b></td>");
sb.Append ("<tr>");
sb.Append ("<td>id</td>");
sb.Append ("<td>System.String</td>");
sb.Append ("<td></td>");
sb.Append ("<td></td>");
sb.Append ("<td>Identifier of the node.</td>");
sb.Append ("</tr>");
foreach (XmlElement at in node.SelectNodes ("Attributes/Attribute")) {
sb.Append ("<tr>");
sb.AppendFormat ("<td>{0}</td>", at.GetAttribute ("name"));
sb.AppendFormat ("<td>{0}</td>", at.GetAttribute ("type"));
if (at.GetAttribute ("required") == "True")
sb.Append ("<td>Yes</td>");
else
sb.Append ("<td></td>");
if (at.GetAttribute ("localizable") == "True")
sb.Append ("<td>Yes</td>");
else
sb.Append ("<td></td>");
string sdesc = "";
desc = (XmlElement) at.SelectSingleNode ("Description");
if (desc != null)
sdesc = desc.InnerText;
sb.AppendFormat ("<td>{0}</td>", sdesc);
sb.Append ("</tr>");
}
sb.Append ("</table>");
XmlNodeList children = node.SelectNodes ("ChildNodes/ExtensionNode");
if (children.Count > 0) {
sb.Append ("<p><b>Child Nodes</b>:</p>");
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
foreach (XmlElement en in children) {
string nid = en.GetAttribute ("id");
string nname = en.GetAttribute ("name");
string sdesc = "";
desc = (XmlElement) en.SelectSingleNode ("Description");
if (desc != null)
sdesc = desc.InnerText;
sb.AppendFormat ("<tr><td><a href=\"extension-node:{0}#{1}#{2}\">{3}</td><td>{4}</td></tr>", fileId, addinId, nid, nname, sdesc);
}
sb.Append ("</table>");
}
sb.Append ("</html>");
return sb.ToString ();
}
}
}
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Xml;
using System.Xml.Xsl;
using System.Xml.XPath;
using System.Collections.Generic;
using Mono.Documentation;
using BF = System.Reflection.BindingFlags;
namespace Monodoc.Generators.Html
{
public class Ecma2Html : IHtmlExporter
{
static string css_ecma;
static string js;
static XslCompiledTransform ecma_transform;
readonly ExtensionObject ExtObject = new ExtensionObject ();
public Ecma2Html ()
{
}
public string CssCode {
get {
if (css_ecma != null)
return css_ecma;
var assembly = typeof(Ecma2Html).Assembly;
Stream str_css = assembly.GetManifestResourceStream ("mono-ecma.css");
css_ecma = (new StreamReader (str_css)).ReadToEnd();
return css_ecma;
}
}
public string JsCode {
get {
if (js != null)
return js;
var assembly = typeof(Ecma2Html).Assembly;
Stream str_js = assembly.GetManifestResourceStream ("helper.js");
js = (new StreamReader (str_js)).ReadToEnd();
return js;
}
}
public string Htmlize (XmlReader ecma_xml, Dictionary<string, string> extraArgs)
{
var args = new XsltArgumentList ();
args.AddExtensionObject("monodoc:///extensions", ExtObject);
string specialPage;
if (extraArgs.TryGetValue ("specialpage", out specialPage) && specialPage == "root") {
extraArgs.Remove ("specialpage");
extraArgs["show"] = "masteroverview";
}
foreach (var kvp in extraArgs)
args.AddParam (kvp.Key, string.Empty, kvp.Value);
return Htmlize (ecma_xml, args);
}
public string Htmlize (XmlReader ecma_xml, XsltArgumentList args)
{
try{
EnsureTransform ();
var output = new StringBuilder ();
ecma_transform.Transform (ecma_xml,
args,
XmlWriter.Create (output, ecma_transform.OutputSettings),
CreateDocumentResolver ());
return output.ToString ();
}
catch(Exception x)
{
var msg = x.ToString ();
return msg;
}
}
protected virtual XmlResolver CreateDocumentResolver ()
{
// results in using XmlUrlResolver
return null;
}
public string Export (Stream stream, Dictionary<string, string> extraArgs)
{
return Htmlize (XmlReader.Create (new StreamReader(stream)), extraArgs);
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
return Htmlize (XmlReader.Create (new StringReader(input)), extraArgs);
}
static void EnsureTransform ()
{
if (ecma_transform == null) {
ecma_transform = new XslCompiledTransform ();
var assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecma2Html));
Stream stream = assembly.GetManifestResourceStream ("mono-ecma-css.xsl");
XmlReader xml_reader = new XmlTextReader (stream);
XmlResolver r = new ManifestResourceResolver (".");
ecma_transform.Load (xml_reader, XsltSettings.TrustedXslt, r);
}
}
public class ExtensionObject
{
bool quiet = true;
Dictionary<string, System.Reflection.Assembly> assemblyCache = new Dictionary<string, System.Reflection.Assembly> ();
public string Colorize(string code, string lang)
{
return Mono.Utilities.Colorizer.Colorize(code,lang);
}
// Used by stylesheet to nicely reformat the <see cref=> tags.
public string MakeNiceSignature(string sig, string contexttype)
{
if (sig.Length < 3)
return sig;
if (sig[1] != ':')
return sig;
char s = sig[0];
sig = sig.Substring(2);
switch (s) {
case 'N': return sig;
case 'T': return ShortTypeName (sig, contexttype);
case 'C': case 'M': case 'P': case 'F': case 'E':
string type, mem, arg;
// Get arguments
int paren;
if (s == 'C' || s == 'M')
paren = sig.IndexOf("(");
else if (s == 'P')
paren = sig.IndexOf("[");
else
paren = 0;
if (paren > 0 && paren < sig.Length-1) {
string[] args = sig.Substring(paren+1, sig.Length-paren-2).Split(',');
for (int i = 0; i < args.Length; i++)
args[i] = ShortTypeName(args[i], contexttype);
arg = "(" + String.Join(", ", args) + ")";
sig = sig.Substring(0, paren);
} else {
arg = string.Empty;
}
// Get type and member names
int dot = sig.LastIndexOf(".");
if (s == 'C' || dot <= 0 || dot == sig.Length-1) {
mem = string.Empty;
type = sig;
} else {
type = sig.Substring(0, dot);
mem = sig.Substring(dot);
}
type = ShortTypeName(type, contexttype);
return type + mem + arg;
default:
return sig;
}
}
static string ShortTypeName(string name, string contexttype)
{
int dot = contexttype.LastIndexOf(".");
if (dot < 0) return name;
string contextns = contexttype.Substring(0, dot+1);
if (name == contexttype)
return name.Substring(dot+1);
if (name.StartsWith(contextns))
return name.Substring(contextns.Length);
return name.Replace("+", ".");
}
string MonoImpInfo(string assemblyname, string typename, string membername, string arglist, bool strlong)
{
if (quiet)
return string.Empty;
var a = new List<string> ();
if (!string.IsNullOrEmpty (arglist)) a.Add (arglist);
return MonoImpInfo(assemblyname, typename, membername, a, strlong);
}
string MonoImpInfo(string assemblyname, string typename, string membername, XPathNodeIterator itr, bool strlong)
{
if (quiet)
return string.Empty;
var rgs = itr.Cast<XPathNavigator> ().Select (nav => nav.Value).ToList ();
return MonoImpInfo (assemblyname, typename, membername, rgs, strlong);
}
string MonoImpInfo(string assemblyname, string typename, string membername, List<string> arglist, bool strlong)
{
try {
System.Reflection.Assembly assembly = null;
try {
if (!assemblyCache.TryGetValue (assemblyname, out assembly)) {
assembly = System.Reflection.Assembly.LoadWithPartialName(assemblyname);
if (assembly != null)
assemblyCache[assemblyname] = assembly;
}
} catch (Exception) {
// nothing.
}
if (assembly == null) {
/*if (strlong) return "The assembly " + assemblyname + " is not available to MonoDoc.";
else return string.Empty;*/
return string.Empty; // silently ignore
}
Type t = assembly.GetType(typename, false);
if (t == null) {
if (strlong)
return typename + " has not been implemented.";
else
return "Not implemented.";
}
// The following code is flakey and fails to find existing members
return string.Empty;
} catch (Exception) {
return string.Empty;
}
}
string MonoImpInfo(System.Reflection.MemberInfo mi, string itemtype, bool strlong)
{
if (quiet)
return string.Empty;
string s = string.Empty;
object[] atts = mi.GetCustomAttributes(true);
int todoctr = 0;
foreach (object att in atts) if (att.GetType().Name == "MonoTODOAttribute") todoctr++;
if (todoctr > 0) {
if (strlong)
s = "This " + itemtype + " is marked as being unfinished.<BR/>\n";
else
s = "Unfinished.";
}
return s;
}
public string MonoImpInfo(string assemblyname, string typename, bool strlong)
{
if (quiet)
return string.Empty;
try {
if (assemblyname == string.Empty)
return string.Empty;
System.Reflection.Assembly assembly;
if (!assemblyCache.TryGetValue (assemblyname, out assembly)) {
assembly = System.Reflection.Assembly.LoadWithPartialName(assemblyname);
if (assembly != null)
assemblyCache[assemblyname] = assembly;
}
if (assembly == null)
return string.Empty;
Type t = assembly.GetType(typename, false);
if (t == null) {
if (strlong)
return typename + " has not been implemented.";
else
return "Not implemented.";
}
string s = MonoImpInfo(t, "type", strlong);
if (strlong) {
var mis = t.GetMembers (BF.Static | BF.Instance | BF.Public | BF.NonPublic);
// Scan members for MonoTODO attributes
int mctr = 0;
foreach (var mi in mis) {
string mii = MonoImpInfo(mi, null, false);
if (mii != string.Empty) mctr++;
}
if (mctr > 0) {
s += "This type has " + mctr + " members that are marked as unfinished.<BR/>";
}
}
return s;
} catch (Exception) {
return string.Empty;
}
}
public bool MonoEditing ()
{
return false;
}
public bool IsToBeAdded(string text)
{
return text.StartsWith ("To be added");
}
}
}
}
using System;
using System.IO;
using System.Xml;
using System.Xml.Xsl;
using System.Xml.XPath;
using System.Collections.Generic;
namespace Monodoc.Generators.Html
{
public class Ecmaspec2Html : IHtmlExporter
{
static string css_ecmaspec;
static XslTransform ecma_transform;
static XsltArgumentList args = new XsltArgumentList();
public string CssCode {
get {
if (css_ecmaspec != null)
return css_ecmaspec;
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecmaspec2Html));
Stream str_css = assembly.GetManifestResourceStream ("ecmaspec.css");
css_ecmaspec = (new StreamReader (str_css)).ReadToEnd ();
return css_ecmaspec;
}
}
class ExtObj
{
public string Colorize (string code, string lang)
{
return Mono.Utilities.Colorizer.Colorize (code, lang);
}
}
public string Export (Stream stream, Dictionary<string, string> extraArgs)
{
return Htmlize (new XPathDocument (stream));
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
return Htmlize (new XPathDocument (new StringReader (input)));
}
static string Htmlize (XPathDocument ecma_xml)
{
if (ecma_transform == null){
ecma_transform = new XslTransform ();
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecmaspec2Html));
Stream stream;
stream = assembly.GetManifestResourceStream ("ecmaspec-html-css.xsl");
XmlReader xml_reader = new XmlTextReader (stream);
ecma_transform.Load (xml_reader, null, null);
args.AddExtensionObject ("monodoc:///extensions", new ExtObj ());
}
if (ecma_xml == null) return "";
StringWriter output = new StringWriter ();
ecma_transform.Transform (ecma_xml, args, output, null);
return output.ToString ();
}
}
}
using System;
using System.IO;
using System.Linq;
using System.Xml;
using System.Xml.XPath;
using System.Collections.Generic;
namespace Monodoc.Generators.Html
{
public class Error2Html : IHtmlExporter
{
public string Export (string input, Dictionary<string, string> extraArgs)
{
return Htmlize (new XPathDocument (new StringReader (input)));
}
public string Export (Stream input, Dictionary<string, string> extraArgs)
{
return Htmlize (new XPathDocument (input));
}
public string CssCode {
get {
return @"
#error_ref {
background: #debcb0;
border: 2px solid #782609;
}
div.summary {
font-size: 110%;
font-weight: bolder;
}
div.details {
font-size: 110%;
font-weight: bolder;
}
div.code_example {
background: #f5f5dd;
border: 1px solid black;
padding-left: 1em;
padding-bottom: 1em;
margin-top: 1em;
white-space: pre;
margin-bottom: 1em;
}
div.code_ex_title {
position: relative;
top: -1em;
left: 30%;
background: #cdcd82;
border: 1px solid black;
color: black;
font-size: 65%;
text-transform: uppercase;
width: 40%;
padding: 0.3em;
text-align: center;
}";
}
}
public string Htmlize (IXPathNavigable doc)
{
var navigator = doc.CreateNavigator ();
var errorName = navigator.SelectSingleNode ("//ErrorDocumentation/ErrorName");
var details = navigator.SelectSingleNode ("//ErrorDocumentation/Details");
StringWriter sw = new StringWriter ();
XmlWriter w = new XmlTextWriter (sw);
WriteElementWithClass (w, "div", "header");
w.WriteAttributeString ("id", "error_ref");
WriteElementWithClass (w, "div", "subtitle", "Compiler Error Reference");
WriteElementWithClass (w, "div", "title", "Error " + (errorName == null ? string.Empty : errorName.Value));
w.WriteEndElement ();
if (details != null) {
WriteElementWithClass (w, "div", "summary", "Summary");
var summary = details.SelectSingleNode ("/Summary");
w.WriteValue (summary == null ? string.Empty : summary.Value);
WriteElementWithClass (w, "div", "details", "Details");
var de = details.SelectSingleNode ("/Details");
w.WriteValue (de == null ? string.Empty : de.Value);
}
foreach (XPathNavigator xmp in navigator.Select ("//ErrorDocumentation/Examples/string")) {
WriteElementWithClass (w, "div", "code_example");
WriteElementWithClass (w, "div", "code_ex_title", "Example");
w.WriteRaw (Mono.Utilities.Colorizer.Colorize (xmp.Value, "c#"));;
w.WriteEndElement ();
}
w.Close ();
return sw.ToString ();
}
void WriteElementWithClass (XmlWriter w, string element, string cls, string content = null)
{
w.WriteStartElement (element);
w.WriteAttributeString ("class", cls);
if (!string.IsNullOrEmpty (content)) {
w.WriteValue (content);
w.WriteEndElement ();
}
}
}
}
using System;
using System.IO;
using System.Text;
using System.Collections.Generic;
using Monodoc;
using Monodoc.Generators;
namespace Monodoc.Generators.Html
{
// Input is expected to be already HTML so just return it
public class Idem : IHtmlExporter
{
public string CssCode {
get {
return string.Empty;
}
}
public string Export (Stream input, Dictionary<string, string> extraArgs)
{
if (input == null)
return null;
return new StreamReader (input).ReadToEnd ();
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
if (string.IsNullOrEmpty (input))
return null;
return input;
}
}
}
using System;
using System.IO;
using System.Text;
using System.Xml;
using System.Collections.Generic;
using Monodoc;
using Monodoc.Generators;
namespace Monodoc.Generators.Html
{
// Input is expected to be already HTML so just return it
public class MonoBook2Html : IHtmlExporter
{
public string CssCode {
get {
return @" h3 {
font-size: 18px;
padding-bottom: 4pt;
border-bottom: 2px solid #dddddd;
}
.api {
border: 1px solid;
padding: 10pt;
margin: 10pt;
}
.api-entry {
border-bottom: none;
font-size: 18px;
}
.prototype {
border: 1px solid;
background-color: #f2f2f2;
padding: 5pt;
margin-top: 5pt;
margin-bottom: 5pt;
}
.header {
border: 1px solid !important;
padding: 0 0 5pt 5pt !important;
margin: 10pt !important;
white-space: pre !important;
font-family: monospace !important;
font-weight: normal !important;
font-size: 1em !important;
}
.code {
border: 1px solid;
padding: 0 0 5pt 5pt;
margin: 10pt;
white-space: pre;
font-family: monospace;
}
";
}
}
public string Export (Stream input, Dictionary<string, string> extraArgs)
{
if (input == null)
return null;
return FromXmlReader (XmlReader.Create (input));
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
if (string.IsNullOrEmpty (input))
return null;
return FromXmlReader (XmlReader.Create (new StringReader (input)));
}
public string FromXmlReader (XmlReader reader)
{
if (!reader.ReadToDescendant ("head"))
return null;
if (!reader.ReadToNextSibling ("body"))
return null;
return reader.ReadInnerXml ();
}
}
}
using System;
using System.IO;
using System.Xml;
using System.Xml.Xsl;
using System.Xml.XPath;
using System.Reflection;
using System.Collections.Generic;
namespace Monodoc.Generators.Html
{
public class Toc2Html : IHtmlExporter
{
XslTransform transform;
public Toc2Html ()
{
transform = new XslTransform ();
var assembly = Assembly.GetAssembly (typeof (Toc2Html));
var stream = assembly.GetManifestResourceStream ("toc-html.xsl");
XmlReader xml_reader = new XmlTextReader (stream);
transform.Load (xml_reader, null, null);
}
public string Export (Stream input, Dictionary<string, string> extraArgs)
{
var output = new StringWriter ();
transform.Transform (new XPathDocument (input), null, output, null);
return output.ToString ();
}
public string Export (string input, Dictionary<string, string> extraArgs)
{
var output = new StringWriter ();
transform.Transform (new XPathDocument (new StringReader (input)), null, output, null);
return output.ToString ();
}
public string CssCode {
get {
return string.Empty;
}
}
}
}
此差异已折叠。
// addins-provider.cs
//
// A provider to display Mono.Addins extension models
//
// Author:
// Lluis Sanchez Gual <lluis@novell.com>
//
// Copyright (c) 2007 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//
using System;
using System.Linq;
using System.Diagnostics;
using System.Text;
using System.IO;
using System.Xml;
using System.Collections.Generic;
namespace Monodoc.Providers
{
public class AddinsProvider : Provider
{
string file;
public AddinsProvider (string xmlModelFile)
{
file = xmlModelFile;
if (!File.Exists (file))
throw new FileNotFoundException (String.Format ("The file `{0}' does not exist", file));
}
public override void PopulateTree (Tree tree)
{
string fileId = Path.GetFileNameWithoutExtension (file);
using (var f = File.OpenRead (file))
tree.HelpSource.Storage.Store (fileId, f);
XmlDocument doc = new XmlDocument ();
doc.Load (file);
foreach (XmlElement addin in doc.SelectNodes ("Addins/Addin")) {
string addinId = addin.GetAttribute ("fullId");
Node newNode = tree.RootNode.CreateNode (addin.GetAttribute ("name"), "addin:" + fileId + "#" + addinId);
foreach (XmlElement node in addin.SelectNodes ("ExtensionPoint")) {
string target = "extension-point:" + fileId + "#" + addinId + "#" + node.GetAttribute ("path");
Node newExt = newNode.CreateNode (node.GetAttribute ("name"), target);
foreach (XmlElement en in node.SelectNodes ("ExtensionNode")) {
string nid = en.GetAttribute ("id");
string nname = en.GetAttribute ("name");
newExt.CreateNode (nname, "extension-node:" + fileId + "#" + addinId + "#" + nid);
}
}
}
}
public override void CloseTree (HelpSource hs, Tree tree)
{
}
}
public class AddinsHelpSource : HelpSource
{
public AddinsHelpSource (string base_file, bool create) : base (base_file, create)
{
}
internal protected const string AddinPrefix = "addin:";
internal protected const string ExtensionPrefix = "extension-point:";
internal protected const string ExtensionNodePrefix = "extension-node:";
public override bool CanHandleUrl (string url)
{
return url.StartsWith (AddinPrefix, StringComparison.OrdinalIgnoreCase)
|| url.StartsWith (ExtensionPrefix, StringComparison.OrdinalIgnoreCase)
|| url.StartsWith (ExtensionNodePrefix, StringComparison.OrdinalIgnoreCase);
}
protected override string UriPrefix {
get {
return AddinPrefix;
}
}
public override DocumentType GetDocumentTypeForId (string id)
{
return DocumentType.AddinXml;
}
public override string GetInternalIdForUrl (string url, out Node node, out Dictionary<string, string> context)
{
var id = base.GetInternalIdForUrl (url, out node, out context);
var idParts = id.Split ('#');
context = new Dictionary<string, string> ();
context["FileID"] = idParts[0];
context["AddinID"] = idParts[1];
context["NodeID"] = idParts[2];
return idParts[0];
}
public override Node MatchNode (string url)
{
var prefix = new[] { AddinPrefix, ExtensionPrefix, ExtensionNodePrefix }.First (p => url.StartsWith (p, StringComparison.OrdinalIgnoreCase));
return base.MatchNode (prefix != null ? url.Substring (prefix.Length) : url);
}
}
}
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
function toggle_display (block) {
var w = document.getElementById (block);
var t = document.getElementById (block + ":toggle");
if (w.style.display == "none") {
w.style.display = "block";
t.getElementsByTagName("img")[0].setAttribute ("src", "xtree/images/clean/Lminus.gif"); // <img src="xtree/images/clean/Lminus.gif">
} else {
w.style.display = "none";
t.getElementsByTagName("img")[0].setAttribute ("src", "xtree/images/clean/Lplus.gif"); // <img src="xtree/images/clean/Lplus.gif">
}
}
<html>
<head>
<link type="text/css" rel="stylesheet" href="mono-ecma.css"/>
</head>
<body>
<div class="Content">
<p>The following documentation collections are available:</p>
<div id="docs">
<ul>
@@API_DOCS@@
</ul>
</div>
</div>
</body>
</html>
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册