Unfortunately -- not in a language you likely would want to use. But I've
been working on doing this in MarcEdit 7, and to do it, I found that I got
a lot of mileage using the Levenshtein distance algorithm (which I
prefer). You can usually find these in a variety of languages. The
approach that I took, since performance is a consideration, is to create a
sorted list for the data that I wanted to work with. I then used a
list<int,object[]> where the object was a structure that stored an internal
record_id, a match label (this is normalized data), the length of the match
label (necessary for the distance algorithm), a display label, and a
cluster count. I then had a secondary List<int,string> that stored the int
from primary storage list, and the first element character. This allows
for a more efficient method of processing large data as you process data in
the sorted list, but then can jump backwards to check for inversion. It's
not perfect, but it allows for acceptable processing within MarcEdit of
recordsets around a million or more once you have the data to be
processed. I do that by breaking data down and preprocessing it in an
SQLite database -- that way I can access large batches of marc data quickly
and have some of the grouping and sorting done for me.
I'm not sure this helps at all (since its comingled with the event code
that is generated with the results data) -- but here's the code I'm using
in C# for the data (I'll assume you have your own fuzzy matching
algorithms).
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Runtime.InteropServices;
namespace MarcEdit
{
public partial class frmClustering : Form
{
private enum CLUSTERING_ALGORITHMS
{composite=0,dice=1,levenshtein=2 }
private string pInternal_Cluster_Database = "";
private string pSourceFile = "";
internal meEditor pCaller = null;
private System.Collections.Hashtable virtualList = new
System.Collections.Hashtable();
private int icheckCount = 0;
private Dictionary<int, Dictionary<string,string>> saved_changes =
null;
private string[] ptag_list;
public frmClustering()
{
InitializeComponent();
}
private void textBoxCClass4_TextChanged(object sender, EventArgs e)
{
}
private void pictureBox2_Click(object sender, EventArgs e)
{
}
private void groupBox2_Enter(object sender, EventArgs e)
{
}
private void cmdImport_Click(object sender, EventArgs e)
{
pInternal_Cluster_Database = cglobal.mglobal.GenerateTempFile();
if (pCaller != null)
{
pSourceFile = cglobal.mglobal.GenerateTempFile();
pCaller.SaveFile(pSourceFile, false, false);
}
else
{
pSourceFile = txtSource.Text;
}
string sSource = pSourceFile;
string[] indexed_fields;
if (rdControlData.Checked == true)
{
indexed_fields = new string[] { "100", "110", "111", "130",
"600", "610", "611", "630", "650", "651", "653", "654", "655", "700",
"710", "711", "730" };
} else if (rdCustom.Checked == true)
{
indexed_fields = txtFields.Text.Split(",".ToCharArray());
} else
{
indexed_fields = new string[] { "all" };
}
lbStatus.Text = "Indexing File...please wait.";
Application.DoEvents();
int stype = 0;
byte[] resource = null;
string name = "MarcEdit.cluster_api.dll";
if (cglobal.IsWindows == false)
{
name = "MarcEdit.cluster_api_mono.dll";
}
try
{
using (System.IO.Stream stream =
System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream(name))
{
if (stream != null)
{
resource = new byte[stream.Length];
stream.Read(resource, 0, (int)stream.Length);
}
}
System.Reflection.Assembly local_api =
System.Reflection.Assembly.Load(resource);
//Loading An Assembly From desired path
//Getting Assembly Types (i.e. classes) here we have only 1
class
Type clsqlite = local_api.GetType("cluster_api.clsqlite");
object SqliteInstance = Activator.CreateInstance(clsqlite);
// Will hold the Fully Qualified Name of the Assembly i.e.
Namespace.Class
//string AssemblyName = string.Empty;
// Will be holding the Full path of the Assembly
//string AssemblyPath = string.Empty;
bool found_dependency = false;
//Creating Full Name of Assembly i.e. Namespace.ClassName
//for (int zz = 0; zz < TypesInAssembly.Length; zz++)
//{
//found_dependency = true;
//AssemblyName = Bib_Actions.Namespace + "." +
Bib_Actions.Name;
//Getting Methods in Assembly Types
//System.Reflection.MethodInfo[] MethodsCollection =
TypesInAssembly[zz].GetMethods();
//System.Reflection.PropertyInfo[] PropertiesCollection =
TypesInAssembly[zz].GetProperties();
//Type pType = TypesInAssembly[zz].GetType();
System.Reflection.PropertyInfo prop =
clsqlite.GetProperty("global_connection_string");
//prop.GetSetMethod(true).Invoke(Bib_Actions, new[] {
HostName });
//prop.GetSetMethod(true).Invoke(local_api, new string[] {
HostName });
//System.Windows.Forms.MessageBox.Show(HostName);
prop.SetValue(SqliteInstance, pInternal_Cluster_Database,
null);
System.Reflection.PropertyInfo pTitle =
clsqlite.GetProperty("TitleTag");
pTitle.SetValue(SqliteInstance, "245$a", null);
clsqlite.InvokeMember("DoSQLite",
System.Reflection.BindingFlags.InvokeMethod
| System.Reflection.BindingFlags.Instance
| System.Reflection.BindingFlags.Public,
null, SqliteInstance, new object[] { sSource,
indexed_fields });
found_dependency = true;
object ErrorMessage =
clsqlite.GetProperty("LastError").GetValue(SqliteInstance, null);
if (((string)ErrorMessage).Length > 0)
{
System.Windows.Forms.MessageBox.Show((string)ErrorMessage);
return;
}
cmbClusterField.SelectedIndex = 0;
cmbclusteringalgorithm.SelectedIndex = 1;
cmbClusterSize.SelectedIndex = 1;
cmbSortBy.SelectedIndex = 0;
PGenerate.BringToFront();
cmbClusterField.Focus();
Application.DoEvents();
//break;
//}
//}
if (found_dependency == false)
{
cglobal.mglobal.RaiseError("Unabled to load dependency
files...stopping.");
return;
}
}
catch (System.Exception general_error)
{
cglobal.mglobal.RaiseError("Error Raised: \n\n" +
general_error.ToString());
}
}
private void cmdGenerateCluster_Click(object sender, EventArgs e)
{
byte[] resource = null;
string name = "MarcEdit.cluster_api.dll";
if (cglobal.IsWindows == false)
{
name = "MarcEdit.cluster_api_mono.dll";
}
//try
//{
string[] search_fields;
switch (cmbClusterField.Text.ToLower())
{
case "names (1xx, 7xx)":
search_fields = new string[] { "100", "110", "111",
"130", "700", "710", "711", "730" };
break;
case "subjects (6xx)":
search_fields = new string[] { "600", "610", "611",
"630", "650", "651", "653", "654", "655" };
break;
default:
//other has been defined.
search_fields =
cmbClusterField.Text.Split(",".ToCharArray());
break;
}
ptag_list = search_fields;
CLUSTERING_ALGORITHMS alg = CLUSTERING_ALGORITHMS.levenshtein;
switch (cmbclusteringalgorithm.SelectedIndex)
{
case 0:
alg = CLUSTERING_ALGORITHMS.composite;
break;
case 1:
alg =
CLUSTERING_ALGORITHMS.levenshtein;
cmbclusteringalgorithm.SelectedIndex = 1;
break;
}
cmbAlg.SelectedIndex = cmbclusteringalgorithm.SelectedIndex;
lbStatus2.Text = "Extracting data...please wait.";
Application.DoEvents();
using (System.IO.Stream stream =
System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream(name))
{
if (stream != null)
{
resource = new byte[stream.Length];
stream.Read(resource, 0, (int)stream.Length);
}
}
System.Reflection.Assembly local_api =
System.Reflection.Assembly.Load(resource);
//Loading An Assembly From desired path
//Getting Assembly Types (i.e. classes) here we have only 1
class
Type clsqlite = local_api.GetType("cluster_api.clsqlite");
object SqliteInstance = Activator.CreateInstance(clsqlite);
// Will hold the Fully Qualified Name of the Assembly i.e.
Namespace.Class
//string AssemblyName = string.Empty;
// Will be holding the Full path of the Assembly
//string AssemblyPath = string.Empty;
bool found_dependency = false;
//Creating Full Name of Assembly i.e. Namespace.ClassName
//for (int zz = 0; zz < TypesInAssembly.Length; zz++)
//{
//found_dependency = true;
//AssemblyName = Bib_Actions.Namespace + "." + Bib_Actions.Name;
//Getting Methods in Assembly Types
//System.Reflection.MethodInfo[] MethodsCollection =
TypesInAssembly[zz].GetMethods();
//System.Reflection.PropertyInfo[] PropertiesCollection =
TypesInAssembly[zz].GetProperties();
//Type pType = TypesInAssembly[zz].GetType();
System.Reflection.PropertyInfo prop =
clsqlite.GetProperty("global_connection_string");
//prop.GetSetMethod(true).Invoke(Bib_Actions, new[] { HostName
});
//prop.GetSetMethod(true).Invoke(local_api, new string[] {
HostName });
//System.Windows.Forms.MessageBox.Show(HostName);
prop.SetValue(SqliteInstance, pInternal_Cluster_Database, null);
System.Reflection.PropertyInfo pTitle =
clsqlite.GetProperty("TitleTag");
pTitle.SetValue(SqliteInstance, "245$a", null);
string[] recs = (string[])clsqlite.InvokeMember("DoSQLiteQuery",
System.Reflection.BindingFlags.InvokeMethod
| System.Reflection.BindingFlags.Instance
| System.Reflection.BindingFlags.Public,
null, SqliteInstance, new object[] { search_fields });
found_dependency = true;
object ErrorMessage =
clsqlite.GetProperty("LastError").GetValue(SqliteInstance, null);
if (((string)ErrorMessage).Length > 0)
{
System.Windows.Forms.MessageBox.Show((string)ErrorMessage);
return;
}
tree_cluster.Nodes.Clear();
lbStatus2.Text = "Generating Clusters...Please Wait";
Application.DoEvents();
if (recs.Length == 0)
{
MessageBox.Show("none");
}
else
{
System.Collections.ArrayList master_list = new
System.Collections.ArrayList();
//System.Collections.Generic.Dictionary<int, string>
letter_list = new Dictionary<int, string>();
cluster_data_structure objCluster = new
cluster_data_structure();
int istart = 0;
int recs_count = recs.Count();
for (int yy=0; yy < recs_count; yy++)
{
string r = recs[yy];
bool bfound = false;
string[] parts = r.Split("\t".ToCharArray());
string search_label = ExtractClusterString(parts[0],
new char[] { 'a' });
int master_list_count = master_list.Count;
for (int x = istart; x < master_list_count; x++)
{
System.Collections.ArrayList t =
((System.Collections.ArrayList)master_list[x]);
if (t.Count > 0)
{
cluster_data_structure tmp_s =
(cluster_data_structure)t[0];
int fuzzy_coeficent = (tmp_s.StringLength -
(int)(tmp_s.StringLength * 0.90));
//MessageBox.Show(search_label + "\n" +
tmp_s.MatchLabel + "\n" +
//
meedit60.LevenshteinDistanceExtensions.LevenshteinDistance(search_label,
tmp_s.MatchLabel, false).ToString());
if (tmp_s.StringLength - search_label.Length <=
fuzzy_coeficent)
{
if (alg ==
CLUSTERING_ALGORITHMS.levenshtein)
{
if
(meedit60.LevenshteinDistanceExtensions.LevenshteinDistance(search_label,
tmp_s.MatchLabel, false) <=
fuzzy_coeficent)
{
objCluster = new
cluster_data_structure();
objCluster.StringLength =
search_label.Length;
objCluster.Label = parts[0];
objCluster.MatchLabel =
search_label;
objCluster.rec_ids = parts[2];
objCluster.ClusterCount = parts[1];
t.Add(objCluster);
master_list[x] = t;
istart = x;
bfound = true;
}
else
{
continue;
}
}
else if (alg ==
CLUSTERING_ALGORITHMS.composite)
{
if
(meedit60.StringExtensions.FuzzyMatch(search_label, tmp_s.MatchLabel) >
0.97)
{
objCluster = new
cluster_data_structure();
objCluster.StringLength =
search_label.Length;
objCluster.Label = parts[0];
objCluster.MatchLabel =
search_label;
objCluster.rec_ids = parts[2];
objCluster.ClusterCount = parts[1];
t.Add(objCluster);
master_list[x] = t;
istart = x;
bfound = true;
}
else
{
continue;
}
}
}
}
if (bfound == true)
{
break;
}
}
if (!bfound)
{
istart = master_list_count;
System.Collections.ArrayList tmp_list = new
System.Collections.ArrayList();
objCluster = new cluster_data_structure();
objCluster.StringLength = search_label.Length;
objCluster.Label = parts[0];
objCluster.MatchLabel = search_label;
objCluster.rec_ids = parts[2];
objCluster.ClusterCount = parts[1];
tmp_list.Add(objCluster);
master_list.Add(tmp_list);
}
}
int cluster_size = 1;
switch (cmbClusterSize.SelectedIndex)
{
case 0:
cluster_size = -1;
break;
case 1:
cluster_size = 1;
break;
case 2:
cluster_size = 5;
break;
default:
cluster_size = 1;
break;
}
bool bflip = false;
List<TreeNode> parent_cluster_nodes = new List<TreeNode>();
//MessageBox.Show("adding to ui");
tree_cluster.BeginUpdate();
TreeNode tmp_node = new TreeNode();
foreach (object l in master_list)
{
//MessageBox.Show("here");
System.Collections.ArrayList t =
(System.Collections.ArrayList)l;
int node_index = 1;
string guid = System.Guid.NewGuid().ToString();
System.Windows.Forms.TreeNode parent_node = new
TreeNode();
List<TreeNode> clustered_nodes = new List<TreeNode>();
if (t.Count > cluster_size)
{
foreach (cluster_data_structure tt in t)
{
if (node_index == 1)
{
parent_node.Text = tt.Label;
parent_node.Tag = node_index;
TreeNode virt_node = new TreeNode();
virt_node.Text = guid;
virt_node.Name =
"VIRT";
parent_node.Nodes.Add(virt_node);
tmp_node = new TreeNode(tt.ClusterCount +
": " + tt.Label);
tmp_node.Tag = tt.rec_ids;
clustered_nodes.Add(tmp_node);
//parent_node.Nodes.Add(tt.ClusterCount +
": " + tt.Label);
}
else
{
//parent_node.Nodes.Add(tt.ClusterCount +
": " + tt.Label);
tmp_node = new TreeNode(tt.ClusterCount +
": " + tt.Label);
tmp_node.Tag = tt.rec_ids;
clustered_nodes.Add(tmp_node);
parent_node.Tag = node_index;
}
node_index++;
}
if (cmbSortBy.SelectedIndex == 0)
{
parent_node.Text = "(" +
parent_node.Tag.ToString() + ") -- " + parent_node.Text;
} else
{
parent_node.Text = parent_node.Text + " -- (" +
parent_node.Tag.ToString() + ")";
}
//if (bflip == false)
//{
//parent_node.BackColor = Color.YellowGreen;
//tree_cluster.Nodes.Add(parent_node);
virtualList.Add(guid, clustered_nodes);
parent_cluster_nodes.Add(parent_node);
//bflip = true;
//}
//else
//{
// //parent_node.BackColor = Color.White;
// //tree_cluster.Nodes.Add(parent_node);
//}
}
}
//tree_cluster.Sort();
tree_cluster.Nodes.AddRange(parent_cluster_nodes.ToArray());
tree_cluster.TreeViewNodeSorter = new AlphanumComparator();
tree_cluster.EndUpdate();
tree_cluster.Invalidate();
Application.DoEvents();
}
PClusteredData.BringToFront();
//break;
//}
//}
if (found_dependency == false)
{
cglobal.mglobal.RaiseError("Unabled to load dependency
files...stopping.");
return;
}
//}
//catch (System.Exception general_error)
//{
// cglobal.mglobal.RaiseError("Unabled to load dependency
files...stopping.\n\n" + general_error.ToString());
//}
}
private void frmClustering_Load(object sender, EventArgs e)
{
PStart.BringToFront();
cglobal.mglobal.SizeLabels(this);
if (pCaller == null)
{
txtSource.Text = "";
txtSource.Enabled = true;
}
//if (cglobal.IsWindows)
//{
// SendMessage(new HandleRef(null, tree_cluster.Handle),
// TVM_SETEXTENDEDSTYLE,
// new IntPtr(TVS_EX_MULTISELECT),
// new IntPtr(TVS_EX_MULTISELECT));
//}
}
private string ExtractClusterString(string s, char[] subfields)
{
string[] parts = s.Split("$".ToCharArray());
string tmp_string = "";
int parts_count = parts.Count();
for (int x = 1; x < parts_count; x++)
{
if (Array.IndexOf(subfields, parts[x][0]) > -1)
{
tmp_string += "$" + parts[x].Trim("
.-,?!".ToCharArray());
}
}
return tmp_string;
}
private void rdCustom_CheckedChanged(object sender, EventArgs e)
{
if (rdCustom.Checked == true)
{
txtFields.Visible = true;
}
else
{
txtFields.Visible = false;
}
}
private void frmClustering_FormClosing(object sender,
FormClosingEventArgs e)
{
tree_cluster.Dispose();
this.Dispose(true);
Application.DoEvents();
}
private void tree_cluster_BeforeExpand(object sender,
TreeViewCancelEventArgs e)
{
if (e.Node.Nodes.ContainsKey("VIRT"))
{
try
{
// Do some work to load data.
// Note this may take a while and could
// be annoying to your user.
// See asynchronous version below.
// Clear out all of the children
string guid = e.Node.Nodes[0].Text;
e.Node.Nodes.Clear();
// Load the new children into the treeview.
e.Node.Nodes.AddRange(((List<TreeNode>)virtualList[guid]).ToArray());
}
catch
{
// Error occured, reset to a known state
e.Node.Nodes.Clear();
MessageBox.Show("error loading virtual nodes");
}
}
}
private void lnkClose_LinkClicked(object sender,
LinkLabelLinkClickedEventArgs e)
{
this.Close();
}
private void lnkEditCluster_LinkClicked(object sender,
LinkLabelLinkClickedEventArgs e)
{
meedit60.meedit60 objE = new meedit60.meedit60();
objE.FileEncoding = pCaller.pEncoding;
objE.LogFileHandle = pCaller.logfilehandle;
int lCount = 0;
lbStatus3.Text = "Processing Changes....Please wait";
Application.DoEvents();
string t = objE.ClusterReplace(pSourceFile, ptag_list,
saved_changes, true, out lCount);
//System.Windows.Forms.Clipboard.SetText(t);
cglobal.mglobal.RaiseMessage(lCount.ToString() + " changes have
been made.");
if (pCaller != null)
{
cglobal.mglobal.UpdateEditor(pCaller, pSourceFile, t,
lCount);
}
//pCaller.LoadFile(t, true, true, true);
this.Close();
}
private void tree_cluster_AfterCheck(object sender,
TreeViewEventArgs e)
{
if (e.Node.Checked == true)
{
icheckCount++;
txtEditValue.Visible = true;
lnkAddCriteria.Visible = true;
if (e.Node.Nodes.Count > 0)
{
e.Node.Expand();
for (int x=0; x<e.Node.Nodes.Count; x++)
{
e.Node.Nodes[x].Checked = true;
}
}
} else
{
icheckCount--;
if (icheckCount <=0)
{
txtEditValue.Visible = false;
lnkAddCriteria.Visible = false;
}
if (e.Node.Nodes.Count > 0)
{
for (int x = 0; x < e.Node.Nodes.Count; x++)
{
e.Node.Nodes[x].Checked = false;
}
}
}
}
private void tree_cluster_AfterSelect(object sender,
TreeViewEventArgs e)
{
if (e.Node.IsSelected)
{
string t = e.Node.Text;
if (t.IndexOf("$")>-1)
{
t = t.Substring(t.IndexOf("$"));
}
txtEditValue.Text = t;
}
}
private void lnkAddCriteria_LinkClicked(object sender,
LinkLabelLinkClickedEventArgs e)
{
if (icheckCount >= 1)
{
int node_count = tree_cluster.Nodes.Count;
System.Collections.ArrayList first_level = new
System.Collections.ArrayList();
System.Collections.ArrayList second_level = new
System.Collections.ArrayList();
for (int x=0; x < node_count; x++)
{
if (tree_cluster.Nodes[x].Checked)
{
foreach (TreeNode t_lower in
tree_cluster.Nodes[x].Nodes)
{
Add2SavedList((string)t_lower.Tag,
t_lower.Text, txtEditValue.Text);
//tmp_string += (string)t_lower.Tag + "\n";
}
first_level.Add(x);
}
else if (tree_cluster.Nodes[x].IsExpanded)
{
second_level = new System.Collections.ArrayList();
for(int y=0; y<tree_cluster.Nodes[x].Nodes.Count;
y++)
{
if (tree_cluster.Nodes[x].Nodes[y].Checked)
{
Add2SavedList((string)tree_cluster.Nodes[x].Nodes[y].Tag,
tree_cluster.Nodes[x].Nodes[y].Text, txtEditValue.Text);
//tmp_string += (string)t_lower.Tag + ",";
second_level.Add(y);
}
}
for (int i = second_level.Count -1; i >= 0; i--)
{
tree_cluster.Nodes[x].Nodes.RemoveAt((int)second_level[i]);
}
}
//tmp_string = tmp_string.TrimEnd(", ".ToCharArray());
}
if (first_level.Count > 0)
{
for (int i = first_level.Count - 1; i >= 0; i--)
{
tree_cluster.Nodes.RemoveAt((int)first_level[i]);
}
}
txtEditValue.Text = "";
lnkEditCluster.Enabled = true;
}
else
{
cglobal.mglobal.RaiseError("No nodes have been selected.");
}
}
private void Add2SavedList(string sid, string sfind, string
sreplace)
{
if (sfind.IndexOf(": ") > -1)
{
sfind = sfind.Substring(sfind.IndexOf(":") + 1).TrimStart();
}
string[] arr_keys = sid.Split(",".ToCharArray());
foreach (string a in arr_keys)
{
if (saved_changes == null)
{
saved_changes = new Dictionary<int, Dictionary<string,
string>>();
}
if (saved_changes.ContainsKey(System.Convert.ToInt32(a)))
{
((Dictionary<string,
string>)saved_changes[System.Convert.ToInt32(a)]).Add(sfind, sreplace);
} else
{
Dictionary<string, string> t = new Dictionary<string,
string>();
t.Add(sfind, sreplace);
saved_changes.Add(System.Convert.ToInt32(a), t);
}
}
}
private void cmdOpen_Click(object sender, EventArgs e)
{
string sPath = "";
cglobal.cxmlini.GetSettings(cglobal.mglobal.XMLPath(),
"settings", "open", cglobal.mglobal.UserDataPath(), ref sPath);
string sfilename = cglobal.mglobal.OpenFile(sPath,
mglobal.M_FILTER, "*.mrk", cglobal.cxmlini.LoadDictionary(cglobal.objDoc,
"global", "tooltip/open", "Open File"));
if (sfilename.Length != 0) { txtSource.Text = sfilename; }
}
private void cmbAlg_SelectedIndexChanged(object sender, EventArgs e)
{
if (this.Controls[0] == PClusteredData)
{
PGenerate.BringToFront();
Application.DoEvents();
cmbclusteringalgorithm.SelectedIndex = cmbAlg.SelectedIndex;
cmdGenerateCluster_Click(this, new EventArgs());
}
}
}
class cluster_data_structure
{
private int plength = 0;
private string plabel = "";
private string pmatch_label = "";
private string pcount = "";
private string prec_ids = "";
internal string rec_ids
{
get { return prec_ids; }
set { prec_ids = value; }
}
internal int StringLength
{
get { return plength; }
set { plength = value; }
}
internal string MatchLabel
{
get { return pmatch_label; }
set { pmatch_label = value; }
}
internal string Label
{
get { return plabel; }
set { plabel = value; }
}
internal string ClusterCount
{
get { return pcount; }
set { pcount = value; }
}
}
class AlphanumComparator : System.Collections.IComparer
{
private enum ChunkType { Alphanumeric, Numeric };
private bool InChunk(char ch, char otherCh)
{
ChunkType type = ChunkType.Alphanumeric;
if (char.IsDigit(otherCh))
{
type = ChunkType.Numeric;
}
if ((type == ChunkType.Alphanumeric && char.IsDigit(ch))
|| (type == ChunkType.Numeric && !char.IsDigit(ch)))
{
return false;
}
return true;
}
public int Compare(object x, object y)
{
TreeNode s1 = y as TreeNode;
TreeNode s2 = x as TreeNode;
//String s1 = x as string;
//String s2 = y as string;
if (s1 == null || s2 == null)
{
return 0;
}
int thisMarker = 0, thisNumericChunk = 0;
int thatMarker = 0, thatNumericChunk = 0;
while ((thisMarker < s1.Text.Length) || (thatMarker <
s2.Text.Length))
{
if (thisMarker >= s1.Text.Length)
{
return -1;
}
else if (thatMarker >= s2.Text.Length)
{
return 1;
}
char thisCh = s1.Text[thisMarker];
char thatCh = s2.Text[thatMarker];
StringBuilder thisChunk = new StringBuilder();
StringBuilder thatChunk = new StringBuilder();
while ((thisMarker < s1.Text.Length) && (thisChunk.Length
== 0 || InChunk(thisCh, thisChunk[0])))
{
thisChunk.Append(thisCh);
thisMarker++;
if (thisMarker < s1.Text.Length)
{
thisCh = s1.Text[thisMarker];
}
}
while ((thatMarker < s2.Text.Length) && (thatChunk.Length
== 0 || InChunk(thatCh, thatChunk[0])))
{
thatChunk.Append(thatCh);
thatMarker++;
if (thatMarker < s2.Text.Length)
{
thatCh = s2.Text[thatMarker];
}
}
int result = 0;
// If both chunks contain numeric characters, sort them
numerically
if (char.IsDigit(thisChunk[0]) &&
char.IsDigit(thatChunk[0]))
{
thisNumericChunk =
Convert.ToInt32(thisChunk.ToString());
thatNumericChunk =
Convert.ToInt32(thatChunk.ToString());
if (thisNumericChunk < thatNumericChunk)
{
result = -1;
}
if (thisNumericChunk > thatNumericChunk)
{
result = 1;
}
}
else
{
result =
thisChunk.ToString().CompareTo(thatChunk.ToString());
}
if (result != 0)
{
return result;
}
}
return 0;
}
}
}
On Wed, Oct 25, 2017 at 11:57 AM, Eric Lease Morgan <[log in to unmask]> wrote:
> Has anybody here played with any clustering techniques for normalizing
> bibliographic data?
>
> My bibliographic data is fraught with inconsistencies. For example, a
> publisher’s name may be recorded one way, another way, or a third way. The
> same goes for things like publisher place: South Bend; South Bend, IN;
> South Bend, Ind. And then there is the ISBD punctuation that is sometimes
> applied and sometimes not. All of these inconsistencies make indexing &
> faceted browsing more difficult than it needs to be.
>
> OpenRefine is a really good program for finding these inconsistencies and
> then normalizing them. OpenRefine calls this process “clustering”, and it
> points to a nice page describing the various clustering processes. [1] Some
> of the techniques included “fingerprinting” and calculating “nearest
> neighbors”. Unfortunately, OpenRefine is not really programable, and I’d
> like to automate much of this process.
>
> Does anybody here have any experience automating the process of normalize
> bibliographic (MARC) data?
>
> [1] about clustering - http://bit.ly/2izQarE
>
> —
> Eric Morgan
>
|