mirror of
https://github.com/sbrl/Pepperminty-Wiki.git
synced 2024-11-22 04:23:01 +00:00
Refactor context highlighter out of context extraction
This commit is contained in:
parent
5a3a501c08
commit
77e4cdcc7d
3 changed files with 27 additions and 33 deletions
|
@ -1376,6 +1376,7 @@ register_module([
|
|||
$link = "?page=" . rawurlencode($result["pagename"]);
|
||||
$pagesource = file_get_contents($result["pagename"] . ".md");
|
||||
$context = search::extract_context($_GET["query"], $pagesource);
|
||||
$context = search::highlight_context($_GET["query"], $context);
|
||||
|
||||
$content .= "<div>\n";
|
||||
$content .= " <h2><a href='$link'>" . $result["pagename"] . "</a></h2>\n";
|
||||
|
@ -1681,21 +1682,6 @@ class search
|
|||
// be broken anyway.
|
||||
$context = self::strip_markup($context);
|
||||
|
||||
// Make the matching words bold.
|
||||
$extraoffset = 0;
|
||||
foreach($group as $match)
|
||||
{
|
||||
$start = $match[1] + $extraoffset;
|
||||
$length = strlen($match[0]);
|
||||
$end = $start + $length;
|
||||
|
||||
// Insert the end one first to make sure that we don't mess up
|
||||
// the offsets.
|
||||
$context = substr_replace($context, "</strong>", $end, 0);
|
||||
$context = substr_replace($context, "<strong>", $start, 0);
|
||||
// $extraoffset += strlen("<strong></strong>");
|
||||
}
|
||||
|
||||
$contexts[] = $context;
|
||||
|
||||
$basepos = $scanpos + 1;
|
||||
|
@ -1703,6 +1689,18 @@ class search
|
|||
|
||||
return implode(" ... ", $contexts);
|
||||
}
|
||||
|
||||
public static function highlight_context($query, $context)
|
||||
{
|
||||
$qterms = self::tokenize($query);
|
||||
|
||||
foreach($qterms as $qterm)
|
||||
{
|
||||
$context = preg_replace("/" . preg_quote($qterm) . "/i", "<strong>$0</strong>", $context);
|
||||
}
|
||||
|
||||
return $context;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -50,7 +50,7 @@
|
|||
"author": "Starbeamrainbowlabs",
|
||||
"description": "Adds proper search functionality to Pepperminty Wiki. Note that this module, at the moment, just contains test code while I figure out how best to write a search engine.",
|
||||
"id": "feature-search",
|
||||
"lastupdate": 1446117613,
|
||||
"lastupdate": 1446299530,
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
|
|
|
@ -52,6 +52,7 @@ register_module([
|
|||
$link = "?page=" . rawurlencode($result["pagename"]);
|
||||
$pagesource = file_get_contents($result["pagename"] . ".md");
|
||||
$context = search::extract_context($_GET["query"], $pagesource);
|
||||
$context = search::highlight_context($_GET["query"], $context);
|
||||
|
||||
$content .= "<div>\n";
|
||||
$content .= " <h2><a href='$link'>" . $result["pagename"] . "</a></h2>\n";
|
||||
|
@ -357,23 +358,6 @@ class search
|
|||
// be broken anyway.
|
||||
$context = self::strip_markup($context);
|
||||
|
||||
// Make the matching words bold.
|
||||
// Fixme Account for the fact that the offsets in $matches[] are relative to the beginning of the document, not the contextual snippet
|
||||
// Todo Figure out why $extraoffset throws everything off
|
||||
$extraoffset = 0;
|
||||
foreach($group as $match)
|
||||
{
|
||||
$start = $match[1] + $extraoffset;
|
||||
$length = strlen($match[0]);
|
||||
$end = $start + $length;
|
||||
|
||||
// Insert the end one first to make sure that we don't mess up
|
||||
// the offsets.
|
||||
$context = substr_replace($context, "</strong>", $end, 0);
|
||||
$context = substr_replace($context, "<strong>", $start, 0);
|
||||
// $extraoffset += strlen("<strong></strong>");
|
||||
}
|
||||
|
||||
$contexts[] = $context;
|
||||
|
||||
$basepos = $scanpos + 1;
|
||||
|
@ -381,6 +365,18 @@ class search
|
|||
|
||||
return implode(" ... ", $contexts);
|
||||
}
|
||||
|
||||
public static function highlight_context($query, $context)
|
||||
{
|
||||
$qterms = self::tokenize($query);
|
||||
|
||||
foreach($qterms as $qterm)
|
||||
{
|
||||
$context = preg_replace("/" . preg_quote($qterm) . "/i", "<strong>$0</strong>", $context);
|
||||
}
|
||||
|
||||
return $context;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in a new issue