1
0
mirror of https://github.com/meineerde/redmine.git synced 2026-02-03 23:53:23 +00:00

Tokenize search parameter in order to allow multiple search terms in:

* the "contains" operator of text filters
* in issue autocomplete

Patch by Jens Krämer.

git-svn-id: http://svn.redmine.org/redmine/trunk@21238 e93f8b46-1217-0410-a6f0-8f06a7374b81
This commit is contained in:
Marius Balteanu 2021-10-05 19:54:31 +00:00
parent 04e27aa161
commit 506fc9d74c
4 changed files with 64 additions and 16 deletions

View File

@ -100,9 +100,8 @@ class Issue < ActiveRecord::Base
ids.any? ? where(:assigned_to_id => ids) : none
end)
scope :like, (lambda do |q|
q = q.to_s
if q.present?
where("LOWER(#{table_name}.subject) LIKE LOWER(?)", "%#{sanitize_sql_like q}%")
where(*::Query.tokenized_like_conditions("#{table_name}.subject", q))
end
end)

View File

@ -1440,11 +1440,25 @@ class Query < ActiveRecord::Base
prefix = suffix = nil
prefix = '%' if options[:ends_with]
suffix = '%' if options[:starts_with]
prefix = suffix = '%' if prefix.nil? && suffix.nil?
value = queried_class.sanitize_sql_like value
queried_class.sanitize_sql_for_conditions(
[Redmine::Database.like(db_field, '?', :match => options[:match]), "#{prefix}#{value}#{suffix}"]
)
if prefix || suffix
value = queried_class.sanitize_sql_like value
queried_class.sanitize_sql_for_conditions(
[Redmine::Database.like(db_field, '?', :match => options[:match]), "#{prefix}#{value}#{suffix}"]
)
else
queried_class.sanitize_sql_for_conditions(
::Query.tokenized_like_conditions(db_field, value, **options)
)
end
end
def self.tokenized_like_conditions(db_field, value, **options)
tokens = Redmine::Search::Tokenizer.new(value).tokens
tokens = [value] unless tokens.present?
sql, values = tokens.map do |token|
[Redmine::Database.like(db_field, '?', options), "%#{sanitize_sql_like token}%"]
end.transpose
[sql.join(" AND "), *values]
end
# Adds a filter for the given custom field

View File

@ -57,15 +57,7 @@ module Redmine
@projects = projects
@cache = options.delete(:cache)
@options = options
# extract tokens from the question
# eg. hello "bye bye" => ["hello", "bye bye"]
@tokens = @question.scan(%r{((\s|^)"[^"]+"(\s|$)|\S+)}).collect {|m| m.first.gsub(%r{(^\s*"\s*|\s*"\s*$)}, '')}
# tokens must be at least 2 characters long
# but for Chinese characters (Chinese HANZI/Japanese KANJI), tokens can be one character
@tokens = @tokens.uniq.select {|w| w.length > 1 || w =~ /\p{Han}/}
# no more than 5 tokens to search for
@tokens.slice! 5..-1
@tokens = Tokenizer.new(@question).tokens
end
# Returns the total result count
@ -135,6 +127,22 @@ module Redmine
end
end
class Tokenizer
def initialize(question)
@question = question.to_s
end
def tokens
# extract tokens from the question
# eg. hello "bye bye" => ["hello", "bye bye"]
tokens = @question.scan(%r{((\s|^)"[^"]+"(\s|$)|\S+)}).collect {|m| m.first.gsub(%r{(^\s*"\s*|\s*"\s*$)}, '')}
# tokens must be at least 2 characters long
# but for Chinese characters (Chinese HANZI/Japanese KANJI), tokens can be one character
# no more than 5 tokens to search for
tokens.uniq.select{|w| w.length > 1 || w =~ /\p{Han}/}.first 5
end
end
module Controller
def self.included(base)
base.extend(ClassMethods)

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
# Redmine - project management software
# Copyright (C) 2006-2021 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
require File.expand_path('../../../../test_helper', __FILE__)
class Redmine::Search::Tokenize < ActiveSupport::TestCase
def test_tokenize
value = "hello \"bye bye\""
assert_equal ["hello", "bye bye"], Redmine::Search::Tokenizer.new(value).tokens
end
end