Merge pull request #678 from DaveDavenport/issue665

Implement support for specifying negated match
This commit is contained in:
Dave Davenport 2017-10-01 10:53:59 +02:00 committed by GitHub
commit 24806f2292
22 changed files with 550 additions and 305 deletions

View File

@ -235,14 +235,13 @@ check_PROGRAMS+=\
helper_test\
helper_expand\
helper_pidfile\
helper_tokenize\
helper_config_cmdline_parser\
widget_test\
box_test\
scrollbar_test
if USE_CHECK
check_PROGRAMS+=mode_test theme_parser_test
check_PROGRAMS+=mode_test theme_parser_test helper_tokenize
endif
@ -316,22 +315,6 @@ helper_pidfile_SOURCES=\
source/xrmoptions.c\
test/helper-pidfile.c
helper_tokenize_CFLAGS=$(textbox_test_CFLAGS)
helper_tokenize_LDADD=$(textbox_test_LDADD)
helper_tokenize_SOURCES=\
config/config.c\
include/rofi.h\
include/mode.h\
include/mode-private.h\
source/helper.c\
source/rofi-types.c\
include/rofi-types.h\
include/helper.h\
include/helper-theme.h\
include/xrmoptions.h\
source/xrmoptions.c\
test/helper-tokenize.c
widget_test_LDADD=$(textbox_test_LDADD)
widget_test_CFLAGS=$(textbox_test_CFLAGS)
widget_test_SOURCES=\
@ -519,6 +502,22 @@ mode_test_SOURCES=\
source/keyb.c\
include/mode.h\
include/mode-private.h
helper_tokenize_CFLAGS=$(textbox_test_CFLAGS) $(check_CFLAGS)
helper_tokenize_LDADD=$(textbox_test_LDADD) $(check_LIBS)
helper_tokenize_SOURCES=\
config/config.c\
include/rofi.h\
include/mode.h\
include/mode-private.h\
source/helper.c\
source/rofi-types.c\
include/rofi-types.h\
include/helper.h\
include/helper-theme.h\
include/xrmoptions.h\
source/xrmoptions.c\
test/helper-tokenize.c
endif
TESTS+=\
@ -526,7 +525,6 @@ TESTS+=\
helper_test\
helper_expand\
helper_pidfile\
helper_tokenize\
helper_config_cmdline_parser\
textbox_test\
widget_test\
@ -535,6 +533,7 @@ TESTS+=\
if USE_CHECK
TESTS+=theme_parser_test\
helper_tokenize\
mode_test
endif

View File

@ -43,7 +43,7 @@
*
* @returns the updated retv list.
*/
PangoAttrList *helper_token_match_get_pango_attr ( RofiHighlightColorStyle th, GRegex **tokens, const char *input, PangoAttrList *retv );
PangoAttrList *helper_token_match_get_pango_attr ( RofiHighlightColorStyle th, rofi_int_matcher **tokens, const char *input, PangoAttrList *retv );
/**
* @param pfd Pango font description to validate.

View File

@ -59,16 +59,16 @@ int helper_parse_setup ( char * string, char ***output, int *length, ... );
*
* Tokenize the string on spaces.
*
* @returns a newly allocated array of regex objest
* @returns a newly allocated array of matching objects
*/
GRegex **tokenize ( const char *input, int case_sensitive );
rofi_int_matcher **helper_tokenize ( const char *input, int case_sensitive );
/**
* @param tokens Array of regex objects
*
* Frees the array of regex expressions.
* Frees the array of matching objects.
*/
void tokenize_free ( GRegex ** tokens );
void helper_tokenize_free ( rofi_int_matcher ** tokens );
/**
* @param key The key to search for
@ -136,7 +136,7 @@ int find_arg ( const char * const key );
*
* @returns TRUE when matches, FALSE otherwise
*/
int helper_token_match ( GRegex * const *tokens, const char *input );
int helper_token_match ( rofi_int_matcher * const *tokens, const char *input );
/**
* @param cmd The command to execute.
*

View File

@ -31,7 +31,7 @@
#include <gmodule.h>
/** ABI version to check if loaded plugin is compatible. */
#define ABI_VERSION 0x00000005
#define ABI_VERSION 0x00000006
/**
* @param data Pointer to #Mode object.
@ -84,7 +84,7 @@ typedef char * ( *_mode_get_completion )( const Mode *sw, unsigned int selected_
*
* @returns 1 when it matches, 0 if not.
*/
typedef int ( *_mode_token_match )( const Mode *data, GRegex **tokens, unsigned int index );
typedef int ( *_mode_token_match )( const Mode *data, rofi_int_matcher **tokens, unsigned int index );
/**
* @param sw The #Mode pointer

View File

@ -29,6 +29,7 @@
#define ROFI_MODE_H
#include <cairo.h>
#include <rofi-types.h>
/**
* @defgroup MODE Mode
*
@ -164,7 +165,7 @@ ModeMode mode_result ( Mode *mode, int menu_retv, char **input, unsigned int sel
*
* @returns TRUE if matches
*/
int mode_token_match ( const Mode *mode, GRegex **tokens, unsigned int selected_line );
int mode_token_match ( const Mode *mode, rofi_int_matcher **tokens, unsigned int selected_line );
/**
* @param mode The mode to query

View File

@ -225,4 +225,14 @@ typedef struct rofi_range_pair
unsigned int start;
unsigned int stop;
} rofi_range_pair;
/**
* Internal structure for matching.
*/
typedef struct rofi_int_matcher_t {
GRegex *regex;
gboolean invert;
} rofi_int_matcher;
#endif // INCLUDE_ROFI_TYPES_H

View File

@ -126,7 +126,7 @@ struct RofiViewState
} mouse;
/** Regexs used for matching */
GRegex **tokens;
rofi_int_matcher **tokens;
};
/** @} */
#endif

View File

@ -286,18 +286,6 @@ test('helper_pidfile test', executable('helper_pidfile.test', [
dependencies: deps,
))
test('helper_tokenize test', executable('helper_tokenize.test', [
'test/helper-tokenize.c',
],
objects: rofi.extract_objects([
'config/config.c',
'source/helper.c',
'source/xrmoptions.c',
'source/rofi-types.c',
]),
dependencies: deps,
))
test('widget test', executable('widget.test', [
'test/widget-test.c',
theme_parser,
@ -433,6 +421,18 @@ if check.found()
]),
dependencies: deps,
))
test('helper_tokenize test', executable('helper_tokenize.test', [
'test/helper-tokenize.c',
],
objects: rofi.extract_objects([
'config/config.c',
'source/helper.c',
'source/xrmoptions.c',
'source/rofi-types.c',
]),
dependencies: deps,
))
endif

View File

@ -187,7 +187,7 @@ static ModeMode combi_mode_result ( Mode *sw, int mretv, char **input, unsigned
}
return MODE_EXIT;
}
static int combi_mode_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int combi_mode_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
CombiModePrivateData *pd = mode_get_private_data ( sw );
for ( unsigned i = 0; i < pd->num_switchers; i++ ) {

View File

@ -371,7 +371,7 @@ static int dmenu_mode_init ( Mode *sw )
return TRUE;
}
static int dmenu_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int dmenu_token_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
DmenuModePrivateData *rmpd = (DmenuModePrivateData *) mode_get_private_data ( sw );
return helper_token_match ( tokens, rmpd->cmd_list[index] );
@ -612,7 +612,7 @@ int dmenu_switcher_dialog ( void )
char *select = NULL;
find_arg_str ( "-select", &select );
if ( select != NULL ) {
GRegex **tokens = tokenize ( select, config.case_sensitive );
rofi_int_matcher **tokens = helper_tokenize ( select, config.case_sensitive );
unsigned int i = 0;
for ( i = 0; i < cmd_list_length; i++ ) {
if ( helper_token_match ( tokens, cmd_list[i] ) ) {
@ -620,17 +620,17 @@ int dmenu_switcher_dialog ( void )
break;
}
}
tokenize_free ( tokens );
helper_tokenize_free ( tokens );
}
if ( find_arg ( "-dump" ) >= 0 ) {
GRegex **tokens = tokenize ( config.filter ? config.filter : "", config.case_sensitive );
rofi_int_matcher **tokens = helper_tokenize ( config.filter ? config.filter : "", config.case_sensitive );
unsigned int i = 0;
for ( i = 0; i < cmd_list_length; i++ ) {
if ( tokens == NULL || helper_token_match ( tokens, cmd_list[i] ) ) {
rofi_output_formatted_line ( pd->format, cmd_list[i], i, config.filter );
}
}
tokenize_free ( tokens );
helper_tokenize_free ( tokens );
dmenu_mode_free ( &dmenu_mode );
g_free ( input );
return TRUE;

View File

@ -667,32 +667,30 @@ static char *drun_get_completion ( const Mode *sw, unsigned int index )
}
}
static int drun_token_match ( const Mode *data, GRegex **tokens, unsigned int index )
static int drun_token_match ( const Mode *data, rofi_int_matcher **tokens, unsigned int index )
{
DRunModePrivateData *rmpd = (DRunModePrivateData *) mode_get_private_data ( data );
int match = 1;
if ( tokens ) {
for ( int j = 0; match && tokens != NULL && tokens[j] != NULL; j++ ) {
int test = 0;
GRegex *ftokens[2] = { tokens[j], NULL };
rofi_int_matcher *ftokens[2] = { tokens[j], NULL };
// Match name
if ( rmpd->entry_list[index].name &&
helper_token_match ( ftokens, rmpd->entry_list[index].name ) ) {
test = 1;
if ( rmpd->entry_list[index].name ) {
test = helper_token_match ( ftokens, rmpd->entry_list[index].name );
}
// Match generic name
if ( !test && rmpd->entry_list[index].generic_name &&
helper_token_match ( ftokens, rmpd->entry_list[index].generic_name ) ) {
test = 1;
if ( test == tokens[j]->invert && rmpd->entry_list[index].generic_name) {
test = helper_token_match ( ftokens, rmpd->entry_list[index].generic_name );
}
// Match executable name.
if ( !test && helper_token_match ( ftokens, rmpd->entry_list[index].exec ) ) {
test = 1;
if ( test == tokens[j]->invert ) {
test = helper_token_match ( ftokens, rmpd->entry_list[index].exec );
}
// Match against category.
if ( !test ) {
if ( test == tokens[j]->invert ) {
gchar **list = rmpd->entry_list[index].categories;
for ( int iter = 0; !test && list && list[iter]; iter++ ) {
for ( int iter = 0; test == tokens[j]->invert && list && list[iter]; iter++ ) {
test = helper_token_match ( ftokens, list[iter] );
}
}

View File

@ -104,7 +104,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, in
return g_strdup ( pd->messages[selected_line] );
}
static int help_keys_token_match ( const Mode *data,
GRegex **tokens,
rofi_int_matcher **tokens,
unsigned int index
)
{

View File

@ -400,7 +400,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
const RunModePrivateData *rmpd = (const RunModePrivateData *) sw->private_data;
return get_entry ? g_strdup ( rmpd->cmd_list[selected_line] ) : NULL;
}
static int run_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int run_token_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
const RunModePrivateData *rmpd = (const RunModePrivateData *) sw->private_data;
return helper_token_match ( tokens, rmpd->cmd_list[index] );

View File

@ -254,7 +254,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
return get_entry ? g_strdup ( pd->cmd_list[selected_line] ) : NULL;
}
static int script_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int script_token_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
ScriptModePrivateData *rmpd = sw->private_data;
return helper_token_match ( tokens, rmpd->cmd_list[index] );

View File

@ -516,7 +516,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
*
* @returns TRUE if matches
*/
static int ssh_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int ssh_token_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
SSHModePrivateData *rmpd = (SSHModePrivateData *) mode_get_private_data ( sw );
return helper_token_match ( tokens, rmpd->hosts_list[index] );

View File

@ -326,7 +326,7 @@ static client* window_client ( ModeModePrivateData *pd, xcb_window_t win )
g_free ( attr );
return c;
}
static int window_match ( const Mode *sw, GRegex **tokens, unsigned int index )
static int window_match ( const Mode *sw, rofi_int_matcher **tokens, unsigned int index )
{
ModeModePrivateData *rmpd = (ModeModePrivateData *) mode_get_private_data ( sw );
int match = 1;
@ -343,23 +343,23 @@ static int window_match ( const Mode *sw, GRegex **tokens, unsigned int index )
// Now we want it to match only one item at the time.
// If hack not in place it would not match queries spanning multiple fields.
// e.g. when searching 'title element' and 'class element'
GRegex *ftokens[2] = { tokens[j], NULL };
rofi_int_matcher *ftokens[2] = { tokens[j], NULL };
if ( c->title != NULL && c->title[0] != '\0' ) {
test = helper_token_match ( ftokens, c->title );
}
if ( !test && c->class != NULL && c->class[0] != '\0' ) {
if ( test == tokens[j]->invert && c->class != NULL && c->class[0] != '\0' ) {
test = helper_token_match ( ftokens, c->class );
}
if ( !test && c->role != NULL && c->role[0] != '\0' ) {
if ( test == tokens[j]->invert && c->role != NULL && c->role[0] != '\0' ) {
test = helper_token_match ( ftokens, c->role );
}
if ( !test && c->name != NULL && c->name[0] != '\0' ) {
if ( test == tokens[j]->invert && c->name != NULL && c->name[0] != '\0' ) {
test = helper_token_match ( ftokens, c->name );
}
if ( !test && c->wmdesktopstr != NULL && c->wmdesktopstr[0] != '\0' ) {
if ( test == tokens[j]->invert && c->wmdesktopstr != NULL && c->wmdesktopstr[0] != '\0' ) {
test = helper_token_match ( ftokens, c->wmdesktopstr );
}

View File

@ -55,6 +55,7 @@
#include "rofi.h"
#include "view.h"
/**
* Textual description of positioning rofi.
*/
@ -152,10 +153,12 @@ int helper_parse_setup ( char * string, char ***output, int *length, ... )
return FALSE;
}
void tokenize_free ( GRegex ** tokens )
void helper_tokenize_free ( rofi_int_matcher ** tokens )
{
for ( size_t i = 0; tokens && tokens[i]; i++ ) {
g_regex_unref ( (GRegex *) tokens[i] );
g_regex_unref ( (GRegex *) tokens[i]->regex );
g_free ( tokens[i] );
}
g_free ( tokens );
}
@ -214,10 +217,15 @@ static inline GRegex * R ( const char *s, int case_sensitive )
return g_regex_new ( s, G_REGEX_OPTIMIZE | ( ( case_sensitive ) ? 0 : G_REGEX_CASELESS ), 0, NULL );
}
static GRegex * create_regex ( const char *input, int case_sensitive )
static rofi_int_matcher * create_regex ( const char *input, int case_sensitive )
{
GRegex * retv = NULL;
gchar *r;
rofi_int_matcher *rv = g_malloc0(sizeof(rofi_int_matcher));
if ( input && input[0] == '-') {
rv->invert = 1;
input++;
}
switch ( config.matching_method )
{
case MM_GLOB:
@ -244,9 +252,10 @@ static GRegex * create_regex ( const char *input, int case_sensitive )
g_free ( r );
break;
}
return retv;
rv->regex = retv;
return rv;
}
GRegex **tokenize ( const char *input, int case_sensitive )
rofi_int_matcher **helper_tokenize ( const char *input, int case_sensitive )
{
if ( input == NULL ) {
return NULL;
@ -257,10 +266,10 @@ GRegex **tokenize ( const char *input, int case_sensitive )
}
char *saveptr = NULL, *token;
GRegex **retv = NULL;
rofi_int_matcher **retv = NULL;
if ( !config.tokenize ) {
retv = g_malloc0 ( sizeof ( GRegex* ) * 2 );
retv[0] = (GRegex *) create_regex ( input, case_sensitive );
retv = g_malloc0 ( sizeof ( rofi_int_matcher* ) * 2 );
retv[0] = create_regex ( input, case_sensitive );
return retv;
}
@ -274,8 +283,8 @@ GRegex **tokenize ( const char *input, int case_sensitive )
// strtok should still be valid for utf8.
const char * const sep = " ";
for ( token = strtok_r ( str, sep, &saveptr ); token != NULL; token = strtok_r ( NULL, sep, &saveptr ) ) {
retv = g_realloc ( retv, sizeof ( GRegex* ) * ( num_tokens + 2 ) );
retv[num_tokens] = (GRegex *) create_regex ( token, case_sensitive );
retv = g_realloc ( retv, sizeof ( rofi_int_matcher* ) * ( num_tokens + 2 ) );
retv[num_tokens] = create_regex ( token, case_sensitive );
retv[num_tokens + 1] = NULL;
num_tokens++;
}
@ -400,13 +409,14 @@ int find_arg_char ( const char * const key, char *val )
return FALSE;
}
PangoAttrList *helper_token_match_get_pango_attr ( RofiHighlightColorStyle th, GRegex **tokens, const char *input, PangoAttrList *retv )
PangoAttrList *helper_token_match_get_pango_attr ( RofiHighlightColorStyle th, rofi_int_matcher**tokens, const char *input, PangoAttrList *retv )
{
// Do a tokenized match.
if ( tokens ) {
for ( int j = 0; tokens[j]; j++ ) {
GMatchInfo *gmi = NULL;
g_regex_match ( (GRegex *) tokens[j], input, G_REGEX_MATCH_PARTIAL, &gmi );
if ( tokens[j]->invert ) continue;
g_regex_match ( tokens[j]->regex, input, G_REGEX_MATCH_PARTIAL, &gmi );
while ( g_match_info_matches ( gmi ) ) {
int count = g_match_info_get_match_count ( gmi );
for ( int index = ( count > 1 ) ? 1 : 0; index < count; index++ ) {
@ -460,13 +470,14 @@ PangoAttrList *helper_token_match_get_pango_attr ( RofiHighlightColorStyle th, G
return retv;
}
int helper_token_match ( GRegex * const *tokens, const char *input )
int helper_token_match ( rofi_int_matcher* const *tokens, const char *input )
{
int match = TRUE;
// Do a tokenized match.
if ( tokens ) {
for ( int j = 0; match && tokens[j]; j++ ) {
match = g_regex_match ( (const GRegex *) tokens[j], input, 0, NULL );
match = g_regex_match ( tokens[j]->regex, input, 0, NULL );
match ^= tokens[j]->invert;
}
}
return match;

View File

@ -80,6 +80,7 @@ static void __history_write_element_list ( FILE *fd, _element **list, unsigned i
static _element ** __history_get_element_list ( FILE *fd, unsigned int *length )
{
unsigned int real_length = 0;
_element **retv = NULL;
if ( length == NULL ) {
@ -108,8 +109,12 @@ static _element ** __history_get_element_list ( FILE *fd, unsigned int *length )
if ( ( l - ( start - buffer ) ) < 2 ) {
continue;
}
// Resize and check.
retv = g_realloc ( retv, ( *length + 2 ) * sizeof ( _element* ) );
if ( real_length < (*length+2) )
{
real_length += 15;
// Resize and check.
retv = g_realloc ( retv, ( real_length ) * sizeof ( _element* ) );
}
retv[( *length )] = g_malloc ( sizeof ( _element ) );

View File

@ -102,7 +102,7 @@ ModeMode mode_result ( Mode *mode, int menu_retv, char **input, unsigned int sel
return mode->_result ( mode, menu_retv, input, selected_line );
}
int mode_token_match ( const Mode *mode, GRegex **tokens, unsigned int selected_line )
int mode_token_match ( const Mode *mode, rofi_int_matcher **tokens, unsigned int selected_line )
{
g_assert ( mode != NULL );
g_assert ( mode->_token_match != NULL );

View File

@ -497,7 +497,7 @@ void rofi_view_set_selected_line ( RofiViewState *state, unsigned int selected_l
void rofi_view_free ( RofiViewState *state )
{
if ( state->tokens ) {
tokenize_free ( state->tokens );
helper_tokenize_free ( state->tokens );
state->tokens = NULL;
}
// Do this here?
@ -1019,14 +1019,14 @@ static void rofi_view_refilter ( RofiViewState *state )
state->reload = FALSE;
}
if ( state->tokens ) {
tokenize_free ( state->tokens );
helper_tokenize_free ( state->tokens );
state->tokens = NULL;
}
if ( state->text && strlen ( state->text->text ) > 0 ) {
unsigned int j = 0;
gchar *pattern = mode_preprocess_input ( state->sw, state->text->text );
glong plen = pattern ? g_utf8_strlen ( pattern, -1 ) : 0;
state->tokens = tokenize ( pattern, config.case_sensitive );
state->tokens = helper_tokenize ( pattern, config.case_sensitive );
/**
* On long lists it can be beneficial to parallelize.
* If number of threads is 1, no thread is spawn.

View File

@ -37,13 +37,10 @@
#include "xcb-internal.h"
#include "rofi.h"
#include "settings.h"
#include "rofi-types.h"
static int test = 0;
#include <check.h>
#define TASSERT( a ) { \
assert ( a ); \
printf ( "Test %i passed (%s)\n", ++test, # a ); \
}
void rofi_add_error_message ( G_GNUC_UNUSED GString *msg )
{
}
@ -60,6 +57,427 @@ int monitor_active ( G_GNUC_UNUSED workarea *mon )
void display_startup_notification ( G_GNUC_UNUSED RofiHelperExecuteContext *context, G_GNUC_UNUSED GSpawnChildSetupFunc *child_setup, G_GNUC_UNUSED gpointer *user_data )
{
}
START_TEST(test_tokenizer_free )
{
helper_tokenize_free ( NULL );
}
END_TEST
START_TEST ( test_tokenizer_match_normal_single_ci )
{
config.matching_method = MM_NORMAL;
rofi_int_matcher **tokens = helper_tokenize ( "noot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_normal_single_cs )
{
config.matching_method = MM_NORMAL;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_normal_multiple_ci )
{
config.matching_method = MM_NORMAL;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "no ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_normal_single_ci_negate )
{
config.matching_method = MM_NORMAL;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "-noot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_normal_multiple_ci_negate )
{
config.matching_method = MM_NORMAL;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "-noot aap", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_single_ci )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_single_cs )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_multiple_ci )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "no ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_single_ci_question )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "n?ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , FALSE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_single_ci_star )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "n*ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_glob_multiple_ci_star )
{
config.matching_method = MM_GLOB;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "n* ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE);
ck_assert_int_eq ( helper_token_match ( tokens, "ot nap mies") , TRUE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_fuzzy_single_ci )
{
config.matching_method = MM_FUZZY;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_fuzzy_single_cs )
{
config.matching_method = MM_FUZZY;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_fuzzy_multiple_ci )
{
config.matching_method = MM_FUZZY;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "no ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE );
helper_tokenize_free ( tokens );
tokens = helper_tokenize ( "n ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_fuzzy_single_ci_split )
{
config.matching_method = MM_FUZZY;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "ont", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , FALSE);
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap nmiest") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_fuzzy_multiple_ci_split )
{
config.matching_method = MM_FUZZY;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "o n t", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE);
ck_assert_int_eq ( helper_token_match ( tokens, "ot nap mies") , TRUE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_single_ci )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_single_cs )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "noot", TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap Noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "Nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noOTap mies") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_multiple_ci )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "no ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE );
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_single_ci_dq )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "n.?ot", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , FALSE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_single_two_char )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "n[oa]{2}t", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , TRUE );
ck_assert_int_eq ( helper_token_match ( tokens, "noat miesot") , TRUE);
ck_assert_int_eq ( helper_token_match ( tokens, "noaat miesot") , FALSE);
helper_tokenize_free ( tokens );
}
END_TEST
START_TEST ( test_tokenizer_match_regex_single_two_word_till_end )
{
config.matching_method = MM_REGEX;
rofi_int_matcher **tokens = NULL;
tokens = helper_tokenize ( "^(aap|noap)\\sMie.*", FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap noot mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "aap mies") , TRUE);
ck_assert_int_eq ( helper_token_match ( tokens, "nooaap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "nootap mies") , FALSE );
ck_assert_int_eq ( helper_token_match ( tokens, "noap miesot") , TRUE);
ck_assert_int_eq ( helper_token_match ( tokens, "ot nap mies") , FALSE );
helper_tokenize_free ( tokens );
}
END_TEST
static Suite * helper_tokenizer_suite (void)
{
Suite *s;
s = suite_create("Tokenizer");
/* Core test case */
{
TCase *tc_core;
tc_core = tcase_create("Core");
tcase_add_test(tc_core, test_tokenizer_free);
suite_add_tcase(s, tc_core);
}
{
TCase *tc_normal = tcase_create ("Normal");
tcase_add_test(tc_normal, test_tokenizer_match_normal_single_ci );
tcase_add_test(tc_normal, test_tokenizer_match_normal_single_cs );
tcase_add_test(tc_normal, test_tokenizer_match_normal_multiple_ci );
tcase_add_test(tc_normal, test_tokenizer_match_normal_single_ci_negate );
tcase_add_test(tc_normal, test_tokenizer_match_normal_multiple_ci_negate);
suite_add_tcase(s, tc_normal);
}
{
TCase *tc_glob = tcase_create ("Glob");
tcase_add_test(tc_glob, test_tokenizer_match_glob_single_ci);
tcase_add_test(tc_glob, test_tokenizer_match_glob_single_cs);
tcase_add_test(tc_glob, test_tokenizer_match_glob_multiple_ci);
tcase_add_test(tc_glob, test_tokenizer_match_glob_single_ci_question);
tcase_add_test(tc_glob, test_tokenizer_match_glob_single_ci_star);
tcase_add_test(tc_glob, test_tokenizer_match_glob_multiple_ci_star);
suite_add_tcase(s, tc_glob);
}
{
TCase *tc_fuzzy = tcase_create ("Fuzzy");
tcase_add_test(tc_fuzzy, test_tokenizer_match_fuzzy_single_ci);
tcase_add_test(tc_fuzzy, test_tokenizer_match_fuzzy_single_cs);
tcase_add_test(tc_fuzzy, test_tokenizer_match_fuzzy_single_ci_split);
tcase_add_test(tc_fuzzy, test_tokenizer_match_fuzzy_multiple_ci);
tcase_add_test(tc_fuzzy, test_tokenizer_match_fuzzy_multiple_ci_split);
suite_add_tcase(s, tc_fuzzy);
}
{
TCase *tc_regex = tcase_create ("Regex");
tcase_add_test(tc_regex, test_tokenizer_match_regex_single_ci);
tcase_add_test(tc_regex, test_tokenizer_match_regex_single_cs);
tcase_add_test(tc_regex, test_tokenizer_match_regex_single_ci_dq);
tcase_add_test(tc_regex, test_tokenizer_match_regex_single_two_char);
tcase_add_test(tc_regex, test_tokenizer_match_regex_single_two_word_till_end);
tcase_add_test(tc_regex, test_tokenizer_match_regex_multiple_ci);
suite_add_tcase(s, tc_regex);
}
return s;
}
int main ( G_GNUC_UNUSED int argc, G_GNUC_UNUSED char ** argv )
{
@ -67,214 +485,17 @@ int main ( G_GNUC_UNUSED int argc, G_GNUC_UNUSED char ** argv )
fprintf ( stderr, "Failed to set locale.\n" );
return EXIT_FAILURE;
}
// Pid test.
// Tests basic functionality of writing it, locking, seeing if I can write same again
// And close/reopen it again.
{
tokenize_free ( NULL );
}
{
config.matching_method = MM_NORMAL;
GRegex **tokens = tokenize ( "noot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
int number_failed = 0;
Suite *s;
SRunner *sr;
tokenize_free ( tokens );
s = helper_tokenizer_suite();
sr = srunner_create(s);
tokens = tokenize ( "noot", TRUE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
}
{
config.matching_method = MM_GLOB;
GRegex **tokens = tokenize ( "noot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n?ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "n*ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
tokenize_free ( tokens );
tokens = tokenize ( "n* ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
TASSERT ( helper_token_match ( tokens, "ot nap mies") == TRUE);
tokenize_free ( tokens );
}
{
config.matching_method = MM_FUZZY;
GRegex **tokens = tokenize ( "noot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
tokenize_free ( tokens );
tokens = tokenize ( "ont", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == FALSE);
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap nmiest") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "o n t", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
TASSERT ( helper_token_match ( tokens, "ot nap mies") == TRUE);
tokenize_free ( tokens );
}
{
config.matching_method = MM_REGEX;
GRegex **tokens = tokenize ( "noot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n.?ot", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "n[oa]{2}t", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
TASSERT ( helper_token_match ( tokens, "noat miesot") == TRUE);
TASSERT ( helper_token_match ( tokens, "noaat miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "^(aap|noap)\\sMie.*", FALSE );
TASSERT ( helper_token_match ( tokens, "aap noot mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "aap mies") == TRUE);
TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "nootap mies") == FALSE );
TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
TASSERT ( helper_token_match ( tokens, "ot nap mies") == FALSE );
tokenize_free ( tokens );
}
srunner_run_all(sr, CK_NORMAL);
number_failed = srunner_ntests_failed(sr);
srunner_free(sr);
return (number_failed == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
}

View File

@ -146,19 +146,19 @@ END_TEST
START_TEST(test_mode_match_entry)
{
GRegex **t = tokenize( "primary-paste", FALSE );
rofi_int_matcher **t = helper_tokenize( "primary-paste", FALSE );
ck_assert_ptr_nonnull ( t );
ck_assert_int_eq ( mode_token_match ( &help_keys_mode, t, 0), TRUE );
ck_assert_int_eq ( mode_token_match ( &help_keys_mode, t, 1), FALSE );
tokenize_free ( t );
t = tokenize( "-paste", FALSE );
helper_tokenize_free ( t );
t = helper_tokenize( "y-paste", FALSE );
ck_assert_ptr_nonnull ( t );
ck_assert_int_eq ( mode_token_match ( &help_keys_mode, t, 0), TRUE );
ck_assert_int_eq ( mode_token_match ( &help_keys_mode, t, 1), TRUE );
ck_assert_int_eq ( mode_token_match ( &help_keys_mode, t, 2), FALSE );
tokenize_free ( t );
helper_tokenize_free ( t );
}
END_TEST