2016-05-07 21:17:52 +00:00
|
|
|
<?php
|
|
|
|
|
2016-11-20 21:53:17 +00:00
|
|
|
declare(strict_types=1);
|
2016-05-07 21:17:52 +00:00
|
|
|
|
2017-09-02 19:39:59 +00:00
|
|
|
namespace tests\Phpml\Tokenization;
|
2016-05-07 21:17:52 +00:00
|
|
|
|
|
|
|
use Phpml\Tokenization\WordTokenizer;
|
2017-02-03 11:58:25 +00:00
|
|
|
use PHPUnit\Framework\TestCase;
|
2016-05-07 21:17:52 +00:00
|
|
|
|
2017-02-03 11:58:25 +00:00
|
|
|
class WordTokenizerTest extends TestCase
|
2016-05-07 21:17:52 +00:00
|
|
|
{
|
2017-11-14 20:21:23 +00:00
|
|
|
public function testTokenizationOnAscii(): void
|
2016-05-07 21:17:52 +00:00
|
|
|
{
|
|
|
|
$tokenizer = new WordTokenizer();
|
|
|
|
|
|
|
|
$text = 'Lorem ipsum-dolor sit amet, consectetur/adipiscing elit.
|
|
|
|
Cras consectetur, dui et lobortis;auctor.
|
|
|
|
Nulla vitae ,.,/ congue lorem.';
|
|
|
|
|
|
|
|
$tokens = ['Lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur', 'adipiscing', 'elit',
|
|
|
|
'Cras', 'consectetur', 'dui', 'et', 'lobortis', 'auctor',
|
|
|
|
'Nulla', 'vitae', 'congue', 'lorem', ];
|
|
|
|
|
|
|
|
$this->assertEquals($tokens, $tokenizer->tokenize($text));
|
|
|
|
}
|
|
|
|
|
2017-11-14 20:21:23 +00:00
|
|
|
public function testTokenizationOnUtf8(): void
|
2016-05-07 21:17:52 +00:00
|
|
|
{
|
|
|
|
$tokenizer = new WordTokenizer();
|
|
|
|
|
|
|
|
$text = '鋍鞎 鳼 鞮鞢騉 袟袘觕, 炟砏 蒮 謺貙蹖 偢偣唲 蒛 箷箯緷 鑴鱱爧 覮轀,
|
|
|
|
剆坲 煘煓瑐 鬐鶤鶐 飹勫嫢 銪 餀 枲柊氠 鍎鞚韕 焲犈,
|
|
|
|
殍涾烰 齞齝囃 蹅輶 鄜, 孻憵 擙樲橚 藒襓謥 岯岪弨 蒮 廞徲 孻憵懥 趡趛踠 槏';
|
|
|
|
|
|
|
|
$tokens = ['鋍鞎', '鞮鞢騉', '袟袘觕', '炟砏', '謺貙蹖', '偢偣唲', '箷箯緷', '鑴鱱爧', '覮轀',
|
|
|
|
'剆坲', '煘煓瑐', '鬐鶤鶐', '飹勫嫢', '枲柊氠', '鍎鞚韕', '焲犈',
|
|
|
|
'殍涾烰', '齞齝囃', '蹅輶', '孻憵', '擙樲橚', '藒襓謥', '岯岪弨', '廞徲', '孻憵懥', '趡趛踠', ];
|
|
|
|
|
|
|
|
$this->assertEquals($tokens, $tokenizer->tokenize($text));
|
|
|
|
}
|
|
|
|
}
|