Add plural forms parser that nearly works

This commit is contained in:
Mikko Ahlroth 2024-05-19 15:36:13 +03:00
parent 68e0be1741
commit 4298176d72
11 changed files with 532 additions and 1274 deletions

File diff suppressed because it is too large Load diff

View file

@ -15,6 +15,7 @@ target = "javascript"
[dependencies]
gleam_stdlib = ">= 0.34.0 and < 2.0.0"
nibble = ">= 1.1.1 and < 2.0.0"
[dev-dependencies]
gleeunit = ">= 1.0.0 and < 2.0.0"

View file

@ -5,10 +5,12 @@ packages = [
{ name = "filepath", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "EFB6FF65C98B2A16378ABC3EE2B14124168C0CE5201553DE652E2644DCFDB594" },
{ name = "gleam_stdlib", version = "0.37.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "5398BD6C2ABA17338F676F42F404B9B7BABE1C8DC7380031ACB05BBE1BCF3742" },
{ name = "gleeunit", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "72CDC3D3F719478F26C4E2C5FED3E657AC81EC14A47D2D2DEBB8693CA3220C3B" },
{ name = "nibble", version = "1.1.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "nibble", source = "hex", outer_checksum = "67C6BEBC1AB6D771AB893B4A7B3E66C92668C6E7774C335FEFCD545B06435FE5" },
{ name = "simplifile", version = "1.7.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "1D5DFA3A2F9319EC85825F6ED88B8E449F381B0D55A62F5E61424E748E7DDEB0" },
]
[requirements]
gleam_stdlib = { version = ">= 0.34.0 and < 2.0.0" }
gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
nibble = { version = ">= 1.1.1 and < 2.0.0"}
simplifile = { version = ">= 1.7.0 and < 2.0.0" }

View file

@ -1,8 +1,25 @@
import gleam/io
import gleam/result
import kielet/database.{type Database}
import kielet/plurals/parser
import kielet/plurals/tokenizer
import nibble
pub fn main() {
io.println("Hello from kielet!")
let _ =
"nplurals=2; plural=n != 1;"
|> tokenizer.tokenize()
|> result.unwrap([])
|> nibble.run(parser.main())
|> io.debug()
"nplurals=6;
plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3
: n%100>=11 ? 4 : 5;"
|> tokenizer.tokenize()
|> result.unwrap([])
|> nibble.run(parser.main())
|> io.debug()
}
pub fn gettext(db: Database, msgid: String, language_code: String) -> String {

27
src/kielet/plurals.gleam Normal file
View file

@ -0,0 +1,27 @@
import gleam/result
import kielet/plurals/ast
import kielet/plurals/parser
import kielet/plurals/syntax_error
import kielet/plurals/tokenizer
import nibble
pub type ParseError {
TokenizerError(err: syntax_error.SyntaxError)
ParserError(err: List(nibble.DeadEnd(tokenizer.Token, Nil)))
}
pub type Plurals {
Plurals(total: Int, algorithm: ast.Ast)
}
pub fn parse(input: String) {
use tokens <- result.try(
tokenizer.tokenize(input)
|> result.map_error(TokenizerError),
)
use #(total, ast) <- result.try(
parser.parse(tokens)
|> result.map_error(ParserError),
)
Ok(Plurals(total: total, algorithm: ast))
}

View file

@ -0,0 +1,19 @@
pub type BinOp {
Equal
NotEqual
GreaterThan
GreaterThanOrEqual
LowerThan
LowerThanOrEqual
Remainder
And
Or
}
pub type Ast {
N
Integer(Int)
BinaryOperation(operator: BinOp, lvalue: Ast, rvalue: Ast)
If(condition: Ast, truthy: Ast, falsy: Ast)
Paren(Ast)
}

View file

@ -0,0 +1,38 @@
import kielet/plurals/ast
pub fn eval(ast: ast.Ast, input: Int) {
case ast {
ast.N -> input
ast.Integer(i) -> i
ast.If(condition, truthy, falsy) -> {
let ast = case eval(condition, input) {
1 -> truthy
_ -> falsy
}
eval(ast, input)
}
ast.Paren(content) -> eval(content, input)
ast.BinaryOperation(operator, lvalue, rvalue) -> {
let lvalue = eval(lvalue, input)
let rvalue = eval(rvalue, input)
case operator {
ast.Equal -> bool_to_int(lvalue == rvalue)
ast.NotEqual -> bool_to_int(lvalue != rvalue)
ast.GreaterThan -> bool_to_int(lvalue > rvalue)
ast.GreaterThanOrEqual -> bool_to_int(lvalue >= rvalue)
ast.LowerThan -> bool_to_int(lvalue < rvalue)
ast.LowerThanOrEqual -> bool_to_int(lvalue <= rvalue)
ast.Remainder -> lvalue % rvalue
ast.And -> bool_to_int(lvalue == 1 && rvalue == 1)
ast.Or -> bool_to_int(lvalue == 1 || rvalue == 1)
}
}
}
}
fn bool_to_int(bool: Bool) {
case bool {
True -> 1
False -> 0
}
}

View file

@ -0,0 +1,128 @@
import gleam/option
import kielet/plurals/ast
import kielet/plurals/tokenizer
import nibble
import nibble/lexer
import nibble/pratt
pub fn parse(input: List(lexer.Token(tokenizer.Token))) {
nibble.run(input, main())
}
pub fn main() {
use _ <- nibble.do(nibble.token(tokenizer.NPlurals))
use _ <- nibble.do(nibble.token(tokenizer.Assignment))
use nplurals <- nibble.do(int_parser())
use _ <- nibble.do(nibble.token(tokenizer.Semicolon))
use _ <- nibble.do(nibble.token(tokenizer.Plural))
use _ <- nibble.do(nibble.token(tokenizer.Assignment))
use ast <- nibble.do(plurals_parser())
use _ <- nibble.do(nibble.optional(nibble.token(tokenizer.Semicolon)))
use _ <- nibble.do(nibble.token(tokenizer.End))
use _ <- nibble.do(nibble.eof())
let assert ast.Integer(nplurals) = nplurals
nibble.return(#(nplurals, ast))
}
fn plurals_parser() {
use maybe_cond <- nibble.do(expr_parser())
nibble.one_of([rest_of_ternary_parser(maybe_cond), nibble.return(maybe_cond)])
}
fn expr_parser() {
pratt.expression(
one_of: [
fn(_) { int_parser() },
fn(_) { n_parser() },
fn(c) { paren_parser(c) },
],
and_then: [
pratt.infix_left(200, nibble.token(tokenizer.And), fn(l, r) {
ast.BinaryOperation(ast.And, l, r)
}),
pratt.infix_left(200, nibble.token(tokenizer.Or), fn(l, r) {
ast.BinaryOperation(ast.Or, l, r)
}),
pratt.infix_left(300, nibble.token(tokenizer.Equals), fn(l, r) {
ast.BinaryOperation(ast.Equal, l, r)
}),
pratt.infix_left(300, nibble.token(tokenizer.NotEquals), fn(l, r) {
ast.BinaryOperation(ast.NotEqual, l, r)
}),
pratt.infix_left(300, nibble.token(tokenizer.GreaterThan), fn(l, r) {
ast.BinaryOperation(ast.GreaterThan, l, r)
}),
pratt.infix_left(300, nibble.token(tokenizer.LowerThan), fn(l, r) {
ast.BinaryOperation(ast.LowerThan, l, r)
}),
pratt.infix_left(
300,
nibble.token(tokenizer.GreaterThanOrEquals),
fn(l, r) { ast.BinaryOperation(ast.GreaterThanOrEqual, l, r) },
),
pratt.infix_left(300, nibble.token(tokenizer.LowerThanOrEquals), fn(l, r) {
ast.BinaryOperation(ast.LowerThanOrEqual, l, r)
}),
pratt.infix_right(400, nibble.token(tokenizer.Remainder), fn(l, r) {
ast.BinaryOperation(ast.Remainder, l, r)
}),
],
dropping: nibble.return(Nil),
)
}
fn paren_parser(c) {
use _ <- nibble.do(lparen_parser())
use expr <- nibble.do(pratt.sub_expression(c, 0))
use _ <- nibble.do(rparen_parser())
nibble.return(ast.Paren(expr))
}
fn rest_of_ternary_parser(cond: ast.Ast) {
use _ <- nibble.do(nibble.token(tokenizer.Ternary))
use if_true <- nibble.do(plurals_parser())
use _ <- nibble.do(nibble.token(tokenizer.TernaryElse))
use if_false <- nibble.do(plurals_parser())
nibble.return(ast.If(cond, if_true, if_false))
}
fn int_parser() {
use tok <- nibble.take_map("An integer")
case tok {
tokenizer.Int(i) -> option.Some(ast.Integer(i))
_ -> option.None
}
}
fn n_parser() {
use tok <- nibble.take_map("n")
case tok {
tokenizer.N -> option.Some(ast.N)
_ -> option.None
}
}
fn lparen_parser() {
use tok <- nibble.take_map("Left parenthesis")
case tok {
tokenizer.LParen -> option.Some(Nil)
_ -> option.None
}
}
fn rparen_parser() {
use tok <- nibble.take_map("Right parenthesis")
case tok {
tokenizer.RParen -> option.Some(Nil)
_ -> option.None
}
}

View file

@ -1,148 +0,0 @@
import gleam/result
import kielet/plurals/syntax_error
import kielet/plurals/tokenizer
pub type ParseError {
SyntaxError(err: syntax_error.SyntaxError)
}
pub type PluralForms {
PluralForms(nplurals: Int, ast: Ast)
}
type BinOp {
Equal
NotEqual
GreaterThan
GreaterThanOrEqual
LowerThan
LowerThanOrEqual
Remainder
And
Or
}
pub opaque type Ast {
N
Integer(Int)
BinaryOperation(operator: BinOp, lvalue: Ast, rvalue: Ast)
If(condition: Ast, truthy: Ast, falsy: Ast)
Paren(Ast)
}
pub fn parse(rules: String) {
use tokens <- result.try(
tokenizer.tokenize(rules)
|> result.map_error(SyntaxError),
)
do_parse(tokens)
}
fn eval_ast(ast: Ast, input: Int) {
case ast {
N -> input
Integer(i) -> i
If(condition, truthy, falsy) -> {
let ast = case eval_ast(condition, input) {
1 -> truthy
_ -> falsy
}
eval_ast(ast, input)
}
Paren(content) -> eval_ast(content, input)
BinaryOperation(operator, lvalue, rvalue) -> {
let lvalue = eval_ast(lvalue, input)
let rvalue = eval_ast(rvalue, input)
case operator {
Equal -> bool_to_int(lvalue == rvalue)
NotEqual -> bool_to_int(lvalue != rvalue)
GreaterThan -> bool_to_int(lvalue > rvalue)
GreaterThanOrEqual -> bool_to_int(lvalue >= rvalue)
LowerThan -> bool_to_int(lvalue < rvalue)
LowerThanOrEqual -> bool_to_int(lvalue <= rvalue)
Remainder -> lvalue % rvalue
And -> bool_to_int(lvalue == 1 && rvalue == 1)
Or -> bool_to_int(lvalue == 1 || rvalue == 1)
}
}
}
}
fn bool_to_int(bool: Bool) {
case bool {
True -> 1
False -> 0
}
}
type Location {
NoLocation
Line(Int)
}
fn do_parse(tokens: List(tokenizer.Token)) {
yeccpars0(tokens, NoLocation, 0, [], [])
}
fn yeccpars0(
tokens: List(tokenizer.Token),
location: Location,
state: Int,
states: List(Int),
vstack: List(tokenizer.Token),
) {
yeccpars1(tokens, location, state, states, vstack)
}
fn yeccpars1(
tokens: List(tokenizer.Token),
location: Location,
state: Int,
states: List(Int),
vstack: List(tokenizer.Token),
) {
case tokens {
[token, ..rest] ->
yeccpars2(state, token, states, vstack, token, rest, location)
[] ->
case location {
NoLocation ->
yeccpars2(
state,
tokenizer.End,
states,
vstack,
#(tokenizer.End, 999_999),
[],
999_999,
)
Line(line) ->
yeccpars2(
state,
tokenizer.End,
states,
vstac,
#(tokenizer.End, line),
[],
line,
)
}
}
}
fn yeccpars1_2(state1, state, states, vstack, token0, tokens, location) {
case tokens {
[token, ..rest] ->
yeccpars2(
state,
token,
[state1, ..states],
[token0, ..vstack],
token,
rest,
location,
)
[] -> yeccpars2()
}
}

View file

@ -3,13 +3,9 @@ import gleam/list
import gleam/option
import gleam/string
import kielet/plurals/syntax_error.{SyntaxError}
import nibble/lexer
pub type Token {
Token(type_: TokenType, line: Int)
Int(value: Int, line: Int)
}
pub type TokenType {
N
NPlurals
Plural
@ -29,58 +25,137 @@ pub type TokenType {
LParen
RParen
End
Int(value: Int)
}
pub type TokenType
pub fn tokenize(str: String) {
do_tokenize(string.to_graphemes(str), [], 1, 1)
}
fn do_tokenize(str: List(String), acc: List(Token), line: Int, col: Int) {
fn do_tokenize(
str: List(String),
acc: List(lexer.Token(Token)),
line: Int,
col: Int,
) {
case str {
[] -> Ok(list.reverse([Token(End, line), ..acc]))
[] -> Ok(list.reverse([to_nibble(End, "", line, col), ..acc]))
["n", "p", "l", "u", "r", "a", "l", "s", ..rest] ->
do_tokenize(rest, [Token(NPlurals, line), ..acc], line, col + 8)
do_tokenize(
rest,
[to_nibble(NPlurals, "nplurals", line, col), ..acc],
line,
col + 8,
)
["p", "l", "u", "r", "a", "l", ..rest] ->
do_tokenize(rest, [Token(Plural, line), ..acc], line, col + 6)
["n", ..rest] -> do_tokenize(rest, [Token(N, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(Plural, "plural", line, col), ..acc],
line,
col + 6,
)
["n", ..rest] ->
do_tokenize(rest, [to_nibble(N, "n", line, col), ..acc], line, col + 1)
["\\", "\n", ..rest] -> do_tokenize(rest, acc, line + 1, 1)
["\n", ..rest] -> do_tokenize(rest, acc, line + 1, 1)
[" ", ..rest] -> do_tokenize(rest, acc, line, col + 1)
["=", "=", ..rest] ->
do_tokenize(rest, [Token(Equals, line), ..acc], line, col + 2)
do_tokenize(
rest,
[to_nibble(Equals, "==", line, col), ..acc],
line,
col + 2,
)
["!", "=", ..rest] ->
do_tokenize(rest, [Token(NotEquals, line), ..acc], line, col + 2)
do_tokenize(
rest,
[to_nibble(NotEquals, "!=", line, col), ..acc],
line,
col + 2,
)
[">", "=", ..rest] ->
do_tokenize(
rest,
[Token(GreaterThanOrEquals, line), ..acc],
[to_nibble(GreaterThanOrEquals, ">=", line, col), ..acc],
line,
col + 2,
)
["<", "=", ..rest] ->
do_tokenize(rest, [Token(LowerThanOrEquals, line), ..acc], line, col + 2)
do_tokenize(
rest,
[to_nibble(LowerThanOrEquals, "<=", line, col), ..acc],
line,
col + 2,
)
[">", ..rest] ->
do_tokenize(rest, [Token(GreaterThan, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(GreaterThan, ">", line, col), ..acc],
line,
col + 1,
)
["<", ..rest] ->
do_tokenize(rest, [Token(LowerThan, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(LowerThan, "<", line, col), ..acc],
line,
col + 1,
)
["=", ..rest] ->
do_tokenize(rest, [Token(Assignment, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(Assignment, "=", line, col), ..acc],
line,
col + 1,
)
["?", ..rest] ->
do_tokenize(rest, [Token(Ternary, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(Ternary, "?", line, col), ..acc],
line,
col + 1,
)
[":", ..rest] ->
do_tokenize(rest, [Token(TernaryElse, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(TernaryElse, ":", line, col), ..acc],
line,
col + 1,
)
["%", ..rest] ->
do_tokenize(rest, [Token(Remainder, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(Remainder, "%", line, col), ..acc],
line,
col + 1,
)
["|", "|", ..rest] ->
do_tokenize(rest, [Token(Or, line), ..acc], line, col + 2)
do_tokenize(rest, [to_nibble(Or, "||", line, col), ..acc], line, col + 2)
["&", "&", ..rest] ->
do_tokenize(rest, [Token(And, line), ..acc], line, col + 2)
do_tokenize(rest, [to_nibble(And, "&&", line, col), ..acc], line, col + 2)
[";", ..rest] ->
do_tokenize(rest, [Token(Semicolon, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(Semicolon, ";", line, col), ..acc],
line,
col + 1,
)
[")", ..rest] ->
do_tokenize(rest, [Token(RParen, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(RParen, ")", line, col), ..acc],
line,
col + 1,
)
["(", ..rest] ->
do_tokenize(rest, [Token(LParen, line), ..acc], line, col + 1)
do_tokenize(
rest,
[to_nibble(LParen, "(", line, col), ..acc],
line,
col + 1,
)
[digit, ..rest] if digit == "0"
|| digit == "1"
|| digit == "2"
@ -102,7 +177,7 @@ fn do_tokenize(str: List(String), acc: List(Token), line: Int, col: Int) {
fn read_digits(
str: List(String),
acc: List(Token),
acc: List(lexer.Token(Token)),
line: Int,
col: Int,
digit_acc: String,
@ -125,7 +200,7 @@ fn read_digits(
Ok(int) ->
do_tokenize(
other,
[Int(int, line), ..acc],
[to_nibble(Int(int), digit_acc, line, col), ..acc],
line,
col + string.length(digit_acc),
)
@ -138,3 +213,16 @@ fn read_digits(
}
}
}
fn to_nibble(token: Token, lexeme: String, line: Int, col: Int) {
lexer.Token(
span: lexer.Span(
row_start: line,
row_end: line,
col_start: col,
col_end: col + string.length(lexeme),
),
lexeme: lexeme,
value: token,
)
}

184
test/plurals_test.gleam Normal file
View file

@ -0,0 +1,184 @@
import gleeunit/should
import kielet/plurals
import kielet/plurals/evaluator
pub fn parenthesized_test() {
let input = "nplurals=1; plural=(n==1);"
should.be_ok(plurals.parse(input))
}
pub fn parenthesized_with_ternary_test() {
let input = "nplurals=1; plural=(n==1 ? 1 : 0);"
should.be_ok(plurals.parse(input))
}
pub fn no_ending_semicolon_test() {
let input = "nplurals=1; plural=0"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 1)
}
pub fn linefeed_test() {
let input =
"nplurals=1;
plural=0;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 1)
}
pub fn linefeed_with_backslash_test() {
let input =
"nplurals=1; \\
plural=0;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 1)
}
pub fn one_test() {
let input = "nplurals=1; plural=0;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 1)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 0), 0)
should.equal(evaluator.eval(plurals.algorithm, 8), 0)
}
pub fn two_test() {
let input = "nplurals=2; plural=n != 1;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 2)
should.equal(evaluator.eval(plurals.algorithm, 0), 1)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 2), 1)
}
pub fn two_french_test() {
let input = "nplurals=2; plural=n>1;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 2)
should.equal(evaluator.eval(plurals.algorithm, 0), 0)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 2), 1)
}
pub fn latvian_test() {
let input = "nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 0), 2)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 2), 1)
should.equal(evaluator.eval(plurals.algorithm, 111), 1)
should.equal(evaluator.eval(plurals.algorithm, 112), 1)
should.equal(evaluator.eval(plurals.algorithm, 31), 0)
should.equal(evaluator.eval(plurals.algorithm, 9), 1)
}
pub fn gaeilge_test() {
let input = "nplurals=3; plural=n==1 ? 0 : n==2 ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 2), 1)
should.equal(evaluator.eval(plurals.algorithm, 0), 2)
should.equal(evaluator.eval(plurals.algorithm, 10), 2)
}
pub fn gaeilge_alternate_test() {
let input =
"nplurals=5; plural=n == 1 ? 0 : n == 2 ? 1 : n >= 3 && n <= 6 ? 2 : n >= 7 && n <= 10 ? 3 : 4;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 5)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 2), 1)
should.equal(evaluator.eval(plurals.algorithm, 4), 2)
should.equal(evaluator.eval(plurals.algorithm, 10), 3)
should.equal(evaluator.eval(plurals.algorithm, 133), 4)
}
pub fn romanian_test() {
let input =
"nplurals=3; \\
plural=n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 0), 1)
should.equal(evaluator.eval(plurals.algorithm, 119), 1)
should.equal(evaluator.eval(plurals.algorithm, 121), 2)
should.equal(evaluator.eval(plurals.algorithm, 19), 1)
should.equal(evaluator.eval(plurals.algorithm, 80), 2)
}
pub fn lithuanian_test() {
let input =
"nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2;"
should.be_ok(plurals.parse(input))
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 81), 0)
should.equal(evaluator.eval(plurals.algorithm, 872), 1)
should.equal(evaluator.eval(plurals.algorithm, 112), 2)
}
pub fn ukrainian_test() {
let input =
"nplurals=3;
plural=n%10==1 && n%100!=11 ? 0 :
n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 21), 0)
should.equal(evaluator.eval(plurals.algorithm, 42), 1)
should.equal(evaluator.eval(plurals.algorithm, 11), 2)
}
pub fn czech_test() {
let input =
"nplurals=3;
plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 3), 1)
should.equal(evaluator.eval(plurals.algorithm, 12), 2)
}
pub fn polish_test() {
let input =
"nplurals=3;
plural=n==1 ? 0 :
n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 3)
should.equal(evaluator.eval(plurals.algorithm, 1), 0)
should.equal(evaluator.eval(plurals.algorithm, 102), 1)
should.equal(evaluator.eval(plurals.algorithm, 713), 2)
}
pub fn slovenian_test() {
let input =
"nplurals=4;
plural=n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 4)
should.equal(evaluator.eval(plurals.algorithm, 320), 3)
should.equal(evaluator.eval(plurals.algorithm, 101), 0)
should.equal(evaluator.eval(plurals.algorithm, 202), 1)
should.equal(evaluator.eval(plurals.algorithm, 303), 2)
}
pub fn arabic_test() {
let input =
"nplurals=6;
plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3
: n%100>=11 ? 4 : 5;"
let assert Ok(plurals) = plurals.parse(input)
should.equal(plurals.total, 6)
should.equal(evaluator.eval(plurals.algorithm, 0), 0)
should.equal(evaluator.eval(plurals.algorithm, 1), 1)
should.equal(evaluator.eval(plurals.algorithm, 2), 2)
should.equal(evaluator.eval(plurals.algorithm, 505), 3)
should.equal(evaluator.eval(plurals.algorithm, 733), 4)
should.equal(evaluator.eval(plurals.algorithm, 101), 5)
}