/*
* call-seq:
* analyzer.token_stream(field_name, input) -> token_stream
*
* Create a new TokenStream to tokenize +input+. The TokenStream created may
* also depend on the +field_name+. Although this parameter is typically
* ignored.
*
* field_name:: name of the field to be tokenized
* input:: data from the field to be tokenized
*/
static VALUE
frt_re_analyzer_token_stream(VALUE self, VALUE rfield, VALUE rtext)
{
TokenStream *ts;
Analyzer *a;
GET_A(a, self);
StringValue(rtext);
ts = a_get_ts(a, frt_field(rfield), rs2s(rtext));
/* Make sure that there is no entry already */
object_set(&ts->text, rtext);
if (ts->next == &rets_next) {
RETS(ts)->rtext = rtext;
rb_hash_aset(object_space, ((VALUE)ts)|1, rtext);
}
else {
RETS(((TokenFilter*)ts)->sub_ts)->rtext = rtext;
rb_hash_aset(object_space, ((VALUE)((TokenFilter*)ts)->sub_ts)|1, rtext);
}
return get_rb_token_stream(ts);
}