Fixed up Keywords

Up to date definitions and reorganizations,
commented out constants that apparently don't exist.
Fixed Strings not highlighting by borrowing missing code from Phoenix.

Also, this fixes 1020 being treated as UnknownAltitude when not on SL grid
and cleans up the horrible tabbing/spacing mud in llfloateravatarlist.cpp
This commit is contained in:
Lirusaito
2012-06-24 01:52:56 -04:00
parent 48a2d0497b
commit 26810ff16a
5 changed files with 596 additions and 308 deletions

View File

@@ -42,12 +42,38 @@
const U32 KEYWORD_FILE_CURRENT_VERSION = 2;
inline BOOL LLKeywordToken::isHead(const llwchar* s) const
inline BOOL LLKeywordToken::isHead(const llwchar* s, bool search_end_c_comment) const
{
// strncmp is much faster than string compare
BOOL res = TRUE;
const llwchar* t = mToken.c_str();
S32 len = mToken.size();
if (search_end_c_comment && len == 2 && t[0] == '/' && t[1] == '*')
{
// Special case for C-like */ end comment token
if (s[0] == '*' && s[1] == '/')
{
return TRUE;
}
else
{
return FALSE;
}
}
for (S32 i = 0; i < len; i++)
{
if (s[i] != t[i])
{
return FALSE;
}
}
return TRUE;
}
inline BOOL LLKeywordToken::isTail(const llwchar* s) const
{
BOOL res = TRUE;
const llwchar* t = mDelimiter.c_str();
S32 len = mDelimiter.size();
for (S32 i=0; i<len; i++)
{
if (s[i] != t[i])
@@ -112,6 +138,7 @@ BOOL LLKeywords::loadFromFile( const std::string& filename )
std::string SOL_LINE("[line ");
std::string SOL_ONE_SIDED_DELIMITER("[one_sided_delimiter ");
std::string SOL_TWO_SIDED_DELIMITER("[two_sided_delimiter ");
std::string SOL_TWO_SIDED_DELIMITER_ESC("[two_sided_delimiter_esc ");
LLColor3 cur_color( 1, 0, 0 );
LLKeywordToken::TOKEN_TYPE cur_type = LLKeywordToken::WORD;
@@ -143,6 +170,12 @@ BOOL LLKeywords::loadFromFile( const std::string& filename )
cur_type = LLKeywordToken::TWO_SIDED_DELIMITER;
continue;
}
else if( line.find(SOL_TWO_SIDED_DELIMITER_ESC) == 0 )
{
cur_color = readColor( line.substr(SOL_TWO_SIDED_DELIMITER_ESC.size()) );
cur_type = LLKeywordToken::TWO_SIDED_DELIMITER_ESC;
continue;
}
else if( line.find(SOL_ONE_SIDED_DELIMITER) == 0 )
{
cur_color = readColor( line.substr(SOL_ONE_SIDED_DELIMITER.size()) );
@@ -163,6 +196,17 @@ BOOL LLKeywords::loadFromFile( const std::string& filename )
// first word is keyword
std::string keyword = (*token_word_iter);
LLStringUtil::trim(keyword);
// second word may be right delimiter
std::string delimiter;
if (cur_type == LLKeywordToken::TWO_SIDED_DELIMITER || cur_type == LLKeywordToken::TWO_SIDED_DELIMITER_ESC)
{
while (delimiter.length() == 0 && ++token_word_iter != word_tokens.end())
{
delimiter = *token_word_iter;
LLStringUtil::trim(delimiter);
}
}
// following words are tooltip
std::string tool_tip;
@@ -176,11 +220,11 @@ BOOL LLKeywords::loadFromFile( const std::string& filename )
{
// Replace : with \n for multi-line tool tips.
LLStringUtil::replaceChar( tool_tip, ':', '\n' );
addToken(cur_type, keyword, cur_color, tool_tip );
addToken(cur_type, keyword, cur_color, tool_tip, delimiter );
}
else
{
addToken(cur_type, keyword, cur_color, LLStringUtil::null );
addToken(cur_type, keyword, cur_color, LLStringUtil::null, delimiter );
}
}
}
@@ -195,23 +239,26 @@ BOOL LLKeywords::loadFromFile( const std::string& filename )
void LLKeywords::addToken(LLKeywordToken::TOKEN_TYPE type,
const std::string& key_in,
const LLColor3& color,
const std::string& tool_tip_in )
const std::string& tool_tip_in,
const std::string& delimiter_in)
{
LLWString key = utf8str_to_wstring(key_in);
LLWString tool_tip = utf8str_to_wstring(tool_tip_in);
LLWString delimiter = utf8str_to_wstring(delimiter_in);
switch(type)
{
case LLKeywordToken::WORD:
mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip);
mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null);
break;
case LLKeywordToken::LINE:
mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null));
break;
case LLKeywordToken::TWO_SIDED_DELIMITER:
case LLKeywordToken::TWO_SIDED_DELIMITER_ESC:
case LLKeywordToken::ONE_SIDED_DELIMITER:
mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, delimiter));
break;
default:
@@ -249,7 +296,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
const llwchar* base = wtext.c_str();
const llwchar* cur = base;
const llwchar* line = NULL;
//const llwchar* line = NULL;
while( *cur )
{
@@ -265,7 +312,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
}
// Start of a new line
line = cur;
//line = cur;
// Skip white space
while( *cur && isspace(*cur) && (*cur != '\n') )
@@ -342,15 +389,15 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
seg_start = cur - base;
cur += cur_delimiter->getLength();
if( cur_delimiter->getType() == LLKeywordToken::TWO_SIDED_DELIMITER )
//if( cur_delimiter->getType() == LLKeywordToken::TWO_SIDED_DELIMITER )
LLKeywordToken::TOKEN_TYPE type = cur_delimiter->getType();
if( type == LLKeywordToken::TWO_SIDED_DELIMITER || type == LLKeywordToken::TWO_SIDED_DELIMITER_ESC )
{
LLWString str = cur_delimiter->getToken();
std::reverse(str.begin(),str.end()); //Flip the delim around (/* changes to */)
LLKeywordToken reverse_delimiter(cur_delimiter->getType(),cur_delimiter->getColor(),str,cur_delimiter->getToolTip());
while( *cur && !reverse_delimiter.isHead(cur))
//llassert( cur_delimiter->getDelimiter() != NULL );
while( *cur && !cur_delimiter->isTail(cur) )
{
// Check for an escape sequence.
if (*cur == '\\')
if (type == LLKeywordToken::TWO_SIDED_DELIMITER_ESC && *cur == '\\')
{
// Count the number of backslashes.
S32 num_backslashes = 0;
@@ -361,7 +408,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
cur++;
}
// Is the next character the end delimiter?
if (reverse_delimiter.isHead(cur))
if (cur_delimiter->isTail(cur))
{
// Is there was an odd number of backslashes, then this delimiter
// does not end the sequence.
@@ -387,7 +434,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
if( *cur )
{
cur += cur_delimiter->getLength();
seg_end = seg_start + between_delimiters + 2 * cur_delimiter->getLength();
seg_end = seg_start + between_delimiters + cur_delimiter->getLength() + cur_delimiter->getLength2();
}
else
{
@@ -420,10 +467,10 @@ void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWS
// check against words
llwchar prev = cur > base ? *(cur-1) : 0;
if( !isalnum( prev ) && (prev != '_') )
if( !isalnum( prev ) && (prev != '_') && (prev != '#'))
{
const llwchar* p = cur;
while( isalnum( *p ) || (*p == '_') )
while( isalnum( *p ) || (*p == '_') || (*p == '#') )
{
p++;
}