@@ -267,7 +267,7 @@ defineSymbol(math, ams, textord, "\u29eb", "\\blacklozenge");
267267defineSymbol ( math , ams , textord , "\u2605" , "\\bigstar" ) ;
268268defineSymbol ( math , ams , textord , "\u2222" , "\\sphericalangle" , true ) ;
269269defineSymbol ( math , ams , textord , "\u2201" , "\\complement" , true ) ;
270- // unicode-math maps U+F0 to \matheth. We map to AMS function \eth
270+ // unicode-math maps U+F0 (ð) to \matheth. We map to AMS function \eth
271271defineSymbol ( math , ams , textord , "\u00f0" , "\\eth" , true ) ;
272272defineSymbol ( math , ams , textord , "\u2571" , "\\diagup" ) ;
273273defineSymbol ( math , ams , textord , "\u2572" , "\\diagdown" ) ;
@@ -739,17 +739,13 @@ for (let i = 0; i < letters.length; i++) {
739739// but they are not actually in the font, nor are they supported by the
740740// Unicode accent mechanism, so they fall back to Times font and look ugly.
741741// TODO(edemaine): Fix this.
742- const extraLatin = "ÇÐÞçðþ " ;
742+ export const extraLatin = "ÇÐÞçþ " ;
743743for ( let i = 0 ; i < extraLatin . length ; i ++ ) {
744744 const ch = extraLatin . charAt ( i ) ;
745745 defineSymbol ( math , main , mathord , ch , ch ) ;
746746 defineSymbol ( text , main , textord , ch , ch ) ;
747747}
748- const extraLatinMath = "Åå ";
749- for ( let i = 0 ; i < extraLatinMath . length ; i ++ ) {
750- const ch = extraLatinMath . charAt ( i ) ;
751- defineSymbol ( math , main , mathord , ch , ch ) ;
752- }
748+ defineSymbol ( text , main , textord , "ð" , "ð" ) ;
753749
754750// Unicode versions of existing characters
755751defineSymbol ( text , main , textord , "\u2013" , "–" ) ;
0 commit comments