1047 OBJ myClass; |
1047 OBJ myClass; |
1048 |
1048 |
1049 myClass = __qClass(self); |
1049 myClass = __qClass(self); |
1050 if ((__ClassInstPtr(myClass)->c_ninstvars) == __MKSMALLINT(0)) { |
1050 if ((__ClassInstPtr(myClass)->c_ninstvars) == __MKSMALLINT(0)) { |
1051 |
1051 |
1052 actualSize = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1052 actualSize = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1053 index = 0; |
1053 index = 0; |
1054 |
1054 |
1055 if (index < actualSize) { |
1055 if (index < actualSize) { |
1056 if (__isBlockLike(aBlock) |
1056 if (__isBlockLike(aBlock) |
1057 && (__BlockInstPtr(aBlock)->b_nargs == __MKSMALLINT(2))) { |
1057 && (__BlockInstPtr(aBlock)->b_nargs == __MKSMALLINT(2))) { |
1058 { |
1058 { |
1059 /* |
1059 /* |
1060 * the most common case: a static compiled block, with home on the stack ... |
1060 * the most common case: a static compiled block, with home on the stack ... |
1061 */ |
1061 */ |
1062 REGISTER OBJFUNC codeVal; |
1062 REGISTER OBJFUNC codeVal; |
1063 |
1063 |
1064 if (((codeVal = __BlockInstPtr(aBlock)->b_code) != (OBJFUNC)nil) |
1064 if (((codeVal = __BlockInstPtr(aBlock)->b_code) != (OBJFUNC)nil) |
1065 #ifdef PARANOIA |
1065 #ifdef PARANOIA |
1066 && (! ((INT)(__BlockInstPtr(aBlock)->b_flags) & __MASKSMALLINT(F_DYNAMIC))) |
1066 && (! ((INT)(__BlockInstPtr(aBlock)->b_flags) & __MASKSMALLINT(F_DYNAMIC))) |
1067 #endif |
1067 #endif |
1068 ) { |
1068 ) { |
1069 |
1069 |
1070 #ifdef NEW_BLOCK_CALL |
1070 #ifdef NEW_BLOCK_CALL |
1071 # define BLOCK_ARG aBlock |
1071 # define BLOCK_ARG aBlock |
1072 #else |
1072 #else |
1073 # define BLOCK_ARG rHome |
1073 # define BLOCK_ARG rHome |
1074 REGISTER OBJ rHome; |
1074 REGISTER OBJ rHome; |
1075 |
1075 |
1076 rHome = __BlockInstPtr(aBlock)->b_home; |
1076 rHome = __BlockInstPtr(aBlock)->b_home; |
1077 if ((rHome == nil) || (__qSpace(rHome) >= STACKSPACE)) |
1077 if ((rHome == nil) || (__qSpace(rHome) >= STACKSPACE)) |
1078 #endif |
1078 #endif |
1079 { |
1079 { |
1080 OBJ el; |
1080 OBJ el; |
1081 |
1081 |
1082 while (index < actualSize) { |
1082 while (index < actualSize) { |
1083 |
1083 |
1084 el = __InstPtr(self)->i_instvars[index]; |
1084 el = __InstPtr(self)->i_instvars[index]; |
1085 if (InterruptPending != nil) goto interruptX; |
1085 if (InterruptPending != nil) goto interruptX; |
1086 continueX: |
1086 continueX: |
1087 index++; |
1087 index++; |
1088 (*codeVal)(BLOCK_ARG, __MKSMALLINT(index), el); |
1088 (*codeVal)(BLOCK_ARG, __MKSMALLINT(index), el); |
1089 } |
1089 } |
1090 RETURN (self); |
1090 RETURN (self); |
1091 |
1091 |
1092 interruptX: |
1092 interruptX: |
1093 __interruptL(@line); |
1093 __interruptL(@line); |
1094 el = __InstPtr(self)->i_instvars[index]; |
1094 el = __InstPtr(self)->i_instvars[index]; |
1095 goto continueX; |
1095 goto continueX; |
1096 } |
1096 } |
1097 } |
1097 } |
1098 } |
1098 } |
1099 |
1099 |
1100 /* |
1100 /* |
1101 * sorry, must check code-pointer in the loop |
1101 * sorry, must check code-pointer in the loop |
1102 * it could be recompiled or flushed |
1102 * it could be recompiled or flushed |
1103 */ |
1103 */ |
1104 # undef BLOCK_ARG |
1104 # undef BLOCK_ARG |
1105 #ifdef NEW_BLOCK_CALL |
1105 #ifdef NEW_BLOCK_CALL |
1106 # define BLOCK_ARG aBlock |
1106 # define BLOCK_ARG aBlock |
1107 # define IBLOCK_ARG nil |
1107 # define IBLOCK_ARG nil |
1108 #else |
1108 #else |
1109 # define BLOCK_ARG (__BlockInstPtr(aBlock)->b_home) |
1109 # define BLOCK_ARG (__BlockInstPtr(aBlock)->b_home) |
1110 # define IBLOCK_ARG (__BlockInstPtr(aBlock)->b_home) |
1110 # define IBLOCK_ARG (__BlockInstPtr(aBlock)->b_home) |
1111 #endif |
1111 #endif |
1112 |
1112 |
1113 while (index < actualSize) { |
1113 while (index < actualSize) { |
1114 REGISTER OBJFUNC codeVal; |
1114 REGISTER OBJFUNC codeVal; |
1115 OBJ el; |
1115 OBJ el; |
1116 |
1116 |
1117 if (InterruptPending != nil) __interruptL(@line); |
1117 if (InterruptPending != nil) __interruptL(@line); |
1118 |
1118 |
1119 el = __InstPtr(self)->i_instvars[index]; |
1119 el = __InstPtr(self)->i_instvars[index]; |
1120 index++; |
1120 index++; |
1121 if ((codeVal = __BlockInstPtr(aBlock)->b_code) != (OBJFUNC)nil) { |
1121 if ((codeVal = __BlockInstPtr(aBlock)->b_code) != (OBJFUNC)nil) { |
1122 (*codeVal)(BLOCK_ARG, __MKSMALLINT(index), el); |
1122 (*codeVal)(BLOCK_ARG, __MKSMALLINT(index), el); |
1123 } else { |
1123 } else { |
1124 if (__BlockInstPtr(aBlock)->b_bytecodes != nil) { |
1124 if (__BlockInstPtr(aBlock)->b_bytecodes != nil) { |
1125 /* |
1125 /* |
1126 * arg is a compiled block with bytecode - |
1126 * arg is a compiled block with bytecode - |
1127 * directly call interpreter without going through Block>>value |
1127 * directly call interpreter without going through Block>>value |
1128 */ |
1128 */ |
1129 #ifdef PASS_ARG_POINTER |
1129 #ifdef PASS_ARG_POINTER |
1130 { |
1130 { |
1131 OBJ t[2]; |
1131 OBJ t[2]; |
1132 |
1132 |
1133 t[0] = __MKSMALLINT(index); |
1133 t[0] = __MKSMALLINT(index); |
1134 t[1] = el; |
1134 t[1] = el; |
1135 |
1135 |
1136 __interpret(aBlock, 2, nil, IBLOCK_ARG, nil, nil, t); |
1136 __interpret(aBlock, 2, nil, IBLOCK_ARG, nil, nil, t); |
1137 } |
1137 } |
1138 #else |
1138 #else |
1139 __interpret(aBlock, 2, nil, IBLOCK_ARG, nil, nil, __MKSMALLINT(index), el); |
1139 __interpret(aBlock, 2, nil, IBLOCK_ARG, nil, nil, __MKSMALLINT(index), el); |
1140 #endif |
1140 #endif |
1141 } else { |
1141 } else { |
1142 (*val2.ilc_func)(aBlock, |
1142 (*val2.ilc_func)(aBlock, |
1143 @symbol(value:value:), |
1143 @symbol(value:value:), |
1144 nil, &val2, |
1144 nil, &val2, |
1145 __MKSMALLINT(index), |
1145 __MKSMALLINT(index), |
1146 el); |
1146 el); |
1147 } |
1147 } |
1148 } |
1148 } |
1149 } |
1149 } |
1150 |
1150 |
1151 # undef BLOCK_ARG |
1151 # undef BLOCK_ARG |
1152 # undef IBLOCK_ARG |
1152 # undef IBLOCK_ARG |
1153 |
1153 |
1154 RETURN (self ); |
1154 RETURN (self ); |
1155 } |
1155 } |
1156 |
1156 |
1157 /* |
1157 /* |
1158 * not a block - send it #value: |
1158 * not a block - send it #value: |
1159 */ |
1159 */ |
1160 while (index < actualSize) { |
1160 while (index < actualSize) { |
1161 OBJ el; |
1161 OBJ el; |
1162 |
1162 |
1163 if (InterruptPending != nil) __interruptL(@line); |
1163 if (InterruptPending != nil) __interruptL(@line); |
1164 |
1164 |
1165 el = __InstPtr(self)->i_instvars[index]; |
1165 el = __InstPtr(self)->i_instvars[index]; |
1166 index++; |
1166 index++; |
1167 (*val2.ilc_func)(aBlock, |
1167 (*val2.ilc_func)(aBlock, |
1168 @symbol(value:value:), |
1168 @symbol(value:value:), |
1169 nil, &val2, |
1169 nil, &val2, |
1170 __MKSMALLINT(index), |
1170 __MKSMALLINT(index), |
1171 el); |
1171 el); |
1172 } |
1172 } |
1173 RETURN ( self ); |
1173 RETURN ( self ); |
1174 } |
1174 } |
1175 } |
1175 } |
1176 %}. |
1176 %}. |
1177 ^ super keysAndValuesDo:aBlock |
1177 ^ super keysAndValuesDo:aBlock |
1178 ! |
1178 ! |
1179 |
1179 |
1523 !Array methodsFor:'printing & storing'! |
1523 !Array methodsFor:'printing & storing'! |
1524 |
1524 |
1525 displayString |
1525 displayString |
1526 "return a printed representation of the receiver for displaying" |
1526 "return a printed representation of the receiver for displaying" |
1527 |
1527 |
1528 |s sz| |
1528 |s| |
1529 |
1529 |
1530 (self isLiteral) ifTrue:[ |
1530 (self isLiteral) ifTrue:[ |
1531 s := WriteStream on:String new. |
1531 s := WriteStream on:String new. |
1532 s writeLimit:5000. |
1532 s writeLimit:5000. |
1533 |
1533 |
1534 WriteStream writeErrorSignal handle:[:ex | |
1534 WriteStream writeErrorSignal handle:[:ex | |
1535 s writeLimit:nil. |
1535 s writeLimit:nil. |
1536 s nextPutAll:' ...' |
1536 s nextPutAll:' ...' |
1537 ] do:[ |
1537 ] do:[ |
1538 s nextPutAll:'#('. |
1538 s nextPutAll:'#('. |
1539 sz := self size. |
1539 self |
1540 self keysAndValuesDo:[:idx :element | |
1540 do:[:each | s nextPutAll:each displayString.] |
1541 s nextPutAll:element displayString. |
1541 separatedBy:[s space] |
1542 idx ~~ sz ifTrue:[s space] |
|
1543 ]. |
|
1544 ]. |
1542 ]. |
1545 s writeLimit:nil. |
1543 s writeLimit:nil. |
1546 s nextPutAll:')'. |
1544 s nextPutAll:')'. |
1547 ^ s contents |
1545 ^ s contents |
1548 ]. |
1546 ]. |
1549 ^ super displayString |
1547 ^ super displayString |
1550 |
1548 |
1551 " |
1549 " |
1552 #(1 2 3 4) displayString |
1550 #(1 2 3 4) displayString |
1553 #(1 2 3 4) printString |
1551 #(1 2 3 4) printString |
1554 (Array new:10000) displayString |
1552 (Array new:10000) displayString |
1555 " |
1553 " |
1556 |
1554 |
1557 "Modified: 30.6.1996 / 13:00:08 / cg" |
1555 "Modified: 12.9.1997 / 22:03:18 / cg" |
1558 ! |
1556 ! |
1559 |
1557 |
1560 storeOn:aStream |
1558 storeOn:aStream |
1561 "append a printed representation of the receiver to aStream, |
1559 "append a printed representation of the receiver to aStream, |
1562 which allows reconstructing it via readFrom:. |
1560 which allows reconstructing it via readFrom:. |
1692 REGISTER OBJ *op; |
1690 REGISTER OBJ *op; |
1693 REGISTER unsigned int nIndex; |
1691 REGISTER unsigned int nIndex; |
1694 int nInsts; |
1692 int nInsts; |
1695 |
1693 |
1696 if (__isSmallInteger(start)) { |
1694 if (__isSmallInteger(start)) { |
1697 index = __intVal(start) - 1; |
1695 index = __intVal(start) - 1; |
1698 if (index >= 0) { |
1696 if (index >= 0) { |
1699 nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars); |
1697 nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars); |
1700 index += nInsts; |
1698 index += nInsts; |
1701 nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1699 nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1702 el = anElement; |
1700 el = anElement; |
1703 op = & (__InstPtr(self)->i_instvars[index]); |
1701 op = & (__InstPtr(self)->i_instvars[index]); |
1704 |
1702 |
1705 #if defined(memsrch4) |
1703 #if defined(memsrch4) |
1706 if (index < nIndex) { |
1704 if (index < nIndex) { |
1707 OBJ *p; |
1705 OBJ *p; |
1708 |
1706 |
1709 p = memsrch4(op, (INT)el, (nIndex - index)); |
1707 p = memsrch4(op, (INT)el, (nIndex - index)); |
1710 if (p) { |
1708 if (p) { |
1711 index += (p - op + 1); |
1709 index += (p - op + 1); |
1712 RETURN ( __MKSMALLINT(index) ); |
1710 RETURN ( __MKSMALLINT(index) ); |
1713 } |
1711 } |
1714 } |
1712 } |
1715 #else |
1713 #else |
1716 |
1714 |
1717 # if defined(UNROLL_LOOPS) |
1715 # if defined(UNROLL_LOOPS) |
1718 { |
1716 { |
1719 /* |
1717 /* |
1720 * dont argue about those gotos below - they speed up that thing by 30%; |
1718 * dont argue about those gotos below - they speed up that thing by 30%; |
1721 * its better to exit the loops below with a goto, |
1719 * its better to exit the loops below with a goto, |
1722 * since the generated code will then be: |
1720 * since the generated code will then be: |
1723 * compare |
1721 * compare |
1724 * branch-on-equal found |
1722 * branch-on-equal found |
1725 * |
1723 * |
1726 * otherwise (with return as if-statement), we get: |
1724 * otherwise (with return as if-statement), we get: |
1727 * compare |
1725 * compare |
1728 * branch-on-not-equal skipLabel |
1726 * branch-on-not-equal skipLabel |
1729 * move-to-return-register true |
1727 * move-to-return-register true |
1730 * goto return-label |
1728 * goto return-label |
1731 * skipLabel |
1729 * skipLabel |
1732 * |
1730 * |
1733 * therefore, WITH the so-much-blamed goto, we only branch |
1731 * therefore, WITH the so-much-blamed goto, we only branch |
1734 * when found; without the goto, we branch always. |
1732 * when found; without the goto, we branch always. |
1735 * Pipelined CPUs do usually not like taken branches. |
1733 * Pipelined CPUs do usually not like taken branches. |
1736 */ |
1734 */ |
1737 |
1735 |
1738 unsigned int i8; |
1736 unsigned int i8; |
1739 |
1737 |
1740 while ((i8 = index + 8) < nIndex) { |
1738 while ((i8 = index + 8) < nIndex) { |
1741 if (op[0] == el) goto found1; |
1739 if (op[0] == el) goto found1; |
1742 if (op[1] == el) goto found2; |
1740 if (op[1] == el) goto found2; |
1743 if (op[2] == el) goto found3; |
1741 if (op[2] == el) goto found3; |
1744 if (op[3] == el) goto found4; |
1742 if (op[3] == el) goto found4; |
1745 if (op[4] == el) goto found5; |
1743 if (op[4] == el) goto found5; |
1746 if (op[5] == el) goto found6; |
1744 if (op[5] == el) goto found6; |
1747 if (op[6] == el) goto found7; |
1745 if (op[6] == el) goto found7; |
1748 if (op[7] == el) goto found8; |
1746 if (op[7] == el) goto found8; |
1749 index = i8; |
1747 index = i8; |
1750 op += 8; |
1748 op += 8; |
1751 } |
1749 } |
1752 if (0) { |
1750 if (0) { |
1753 found1: |
1751 found1: |
1754 RETURN ( __MKSMALLINT(index + 1 - nInsts) ); |
1752 RETURN ( __MKSMALLINT(index + 1 - nInsts) ); |
1755 found2: |
1753 found2: |
1756 RETURN ( __MKSMALLINT(index + 2 - nInsts) ); |
1754 RETURN ( __MKSMALLINT(index + 2 - nInsts) ); |
1757 found3: |
1755 found3: |
1758 RETURN ( __MKSMALLINT(index + 3 - nInsts) ); |
1756 RETURN ( __MKSMALLINT(index + 3 - nInsts) ); |
1759 found4: |
1757 found4: |
1760 RETURN ( __MKSMALLINT(index + 4 - nInsts) ); |
1758 RETURN ( __MKSMALLINT(index + 4 - nInsts) ); |
1761 found5: |
1759 found5: |
1762 RETURN ( __MKSMALLINT(index + 5 - nInsts) ); |
1760 RETURN ( __MKSMALLINT(index + 5 - nInsts) ); |
1763 found6: |
1761 found6: |
1764 RETURN ( __MKSMALLINT(index + 6 - nInsts) ); |
1762 RETURN ( __MKSMALLINT(index + 6 - nInsts) ); |
1765 found7: |
1763 found7: |
1766 RETURN ( __MKSMALLINT(index + 7 - nInsts) ); |
1764 RETURN ( __MKSMALLINT(index + 7 - nInsts) ); |
1767 found8: |
1765 found8: |
1768 RETURN ( __MKSMALLINT(index + 8 - nInsts) ); |
1766 RETURN ( __MKSMALLINT(index + 8 - nInsts) ); |
1769 } |
1767 } |
1770 } |
1768 } |
1771 # endif /* UNROLLED_LOOPS */ |
1769 # endif /* UNROLLED_LOOPS */ |
1772 |
1770 |
1773 while (index++ < nIndex) { |
1771 while (index++ < nIndex) { |
1774 if (*op++ == el) goto found0; |
1772 if (*op++ == el) goto found0; |
1775 } |
1773 } |
1776 |
1774 |
1777 if (0) { |
1775 if (0) { |
1778 found0: |
1776 found0: |
1779 RETURN ( __MKSMALLINT(index - nInsts) ); |
1777 RETURN ( __MKSMALLINT(index - nInsts) ); |
1780 } |
1778 } |
1781 #endif /* no memsrch */ |
1779 #endif /* no memsrch */ |
1782 } |
1780 } |
1783 RETURN ( __MKSMALLINT(0) ); |
1781 RETURN ( __MKSMALLINT(0) ); |
1784 } |
1782 } |
1785 %}. |
1783 %}. |
1786 ^ self indexNotInteger |
1784 ^ self indexNotInteger |
1787 ! |
1785 ! |
1788 |
1786 |
1799 REGISTER unsigned int lastIndex; |
1797 REGISTER unsigned int lastIndex; |
1800 unsigned int nIndex; |
1798 unsigned int nIndex; |
1801 int nInsts; |
1799 int nInsts; |
1802 |
1800 |
1803 if (__bothSmallInteger(start, stop)) { |
1801 if (__bothSmallInteger(start, stop)) { |
1804 index = __intVal(start) - 1; |
1802 index = __intVal(start) - 1; |
1805 if (index >= 0) { |
1803 if (index >= 0) { |
1806 nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars); |
1804 nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars); |
1807 index += nInsts; |
1805 index += nInsts; |
1808 lastIndex = nInsts + __intVal(stop); |
1806 lastIndex = nInsts + __intVal(stop); |
1809 nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1807 nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE); |
1810 if (nIndex < lastIndex) { |
1808 if (nIndex < lastIndex) { |
1811 lastIndex = nIndex; |
1809 lastIndex = nIndex; |
1812 } |
1810 } |
1813 el = anElement; |
1811 el = anElement; |
1814 op = & (__InstPtr(self)->i_instvars[index]); |
1812 op = & (__InstPtr(self)->i_instvars[index]); |
1815 |
1813 |
1816 #if defined(memsrch4) |
1814 #if defined(memsrch4) |
1817 if (index < lastIndex) { |
1815 if (index < lastIndex) { |
1818 OBJ *p; |
1816 OBJ *p; |
1819 |
1817 |
1820 p = memsrch4(op, (INT)el, (lastIndex - index)); |
1818 p = memsrch4(op, (INT)el, (lastIndex - index)); |
1821 if (p) { |
1819 if (p) { |
1822 index += (p - op + 1); |
1820 index += (p - op + 1); |
1823 RETURN ( __MKSMALLINT(index) ); |
1821 RETURN ( __MKSMALLINT(index) ); |
1824 } |
1822 } |
1825 } |
1823 } |
1826 #else |
1824 #else |
1827 |
1825 |
1828 # if defined(UNROLL_LOOPS) |
1826 # if defined(UNROLL_LOOPS) |
1829 { |
1827 { |
1830 unsigned int i8; |
1828 unsigned int i8; |
1831 |
1829 |
1832 while ((i8 = index + 8) < lastIndex) { |
1830 while ((i8 = index + 8) < lastIndex) { |
1833 if (op[0] == el) goto found1; |
1831 if (op[0] == el) goto found1; |
1834 if (op[1] == el) goto found2; |
1832 if (op[1] == el) goto found2; |
1835 if (op[2] == el) goto found3; |
1833 if (op[2] == el) goto found3; |
1836 if (op[3] == el) goto found4; |
1834 if (op[3] == el) goto found4; |
1837 if (op[4] == el) goto found5; |
1835 if (op[4] == el) goto found5; |
1838 if (op[5] == el) goto found6; |
1836 if (op[5] == el) goto found6; |
1839 if (op[6] == el) goto found7; |
1837 if (op[6] == el) goto found7; |
1840 if (op[7] == el) goto found8; |
1838 if (op[7] == el) goto found8; |
1841 index = i8; |
1839 index = i8; |
1842 op += 8; |
1840 op += 8; |
1843 } |
1841 } |
1844 |
1842 |
1845 if (0) { |
1843 if (0) { |
1846 found1: |
1844 found1: |
1847 RETURN ( __MKSMALLINT(index + 1 - nInsts) ); |
1845 RETURN ( __MKSMALLINT(index + 1 - nInsts) ); |
1848 found2: |
1846 found2: |
1849 RETURN ( __MKSMALLINT(index + 2 - nInsts) ); |
1847 RETURN ( __MKSMALLINT(index + 2 - nInsts) ); |
1850 found3: |
1848 found3: |
1851 RETURN ( __MKSMALLINT(index + 3 - nInsts) ); |
1849 RETURN ( __MKSMALLINT(index + 3 - nInsts) ); |
1852 found4: |
1850 found4: |
1853 RETURN ( __MKSMALLINT(index + 4 - nInsts) ); |
1851 RETURN ( __MKSMALLINT(index + 4 - nInsts) ); |
1854 found5: |
1852 found5: |
1855 RETURN ( __MKSMALLINT(index + 5 - nInsts) ); |
1853 RETURN ( __MKSMALLINT(index + 5 - nInsts) ); |
1856 found6: |
1854 found6: |
1857 RETURN ( __MKSMALLINT(index + 6 - nInsts) ); |
1855 RETURN ( __MKSMALLINT(index + 6 - nInsts) ); |
1858 found7: |
1856 found7: |
1859 RETURN ( __MKSMALLINT(index + 7 - nInsts) ); |
1857 RETURN ( __MKSMALLINT(index + 7 - nInsts) ); |
1860 found8: |
1858 found8: |
1861 RETURN ( __MKSMALLINT(index + 8 - nInsts) ); |
1859 RETURN ( __MKSMALLINT(index + 8 - nInsts) ); |
1862 } |
1860 } |
1863 } |
1861 } |
1864 # endif /* UNROLL_LOOPS */ |
1862 # endif /* UNROLL_LOOPS */ |
1865 |
1863 |
1866 while (index++ < lastIndex) { |
1864 while (index++ < lastIndex) { |
1867 if (*op++ == el) goto found0; |
1865 if (*op++ == el) goto found0; |
1868 } |
1866 } |
1869 |
1867 |
1870 if (0) { |
1868 if (0) { |
1871 found0: |
1869 found0: |
1872 RETURN ( __MKSMALLINT(index - nInsts) ); |
1870 RETURN ( __MKSMALLINT(index - nInsts) ); |
1873 } |
1871 } |
1874 #endif |
1872 #endif |
1875 } |
1873 } |
1876 RETURN ( __MKSMALLINT(0) ); |
1874 RETURN ( __MKSMALLINT(0) ); |
1877 } |
1875 } |
1878 %}. |
1876 %}. |
1879 ^ self indexNotInteger |
1877 ^ self indexNotInteger |
1880 |
1878 |
1881 ! |
1879 ! |