Array.st
branchjv
changeset 18017 7fef9e17913f
parent 18011 deb0c3355881
parent 14687 e718f7219911
child 18043 03660093fe98
equal deleted inserted replaced
18016:956be83322ff 18017:7fef9e17913f
    98      it seems worth to have a specially tuned version here."
    98      it seems worth to have a specially tuned version here."
    99 
    99 
   100 %{  /* NOCONTEXT */
   100 %{  /* NOCONTEXT */
   101 
   101 
   102     OBJ newobj;
   102     OBJ newobj;
   103     unsigned INT instsize, nInstVars;
   103     unsigned INT nInstVars;
       
   104     unsigned INT instsize;
   104     INT nindexedinstvars;
   105     INT nindexedinstvars;
   105     REGISTER OBJ *op;
   106     REGISTER OBJ *op;
   106 
   107 
   107     if (__isSmallInteger(anInteger)) {
   108     if (__isSmallInteger(anInteger)) {
   108 	nindexedinstvars = __intVal(anInteger);
   109 	nindexedinstvars = __intVal(anInteger);
   157 #   if (POINTER_SIZE == 4) && defined(FAST_ARRAY_MEMSET_LONGLONG_UNROLLED)
   158 #   if (POINTER_SIZE == 4) && defined(FAST_ARRAY_MEMSET_LONGLONG_UNROLLED)
   158 #    ifdef INT64
   159 #    ifdef INT64
   159 #     define LONGLONG INT64
   160 #     define LONGLONG INT64
   160 #    else
   161 #    else
   161 #     define LONGLONG long long
   162 #     define LONGLONG long long
   162 #   endif
   163 #    endif
   163 
   164 
   164 		op = __InstPtr(newobj)->i_instvars;
   165 		op = __InstPtr(newobj)->i_instvars;
   165 		if (nInstVars > 8) {
   166 		if (nInstVars > 8) {
   166 		    *op++ = nil;    /* for alignment */
   167 		    *op++ = nil;    /* for alignment */
   167 		    nInstVars--;
   168 		    nInstVars--;
   188 		    *((INT64 *)op) = 0;
   189 		    *((INT64 *)op) = 0;
   189 		    nInstVars -= 2;
   190 		    nInstVars -= 2;
   190 		    op += 2;
   191 		    op += 2;
   191 		}
   192 		}
   192 #     endif
   193 #     endif
       
   194 		while (nInstVars >= 8) {
       
   195 		    nInstVars -= 8;
       
   196 		    op[0] = nil; op[1] = nil;
       
   197 		    op[2] = nil; op[3] = nil;
       
   198 		    op[4] = nil; op[5] = nil;
       
   199 		    op[6] = nil; op[7] = nil;
       
   200 		    op += 8;
       
   201 		}
   193 		while (nInstVars--)
   202 		while (nInstVars--)
   194 		    *op++ = nil;
   203 		    *op++ = nil;
   195 #    endif
   204 #    endif
   196 #   endif
   205 #   endif
   197 #  endif
   206 #  endif
   208 		 * have to protect all context stuff
   217 		 * have to protect all context stuff
   209 		 * (especially for self, but also for backtrace in case of
   218 		 * (especially for self, but also for backtrace in case of
   210 		 *  allocation failure)
   219 		 *  allocation failure)
   211 		 */
   220 		 */
   212 		__PROTECT_CONTEXT__
   221 		__PROTECT_CONTEXT__
   213 		newobj = __STX___new(instsize);
   222 		newobj = __STX___new((INT)instsize);
   214 		__UNPROTECT_CONTEXT__
   223 		__UNPROTECT_CONTEXT__
   215 		if (newobj != nil) {
   224 		if (newobj != nil) {
   216 		    goto ok;
   225 		    goto ok;
   217 		}
   226 		}
   218 	    }
   227 	    }
   280       redefined in a subclass).
   289       redefined in a subclass).
   281      This method is the same as basicAt:."
   290      This method is the same as basicAt:."
   282 
   291 
   283 %{  /* NOCONTEXT */
   292 %{  /* NOCONTEXT */
   284 
   293 
   285     REGISTER int indx;
   294     REGISTER INT indx;
   286     REGISTER OBJ slf;
   295     REGISTER OBJ slf;
   287     REGISTER unsigned int nIndex;
   296     REGISTER unsigned INT nIndex;
   288     REGISTER OBJ cls;
   297     REGISTER OBJ cls;
   289 
   298 
   290     if (__isSmallInteger(index)) {
   299     if (__isSmallInteger(index)) {
   291 	indx = __intVal(index) - 1;
   300 	indx = __intVal(index) - 1;
   292 	slf = self;
   301 	slf = self;
   294 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   303 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   295 	if ((cls = __qClass(slf)) != Array) {
   304 	if ((cls = __qClass(slf)) != Array) {
   296 	    if (indx < 0) goto badIndex;
   305 	    if (indx < 0) goto badIndex;
   297 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   306 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   298 	}
   307 	}
   299 	if ((unsigned)indx < (unsigned)nIndex) {
   308 	if ((unsigned INT)indx < (unsigned INT)nIndex) {
   300 	    RETURN ( __InstPtr(slf)->i_instvars[indx] );
   309 	    RETURN ( __InstPtr(slf)->i_instvars[indx] );
   301 	}
   310 	}
   302     }
   311     }
   303 badIndex: ;
   312 badIndex: ;
   304 %}.
   313 %}.
   314       redefined in a subclass).
   323       redefined in a subclass).
   315      This method is the same as basicAt:put:."
   324      This method is the same as basicAt:put:."
   316 
   325 
   317 %{  /* NOCONTEXT */
   326 %{  /* NOCONTEXT */
   318 
   327 
   319     REGISTER int indx;
   328     REGISTER INT indx;
   320     REGISTER OBJ slf;
   329     REGISTER OBJ slf;
   321     REGISTER unsigned int nIndex;
   330     REGISTER unsigned INT nIndex;
   322     REGISTER OBJ cls;
   331     REGISTER OBJ cls;
   323 
   332 
   324     if (__isSmallInteger(index)) {
   333     if (__isSmallInteger(index)) {
   325 	indx = __intVal(index) - 1;
   334 	indx = __intVal(index) - 1;
   326 	slf = self;
   335 	slf = self;
   328 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   337 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   329 	if ((cls = __qClass(slf)) != Array) {
   338 	if ((cls = __qClass(slf)) != Array) {
   330 	    if (indx < 0) goto badIndex;
   339 	    if (indx < 0) goto badIndex;
   331 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   340 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   332 	}
   341 	}
   333 	if ((unsigned)indx < (unsigned)nIndex) {
   342 	if ((unsigned INT)indx < (unsigned INT)nIndex) {
   334 	    __InstPtr(slf)->i_instvars[indx] = anObject;
   343 	    __InstPtr(slf)->i_instvars[indx] = anObject;
   335 	    __STORE(slf, anObject);
   344 	    __STORE(slf, anObject);
   336 	    RETURN ( anObject );
   345 	    RETURN ( anObject );
   337 	}
   346 	}
   338     }
   347     }
   348     "return the indexed instance variable with index, anInteger
   357     "return the indexed instance variable with index, anInteger
   349      - added here for speed"
   358      - added here for speed"
   350 
   359 
   351 %{  /* NOCONTEXT */
   360 %{  /* NOCONTEXT */
   352 
   361 
   353     REGISTER int indx;
   362     REGISTER INT indx;
   354     REGISTER OBJ slf;
   363     REGISTER OBJ slf;
   355     REGISTER unsigned int nIndex;
   364     REGISTER unsigned INT nIndex;
   356     REGISTER OBJ cls;
   365     REGISTER OBJ cls;
   357 
   366 
   358     if (__isSmallInteger(index)) {
   367     if (__isSmallInteger(index)) {
   359 	indx = __intVal(index) - 1;
   368 	indx = __intVal(index) - 1;
   360 	slf = self;
   369 	slf = self;
   362 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   371 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   363 	if ((cls = __qClass(slf)) != Array) {
   372 	if ((cls = __qClass(slf)) != Array) {
   364 	    if (indx < 0) goto badIndex;
   373 	    if (indx < 0) goto badIndex;
   365 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   374 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   366 	}
   375 	}
   367 	if ((unsigned)indx < (unsigned)nIndex) {
   376 	if ((unsigned INT)indx < (unsigned INT)nIndex) {
   368 	    RETURN ( __InstPtr(slf)->i_instvars[indx] );
   377 	    RETURN ( __InstPtr(slf)->i_instvars[indx] );
   369 	}
   378 	}
   370     }
   379     }
   371 badIndex: ;
   380 badIndex: ;
   372 %}.
   381 %}.
   378      Returns anObject (sigh).
   387      Returns anObject (sigh).
   379      - added here for speed"
   388      - added here for speed"
   380 
   389 
   381 %{  /* NOCONTEXT */
   390 %{  /* NOCONTEXT */
   382 
   391 
   383     REGISTER int indx;
   392     REGISTER INT indx;
   384     REGISTER OBJ slf;
   393     REGISTER OBJ slf;
   385     REGISTER unsigned int nIndex;
   394     REGISTER unsigned INT nIndex;
   386     REGISTER OBJ cls;
   395     REGISTER OBJ cls;
   387 
   396 
   388     if (__isSmallInteger(index)) {
   397     if (__isSmallInteger(index)) {
   389 	indx = __intVal(index) - 1;
   398 	indx = __intVal(index) - 1;
   390 	slf = self;
   399 	slf = self;
   392 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   401 	nIndex = __BYTES2OBJS__(__qSize(slf) - OHDR_SIZE);
   393 	if ((cls = __qClass(slf)) != Array) {
   402 	if ((cls = __qClass(slf)) != Array) {
   394 	    if (indx < 0) goto badIndex;
   403 	    if (indx < 0) goto badIndex;
   395 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   404 	    indx += __intVal(__ClassInstPtr(cls)->c_ninstvars);
   396 	}
   405 	}
   397 	if ((unsigned)indx < (unsigned)nIndex) {
   406 	if ((unsigned INT)indx < (unsigned INT)nIndex) {
   398 	    __InstPtr(slf)->i_instvars[indx] = anObject;
   407 	    __InstPtr(slf)->i_instvars[indx] = anObject;
   399 	    __STORE(slf, anObject);
   408 	    __STORE(slf, anObject);
   400 	    RETURN ( anObject );
   409 	    RETURN ( anObject );
   401 	}
   410 	}
   402     }
   411     }
   450      (since the inherited copyWith uses replaceFromTo:, which is also
   459      (since the inherited copyWith uses replaceFromTo:, which is also
   451       tuned, it is questionable, if we need this)"
   460       tuned, it is questionable, if we need this)"
   452 
   461 
   453 %{  /* NOCONTEXT */
   462 %{  /* NOCONTEXT */
   454     OBJ nObj;
   463     OBJ nObj;
   455     unsigned int sz;
   464     unsigned INT sz;
   456     unsigned int nIndex;
   465     unsigned INT nIndex;
   457     REGISTER OBJ *srcP, *dstP;
   466     REGISTER OBJ *srcP, *dstP;
   458     REGISTER int spc;
   467     REGISTER int spc;
   459 
   468 
   460     if (__qClass(self) == Array) {
   469     if (__qClass(self) == Array) {
   461 	sz = __qSize(self) + sizeof(OBJ);
   470 	sz = __qSize(self) + sizeof(OBJ);
   562     | sz "{ Class: SmallInteger }"|
   571     | sz "{ Class: SmallInteger }"|
   563 
   572 
   564     sz := self size.
   573     sz := self size.
   565 %{
   574 %{
   566     REGISTER OBJFUNC codeVal;
   575     REGISTER OBJFUNC codeVal;
   567     REGISTER int index;
   576     REGISTER INT index;
   568     unsigned int nIndex;
   577     unsigned INT nIndex;
   569     static struct inlineCache val = _ILC1;
   578     static struct inlineCache val = _ILC1;
   570     REGISTER OBJ rHome;
   579     REGISTER OBJ rHome;
   571     int actualSize;
   580     INT actualSize;
   572 
   581 
   573     {
   582     {
   574 	OBJ mySelf = self;
   583 	OBJ mySelf = self;
   575 
   584 
   576 	index = __intVal(__ClassInstPtr(__qClass(mySelf))->c_ninstvars);
   585 	index = __intVal(__ClassInstPtr(__qClass(mySelf))->c_ninstvars);
   779      - reimplemented for speed, since this is used by many higher
   788      - reimplemented for speed, since this is used by many higher
   780        level collections"
   789        level collections"
   781 
   790 
   782 %{
   791 %{
   783     REGISTER OBJFUNC codeVal;
   792     REGISTER OBJFUNC codeVal;
   784     REGISTER int index;
   793     REGISTER INT index;
   785     REGISTER OBJ rHome;
   794     REGISTER OBJ rHome;
   786     OBJ slf;
   795     OBJ slf;
   787     int nIndex, nInsts;
   796     INT nIndex, nInsts;
   788     static struct inlineCache val = _ILC1;
   797     static struct inlineCache val = _ILC1;
   789     int indexHigh;
   798     INT indexHigh;
   790     OBJ myClass;
   799     OBJ myClass;
   791 
   800 
   792     slf = self;
   801     slf = self;
   793     myClass = __qClass(slf);
   802     myClass = __qClass(slf);
   794 
   803 
   801 	    index += nInsts;
   810 	    index += nInsts;
   802 	    indexHigh += nInsts;
   811 	    indexHigh += nInsts;
   803 	}
   812 	}
   804 	if (indexHigh <= nIndex) {
   813 	if (indexHigh <= nIndex) {
   805 	    OBJ __aBlock = aBlock;
   814 	    OBJ __aBlock = aBlock;
   806 	    int n;
   815 	    INT n;
   807 
   816 
   808 	    index--;                            /* 0-based */
   817 	    index--;                            /* 0-based */
   809 	    n = indexHigh - index;
   818 	    n = indexHigh - index;
   810 
   819 
   811 	    if (__isBlockLike(__aBlock)
   820 	    if (__isBlockLike(__aBlock)
  1059      up to (and including) stop in the collection. Step in reverse order.
  1068      up to (and including) stop in the collection. Step in reverse order.
  1060      - reimplemented for speed"
  1069      - reimplemented for speed"
  1061 
  1070 
  1062 %{
  1071 %{
  1063     REGISTER OBJFUNC codeVal;
  1072     REGISTER OBJFUNC codeVal;
  1064     REGISTER int index;
  1073     REGISTER INT index;
  1065     REGISTER OBJ rHome;
  1074     REGISTER OBJ rHome;
  1066     int nIndex;
  1075     INT nIndex;
  1067     static struct inlineCache val = _ILC1;
  1076     static struct inlineCache val = _ILC1;
  1068     int indexLow, indexHigh;
  1077     INT indexLow, indexHigh;
  1069 
  1078 
  1070     if (__bothSmallInteger(start, stop)
  1079     if (__bothSmallInteger(start, stop)
  1071      && (__qClass(self) == @global(Array))
  1080      && (__qClass(self) == @global(Array))
  1072      && ((indexLow = __intVal(start)) > 0)) {
  1081      && ((indexLow = __intVal(start)) > 0)) {
  1073 	indexHigh = __intVal(stop);
  1082 	indexHigh = __intVal(stop);
  1146 %{
  1155 %{
  1147     REGISTER OBJFUNC codeVal;
  1156     REGISTER OBJFUNC codeVal;
  1148     REGISTER INT index;
  1157     REGISTER INT index;
  1149     static struct inlineCache val2 = _ILC2;
  1158     static struct inlineCache val2 = _ILC2;
  1150     REGISTER OBJ rHome;
  1159     REGISTER OBJ rHome;
  1151     int actualSize;
  1160     INT actualSize;
  1152     OBJ myClass;
  1161     OBJ myClass;
  1153 
  1162 
  1154     myClass = __qClass(self);
  1163     myClass = __qClass(self);
  1155     if ((__ClassInstPtr(myClass)->c_ninstvars) == __mkSmallInteger(0)) {
  1164     if ((__ClassInstPtr(myClass)->c_ninstvars) == __mkSmallInteger(0)) {
  1156 
  1165 
  1321     |home sz "{ Class: SmallInteger }" |
  1330     |home sz "{ Class: SmallInteger }" |
  1322 
  1331 
  1323     sz := self size.
  1332     sz := self size.
  1324 %{
  1333 %{
  1325     REGISTER OBJFUNC codeVal;
  1334     REGISTER OBJFUNC codeVal;
  1326     REGISTER int index;
  1335     REGISTER INT index;
  1327     unsigned int nIndex;
  1336     unsigned INT nIndex;
  1328     int endIndex;
  1337     INT endIndex;
  1329     static struct inlineCache val = _ILC1;
  1338     static struct inlineCache val = _ILC1;
  1330     int actualSize;
  1339     INT actualSize;
  1331     OBJ myClass;
  1340     OBJ myClass;
  1332 
  1341 
  1333     myClass = __qClass(self);
  1342     myClass = __qClass(self);
  1334     {
  1343     {
  1335 	endIndex = __intVal(__ClassInstPtr(myClass)->c_ninstvars);
  1344 	endIndex = __intVal(__ClassInstPtr(myClass)->c_ninstvars);
  1437 from:index1 to:index2 put:anObject
  1446 from:index1 to:index2 put:anObject
  1438     "reimplemented for speed if receiver is an Array"
  1447     "reimplemented for speed if receiver is an Array"
  1439 
  1448 
  1440 %{  /* NOCONTEXT */
  1449 %{  /* NOCONTEXT */
  1441 
  1450 
  1442     REGISTER int index;
  1451     REGISTER INT index;
  1443     unsigned int nIndex;
  1452     unsigned INT nIndex;
  1444     unsigned int endIndex;
  1453     unsigned INT endIndex;
  1445     REGISTER OBJ *dst;
  1454     REGISTER OBJ *dst;
  1446 
  1455 
  1447     if ((__qClass(self) == Array)
  1456     if ((__qClass(self) == Array)
  1448      && __bothSmallInteger(index1, index2)) {
  1457      && __bothSmallInteger(index1, index2)) {
  1449 	index = __intVal(index1) - 1;
  1458 	index = __intVal(index1) - 1;
  1466 		} else
  1475 		} else
  1467 # endif
  1476 # endif
  1468 		{
  1477 		{
  1469 # ifdef __UNROLL_LOOPS__
  1478 # ifdef __UNROLL_LOOPS__
  1470 		    {
  1479 		    {
  1471 			int i8;
  1480 			INT i8;
  1472 
  1481 
  1473 			while ((i8 = index + 8) <= endIndex) {
  1482 			while ((i8 = index + 8) <= endIndex) {
  1474 			    dst[3] = dst[2] = dst[1] = dst[0] = anObject;
  1483 			    dst[3] = dst[2] = dst[1] = dst[0] = anObject;
  1475 			    dst[7] = dst[6] = dst[5] = dst[4] = anObject;
  1484 			    dst[7] = dst[6] = dst[5] = dst[4] = anObject;
  1476 			    dst += 8;
  1485 			    dst += 8;
  1498      Return the receiver.
  1507      Return the receiver.
  1499      Reimplemented for speed if both receiver and aCollection are Arrays"
  1508      Reimplemented for speed if both receiver and aCollection are Arrays"
  1500 
  1509 
  1501 %{  /* NOCONTEXT */
  1510 %{  /* NOCONTEXT */
  1502 
  1511 
  1503     unsigned int nIndex;
  1512     unsigned INT nIndex;
  1504     unsigned int repNIndex;
  1513     unsigned INT repNIndex;
  1505     int startIndex, stopIndex;
  1514     INT startIndex, stopIndex;
  1506     REGISTER OBJ *src;
  1515     REGISTER OBJ *src;
  1507     REGISTER OBJ *dst;
  1516     REGISTER OBJ *dst;
  1508     int repStopIndex;
  1517     INT repStopIndex;
  1509     REGISTER int repStartIndex;
  1518     REGISTER INT repStartIndex;
  1510     REGISTER OBJ t;
  1519     REGISTER OBJ t;
  1511     REGISTER int count;
  1520     REGISTER INT count;
  1512     OBJ myClass;
  1521     OBJ myClass;
  1513 
  1522 
  1514     if (
  1523     if (
  1515 	(__ClassInstPtr((myClass = __qClass(self)))->c_ninstvars == __mkSmallInteger(0))
  1524 	(__ClassInstPtr((myClass = __qClass(self)))->c_ninstvars == __mkSmallInteger(0))
  1516      && __isNonNilObject(aCollection)
  1525      && __isNonNilObject(aCollection)
  1848     REGISTER INT index;
  1857     REGISTER INT index;
  1849     REGISTER OBJ o, el1, el2;
  1858     REGISTER OBJ o, el1, el2;
  1850     REGISTER OBJ *op;
  1859     REGISTER OBJ *op;
  1851     REGISTER unsigned INT nIndex;
  1860     REGISTER unsigned INT nIndex;
  1852     INT altIndex = 0;
  1861     INT altIndex = 0;
  1853     int nInsts;
  1862     INT nInsts;
  1854 
  1863 
  1855     index = 0;
  1864     index = 0;
  1856     nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  1865     nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  1857     index += nInsts;
  1866     index += nInsts;
  1858     nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE);
  1867     nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE);
  1892 
  1901 
  1893     REGISTER INT index;
  1902     REGISTER INT index;
  1894     REGISTER OBJ el;
  1903     REGISTER OBJ el;
  1895     REGISTER OBJ *op;
  1904     REGISTER OBJ *op;
  1896     REGISTER unsigned INT nIndex;
  1905     REGISTER unsigned INT nIndex;
  1897     int nInsts;
  1906     INT nInsts;
  1898 
  1907 
  1899     if (__isSmallInteger(start)) {
  1908     if (__isSmallInteger(start)) {
  1900 	index = __intVal(start) - 1;
  1909 	index = __intVal(start) - 1;
  1901 	if (index >= 0) {
  1910 	if (index >= 0) {
  1902 	    nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  1911 	    nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  1936 		 * therefore, WITH the so-much-blamed goto, we only branch
  1945 		 * therefore, WITH the so-much-blamed goto, we only branch
  1937 		 * when found; without the goto, we branch always.
  1946 		 * when found; without the goto, we branch always.
  1938 		 * Pipelined CPUs do usually not like taken branches.
  1947 		 * Pipelined CPUs do usually not like taken branches.
  1939 		 */
  1948 		 */
  1940 
  1949 
  1941 		unsigned int i8;
  1950 		unsigned INT i8;
  1942 
  1951 
  1943 		while ((i8 = index + 8) < nIndex) {
  1952 		while ((i8 = index + 8) < nIndex) {
  1944 		    if (op[0] == el) goto found1;
  1953 		    if (op[0] == el) goto found1;
  1945 		    if (op[1] == el) goto found2;
  1954 		    if (op[1] == el) goto found2;
  1946 		    if (op[2] == el) goto found3;
  1955 		    if (op[2] == el) goto found3;
  1997 %{  /* NOCONTEXT */
  2006 %{  /* NOCONTEXT */
  1998 
  2007 
  1999     REGISTER INT index;
  2008     REGISTER INT index;
  2000     REGISTER OBJ el;
  2009     REGISTER OBJ el;
  2001     REGISTER OBJ *op;
  2010     REGISTER OBJ *op;
  2002     REGISTER unsigned int lastIndex;
  2011     REGISTER unsigned INT lastIndex;
  2003     unsigned INT nIndex;
  2012     unsigned INT nIndex;
  2004     int nInsts;
  2013     INT nInsts;
  2005 
  2014 
  2006     if (__bothSmallInteger(start, stop)) {
  2015     if (__bothSmallInteger(start, stop)) {
  2007 	index = __intVal(start) - 1;
  2016 	index = __intVal(start) - 1;
  2008 	if (index >= 0) {
  2017 	if (index >= 0) {
  2009 	    nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  2018 	    nInsts = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  2028 	    }
  2037 	    }
  2029 #else
  2038 #else
  2030 
  2039 
  2031 # ifdef __UNROLL_LOOPS__
  2040 # ifdef __UNROLL_LOOPS__
  2032 	    {
  2041 	    {
  2033 		unsigned int i8;
  2042 		unsigned INT i8;
  2034 
  2043 
  2035 		while ((i8 = index + 8) < lastIndex) {
  2044 		while ((i8 = index + 8) < lastIndex) {
  2036 		    if (op[0] == el) goto found1;
  2045 		    if (op[0] == el) goto found1;
  2037 		    if (op[1] == el) goto found2;
  2046 		    if (op[1] == el) goto found2;
  2038 		    if (op[2] == el) goto found3;
  2047 		    if (op[2] == el) goto found3;
  2089 
  2098 
  2090     |element|
  2099     |element|
  2091 %{
  2100 %{
  2092     REGISTER INT index;
  2101     REGISTER INT index;
  2093     unsigned INT nIndex;
  2102     unsigned INT nIndex;
  2094     unsigned int nInsts;
  2103     unsigned INT nInsts;
  2095     static struct inlineCache eq = _ILC1;
  2104     static struct inlineCache eq = _ILC1;
  2096     OBJ myClass, e;
  2105     OBJ myClass, e;
  2097 
  2106 
  2098     myClass = __qClass(self);
  2107     myClass = __qClass(self);
  2099     if ( __isSmallInteger(start) ) {
  2108     if ( __isSmallInteger(start) ) {
  2185 		/*
  2194 		/*
  2186 		 * search for nil - do an identity-search
  2195 		 * search for nil - do an identity-search
  2187 		 */
  2196 		 */
  2188 #ifdef __UNROLL_LOOPS__
  2197 #ifdef __UNROLL_LOOPS__
  2189 		{
  2198 		{
  2190 		    unsigned int i8;
  2199 		    unsigned INT i8;
  2191 
  2200 
  2192 		    while ((i8 = index + 8) < nIndex) {
  2201 		    while ((i8 = index + 8) < nIndex) {
  2193 			if (__InstPtr(slf)->i_instvars[index] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 1) ); }
  2202 			if (__InstPtr(slf)->i_instvars[index] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 1) ); }
  2194 			if (__InstPtr(slf)->i_instvars[index+1] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 2) ); }
  2203 			if (__InstPtr(slf)->i_instvars[index+1] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 2) ); }
  2195 			if (__InstPtr(slf)->i_instvars[index+2] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 3) ); }
  2204 			if (__InstPtr(slf)->i_instvars[index+2] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 3) ); }
  2223 
  2232 
  2224     |element|
  2233     |element|
  2225 %{
  2234 %{
  2226     REGISTER INT index;
  2235     REGISTER INT index;
  2227     unsigned INT lastIndex, nIndex;
  2236     unsigned INT lastIndex, nIndex;
  2228     unsigned int nInsts;
  2237     unsigned INT nInsts;
  2229     static struct inlineCache eq = _ILC1;
  2238     static struct inlineCache eq = _ILC1;
  2230     OBJ myClass, e;
  2239     OBJ myClass, e;
  2231 
  2240 
  2232     myClass = __qClass(self);
  2241     myClass = __qClass(self);
  2233     if ( __bothSmallInteger(start, stop) ) {
  2242     if ( __bothSmallInteger(start, stop) ) {
  2317 		/*
  2326 		/*
  2318 		 * search for nil - do an identity-search
  2327 		 * search for nil - do an identity-search
  2319 		 */
  2328 		 */
  2320 #ifdef __UNROLL_LOOPS__
  2329 #ifdef __UNROLL_LOOPS__
  2321 		{
  2330 		{
  2322 		    unsigned int i8;
  2331 		    unsigned INT i8;
  2323 
  2332 
  2324 		    while ((i8 = index + 8) < lastIndex) {
  2333 		    while ((i8 = index + 8) < lastIndex) {
  2325 			if (__InstPtr(slf)->i_instvars[index] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 1) ); }
  2334 			if (__InstPtr(slf)->i_instvars[index] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 1) ); }
  2326 			if (__InstPtr(slf)->i_instvars[index+1] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 2) ); }
  2335 			if (__InstPtr(slf)->i_instvars[index+1] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 2) ); }
  2327 			if (__InstPtr(slf)->i_instvars[index+2] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 3) ); }
  2336 			if (__InstPtr(slf)->i_instvars[index+2] == nil) { RETURN ( __mkSmallInteger(index - nInsts + 3) ); }
  2359     /*
  2368     /*
  2360      * first, do a quick check using ==
  2369      * first, do a quick check using ==
  2361      * this does not need a context or message send.
  2370      * this does not need a context or message send.
  2362      * In many cases this will already find a match.
  2371      * In many cases this will already find a match.
  2363      */
  2372      */
  2364     REGISTER int index;
  2373     REGISTER INT index;
  2365     REGISTER OBJ o;
  2374     REGISTER OBJ o;
  2366     unsigned int nIndex;
  2375     unsigned INT nIndex;
  2367 
  2376 
  2368     nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE);
  2377     nIndex = __BYTES2OBJS__(__qSize(self) - OHDR_SIZE);
  2369     index = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  2378     index = __intVal(__ClassInstPtr(__qClass(self))->c_ninstvars);
  2370 
  2379 
  2371     o = anObject;
  2380     o = anObject;
  2412      * also, all branches are forward, which are usually predicted
  2421      * also, all branches are forward, which are usually predicted
  2413      * as not taken.
  2422      * as not taken.
  2414      */
  2423      */
  2415 #  ifdef __UNROLL_LOOPS__
  2424 #  ifdef __UNROLL_LOOPS__
  2416     {
  2425     {
  2417 	unsigned int i8;
  2426 	unsigned INT i8;
  2418 	REGISTER OBJ slf = self;
  2427 	REGISTER OBJ slf = self;
  2419 
  2428 
  2420 	while ((i8 = index + 8) < nIndex) {
  2429 	while ((i8 = index + 8) < nIndex) {
  2421 	    if (__InstPtr(slf)->i_instvars[index] == o) goto found;
  2430 	    if (__InstPtr(slf)->i_instvars[index] == o) goto found;
  2422 	    if (__InstPtr(slf)->i_instvars[index+1] == o) goto found;
  2431 	    if (__InstPtr(slf)->i_instvars[index+1] == o) goto found;
  2445 	RETURN ( false );
  2454 	RETURN ( false );
  2446     }
  2455     }
  2447 %}.
  2456 %}.
  2448 
  2457 
  2449 %{
  2458 %{
  2450     REGISTER int index;
  2459     REGISTER INT index;
  2451     REGISTER OBJ o;
  2460     REGISTER OBJ o;
  2452     unsigned int nIndex;
  2461     unsigned INT nIndex;
  2453     static struct inlineCache eq = _ILC1;
  2462     static struct inlineCache eq = _ILC1;
  2454 
  2463 
  2455     /*
  2464     /*
  2456      * then do a slow(er) check using =
  2465      * then do a slow(er) check using =
  2457      */
  2466      */
  2557 ! !
  2566 ! !
  2558 
  2567 
  2559 !Array class methodsFor:'documentation'!
  2568 !Array class methodsFor:'documentation'!
  2560 
  2569 
  2561 version
  2570 version
  2562     ^ '$Header: /cvs/stx/stx/libbasic/Array.st,v 1.155 2013-01-16 10:30:10 cg Exp $'
  2571     ^ '$Header: /cvs/stx/stx/libbasic/Array.st,v 1.156 2013-01-23 17:57:32 cg Exp $'
  2563 !
  2572 !
  2564 
  2573 
  2565 version_CVS
  2574 version_CVS
  2566     ^ '$Header: /cvs/stx/stx/libbasic/Array.st,v 1.155 2013-01-16 10:30:10 cg Exp $'
  2575     ^ '$Header: /cvs/stx/stx/libbasic/Array.st,v 1.156 2013-01-23 17:57:32 cg Exp $'
  2567 ! !
  2576 ! !