xref: /original-bsd/sys/luna68k/stand/locore.s (revision 3705696b)
1/*
2 * Copyright (c) 1992 OMRON Corporation.
3 * Copyright (c) 1990, 1993
4 *	The Regents of the University of California.  All rights reserved.
5 *
6 * This code is derived from software contributed to Berkeley by
7 * OMRON Corporation.
8 *
9 * %sccs.include.redist.c%
10 *
11 *	@(#)locore.s	8.1 (Berkeley) 06/10/93
12 */
13
14#define	T_BUSERR	0
15#define	T_ADDRERR	1
16#define	T_ILLINST	2
17#define	T_ZERODIV	3
18#define	T_CHKINST	4
19#define	T_TRAPVINST	5
20#define	T_PRIVINST	6
21#define	T_MMUFLT	8
22#define	T_FMTERR	10
23#define	T_FPERR		11
24#define	T_COPERR	12
25
26#define	PSL_LOWIPL	8192
27#define	PSL_HIGHIPL	9984
28
29#define	SPL1		8448
30#define	SPL2		8704
31#define	SPL3		8960
32#define	SPL4		9216
33#define	SPL5		9472
34#define	SPL6		9728
35
36#define	CLOCK_REG	1660944384
37#define	CLK_CLR		1
38
39#define	ILLGINST	16
40#define	NMIVEC		124
41#define	EVTRAPF		188
42
43	.text
44
45	.globl	Reset
46	.globl	_buserr,_addrerr
47	.globl	_illinst,_zerodiv,_chkinst,_trapvinst,_privinst
48	.globl	_lev6intr,_lev5intr,_lev3intr,_lev2intr,_badtrap
49
50Reset:
51	jmp start		/* 0: NOT USED (reset PC) */
52	.word	0		/* 1: NOT USED (reset PC) */
53	.long	_buserr		/* 2: bus error */
54	.long	_addrerr	/* 3: address error */
55	.long	_illinst	/* 4: illegal instruction */
56	.long	_zerodiv	/* 5: zero divide */
57	.long	_chkinst	/* 6: CHK instruction */
58	.long	_trapvinst	/* 7: TRAPV instruction */
59	.long	_privinst	/* 8: privilege violation */
60	.long	_badtrap	/* 9: trace */
61	.long	_illinst	/* 10: line 1010 emulator */
62	.long	_illinst	/* 11: line 1111 emulator */
63	.long	_badtrap	/* 12: unassigned, reserved */
64	.long	_coperr		/* 13: coprocessor protocol violation */
65	.long	_fmterr		/* 14: format error */
66	.long	_badtrap	/* 15: uninitialized interrupt vector */
67	.long	_badtrap	/* 16: unassigned, reserved */
68	.long	_badtrap	/* 17: unassigned, reserved */
69	.long	_badtrap	/* 18: unassigned, reserved */
70	.long	_badtrap	/* 19: unassigned, reserved */
71	.long	_badtrap	/* 20: unassigned, reserved */
72	.long	_badtrap	/* 21: unassigned, reserved */
73	.long	_badtrap	/* 22: unassigned, reserved */
74	.long	_badtrap	/* 23: unassigned, reserved */
75	.long	_badtrap	/* 24: spurious interrupt */
76	.long	_badtrap	/* 25: level 1 interrupt autovector */
77	.long	_lev2intr	/* 26: level 2 interrupt autovector */
78	.long	_lev3intr	/* 27: level 3 interrupt autovector */
79	.long	_badtrap	/* 28: level 4 interrupt autovector */
80	.long	_lev5intr	/* 29: level 5 interrupt autovector */
81	.long	_lev6intr	/* 30: level 6 interrupt autovector */
82	.long	_badtrap	/* 31: level 7 interrupt autovector */
83	.long	_illinst	/* 32: syscalls */
84	.long	_illinst	/* 33: sigreturn syscall or breakpoint */
85	.long	_illinst	/* 34: breakpoint or sigreturn syscall */
86	.long	_illinst	/* 35: TRAP instruction vector */
87	.long	_illinst	/* 36: TRAP instruction vector */
88	.long	_illinst	/* 37: TRAP instruction vector */
89	.long	_illinst	/* 38: TRAP instruction vector */
90	.long	_illinst	/* 39: TRAP instruction vector */
91	.long	_illinst	/* 40: TRAP instruction vector */
92	.long	_illinst	/* 41: TRAP instruction vector */
93	.long	_illinst	/* 42: TRAP instruction vector */
94	.long	_illinst	/* 43: TRAP instruction vector */
95	.long	_illinst	/* 44: TRAP instruction vector */
96	.long	_illinst	/* 45: TRAP instruction vector */
97	.long	_illinst	/* 46: TRAP instruction vector */
98	.long	_illinst	/* 47: TRAP instruction vector */
99 	.long	_fptrap		/* 48: FPCP branch/set on unordered cond */
100 	.long	_fptrap		/* 49: FPCP inexact result */
101 	.long	_fptrap		/* 50: FPCP divide by zero */
102 	.long	_fptrap		/* 51: FPCP underflow */
103 	.long	_fptrap		/* 52: FPCP operand error */
104 	.long	_fptrap		/* 53: FPCP overflow */
105 	.long	_fptrap		/* 54: FPCP signalling NAN */
106
107	.long	_badtrap	/* 55: unassigned, reserved */
108	.long	_badtrap	/* 56: unassigned, reserved */
109	.long	_badtrap	/* 57: unassigned, reserved */
110	.long	_badtrap	/* 58: unassigned, reserved */
111	.long	_badtrap	/* 59: unassigned, reserved */
112	.long	_badtrap	/* 60: unassigned, reserved */
113	.long	_badtrap	/* 61: unassigned, reserved */
114	.long	_badtrap	/* 62: unassigned, reserved */
115	.long	_badtrap	/* 63: unassigned, reserved */
116#define BADTRAP16	.long	_badtrap,_badtrap,_badtrap,_badtrap,\
117				_badtrap,_badtrap,_badtrap,_badtrap,\
118				_badtrap,_badtrap,_badtrap,_badtrap,\
119				_badtrap,_badtrap,_badtrap,_badtrap
120	BADTRAP16		/* 64-255: user interrupt vectors */
121	BADTRAP16		/* 64-255: user interrupt vectors */
122	BADTRAP16		/* 64-255: user interrupt vectors */
123	BADTRAP16		/* 64-255: user interrupt vectors */
124	BADTRAP16		/* 64-255: user interrupt vectors */
125	BADTRAP16		/* 64-255: user interrupt vectors */
126	BADTRAP16		/* 64-255: user interrupt vectors */
127	BADTRAP16		/* 64-255: user interrupt vectors */
128	BADTRAP16		/* 64-255: user interrupt vectors */
129	BADTRAP16		/* 64-255: user interrupt vectors */
130	BADTRAP16		/* 64-255: user interrupt vectors */
131	BADTRAP16		/* 64-255: user interrupt vectors */
132
133
134	.globl  start
135	.globl  _main
136	.globl	_etext,_edata,_end
137
138	START = 0x700000
139	STACK = 0x800000
140	DIPSW = 0x49000000
141
142start:
143        movw    #PSL_HIGHIPL,sr         | no interrupts
144	movl	#STACK,sp		| set SP
145
146	movl	#_prgcore, a2		| save program address
147	movl	#Reset, a2@+		| save start of core
148	movl	#_end,  a2@+		| save end of core
149	movl	#STACK, a2@		| save initial stack addr
150
151/* clear BSS area */
152	movl	#_edata,a2		| start of BSS
153	movl	#_end,a3		| end
154Lbssclr:
155	clrb	a2@+			| clear BSS
156	cmpl	a2,a3			| done?
157	bne	Lbssclr			| no, keep going
158
159/* save address to goto ROM monitor */
160	movec	vbr,a0			| ROM vbr to a0
161	movl	a0@(NMIVEC),d0		| restore NMIVEC
162	movl	#_gotoROM,a0		| save to _gotoROM
163	movl	d0,a0@			|
164	movl	#Reset,a0		| BP vbr to a0
165	movl	#_exit,a0@(NMIVEC)	| save address
166
167
168/* switch vector tabel */
169	movec	vbr,a0
170	movl	a0@(ILLGINST),sp@-	| save ILLINST vector for BrkPtr
171	movl	a0@(EVTRAPF),sp@-
172
173	movl	#Reset,a0
174	movl	sp@+,a0@(EVTRAPF)
175	movl	sp@+,a0@(ILLGINST)	| restore ILLINST vector
176	movec	a0,vbr
177
178	movl	#DIPSW,a0
179	movw	a0@,d0
180	lsrl	#8,d0
181	andl	#0xFF,d0
182	movl	d0,_dipsw1
183	movw	a0@,d0
184	andl	#0xFF,d0
185	movl	d0,_dipsw2
186
187/* final setup for C code */
188        movw    #PSL_LOWIPL,sr		| no interrupts
189	jsr     _main			| lets go
190	jsr     start
191
192/*
193 * exit to ROM monitor
194 */
195
196	ROM_VBR = 0
197
198	.globl	_exit
199
200_exit:
201        movw    #PSL_HIGHIPL,sr         | no interrupts
202	movl	#ROM_VBR,a0
203	movec	a0,vbr
204	movl	#_gotoROM,a0
205	movl	a0@,a1
206	jmp	a1@
207
208/*
209 * Trap/interrupt vector routines
210 */
211
212	.globl	_trap,_nofault,_longjmp
213_buserr:
214	tstl	_nofault		| device probe?
215	jeq	_addrerr		| no, handle as usual
216	movl	_nofault,sp@-		| yes,
217	jbsr	_longjmp		|  longjmp(nofault)
218_addrerr:
219	clrw	sp@-			| pad SR to longword
220	moveml	#0xFFFF,sp@-		| save user registers
221	movl	usp,a0			| save the user SP
222	movl	a0,sp@(60)		|   in the savearea
223	lea	sp@(64),a1		| grab base of HW berr frame
224	movw	a1@(12),d0		| grab SSW for fault processing
225	btst	#12,d0			| RB set?
226	jeq	LbeX0			| no, test RC
227	bset	#14,d0			| yes, must set FB
228	movw	d0,a1@(12)		| for hardware too
229LbeX0:
230	btst	#13,d0			| RC set?
231	jeq	LbeX1			| no, skip
232	bset	#15,d0			| yes, must set FC
233	movw	d0,a1@(12)		| for hardware too
234LbeX1:
235	btst	#8,d0			| data fault?
236	jeq	Lbe0			| no, check for hard cases
237	movl	a1@(18),d1		| fault address is as given in frame
238	jra	Lbe10			| thats it
239Lbe0:
240	btst	#4,a1@(8)		| long (type B) stack frame?
241	jne	Lbe4			| yes, go handle
242	movl	a1@(4),d1		| no, can use save PC
243	btst	#14,d0			| FB set?
244	jeq	Lbe3			| no, try FC
245	addql	#4,d1			| yes, adjust address
246	jra	Lbe10			| done
247Lbe3:
248	btst	#15,d0			| FC set?
249	jeq	Lbe10			| no, done
250	addql	#2,d1			| yes, adjust address
251	jra	Lbe10			| done
252Lbe4:
253	movl	a1@(38),d1		| long format, use stage B address
254	btst	#15,d0			| FC set?
255	jeq	Lbe10			| no, all done
256	subql	#2,d1			| yes, adjust address
257Lbe10:
258	movl	d1,sp@-			| push fault VA
259	movw	d0,sp@-			| and SSW
260	clrw	sp@-			|   padded to longword
261	movw	a1@(8),d0		| get frame format/vector offset
262	andw	#0x0FFF,d0		| clear out frame format
263	cmpw	#12,d0			| address error vector?
264	jeq	Lisaerr			| yes, go to it
265#if 0
266	movl	d1,a0			| fault address
267	.long	0xf0109e11		| ptestr #1,a0@,#7
268	.long	0xf0176200		| pmove psr,sp@
269	btst	#7,sp@			| bus error bit set?
270	jeq	Lismerr			| no, must be MMU fault
271	clrw	sp@			| yes, re-clear pad word
272#endif
273	jra	Lisberr			| and process as normal bus error
274Lismerr:
275	movl	#T_MMUFLT,sp@-		| show that we are an MMU fault
276	jra	Lbexit			| and deal with it
277Lisaerr:
278	movl	#T_ADDRERR,sp@-		| mark address error
279	jra	Lbexit			| and deal with it
280Lisberr:
281	movl	#T_BUSERR,sp@-		| mark bus error
282Lbexit:
283	jbsr	_trap			| handle the error
284	lea	sp@(12),sp		| pop value args
285	movl	sp@(60),a0		| restore user SP
286	movl	a0,usp			|   from save area
287	moveml	sp@+,#0x7FFF		| restore most user regs
288	addql	#4,sp			| toss SSP
289	tstw	sp@+			| do we need to clean up stack?
290	jeq	rei			| no, just continue
291	btst	#7,sp@(6)		| type 9/10/11 frame?
292	jeq	rei			| no, nothing to do
293	btst	#5,sp@(6)		| type 9?
294	jne	Lbex1			| no, skip
295	movw	sp@,sp@(12)		| yes, push down SR
296	movl	sp@(2),sp@(14)		| and PC
297	clrw	sp@(18)			| and mark as type 0 frame
298	lea	sp@(12),sp		| clean the excess
299	jra	rei			| all done
300Lbex1:
301	btst	#4,sp@(6)		| type 10?
302	jne	Lbex2			| no, skip
303	movw	sp@,sp@(24)		| yes, push down SR
304	movl	sp@(2),sp@(26)		| and PC
305	clrw	sp@(30)			| and mark as type 0 frame
306	lea	sp@(24),sp		| clean the excess
307	jra	rei			| all done
308Lbex2:
309	movw	sp@,sp@(84)		| type 11, push down SR
310	movl	sp@(2),sp@(86)		| and PC
311	clrw	sp@(90)			| and mark as type 0 frame
312	lea	sp@(84),sp		| clean the excess
313	jra	rei			| all done
314
315_illinst:
316	clrw	sp@-
317	moveml	#0xFFFF,sp@-
318	moveq	#T_ILLINST,d0
319	jra	_fault
320
321_zerodiv:
322	clrw	sp@-
323	moveml	#0xFFFF,sp@-
324	moveq	#T_ZERODIV,d0
325	jra	_fault
326
327_chkinst:
328	clrw	sp@-
329	moveml	#0xFFFF,sp@-
330	moveq	#T_CHKINST,d0
331	jra	_fault
332
333_trapvinst:
334	clrw	sp@-
335	moveml	#0xFFFF,sp@-
336	moveq	#T_TRAPVINST,d0
337	jra	_fault
338
339_privinst:
340	clrw	sp@-
341	moveml	#0xFFFF,sp@-
342	moveq	#T_PRIVINST,d0
343	jra	_fault
344
345_coperr:
346	clrw	sp@-
347	moveml	#0xFFFF,sp@-
348	moveq	#T_COPERR,d0
349	jra	_fault
350
351_fmterr:
352	clrw	sp@-
353	moveml	#0xFFFF,sp@-
354	moveq	#T_FMTERR,d0
355	jra	_fault
356
357_fptrap:
358#ifdef FPCOPROC
359	clrw	sp@-		| pad SR to longword
360	moveml	#0xFFFF,sp@-	| save user registers
361	movl	usp,a0		| and save
362	movl	a0,sp@(60)	|   the user stack pointer
363	clrl	sp@-		| no VA arg
364#if 0
365	lea	_u+PCB_FPCTX,a0	| address of FP savearea
366	.word	0xf310		| fsave a0@
367	tstb	a0@		| null state frame?
368	jeq	Lfptnull	| yes, safe
369	clrw	d0		| no, need to tweak BIU
370	movb	a0@(1),d0	| get frame size
371	bset	#3,a0@(0,d0:w)	| set exc_pend bit of BIU
372Lfptnull:
373	.word	0xf227,0xa800	| fmovem fpsr,sp@- (code arg)
374	.word	0xf350		| frestore a0@
375#else
376	clrl	sp@-		| push dummy FPSR
377#endif
378	movl	#T_FPERR,sp@-	| push type arg
379	jbsr	_trap		| call trap
380	lea	sp@(12),sp	| pop value args
381	movl	sp@(60),a0	| restore
382	movl	a0,usp		|   user SP
383	moveml	sp@+,#0x7FFF	| and remaining user registers
384	addql	#6,sp		| pop SSP and align word
385	jra	rei		| all done
386#else
387	jra	_badtrap	| treat as an unexpected trap
388#endif
389
390	.globl	_fault
391_fault:
392	movl	usp,a0		| get and save
393	movl	a0,sp@(60)	|   the user stack pointer
394	clrl	sp@-		| no VA arg
395	clrl	sp@-		| or code arg
396	movl	d0,sp@-		| push trap type
397	jbsr	_trap		| handle trap
398	lea	sp@(12),sp	| pop value args
399	movl	sp@(60),a0	| restore
400	movl	a0,usp		|   user SP
401	moveml	sp@+,#0x7FFF	| restore most user regs
402	addql	#6,sp		| pop SP and pad word
403	jra	rei		| all done
404
405	.globl	_straytrap
406_badtrap:
407	clrw	sp@-
408	moveml	#0xC0C0,sp@-
409	movw	sp@(24),sp@-
410	clrw	sp@-
411	jbsr	_straytrap
412	addql	#4,sp
413	moveml	sp@+,#0x0303
414	addql	#2,sp
415	jra	rei
416
417/*
418 * Interrupt handlers.
419 * All device interrupts are auto-vectored.  Most can be configured
420 * to interrupt in the range IPL2 to IPL6.  Here are our assignments:
421 *
422 *	Level 0:
423 *	Level 1:
424 *	Level 2:	SCSI SPC
425 *	Level 3:
426 *	Level 4:
427 *	Level 5:	System Clock
428 *	Level 6:	Internal SIO used uPD7201A
429 *	Level 7:	Non-maskable: Abort Key (Dispatched vector to ROM monitor)
430 */
431	.globl	_scintr, __siointr, _hardclock
432
433_lev2intr:
434	clrw	sp@-
435	moveml	#0xC0C0,sp@-
436	jbsr	_scintr
437	moveml	sp@+,#0x0303
438	addql	#2,sp
439	jra	rei
440
441_lev3intr:
442	clrw	sp@-
443	moveml	#0xC0C0,sp@-
444	moveml	sp@+,#0x0303
445	addql	#2,sp
446	jra	rei
447
448_lev5intr:
449	clrw	sp@-			| push pad word
450	moveml	#0xC0C0,sp@-		| save scratch regs
451	movl	#CLOCK_REG,a0		| get clock CR addr
452	movb	#CLK_CLR,a0@		| reset system clock
453	lea	sp@(16),a1		| get pointer to PS
454	movl	a1@,sp@-		| push padded PS
455	movl	a1@(4),sp@-		| push PC
456	jbsr	_hardclock		| call generic clock int routine
457	addql	#8,sp			| pop params
458	moveml	sp@+,#0x0303		| restore scratch regs
459	addql	#2,sp			| pop pad word
460	jra	rei			| all done
461
462_lev6intr:
463	clrw	sp@-
464	moveml	#0xC0C0,sp@-
465	jbsr	__siointr
466	moveml	sp@+,#0x0303
467	addql	#2,sp
468	jra	rei
469
470
471/*
472 * Emulation of VAX REI instruction.
473 *
474 * This code deals with checking for and servicing ASTs
475 * (profiling, scheduling) and software interrupts (network, softclock).
476 * We check for ASTs first, just like the VAX.  To avoid excess overhead
477 * the T_ASTFLT handling code will also check for software interrupts so we
478 * do not have to do it here.
479 *
480 * This code is complicated by the fact that sendsig may have been called
481 * necessitating a stack cleanup.  A cleanup should only be needed at this
482 * point for coprocessor mid-instruction frames (type 9), but we also test
483 * for bus error frames (type 10 and 11).
484 */
485#if 0
486	.comm	_ssir,1
487rei:
488#ifdef DEBUG
489	tstl	_panicstr		| have we paniced?
490	jne	Ldorte			| yes, do not make matters worse
491#endif
492	btst	#PCB_ASTB,_u+PCB_FLAGS+1| AST pending?
493	jeq	Lchksir			| no, go check for SIR
494	btst	#5,sp@			| yes, are we returning to user mode?
495	jne	Lchksir			| no, go check for SIR
496	clrw	sp@-			| pad SR to longword
497	moveml	#0xFFFF,sp@-		| save all registers
498	movl	usp,a1			| including
499	movl	a1,sp@(60)		|    the users SP
500	clrl	sp@-			| VA == none
501	clrl	sp@-			| code == none
502	movl	#T_ASTFLT,sp@-		| type == async system trap
503	jbsr	_trap			| go handle it
504	lea	sp@(12),sp		| pop value args
505	movl	sp@(60),a0		| restore
506	movl	a0,usp			|   user SP
507	moveml	sp@+,#0x7FFF		| and all remaining registers
508	addql	#4,sp			| toss SSP
509	tstw	sp@+			| do we need to clean up stack?
510	jeq	Ldorte			| no, just continue
511	btst	#7,sp@(6)		| type 9/10/11 frame?
512	jeq	Ldorte			| no, nothing to do
513	btst	#5,sp@(6)		| type 9?
514	jne	Last1			| no, skip
515	movw	sp@,sp@(12)		| yes, push down SR
516	movl	sp@(2),sp@(14)		| and PC
517	clrw	sp@(18)			| and mark as type 0 frame
518	lea	sp@(12),sp		| clean the excess
519	jra	Ldorte			| all done
520Last1:
521	btst	#4,sp@(6)		| type 10?
522	jne	Last2			| no, skip
523	movw	sp@,sp@(24)		| yes, push down SR
524	movl	sp@(2),sp@(26)		| and PC
525	clrw	sp@(30)			| and mark as type 0 frame
526	lea	sp@(24),sp		| clean the excess
527	jra	Ldorte			| all done
528Last2:
529	movw	sp@,sp@(84)		| type 11, push down SR
530	movl	sp@(2),sp@(86)		| and PC
531	clrw	sp@(90)			| and mark as type 0 frame
532	lea	sp@(84),sp		| clean the excess
533	jra	Ldorte			| all done
534Lchksir:
535	tstb	_ssir			| SIR pending?
536	jeq	Ldorte			| no, all done
537	movl	d0,sp@-			| need a scratch register
538	movw	sp@(4),d0		| get SR
539	andw	#PSL_IPL7,d0		| mask all but IPL
540	jne	Lnosir			| came from interrupt, no can do
541	movl	sp@+,d0			| restore scratch register
542Lgotsir:
543	movw	#SPL1,sr		| prevent others from servicing int
544	tstb	_ssir			| too late?
545	jeq	Ldorte			| yes, oh well...
546	clrw	sp@-			| pad SR to longword
547	moveml	#0xFFFF,sp@-		| save all registers
548	movl	usp,a1			| including
549	movl	a1,sp@(60)		|    the users SP
550	clrl	sp@-			| VA == none
551	clrl	sp@-			| code == none
552	movl	#T_SSIR,sp@-		| type == software interrupt
553	jbsr	_trap			| go handle it
554	lea	sp@(12),sp		| pop value args
555	movl	sp@(60),a0		| restore
556	movl	a0,usp			|   user SP
557	moveml	sp@+,#0x7FFF		| and all remaining registers
558	addql	#6,sp			| pop SSP and align word
559	rte
560Lnosir:
561	movl	sp@+,d0			| restore scratch register
562Ldorte:
563#else
564rei:					| dummy Entry of rei
565#endif
566	rte				| real return
567
568
569/*
570 * Primitives
571 */
572
573#ifdef GPROF
574#ifdef __GNUC__
575#define	ENTRY(name) \
576	.globl _/**/name; _/**/name: link a6,#0; jbsr mcount; unlk a6
577#define ALTENTRY(name, rname) \
578	ENTRY(name); jra rname+12
579#else
580#define	ENTRY(name) \
581	.globl _/**/name; _/**/name: jbsr mcount
582#define ALTENTRY(name, rname) \
583	ENTRY(name); jra rname+6
584#endif
585#else
586#define	ENTRY(name) \
587	.globl _/**/name; _/**/name:
588#define ALTENTRY(name, rname) \
589	.globl _/**/name; _/**/name:
590#endif
591
592/*
593 * non-local gotos
594 */
595ALTENTRY(savectx, _setjmp)
596ENTRY(setjmp)
597	movl	sp@(4),a0	| savearea pointer
598	moveml	#0xFCFC,a0@	| save d2-d7/a2-a7
599	movl	sp@,a0@(48)	| and return address
600	moveq	#0,d0		| return 0
601	rts
602
603ENTRY(qsetjmp)
604	movl	sp@(4),a0	| savearea pointer
605	lea	a0@(40),a0	| skip regs we do not save
606	movl	a6,a0@+		| save FP
607	movl	sp,a0@+		| save SP
608	movl	sp@,a0@		| and return address
609	moveq	#0,d0		| return 0
610	rts
611
612ENTRY(longjmp)
613	movl	sp@(4),a0
614	moveml	a0@+,#0xFCFC
615	movl	a0@,sp@
616	moveq	#1,d0
617	rts
618
619	.globl	_getsfc, _getdfc
620_getsfc:
621	movc	sfc,d0
622	rts
623_getdfc:
624	movc	dfc,d0
625	rts
626
627/*
628 * Set processor priority level calls.  Most could (should) be replaced
629 * by inline asm expansions.  However, SPL0 and SPLX require special
630 * handling.  If we are returning to the base processor priority (SPL0)
631 * we need to check for our emulated software interrupts.
632 */
633
634ENTRY(spl0)
635	moveq	#0,d0
636	movw	sr,d0			| get old SR for return
637	movw	#PSL_LOWIPL,sr		| restore new SR
638|	jra	Lsplsir
639	rts
640
641ENTRY(splx)
642	moveq	#0,d0
643	movw	sr,d0			| get current SR for return
644	movw	sp@(6),d1		| get new value
645	movw	d1,sr			| restore new SR
646|	andw	#PSL_IPL7,d1		| mask all but PSL_IPL
647|	jne	Lspldone		| non-zero, all done
648|Lsplsir:
649|	tstb	_ssir			| software interrupt pending?
650|	jeq	Lspldone		| no, all done
651|	subql	#4,sp			| make room for RTE frame
652|	movl	sp@(4),sp@(2)		| position return address
653|	clrw	sp@(6)			| set frame type 0
654|	movw	#PSL_LOWIPL,sp@		| and new SR
655|	jra	Lgotsir			| go handle it
656|Lspldone:
657	rts
658
659ENTRY(spl1)
660	moveq	#0,d0
661	movw	sr,d0
662	movw	#SPL1,sr
663	rts
664
665ALTENTRY(splscsi, _spl2)
666ENTRY(spl2)
667	moveq	#0,d0
668	movw	sr,d0
669	movw	#SPL2,sr
670	rts
671
672ENTRY(spl3)
673	moveq	#0,d0
674	movw	sr,d0
675	movw	#SPL3,sr
676	rts
677
678ENTRY(spl4)
679	moveq	#0,d0
680	movw	sr,d0
681	movw	#SPL4,sr
682	rts
683
684ENTRY(spl5)
685	moveq	#0,d0
686	movw	sr,d0
687	movw	#SPL5,sr
688	rts
689
690ENTRY(spl6)
691	moveq	#0,d0
692	movw	sr,d0
693	movw	#SPL6,sr
694	rts
695
696ALTENTRY(splhigh, _spl7)
697ENTRY(spl7)
698	moveq	#0,d0
699	movw	sr,d0
700	movw	#PSL_HIGHIPL,sr
701	rts
702
703
704ENTRY(_insque)
705	movw	sr,d0
706	movw	#PSL_HIGHIPL,sr		| atomic
707	movl	sp@(8),a0		| where to insert (after)
708	movl	sp@(4),a1		| element to insert (e)
709	movl	a0@,a1@			| e->next = after->next
710	movl	a0,a1@(4)		| e->prev = after
711	movl	a1,a0@			| after->next = e
712	movl	a1@,a0
713	movl	a1,a0@(4)		| e->next->prev = e
714	movw	d0,sr
715	rts
716
717ENTRY(_remque)
718	movw	sr,d0
719	movw	#PSL_HIGHIPL,sr		| atomic
720	movl	sp@(4),a0		| element to remove (e)
721	movl	a0@,a1
722	movl	a0@(4),a0
723	movl	a0,a1@(4)		| e->next->prev = e->prev
724	movl	a1,a0@			| e->prev->next = e->next
725	movw	d0,sr
726	rts
727
728ALTENTRY(blkclr, _bzero)
729ENTRY(bzero)
730	movl	sp@(4),a0
731	movl	sp@(8),d0
732	jeq	Lbzero1
733	movl	a0,d1
734	btst	#0,d1
735	jeq	Lbzero2
736	clrb	a0@+
737	subql	#1,d0
738	jeq	Lbzero1
739Lbzero2:
740	movl	d0,d1
741	andl	#31,d0
742	lsrl	#5,d1
743	jeq	Lbzero3
744Lbzero4:
745	clrl	a0@+; clrl	a0@+; clrl	a0@+; clrl	a0@+;
746	clrl	a0@+; clrl	a0@+; clrl	a0@+; clrl	a0@+;
747	subql	#1,d1
748	jne	Lbzero4
749	tstl	d0
750	jeq	Lbzero1
751Lbzero3:
752	clrb	a0@+
753	subql	#1,d0
754	jne	Lbzero3
755Lbzero1:
756	rts
757
758/*
759 * strlen(str)
760 */
761ENTRY(strlen)
762	moveq	#-1,d0
763	movl	sp@(4),a0	| string
764Lslloop:
765	addql	#1,d0		| increment count
766	tstb	a0@+		| null?
767	jne	Lslloop		| no, keep going
768	rts
769
770/*
771 * bcmp(s1, s2, len)
772 *
773 * WARNING!  This guy only works with counts up to 64K
774 */
775ENTRY(bcmp)
776	movl	sp@(4),a0		| string 1
777	movl	sp@(8),a1		| string 2
778	moveq	#0,d0
779	movw	sp@(14),d0		| length
780	jeq	Lcmpdone		| if zero, nothing to do
781	subqw	#1,d0			| set up for DBcc loop
782Lcmploop:
783	cmpmb	a0@+,a1@+		| equal?
784	dbne	d0,Lcmploop		| yes, keep going
785	addqw	#1,d0			| +1 gives zero on match
786Lcmpdone:
787	rts
788
789/*
790 * {ov}bcopy(from, to, len)
791 *
792 * Works for counts up to 128K.
793 */
794ALTENTRY(ovbcopy, _bcopy)
795ENTRY(bcopy)
796	movl	sp@(12),d0		| get count
797	jeq	Lcpyexit		| if zero, return
798	movl	sp@(4),a0		| src address
799	movl	sp@(8),a1		| dest address
800	cmpl	a1,a0			| src before dest?
801	jlt	Lcpyback		| yes, copy backwards (avoids overlap)
802	movl	a0,d1
803	btst	#0,d1			| src address odd?
804	jeq	Lcfeven			| no, go check dest
805	movb	a0@+,a1@+		| yes, copy a byte
806	subql	#1,d0			| update count
807	jeq	Lcpyexit		| exit if done
808Lcfeven:
809	movl	a1,d1
810	btst	#0,d1			| dest address odd?
811	jne	Lcfbyte			| yes, must copy by bytes
812	movl	d0,d1			| no, get count
813	lsrl	#2,d1			| convert to longwords
814	jeq	Lcfbyte			| no longwords, copy bytes
815	subql	#1,d1			| set up for dbf
816Lcflloop:
817	movl	a0@+,a1@+		| copy longwords
818	dbf	d1,Lcflloop		| til done
819	andl	#3,d0			| get remaining count
820	jeq	Lcpyexit		| done if none
821Lcfbyte:
822	subql	#1,d0			| set up for dbf
823Lcfbloop:
824	movb	a0@+,a1@+		| copy bytes
825	dbf	d0,Lcfbloop		| til done
826Lcpyexit:
827	rts
828Lcpyback:
829	addl	d0,a0			| add count to src
830	addl	d0,a1			| add count to dest
831	movl	a0,d1
832	btst	#0,d1			| src address odd?
833	jeq	Lcbeven			| no, go check dest
834	movb	a0@-,a1@-		| yes, copy a byte
835	subql	#1,d0			| update count
836	jeq	Lcpyexit		| exit if done
837Lcbeven:
838	movl	a1,d1
839	btst	#0,d1			| dest address odd?
840	jne	Lcbbyte			| yes, must copy by bytes
841	movl	d0,d1			| no, get count
842	lsrl	#2,d1			| convert to longwords
843	jeq	Lcbbyte			| no longwords, copy bytes
844	subql	#1,d1			| set up for dbf
845Lcblloop:
846	movl	a0@-,a1@-		| copy longwords
847	dbf	d1,Lcblloop		| til done
848	andl	#3,d0			| get remaining count
849	jeq	Lcpyexit		| done if none
850Lcbbyte:
851	subql	#1,d0			| set up for dbf
852Lcbbloop:
853	movb	a0@-,a1@-		| copy bytes
854	dbf	d0,Lcbbloop		| til done
855	rts
856
857
858	.data
859
860/*
861 * Memory Infomation Field for secondary booter memory allocator
862 */
863	.globl  _prgcore
864	.globl	_dipsw1,_dipsw2
865
866_prgcore:
867	.long	0
868	.long	0
869	.long	0
870
871_gotoROM:
872	.long	0
873
874_dipsw1:
875	.long	0
876
877_dipsw2:
878	.long	0
879