1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
|
/*
* Creation Date: <2001/06/16 21:30:18 samuel>
* Time-stamp: <2003/04/04 16:32:06 samuel>
*
* <init.S>
*
* Asm glue for ELF images
*
* Copyright (C) 2001, 2002, 2003 Samuel Rydh (samuel@ibrium.se)
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation
*
*/
#include "autoconf.h"
#include "asm/asmdefs.h"
#include "asm/processor.h"
/************************************************************************/
/* Macros */
/************************************************************************/
#define ILLEGAL_VECTOR( v ) .org __vectors + v ; vector__##v: bl trap_error ;
#define VECTOR( v, dummystr ) .org __vectors + v ; vector__##v
#ifdef CONFIG_PPC_64BITSUPPORT
/* We're trying to use the same code for the ppc32 and ppc64 handlers here.
* On ppc32 we only save/restore the registers, C considers volatile.
*
* On ppc64 on the other hand, we have to save/restore all registers, because
* all OF code is 32 bits, which only saves/restores the low 32 bits of the
* registers it clobbers.
*/
#define EXCEPTION_PREAMBLE_TEMPLATE \
mtsprg1 r1 ; /* scratch */ \
mfcr r1 ; \
mtsprg2 r1 ; /* scratch */ \
lis r1, 0x8000 ; /* r1=0x80000000 */ \
add. r1,r1,r1 ; /* r1=r1+r1 (high 32bit !0) */ \
beq 1f; \
\
mfmsr r1 ; /* unset MSR_SF */ \
clrldi r1,r1,1 ; \
mtmsrd r1 ; \
1: \
mfsprg0 r1 ; /* exception stack in sprg0 */ \
.ifc ULONG_SIZE, 8 ; \
addi r1,r1,-(40 * ULONG_SIZE) ; /* push exception frame */ \
.else ; \
addi r1,r1,-(20 * ULONG_SIZE) ; /* push exception frame */ \
.endif ; \
\
stl r0,(0 * ULONG_SIZE)(r1) ; /* save r0 */ \
mfsprg1 r0 ; \
stl r0,(1 * ULONG_SIZE)(r1) ; /* save r1 */ \
stl r2,(2 * ULONG_SIZE)(r1) ; /* save r2 */ \
stl r3,(3 * ULONG_SIZE)(r1) ; /* save r3 */ \
stl r4,(4 * ULONG_SIZE)(r1) ; \
stl r5,(5 * ULONG_SIZE)(r1) ; \
stl r6,(6 * ULONG_SIZE)(r1) ; \
stl r7,(7 * ULONG_SIZE)(r1) ; \
stl r8,(8 * ULONG_SIZE)(r1) ; \
stl r9,(9 * ULONG_SIZE)(r1) ; \
stl r10,(10 * ULONG_SIZE)(r1) ; \
stl r11,(11 * ULONG_SIZE)(r1) ; \
stl r12,(12 * ULONG_SIZE)(r1) ; \
.ifc ULONG_SIZE, 8 ; \
stl r13,(17 * ULONG_SIZE)(r1) ; \
stl r14,(18 * ULONG_SIZE)(r1) ; \
stl r15,(19 * ULONG_SIZE)(r1) ; \
stl r16,(20 * ULONG_SIZE)(r1) ; \
stl r17,(21 * ULONG_SIZE)(r1) ; \
stl r18,(22 * ULONG_SIZE)(r1) ; \
stl r19,(23 * ULONG_SIZE)(r1) ; \
stl r20,(24 * ULONG_SIZE)(r1) ; \
stl r21,(25 * ULONG_SIZE)(r1) ; \
stl r22,(26 * ULONG_SIZE)(r1) ; \
stl r23,(27 * ULONG_SIZE)(r1) ; \
stl r24,(28 * ULONG_SIZE)(r1) ; \
stl r25,(29 * ULONG_SIZE)(r1) ; \
stl r26,(30 * ULONG_SIZE)(r1) ; \
stl r27,(31 * ULONG_SIZE)(r1) ; \
stl r28,(32 * ULONG_SIZE)(r1) ; \
stl r29,(33 * ULONG_SIZE)(r1) ; \
stl r30,(34 * ULONG_SIZE)(r1) ; \
stl r31,(35 * ULONG_SIZE)(r1) ; \
.endif ; \
\
mflr r0 ; \
stl r0,(13 * ULONG_SIZE)(r1) ; \
mfsprg2 r0 ; \
stl r0,(14 * ULONG_SIZE)(r1) ; \
mfctr r0 ; \
stl r0,(15 * ULONG_SIZE)(r1) ; \
mfxer r0 ; \
stl r0,(16 * ULONG_SIZE)(r1) ; \
\
/* 76(r1) unused */ \
addi r1,r1,-16 ; /* C ABI uses 0(r1) and 4(r1)... */
#define EXCEPTION_EPILOGUE_TEMPLATE \
addi r1,r1,16 ; /* pop ABI frame */ \
\
ll r0,(13 * ULONG_SIZE)(r1) ; \
mtlr r0 ; \
ll r0,(14 * ULONG_SIZE)(r1) ; \
mtcr r0 ; \
ll r0,(15 * ULONG_SIZE)(r1) ; \
mtctr r0 ; \
ll r0,(16 * ULONG_SIZE)(r1) ; \
mtxer r0 ; \
\
ll r0,(0 * ULONG_SIZE)(r1) ; \
ll r2,(2 * ULONG_SIZE)(r1) ; \
ll r3,(3 * ULONG_SIZE)(r1) ; \
ll r4,(4 * ULONG_SIZE)(r1) ; \
ll r5,(5 * ULONG_SIZE)(r1) ; \
ll r6,(6 * ULONG_SIZE)(r1) ; \
ll r7,(7 * ULONG_SIZE)(r1) ; \
ll r8,(8 * ULONG_SIZE)(r1) ; \
ll r9,(9 * ULONG_SIZE)(r1) ; \
ll r10,(10 * ULONG_SIZE)(r1) ; \
ll r11,(11 * ULONG_SIZE)(r1) ; \
ll r12,(12 * ULONG_SIZE)(r1) ; \
.ifc ULONG_SIZE, 8 ; \
ll r13,(17 * ULONG_SIZE)(r1) ; \
ll r14,(18 * ULONG_SIZE)(r1) ; \
ll r15,(19 * ULONG_SIZE)(r1) ; \
ll r16,(20 * ULONG_SIZE)(r1) ; \
ll r17,(21 * ULONG_SIZE)(r1) ; \
ll r18,(22 * ULONG_SIZE)(r1) ; \
ll r19,(23 * ULONG_SIZE)(r1) ; \
ll r20,(24 * ULONG_SIZE)(r1) ; \
ll r21,(25 * ULONG_SIZE)(r1) ; \
ll r22,(26 * ULONG_SIZE)(r1) ; \
ll r23,(27 * ULONG_SIZE)(r1) ; \
ll r24,(28 * ULONG_SIZE)(r1) ; \
ll r25,(29 * ULONG_SIZE)(r1) ; \
ll r26,(30 * ULONG_SIZE)(r1) ; \
ll r27,(31 * ULONG_SIZE)(r1) ; \
ll r28,(32 * ULONG_SIZE)(r1) ; \
ll r29,(33 * ULONG_SIZE)(r1) ; \
ll r30,(34 * ULONG_SIZE)(r1) ; \
ll r31,(35 * ULONG_SIZE)(r1) ; \
.endif ; \
ll r1,(1 * ULONG_SIZE)(r1) ; /* restore stack at last */ \
rfi
// PPC32
#define ULONG_SIZE 4
#define stl stw
#define ll lwz
.macro EXCEPTION_PREAMBLE
EXCEPTION_PREAMBLE_TEMPLATE
.endm
.macro EXCEPTION_EPILOGUE
EXCEPTION_EPILOGUE_TEMPLATE
.endm
#undef ULONG_SIZE
#undef stl
#undef ll
// PPC64
#define ULONG_SIZE 8
#define stl std
#define ll ld
.macro EXCEPTION_PREAMBLE_64
EXCEPTION_PREAMBLE_TEMPLATE
.endm
.macro EXCEPTION_EPILOGUE_64
EXCEPTION_EPILOGUE_TEMPLATE
.endm
#undef ULONG_SIZE
#undef stl
#undef ll
#define ULONG_SIZE 4
#define STACKFRAME_MINSIZE 16
#else /* !CONFIG_PPC_64BITSUPPORT */
#ifdef __powerpc64__
#define ULONG_SIZE 8
#define STACKFRAME_MINSIZE 48
#define stl std
#define ll ld
#else
#define ULONG_SIZE 4
#define STACKFRAME_MINSIZE 16
#define stl stw
#define ll lwz
#endif
.macro EXCEPTION_PREAMBLE
mtsprg1 r1 /* scratch */
mfsprg0 r1 /* exception stack in sprg0 */
addi r1, r1, -(20 * ULONG_SIZE) /* push exception frame */
stl r0, ( 0 * ULONG_SIZE)(r1) /* save r0 */
mfsprg1 r0
stl r0, ( 1 * ULONG_SIZE)(r1) /* save r1 */
stl r2, ( 2 * ULONG_SIZE)(r1) /* save r2 */
stl r3, ( 3 * ULONG_SIZE)(r1) /* save r3 */
stl r4, ( 4 * ULONG_SIZE)(r1)
stl r5, ( 5 * ULONG_SIZE)(r1)
stl r6, ( 6 * ULONG_SIZE)(r1)
stl r7, ( 7 * ULONG_SIZE)(r1)
stl r8, ( 8 * ULONG_SIZE)(r1)
stl r9, ( 9 * ULONG_SIZE)(r1)
stl r10, (10 * ULONG_SIZE)(r1)
stl r11, (11 * ULONG_SIZE)(r1)
stl r12, (12 * ULONG_SIZE)(r1)
mflr r0
stl r0, (13 * ULONG_SIZE)(r1)
mfcr r0
stl r0, (14 * ULONG_SIZE)(r1)
mfctr r0
stl r0, (15 * ULONG_SIZE)(r1)
mfxer r0
stl r0, (16 * ULONG_SIZE)(r1)
addi r1, r1, -STACKFRAME_MINSIZE /* C ABI saves LR and SP */
.endm
.macro EXCEPTION_EPILOGUE
addi r1, r1, STACKFRAME_MINSIZE /* pop ABI frame */
ll r0, (13 * ULONG_SIZE)(r1)
mtlr r0
ll r0, (14 * ULONG_SIZE)(r1)
mtcr r0
ll r0, (15 * ULONG_SIZE)(r1)
mtctr r0
ll r0, (16 * ULONG_SIZE)(r1)
mtxer r0
ll r0, ( 0 * ULONG_SIZE)(r1)
ll r2, ( 2 * ULONG_SIZE)(r1)
ll r3, ( 3 * ULONG_SIZE)(r1)
ll r4, ( 4 * ULONG_SIZE)(r1)
ll r5, ( 5 * ULONG_SIZE)(r1)
ll r6, ( 6 * ULONG_SIZE)(r1)
ll r7, ( 7 * ULONG_SIZE)(r1)
ll r8, ( 8 * ULONG_SIZE)(r1)
ll r9, ( 9 * ULONG_SIZE)(r1)
ll r10, (10 * ULONG_SIZE)(r1)
ll r11, (11 * ULONG_SIZE)(r1)
ll r12, (12 * ULONG_SIZE)(r1)
ll r1, ( 1 * ULONG_SIZE)(r1) /* restore stack at last */
RFI
.endm
#endif /* !CONFIG_PPC_64BITSUPPORT */
/************************************************************************/
/* vectors */
/************************************************************************/
.section .text.vectors, "ax"
GLOBL(__vectors):
nop // NULL-jmp trap
1: nop //
b 1b
VECTOR( 0x100, "SRE" ):
b _entry
trap_error:
lis r1, 0x8000 /* r1=0x80000000 */
add. r1,r1,r1 /* r1=r1+r1 (high 32bit !0) */
beq 1f
mfmsr r1 /* unset MSR_SF */
clrldi r1,r1,1
mtmsrd r1
1:
mflr r3
LOAD_REG_FUNC(r4, unexpected_excep)
mtctr r4
bctr
ILLEGAL_VECTOR( 0x200 )
VECTOR( 0x300, "DSI" ):
b real_dsi
ILLEGAL_VECTOR( 0x380 )
VECTOR( 0x400, "ISI" ):
b real_isi
ILLEGAL_VECTOR( 0x480 )
ILLEGAL_VECTOR( 0x500 )
ILLEGAL_VECTOR( 0x600 )
ILLEGAL_VECTOR( 0x700 )
VECTOR( 0x800, "FPU" ):
mtsprg1 r3
mfsrr1 r3
ori r3,r3,0x2000
mtsrr1 r3
mfsprg1 r3
RFI
ILLEGAL_VECTOR( 0x900 )
ILLEGAL_VECTOR( 0xa00 )
ILLEGAL_VECTOR( 0xb00 )
ILLEGAL_VECTOR( 0xc00 )
ILLEGAL_VECTOR( 0xd00 )
ILLEGAL_VECTOR( 0xe00 )
ILLEGAL_VECTOR( 0xf00 )
ILLEGAL_VECTOR( 0xf20 )
ILLEGAL_VECTOR( 0x1000 )
ILLEGAL_VECTOR( 0x1100 )
ILLEGAL_VECTOR( 0x1200 )
ILLEGAL_VECTOR( 0x1300 )
ILLEGAL_VECTOR( 0x1400 )
ILLEGAL_VECTOR( 0x1500 )
ILLEGAL_VECTOR( 0x1600 )
ILLEGAL_VECTOR( 0x1700 )
#ifdef CONFIG_PPC_64BITSUPPORT
VECTOR( 0x2000, "DSI_64" ):
EXCEPTION_PREAMBLE_64
LOAD_REG_IMMEDIATE(r3, dsi_exception)
mtctr r3
bctrl
EXCEPTION_EPILOGUE_64
VECTOR( 0x2200, "ISI_64" ):
EXCEPTION_PREAMBLE_64
LOAD_REG_IMMEDIATE(r3, isi_exception)
mtctr r3
bctrl
EXCEPTION_EPILOGUE_64
#endif
real_dsi:
EXCEPTION_PREAMBLE
LOAD_REG_FUNC(r3, dsi_exception)
mtctr r3
bctrl
b exception_return
real_isi:
EXCEPTION_PREAMBLE
LOAD_REG_FUNC(r3, isi_exception)
mtctr r3
bctrl
b exception_return
exception_return:
EXCEPTION_EPILOGUE
GLOBL(__vectors_end):
/************************************************************************/
/* entry */
/************************************************************************/
GLOBL(_entry):
#ifdef CONFIG_PPC_64BITSUPPORT
li r0,0
lis r3, 0x8000 /* r1=0x80000000 */
add. r3,r3,r3 /* r1=r1+r1 (high 32bit !0) */
beq no_64bit /* only true when !MSR_SF */
/* clear MSR, disable MMU, SF */
mtmsrd r0
b real_entry
no_64bit:
/* clear MSR, disable MMU */
mtmsr r0
real_entry:
#endif
/* copy exception vectors */
LOAD_REG_IMMEDIATE(r3, __vectors)
li r4,0
li r5,__vectors_end - __vectors + 16
rlwinm r5,r5,0,0,28
1: lwz r6,0(r3)
lwz r7,4(r3)
lwz r8,8(r3)
lwz r9,12(r3)
stw r6,0(r4)
stw r7,4(r4)
stw r8,8(r4)
stw r9,12(r4)
dcbst 0,r4
sync
icbi 0,r4
sync
addi r5,r5,-16
addi r3,r3,16
addi r4,r4,16
cmpwi r5,0
bgt 1b
isync
bl compute_ramsize
/* Memory map:
*
* Top +-------------------------+
* | |
* | ROM into RAM (1 MB) |
* | |
* +-------------------------+
* | |
* | MMU Hash Table (64 kB) |
* | |
* +-------------------------+
* | |
* | Exception Stack (32 kB) |
* | |
* +-------------------------+
* | |
* | Stack (64 kB) |
* | |
* +-------------------------+
* | |
* | Client Stack (64 kB) |
* | |
* +-------------------------+
* | |
* | Malloc Zone (2 MiB) |
* | |
* +-------------------------+
* : :
* Bottom
*/
addis r1, r3, -16 /* ramsize - 1MB */
/* setup hash table */
addis r1, r1, -1 /* - 64 kB */
clrrwi r1, r1, 5*4 /* & ~0xfffff */
/* setup exception stack */
mtsprg0 r1
/* setup stack */
addi r1, r1, -32768 /* - 32 kB */
/* save memory size in stack */
#ifdef __powerpc64__
/* set up TOC pointer */
LOAD_REG_IMMEDIATE(r2, setup_mmu)
ld r2, 8(r2)
#endif
bl BRANCH_LABEL(setup_mmu)
bl BRANCH_LABEL(entry)
1: nop
b 1b
/* According to IEEE 1275, PPC bindings:
*
* MSR = FP, ME + (DR|IR)
* r1 = stack (32 K + 32 bytes link area above)
* r5 = client interface handler
* r6 = address of client program arguments (unused)
* r7 = length of client program arguments (unused)
*
* Yaboot and Linux use r3 and r4 for initrd address and size
*/
.data
saved_stack:
DATA_LONG(0)
.previous
/* void call_elf( arg1, arg2, entry ) */
_GLOBAL(call_elf):
mflr r0
PPC_STLU r1, -STACKFRAME_MINSIZE(r1)
PPC_STL r0, (STACKFRAME_MINSIZE + PPC_LR_STKOFF)(r1)
mtlr r5
LOAD_REG_IMMEDIATE(r8, saved_stack) // save our stack pointer
PPC_STL r1,0(r8)
mfsdr1 r1
addi r1, r1, -32768 /* - 32 KiB exception stack */
addis r1, r1, -1 /* - 64 KiB stack */
LOAD_REG_IMMEDIATE(r5, of_client_callback) // r5 = callback
li r6,0 // r6 = address of client program arguments (unused)
li r7,0 // r7 = length of client program arguments (unused)
li r0,MSR_FP | MSR_ME | MSR_DR | MSR_IR
MTMSRD(r0)
blrl
#ifdef CONFIG_PPC64
/* Restore SF bit */
LOAD_REG_IMMEDIATE(r0, MSR_SF | MSR_FP | MSR_ME | MSR_DR | MSR_IR)
MTMSRD(r0)
#endif
LOAD_REG_IMMEDIATE(r8, saved_stack) // restore stack pointer
mr r1,r8
PPC_LL r0, (STACKFRAME_MINSIZE + PPC_LR_STKOFF)(r1)
mtlr r0
addi r1, r1, STACKFRAME_MINSIZE
// XXX: should restore r12-r31 etc..
// we should not really come here though
blr
#ifdef __powerpc64__
#define STKOFF STACKFRAME_MINSIZE
#define SAVE_SPACE 320
#else
#define STKOFF 8
#define SAVE_SPACE 144
#endif
GLOBL(of_client_callback):
#ifdef CONFIG_PPC64
PPC_STLU r1, -(STACKFRAME_MINSIZE + 16)(r1)
#else
PPC_STLU r1, -STACKFRAME_MINSIZE(r1) /* fits within alignment */
#endif
/* save r4 */
PPC_STL r4, STKOFF(r1)
/* save lr */
mflr r4
PPC_STL r4, PPC_LR_STKOFF(r1)
/* restore OF stack */
LOAD_REG_IMMEDIATE(r4, saved_stack)
PPC_LL r4, 0(r4)
PPC_STLU r4,-SAVE_SPACE(r4)
PPC_STL r1,(STKOFF)(r4) // save caller stack
mr r1,r4
PPC_STL r2, (STKOFF + 1 * ULONG_SIZE)(r1)
PPC_STL r0, (STKOFF + 2 * ULONG_SIZE)(r1)
/* save ctr, cr and xer */
mfctr r2
PPC_STL r2, (STKOFF + 3 * ULONG_SIZE)(r1)
mfcr r2
PPC_STL r2, (STKOFF + 4 * ULONG_SIZE)(r1)
mfxer r2
PPC_STL r2, (STKOFF + 5 * ULONG_SIZE)(r1)
/* save r5 - r31 */
PPC_STL r5, (STKOFF + 6 * ULONG_SIZE)(r1)
PPC_STL r6, (STKOFF + 7 * ULONG_SIZE)(r1)
PPC_STL r7, (STKOFF + 8 * ULONG_SIZE)(r1)
PPC_STL r8, (STKOFF + 9 * ULONG_SIZE)(r1)
PPC_STL r9, (STKOFF + 10 * ULONG_SIZE)(r1)
PPC_STL r10, (STKOFF + 11 * ULONG_SIZE)(r1)
PPC_STL r11, (STKOFF + 12 * ULONG_SIZE)(r1)
PPC_STL r12, (STKOFF + 13 * ULONG_SIZE)(r1)
PPC_STL r13, (STKOFF + 14 * ULONG_SIZE)(r1)
PPC_STL r14, (STKOFF + 15 * ULONG_SIZE)(r1)
PPC_STL r15, (STKOFF + 16 * ULONG_SIZE)(r1)
PPC_STL r16, (STKOFF + 17 * ULONG_SIZE)(r1)
PPC_STL r17, (STKOFF + 18 * ULONG_SIZE)(r1)
PPC_STL r18, (STKOFF + 19 * ULONG_SIZE)(r1)
PPC_STL r19, (STKOFF + 20 * ULONG_SIZE)(r1)
PPC_STL r20, (STKOFF + 21 * ULONG_SIZE)(r1)
PPC_STL r21, (STKOFF + 22 * ULONG_SIZE)(r1)
PPC_STL r22, (STKOFF + 23 * ULONG_SIZE)(r1)
PPC_STL r23, (STKOFF + 24 * ULONG_SIZE)(r1)
PPC_STL r24, (STKOFF + 25 * ULONG_SIZE)(r1)
PPC_STL r25, (STKOFF + 26 * ULONG_SIZE)(r1)
PPC_STL r26, (STKOFF + 27 * ULONG_SIZE)(r1)
PPC_STL r27, (STKOFF + 28 * ULONG_SIZE)(r1)
PPC_STL r28, (STKOFF + 29 * ULONG_SIZE)(r1)
PPC_STL r29, (STKOFF + 30 * ULONG_SIZE)(r1)
PPC_STL r30, (STKOFF + 31 * ULONG_SIZE)(r1)
PPC_STL r31, (STKOFF + 32 * ULONG_SIZE)(r1)
#ifdef CONFIG_PPC64
LOAD_REG_IMMEDIATE(r2, of_client_interface)
ld r2, 8(r2)
#endif
bl BRANCH_LABEL(of_client_interface)
/* restore r5 - r31 */
PPC_LL r5, (STKOFF + 6 * ULONG_SIZE)(r1)
PPC_LL r6, (STKOFF + 7 * ULONG_SIZE)(r1)
PPC_LL r7, (STKOFF + 8 * ULONG_SIZE)(r1)
PPC_LL r8, (STKOFF + 9 * ULONG_SIZE)(r1)
PPC_LL r9, (STKOFF + 10 * ULONG_SIZE)(r1)
PPC_LL r10, (STKOFF + 11 * ULONG_SIZE)(r1)
PPC_LL r11, (STKOFF + 12 * ULONG_SIZE)(r1)
PPC_LL r12, (STKOFF + 13 * ULONG_SIZE)(r1)
PPC_LL r13, (STKOFF + 14 * ULONG_SIZE)(r1)
PPC_LL r14, (STKOFF + 15 * ULONG_SIZE)(r1)
PPC_LL r15, (STKOFF + 16 * ULONG_SIZE)(r1)
PPC_LL r16, (STKOFF + 17 * ULONG_SIZE)(r1)
PPC_LL r17, (STKOFF + 18 * ULONG_SIZE)(r1)
PPC_LL r18, (STKOFF + 19 * ULONG_SIZE)(r1)
PPC_LL r19, (STKOFF + 20 * ULONG_SIZE)(r1)
PPC_LL r20, (STKOFF + 21 * ULONG_SIZE)(r1)
PPC_LL r21, (STKOFF + 22 * ULONG_SIZE)(r1)
PPC_LL r22, (STKOFF + 23 * ULONG_SIZE)(r1)
PPC_LL r23, (STKOFF + 24 * ULONG_SIZE)(r1)
PPC_LL r24, (STKOFF + 25 * ULONG_SIZE)(r1)
PPC_LL r25, (STKOFF + 26 * ULONG_SIZE)(r1)
PPC_LL r26, (STKOFF + 27 * ULONG_SIZE)(r1)
PPC_LL r27, (STKOFF + 28 * ULONG_SIZE)(r1)
PPC_LL r28, (STKOFF + 29 * ULONG_SIZE)(r1)
PPC_LL r29, (STKOFF + 30 * ULONG_SIZE)(r1)
PPC_LL r30, (STKOFF + 31 * ULONG_SIZE)(r1)
PPC_LL r31, (STKOFF + 32 * ULONG_SIZE)(r1)
/* restore ctr, cr and xer */
PPC_LL r2, (STKOFF + 3 * ULONG_SIZE)(r1)
mtctr r2
PPC_LL r2, (STKOFF + 4 * ULONG_SIZE)(r1)
mtcr r2
PPC_LL r2, (STKOFF + 5 * ULONG_SIZE)(r1)
mtxer r2
/* restore r0 and r2 */
PPC_LL r2, (STKOFF + 1 * ULONG_SIZE)(r1)
PPC_LL r0, (STKOFF + 2 * ULONG_SIZE)(r1)
/* restore caller stack */
PPC_LL r1, (STKOFF)(r1)
PPC_LL r4, PPC_LR_STKOFF(r1)
mtlr r4
PPC_LL r4, STKOFF(r1)
PPC_LL r1, 0(r1)
blr
/* rtas glue (must be reloctable) */
GLOBL(of_rtas_start):
/* r3 = argument buffer, r4 = of_rtas_start */
/* according to the CHRP standard, cr must be preserved (cr0/cr1 too?) */
blr
GLOBL(of_rtas_end):
#define CACHE_LINE_SIZE 32
#define LG_CACHE_LINE_SIZE 5
/* flush_icache_range( unsigned long start, unsigned long stop) */
_GLOBAL(flush_icache_range):
li r5,CACHE_LINE_SIZE-1
andc r3,r3,r5
subf r4,r3,r4
add r4,r4,r5
srwi. r4,r4,LG_CACHE_LINE_SIZE
beqlr
mtctr r4
mr r6,r3
1: dcbst 0,r3
addi r3,r3,CACHE_LINE_SIZE
bdnz 1b
sync /* wait for dcbst's to get to ram */
mtctr r4
2: icbi 0,r6
addi r6,r6,CACHE_LINE_SIZE
bdnz 2b
sync /* additional sync needed on g4 */
isync
blr
/* Get RAM size from QEMU configuration device */
#define CFG_ADDR 0xf0000510
#define FW_CFG_RAM_SIZE 0x03
compute_ramsize:
LOAD_REG_IMMEDIATE(r9, CFG_ADDR)
li r0,FW_CFG_RAM_SIZE
sth r0,0(r9)
LOAD_REG_IMMEDIATE(r9, CFG_ADDR + 2)
lbz r1,0(r9)
lbz r0,0(r9)
slwi r0,r0,8
or r1,r1,r0
lbz r0,0(r9)
slwi r0,r0,16
or r1,r1,r0
lbz r0,0(r9)
slwi r0,r0,24
or r3,r1,r0
blr
/* Hard reset vector */
.section .romentry,"ax"
bl _entry
|