Subversion Repositories Projects

Rev

Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
1702 - 1
/* Copyright (c) 2002, 2005, 2006, 2007 Marek Michalkiewicz
2
   Copyright (c) 2006 Dmitry Xmelkov
3
   All rights reserved.
4
 
5
   Redistribution and use in source and binary forms, with or without
6
   modification, are permitted provided that the following conditions are met:
7
 
8
   * Redistributions of source code must retain the above copyright
9
     notice, this list of conditions and the following disclaimer.
10
 
11
   * Redistributions in binary form must reproduce the above copyright
12
     notice, this list of conditions and the following disclaimer in
13
     the documentation and/or other materials provided with the
14
     distribution.
15
 
16
   * Neither the name of the copyright holders nor the names of
17
     contributors may be used to endorse or promote products derived
18
     from this software without specific prior written permission.
19
 
20
  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23
  ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
24
  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25
  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26
  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27
  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28
  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29
  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30
  POSSIBILITY OF SUCH DAMAGE. */
31
 
32
/*
33
   macros.inc - macros for use in assembler sources
34
 
35
   Contributors:
36
     Created by Marek Michalkiewicz <marekm@linux.org.pl>
37
 */
38
 
39
#include <avr/io.h>
40
//#include "sectionname.h"
41
 
42
/* if not defined, assume old version with underscores */
43
#ifndef __USER_LABEL_PREFIX__
44
#define __USER_LABEL_PREFIX__ _
45
#endif
46
 
47
#ifndef __REGISTER_PREFIX__
48
#define __REGISTER_PREFIX__
49
#endif
50
 
51
/* the assembler line separator (just in case it ever changes) */
52
#define _L $
53
 
54
#define CONCAT1(a, b) CONCAT2(a, b)
55
#define CONCAT2(a, b) a ## b
56
 
57
#define _U(x) CONCAT1(__USER_LABEL_PREFIX__, x)
58
 
59
#define _R(x) CONCAT1(__REGISTER_PREFIX__, x)
60
 
61
/* these should help to fix the "can't have function named r1()" bug
62
   which may require adding '%' in front of register names.  */
63
 
64
#define r0 _R(r0)
65
#define r1 _R(r1)
66
#define r2 _R(r2)
67
#define r3 _R(r3)
68
#define r4 _R(r4)
69
#define r5 _R(r5)
70
#define r6 _R(r6)
71
#define r7 _R(r7)
72
#define r8 _R(r8)
73
#define r9 _R(r9)
74
#define r10 _R(r10)
75
#define r11 _R(r11)
76
#define r12 _R(r12)
77
#define r13 _R(r13)
78
#define r14 _R(r14)
79
#define r15 _R(r15)
80
#define r16 _R(r16)
81
#define r17 _R(r17)
82
#define r18 _R(r18)
83
#define r19 _R(r19)
84
#define r20 _R(r20)
85
#define r21 _R(r21)
86
#define r22 _R(r22)
87
#define r23 _R(r23)
88
#define r24 _R(r24)
89
#define r25 _R(r25)
90
#define r26 _R(r26)
91
#define r27 _R(r27)
92
#define r28 _R(r28)
93
#define r29 _R(r29)
94
#define r30 _R(r30)
95
#define r31 _R(r31)
96
 
97
#ifndef __tmp_reg__
98
#define __tmp_reg__ r0
99
#endif
100
 
101
#ifndef __zero_reg__
102
#define __zero_reg__ r1
103
#endif
104
 
105
#if __AVR_MEGA__
106
  #define XJMP jmp
107
  #define XCALL call
108
#else
109
  #define XJMP rjmp
110
  #define XCALL rcall
111
#endif
112
 
113
/* used only by fplib/strtod.S - libgcc internal function calls */
114
#define PROLOGUE_SAVES(offset) XJMP (__prologue_saves__ + 2 * (offset))
115
#define EPILOGUE_RESTORES(offset) XJMP (__epilogue_restores__ + 2 * (offset))
116
 
117
#if FLASHEND > 0x10000  /* ATmega103 */
118
  #define BIG_CODE 1
119
#else
120
  #define BIG_CODE 0
121
#endif
122
 
123
#ifndef __AVR_HAVE_MOVW__
124
#  if  defined(__AVR_ENHANCED__) && __AVR_ENHANCED__
125
#   define __AVR_HAVE_MOVW__ 1
126
#  endif
127
#endif
128
 
129
#ifndef __AVR_HAVE_LPMX__
130
# if  defined(__AVR_ENHANCED__) && __AVR_ENHANCED__
131
#  define __AVR_HAVE_LPMX__ 1
132
# endif
133
#endif
134
 
135
#ifndef __AVR_HAVE_MUL__
136
# if  defined(__AVR_ENHANCED__) && __AVR_ENHANCED__
137
#  define __AVR_HAVE_MUL__ 1
138
# endif
139
#endif
140
 
141
/*
142
   Smart version of movw:
143
    - uses "movw" if possible (supported by MCU, and both registers even)
144
    - handles overlapping register pairs correctly
145
    - no instruction generated if source and destination are the same
146
   (may expand to 0, 1 or 2 instructions).
147
 */
148
 
149
.macro  X_movw dst src
150
	.L_movw_dst = -1
151
	.L_movw_src = -1
152
	.L_movw_n = 0
153
	.irp  reg,	r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, \
154
			r10,r11,r12,r13,r14,r15,r16,r17,r18,r19, \
155
			r20,r21,r22,r23,r24,r25,r26,r27,r28,r29, \
156
			r30,r31
157
		.ifc  \reg,\dst
158
			.L_movw_dst = .L_movw_n
159
		.endif
160
		.ifc  \reg,\src
161
			.L_movw_src = .L_movw_n
162
		.endif
163
		.L_movw_n = .L_movw_n + 1
164
	.endr
165
	.L_movw_n = 0
166
	.irp  reg,	R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, \
167
			R10,R11,R12,R13,R14,R15,R16,R17,R18,R19, \
168
			R20,R21,R22,R23,R24,R25,R26,R27,R28,R29, \
169
			R30,R31
170
		.ifc  \reg,\dst
171
			.L_movw_dst = .L_movw_n
172
		.endif
173
		.ifc  \reg,\src
174
			.L_movw_src = .L_movw_n
175
		.endif
176
		.L_movw_n = .L_movw_n + 1
177
	.endr
178
	.if   .L_movw_dst < 0
179
		.L_movw_n = 0
180
		.rept   32
181
			.if \dst == .L_movw_n
182
				.L_movw_dst = .L_movw_n
183
			.endif
184
			.L_movw_n = .L_movw_n + 1
185
		.endr
186
	.endif
187
	.if   .L_movw_src < 0
188
		.L_movw_n = 0
189
		.rept   32
190
			.if \src == .L_movw_n
191
				.L_movw_src = .L_movw_n
192
			.endif
193
			.L_movw_n = .L_movw_n + 1
194
		.endr
195
	.endif
196
	.if   (.L_movw_dst < 0) || (.L_movw_src < 0)
197
		.err    ; Invalid 'X_movw' arg.
198
	.endif
199
 
200
	.if ((.L_movw_src) - (.L_movw_dst))  /* different registers */
201
		.if (((.L_movw_src) | (.L_movw_dst)) & 0x01)
202
			.if (((.L_movw_src)-(.L_movw_dst)) & 0x80) /* src < dest */
203
				mov     (.L_movw_dst)+1, (.L_movw_src)+1
204
				mov     (.L_movw_dst), (.L_movw_src)
205
			.else                                      /* src > dest */
206
				mov     (.L_movw_dst), (.L_movw_src)
207
				mov     (.L_movw_dst)+1, (.L_movw_src)+1
208
			.endif
209
		.else  /* both even -> overlap not possible */
210
#if  defined(__AVR_HAVE_MOVW__) && __AVR_HAVE_MOVW__
211
			movw    \dst, \src
212
#else
213
			mov     (.L_movw_dst), (.L_movw_src)
214
			mov     (.L_movw_dst)+1, (.L_movw_src)+1
215
#endif
216
		.endif
217
	.endif
218
.endm
219
 
220
/* Macro 'X_lpm' extends enhanced lpm instruction for classic chips.
221
   Usage:
222
	X_lpm	reg, dst
223
   where
224
	reg	is 0..31, r0..r31 or R0..R31
225
	dst	is z, Z, z+ or Z+
226
   It is possible to omit both arguments.
227
 
228
   Possible results for classic chips:
229
	lpm
230
	lpm / mov Rd,r0
231
	lpm / adiw ZL,1
232
	lpm / mov Rd,r0 / adiw ZL,1
233
 
234
   For enhanced chips it is one instruction always.
235
 
236
   ATTENTION:  unlike enhanced chips SREG (S,V,N,Z,C) flags are
237
   changed in case of 'Z+' dst.  R0 is scratch.
238
 */
239
.macro	X_lpm	dst=r0, src=Z
240
 
241
  /* dst evaluation	*/
242
  .L_lpm_dst = -1
243
 
244
  .L_lpm_n = 0
245
  .irp  reg,  r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, \
246
	     r10,r11,r12,r13,r14,r15,r16,r17,r18,r19, \
247
	     r20,r21,r22,r23,r24,r25,r26,r27,r28,r29, \
248
	     r30,r31
249
    .ifc  \reg,\dst
250
      .L_lpm_dst = .L_lpm_n
251
    .endif
252
    .L_lpm_n = .L_lpm_n + 1
253
  .endr
254
 
255
  .L_lpm_n = 0
256
  .irp  reg,  R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, \
257
	     R10,R11,R12,R13,R14,R15,R16,R17,R18,R19, \
258
	     R20,R21,R22,R23,R24,R25,R26,R27,R28,R29, \
259
	     R30,R31
260
    .ifc  \reg,\dst
261
      .L_lpm_dst = .L_lpm_n
262
    .endif
263
    .L_lpm_n = .L_lpm_n + 1
264
  .endr
265
 
266
  .if  .L_lpm_dst < 0
267
    .L_lpm_n = 0
268
    .rept 32
269
      .if  \dst == .L_lpm_n
270
	.L_lpm_dst = .L_lpm_n
271
      .endif
272
      .L_lpm_n = .L_lpm_n + 1
273
    .endr
274
  .endif
275
 
276
  .if  (.L_lpm_dst < 0)
277
    .err	; Invalid dst arg of 'X_lpm' macro.
278
  .endif
279
 
280
  /* src evaluation	*/
281
  .L_lpm_src = -1
282
  .L_lpm_n = 0
283
  .irp  reg,  z,Z,z+,Z+
284
    .ifc  \reg,\src
285
      .L_lpm_src = .L_lpm_n
286
    .endif
287
    .L_lpm_n = .L_lpm_n + 1
288
  .endr
289
 
290
  .if  (.L_lpm_src < 0)
291
    .err	; Invalid src arg of 'X_lpm' macro.
292
  .endif
293
 
294
  /* instruction(s)	*/
295
  .if  .L_lpm_src < 2
296
    .if  .L_lpm_dst == 0
297
	lpm
298
    .else
299
#if  defined(__AVR_HAVE_LPMX__) && __AVR_HAVE_LPMX__
300
	lpm	.L_lpm_dst, Z
301
#else
302
	lpm
303
	mov	.L_lpm_dst, r0
304
#endif
305
    .endif
306
  .else
307
    .if  (.L_lpm_dst >= 30)
308
      .err	; Registers 30 and 31 are inhibited as 'X_lpm *,Z+' dst.
309
    .endif
310
#if  defined(__AVR_HAVE_LPMX__) && __AVR_HAVE_LPMX__
311
	lpm	.L_lpm_dst, Z+
312
#else
313
	lpm
314
    .if  .L_lpm_dst
315
	mov	.L_lpm_dst, r0
316
    .endif
317
	adiw	r30, 1
318
#endif
319
  .endif
320
.endm
321
 
322
/*
323
   LPM_R0_ZPLUS_INIT is used before the loop to initialize RAMPZ
324
   for future devices with RAMPZ:Z auto-increment - [e]lpm r0, Z+.
325
 
326
   LPM_R0_ZPLUS_NEXT is used inside the loop to load a byte from
327
   the program memory at [RAMPZ:]Z to R0, and increment [RAMPZ:]Z.
328
 
329
   The argument in both macros is a register that contains the
330
   high byte (bits 23-16) of the address, bits 15-0 should be in
331
   the Z (r31:r30) register.  It can be any register except for:
332
   r0, r1 (__zero_reg__ - assumed to always contain 0), r30, r31.
333
 */
334
 
335
	.macro	LPM_R0_ZPLUS_INIT hhi
336
#if __AVR_ENHANCED__
337
  #if BIG_CODE
338
	out	AVR_RAMPZ_ADDR, \hhi
339
  #endif
340
#endif
341
	.endm
342
 
343
	.macro	LPM_R0_ZPLUS_NEXT hhi
344
#if __AVR_ENHANCED__
345
  #if BIG_CODE
346
    /* ELPM with RAMPZ:Z post-increment, load RAMPZ only once */
347
	elpm	r0, Z+
348
  #else
349
    /* LPM with Z post-increment, max 64K, no RAMPZ (ATmega83/161/163/32) */
350
	lpm	r0, Z+
351
  #endif
352
#else
353
  #if BIG_CODE
354
    /* ELPM without post-increment, load RAMPZ each time (ATmega103) */
355
	out	AVR_RAMPZ_ADDR, \hhi
356
	elpm
357
	adiw	r30,1
358
	adc	\hhi, __zero_reg__
359
  #else
360
    /* LPM without post-increment, max 64K, no RAMPZ (AT90S*) */
361
	lpm
362
	adiw	r30,1
363
  #endif
364
#endif
365
	.endm