| 126 |
(may expand to 0, 1 or 2 instructions). |
(may expand to 0, 1 or 2 instructions). |
| 127 |
*/ |
*/ |
| 128 |
|
|
| 129 |
.macro X_movw dst src |
.macro X_movw dst src |
| 130 |
.L_movw_dst = -1 |
.L_movw_dst = -1 |
| 131 |
.L_movw_src = -1 |
.L_movw_src = -1 |
| 132 |
|
.L_movw_n = 0 |
| 133 |
|
.irp reg, r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, \ |
| 134 |
|
r10,r11,r12,r13,r14,r15,r16,r17,r18,r19, \ |
| 135 |
|
r20,r21,r22,r23,r24,r25,r26,r27,r28,r29, \ |
| 136 |
|
r30,r31 |
| 137 |
|
.ifc \reg,\dst |
| 138 |
|
.L_movw_dst = .L_movw_n |
| 139 |
|
.endif |
| 140 |
|
.ifc \reg,\src |
| 141 |
|
.L_movw_src = .L_movw_n |
| 142 |
|
.endif |
| 143 |
|
.L_movw_n = .L_movw_n + 1 |
| 144 |
|
.endr |
| 145 |
|
.L_movw_n = 0 |
| 146 |
|
.irp reg, R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, \ |
| 147 |
|
R10,R11,R12,R13,R14,R15,R16,R17,R18,R19, \ |
| 148 |
|
R20,R21,R22,R23,R24,R25,R26,R27,R28,R29, \ |
| 149 |
|
R30,R31 |
| 150 |
|
.ifc \reg,\dst |
| 151 |
|
.L_movw_dst = .L_movw_n |
| 152 |
|
.endif |
| 153 |
|
.ifc \reg,\src |
| 154 |
|
.L_movw_src = .L_movw_n |
| 155 |
|
.endif |
| 156 |
|
.L_movw_n = .L_movw_n + 1 |
| 157 |
|
.endr |
| 158 |
|
.if .L_movw_dst < 0 |
| 159 |
.L_movw_n = 0 |
.L_movw_n = 0 |
| 160 |
.irp reg, r0, r1, r2, r3, r4, r5, r6, r7, \ |
.rept 32 |
| 161 |
r8, r9, r10, r11, r12, r13, r14, r15, \ |
.if \dst == .L_movw_n |
|
r16, r17, r18, r19, r20, r21, r22, r23, \ |
|
|
r24, r25, r26, r27, r28, r29, r30, r31 |
|
|
.ifc \reg,\dst |
|
| 162 |
.L_movw_dst = .L_movw_n |
.L_movw_dst = .L_movw_n |
| 163 |
.endif |
.endif |
| 164 |
.ifc \reg,\src |
.L_movw_n = .L_movw_n + 1 |
| 165 |
|
.endr |
| 166 |
|
.endif |
| 167 |
|
.if .L_movw_src < 0 |
| 168 |
|
.L_movw_n = 0 |
| 169 |
|
.rept 32 |
| 170 |
|
.if \src == .L_movw_n |
| 171 |
.L_movw_src = .L_movw_n |
.L_movw_src = .L_movw_n |
| 172 |
.endif |
.endif |
| 173 |
.L_movw_n = .L_movw_n + 1 |
.L_movw_n = .L_movw_n + 1 |
| 174 |
.endr |
.endr |
| 175 |
.if .L_movw_dst < 0 |
.endif |
| 176 |
.L_movw_n = 0 |
.if (.L_movw_dst < 0) || (.L_movw_src < 0) |
| 177 |
.rept 32 |
.err ; Invalid 'X_movw' arg. |
| 178 |
.if \dst == .L_movw_n |
.endif |
| 179 |
.L_movw_dst = .L_movw_n |
|
| 180 |
.endif |
.if ((.L_movw_src) - (.L_movw_dst)) /* different registers */ |
| 181 |
.L_movw_n = .L_movw_n + 1 |
.if (((.L_movw_src) | (.L_movw_dst)) & 0x01) |
| 182 |
.endr |
.if (((.L_movw_src)-(.L_movw_dst)) & 0x80) /* src < dest */ |
| 183 |
.endif |
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
| 184 |
.if .L_movw_src < 0 |
mov (.L_movw_dst), (.L_movw_src) |
| 185 |
.L_movw_n = 0 |
.else /* src > dest */ |
| 186 |
.rept 32 |
mov (.L_movw_dst), (.L_movw_src) |
| 187 |
.if \src == .L_movw_n |
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
| 188 |
.L_movw_src = .L_movw_n |
.endif |
| 189 |
.endif |
.else /* both even -> overlap not possible */ |
| 190 |
.L_movw_n = .L_movw_n + 1 |
#if defined(__AVR_ENHANCED__) && __AVR_ENHANCED__ |
| 191 |
.endr |
movw \dst, \src |
|
.endif |
|
|
.if (.L_movw_dst < 0) || (.L_movw_src < 0) |
|
|
.err ; Invalid arg. |
|
|
.endif |
|
|
|
|
|
.if ((.L_movw_src) - (.L_movw_dst)) /* different registers */ |
|
|
.if (((.L_movw_src) | (.L_movw_dst)) & 0x01) |
|
|
.if (((.L_movw_src)-(.L_movw_dst)) & 0x80) /* src < dest */ |
|
|
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
|
|
mov (.L_movw_dst), (.L_movw_src) |
|
|
.else /* src > dest */ |
|
|
mov (.L_movw_dst), (.L_movw_src) |
|
|
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
|
|
.endif |
|
|
.else /* both even -> overlap not possible */ |
|
|
#if defined(__AVR_ENHANCED__) && __AVR_ENHANCED__ |
|
|
movw \dst, \src |
|
|
#else |
|
|
mov (.L_movw_dst), (.L_movw_src) |
|
|
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
|
|
#endif |
|
|
.endif |
|
|
.endif |
|
|
.endm |
|
|
|
|
|
#if __AVR_ENHANCED__ |
|
|
#define LOAD_X(lo, hi) movw r26, lo |
|
|
#define LOAD_Z(lo, hi) movw r30, lo |
|
| 192 |
#else |
#else |
| 193 |
#define LOAD_X(lo, hi) \ |
mov (.L_movw_dst), (.L_movw_src) |
| 194 |
mov r26, lo _L\ |
mov (.L_movw_dst)+1, (.L_movw_src)+1 |
|
mov r27, hi |
|
|
|
|
|
#define LOAD_Z(lo, hi) \ |
|
|
mov r30, lo _L\ |
|
|
mov r31, hi |
|
| 195 |
#endif |
#endif |
| 196 |
|
.endif |
| 197 |
|
.endif |
| 198 |
|
.endm |
| 199 |
|
|
| 200 |
/* LOAD_X_CONST(p) loads constant P into pointer register X. */ |
/* LOAD_X_CONST(p) loads constant P into pointer register X. */ |
| 201 |
#define LOAD_X_CONST(p) \ |
#define LOAD_X_CONST(p) \ |