1#
2#
3#            Nim's Runtime Library
4#        (c) Copyright 2012 Andreas Rumpf
5#
6#    See the file "copying.txt", included in this
7#    distribution, for details about the copyright.
8#
9
10include seqs_v2_reimpl
11
12proc genericResetAux(dest: pointer, n: ptr TNimNode) {.benign.}
13
14proc genericAssignAux(dest, src: pointer, mt: PNimType, shallow: bool) {.benign.}
15proc genericAssignAux(dest, src: pointer, n: ptr TNimNode,
16                      shallow: bool) {.benign.} =
17  var
18    d = cast[ByteAddress](dest)
19    s = cast[ByteAddress](src)
20  case n.kind
21  of nkSlot:
22    genericAssignAux(cast[pointer](d +% n.offset),
23                     cast[pointer](s +% n.offset), n.typ, shallow)
24  of nkList:
25    for i in 0..n.len-1:
26      genericAssignAux(dest, src, n.sons[i], shallow)
27  of nkCase:
28    var dd = selectBranch(dest, n)
29    var m = selectBranch(src, n)
30    # reset if different branches are in use; note different branches also
31    # imply that's not self-assignment (``x = x``)!
32    if m != dd and dd != nil:
33      genericResetAux(dest, dd)
34    copyMem(cast[pointer](d +% n.offset), cast[pointer](s +% n.offset),
35            n.typ.size)
36    if m != nil:
37      genericAssignAux(dest, src, m, shallow)
38  of nkNone: sysAssert(false, "genericAssignAux")
39  #else:
40  #  echo "ugh memory corruption! ", n.kind
41  #  quit 1
42
43template deepSeqAssignImpl(operation, additionalArg) {.dirty.} =
44  var d = cast[ptr NimSeqV2Reimpl](dest)
45  var s = cast[ptr NimSeqV2Reimpl](src)
46  d.len = s.len
47  let elem = mt.base
48  d.p = cast[ptr NimSeqPayloadReimpl](newSeqPayload(s.len, elem.size, elem.align))
49
50  let bs = elem.size
51  let ba = elem.align
52  let headerSize = align(sizeof(NimSeqPayloadBase), ba)
53
54  for i in 0..d.len-1:
55    operation(d.p +! (headerSize+i*bs), s.p +! (headerSize+i*bs), mt.base, additionalArg)
56
57proc genericAssignAux(dest, src: pointer, mt: PNimType, shallow: bool) =
58  var
59    d = cast[ByteAddress](dest)
60    s = cast[ByteAddress](src)
61  sysAssert(mt != nil, "genericAssignAux 2")
62  case mt.kind
63  of tyString:
64    when defined(nimSeqsV2):
65      var x = cast[ptr NimStringV2](dest)
66      var s2 = cast[ptr NimStringV2](s)[]
67      nimAsgnStrV2(x[], s2)
68    else:
69      var x = cast[PPointer](dest)
70      var s2 = cast[PPointer](s)[]
71      if s2 == nil or shallow or (
72          cast[PGenericSeq](s2).reserved and seqShallowFlag) != 0:
73        unsureAsgnRef(x, s2)
74      else:
75        unsureAsgnRef(x, copyString(cast[NimString](s2)))
76  of tySequence:
77    when defined(nimSeqsV2):
78      deepSeqAssignImpl(genericAssignAux, shallow)
79    else:
80      var s2 = cast[PPointer](src)[]
81      var seq = cast[PGenericSeq](s2)
82      var x = cast[PPointer](dest)
83      if s2 == nil or shallow or (seq.reserved and seqShallowFlag) != 0:
84        # this can happen! nil sequences are allowed
85        unsureAsgnRef(x, s2)
86        return
87      sysAssert(dest != nil, "genericAssignAux 3")
88      if ntfNoRefs in mt.base.flags:
89        var ss = nimNewSeqOfCap(mt, seq.len)
90        cast[PGenericSeq](ss).len = seq.len
91        unsureAsgnRef(x, ss)
92        var dst = cast[ByteAddress](cast[PPointer](dest)[])
93        copyMem(cast[pointer](dst +% align(GenericSeqSize, mt.base.align)),
94                cast[pointer](cast[ByteAddress](s2) +% align(GenericSeqSize, mt.base.align)),
95                seq.len *% mt.base.size)
96      else:
97        unsureAsgnRef(x, newSeq(mt, seq.len))
98        var dst = cast[ByteAddress](cast[PPointer](dest)[])
99        for i in 0..seq.len-1:
100          genericAssignAux(
101            cast[pointer](dst +% align(GenericSeqSize, mt.base.align) +% i *% mt.base.size ),
102            cast[pointer](cast[ByteAddress](s2) +% align(GenericSeqSize, mt.base.align) +% i *% mt.base.size ),
103            mt.base, shallow)
104  of tyObject:
105    var it = mt.base
106    # don't use recursion here on the PNimType because the subtype
107    # check should only be done at the very end:
108    while it != nil:
109      genericAssignAux(dest, src, it.node, shallow)
110      it = it.base
111    genericAssignAux(dest, src, mt.node, shallow)
112    # we need to copy m_type field for tyObject, as it could be empty for
113    # sequence reallocations:
114    when defined(nimSeqsV2):
115      var pint = cast[ptr PNimTypeV2](dest)
116      #chckObjAsgn(cast[ptr PNimTypeV2](src)[].typeInfoV2, mt)
117      pint[] = cast[PNimTypeV2](mt.typeInfoV2)
118    else:
119      var pint = cast[ptr PNimType](dest)
120      # We need to copy the *static* type not the dynamic type:
121      #   if p of TB:
122      #     var tbObj = TB(p)
123      #     tbObj of TC # needs to be false!
124      #c_fprintf(stdout, "%s %s\n", pint[].name, mt.name)
125      let srcType = cast[ptr PNimType](src)[]
126      if srcType != nil:
127        # `!= nil` needed because of cases where object is not initialized properly (see bug #16706)
128        # note that you can have `srcType == nil` yet `src != nil`
129        chckObjAsgn(srcType, mt)
130      pint[] = mt # cast[ptr PNimType](src)[]
131  of tyTuple:
132    genericAssignAux(dest, src, mt.node, shallow)
133  of tyArray, tyArrayConstr:
134    for i in 0..(mt.size div mt.base.size)-1:
135      genericAssignAux(cast[pointer](d +% i *% mt.base.size),
136                       cast[pointer](s +% i *% mt.base.size), mt.base, shallow)
137  of tyRef:
138    unsureAsgnRef(cast[PPointer](dest), cast[PPointer](s)[])
139  else:
140    copyMem(dest, src, mt.size) # copy raw bits
141
142proc genericAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
143  genericAssignAux(dest, src, mt, false)
144
145proc genericShallowAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
146  genericAssignAux(dest, src, mt, true)
147
148when false:
149  proc debugNimType(t: PNimType) =
150    if t.isNil:
151      cprintf("nil!")
152      return
153    var k: cstring
154    case t.kind
155    of tyBool: k = "bool"
156    of tyChar: k = "char"
157    of tyEnum: k = "enum"
158    of tyArray: k = "array"
159    of tyObject: k = "object"
160    of tyTuple: k = "tuple"
161    of tyRange: k = "range"
162    of tyPtr: k = "ptr"
163    of tyRef: k = "ref"
164    of tyVar: k = "var"
165    of tySequence: k = "seq"
166    of tyProc: k = "proc"
167    of tyPointer: k = "range"
168    of tyOpenArray: k = "openarray"
169    of tyString: k = "string"
170    of tyCstring: k = "cstring"
171    of tyInt: k = "int"
172    of tyInt32: k = "int32"
173    else: k = "other"
174    cprintf("%s %ld\n", k, t.size)
175    debugNimType(t.base)
176
177proc genericSeqAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
178  var src = src # ugly, but I like to stress the parser sometimes :-)
179  genericAssign(dest, addr(src), mt)
180
181proc genericAssignOpenArray(dest, src: pointer, len: int,
182                            mt: PNimType) {.compilerproc.} =
183  var
184    d = cast[ByteAddress](dest)
185    s = cast[ByteAddress](src)
186  for i in 0..len-1:
187    genericAssign(cast[pointer](d +% i *% mt.base.size),
188                  cast[pointer](s +% i *% mt.base.size), mt.base)
189
190proc objectInit(dest: pointer, typ: PNimType) {.compilerproc, benign.}
191proc objectInitAux(dest: pointer, n: ptr TNimNode) {.benign.} =
192  var d = cast[ByteAddress](dest)
193  case n.kind
194  of nkNone: sysAssert(false, "objectInitAux")
195  of nkSlot: objectInit(cast[pointer](d +% n.offset), n.typ)
196  of nkList:
197    for i in 0..n.len-1:
198      objectInitAux(dest, n.sons[i])
199  of nkCase:
200    var m = selectBranch(dest, n)
201    if m != nil: objectInitAux(dest, m)
202
203proc objectInit(dest: pointer, typ: PNimType) =
204  # the generic init proc that takes care of initialization of complex
205  # objects on the stack or heap
206  var d = cast[ByteAddress](dest)
207  case typ.kind
208  of tyObject:
209    # iterate over any structural type
210    # here we have to init the type field:
211    when defined(nimSeqsV2):
212      var pint = cast[ptr PNimTypeV2](dest)
213      pint[] = cast[PNimTypeV2](typ.typeInfoV2)
214    else:
215      var pint = cast[ptr PNimType](dest)
216      pint[] = typ
217    objectInitAux(dest, typ.node)
218  of tyTuple:
219    objectInitAux(dest, typ.node)
220  of tyArray, tyArrayConstr:
221    for i in 0..(typ.size div typ.base.size)-1:
222      objectInit(cast[pointer](d +% i * typ.base.size), typ.base)
223  else: discard # nothing to do
224
225# ---------------------- assign zero -----------------------------------------
226
227proc genericReset(dest: pointer, mt: PNimType) {.compilerproc, benign.}
228proc genericResetAux(dest: pointer, n: ptr TNimNode) =
229  var d = cast[ByteAddress](dest)
230  case n.kind
231  of nkNone: sysAssert(false, "genericResetAux")
232  of nkSlot: genericReset(cast[pointer](d +% n.offset), n.typ)
233  of nkList:
234    for i in 0..n.len-1: genericResetAux(dest, n.sons[i])
235  of nkCase:
236    var m = selectBranch(dest, n)
237    if m != nil: genericResetAux(dest, m)
238    zeroMem(cast[pointer](d +% n.offset), n.typ.size)
239
240proc genericReset(dest: pointer, mt: PNimType) =
241  var d = cast[ByteAddress](dest)
242  sysAssert(mt != nil, "genericReset 2")
243  case mt.kind
244  of tyRef:
245    unsureAsgnRef(cast[PPointer](dest), nil)
246  of tyString:
247    when defined(nimSeqsV2):
248      var s = cast[ptr NimStringV2](dest)
249      frees(s[])
250      zeroMem(dest, mt.size)
251    else:
252      unsureAsgnRef(cast[PPointer](dest), nil)
253  of tySequence:
254    when defined(nimSeqsV2):
255      frees(cast[ptr NimSeqV2Reimpl](dest)[])
256      zeroMem(dest, mt.size)
257    else:
258      unsureAsgnRef(cast[PPointer](dest), nil)
259  of tyTuple:
260    genericResetAux(dest, mt.node)
261  of tyObject:
262    genericResetAux(dest, mt.node)
263    # also reset the type field for tyObject, for correct branch switching!
264    when defined(nimSeqsV2):
265      var pint = cast[ptr PNimTypeV2](dest)
266      pint[] = nil
267    else:
268      var pint = cast[ptr PNimType](dest)
269      pint[] = nil
270  of tyArray, tyArrayConstr:
271    for i in 0..(mt.size div mt.base.size)-1:
272      genericReset(cast[pointer](d +% i *% mt.base.size), mt.base)
273  else:
274    zeroMem(dest, mt.size) # set raw bits to zero
275
276proc selectBranch(discVal, L: int,
277                  a: ptr array[0x7fff, ptr TNimNode]): ptr TNimNode =
278  result = a[L] # a[L] contains the ``else`` part (but may be nil)
279  if discVal <% L:
280    let x = a[discVal]
281    if x != nil: result = x
282
283proc FieldDiscriminantCheck(oldDiscVal, newDiscVal: int,
284                            a: ptr array[0x7fff, ptr TNimNode],
285                            L: int) {.compilerproc.} =
286  let oldBranch = selectBranch(oldDiscVal, L, a)
287  let newBranch = selectBranch(newDiscVal, L, a)
288  when defined(nimOldCaseObjects):
289    if newBranch != oldBranch and oldDiscVal != 0:
290      sysFatal(FieldDefect, "assignment to discriminant changes object branch")
291  else:
292    if newBranch != oldBranch:
293      if oldDiscVal != 0:
294        sysFatal(FieldDefect, "assignment to discriminant changes object branch")
295      else:
296        sysFatal(FieldDefect, "assignment to discriminant changes object branch; compile with -d:nimOldCaseObjects for a transition period")
297